Merge lp:~gz/brz/py3_bootstrap2 into lp:brz
- py3_bootstrap2
- Merge into trunk
Proposed by
Martin Packman
Status: | Merged |
---|---|
Approved by: | Martin Packman |
Approved revision: | no longer in the source branch. |
Merge reported by: | The Breezy Bot |
Merged at revision: | not available |
Proposed branch: | lp:~gz/brz/py3_bootstrap2 |
Merge into: | lp:brz |
Diff against target: |
2983 lines (+650/-616) 27 files modified
breezy/_chk_map_py.py (+22/-21) breezy/_chunks_to_lines_py.py (+3/-3) breezy/_dirstate_helpers_py.py (+2/-2) breezy/_groupcompress_py.py (+10/-10) breezy/bzrworkingtree.py (+5/-5) breezy/chk_map.py (+38/-38) breezy/commit.py (+2/-2) breezy/config.py (+13/-7) breezy/dirstate.py (+5/-5) breezy/groupcompress.py (+54/-52) breezy/index.py (+2/-2) breezy/inventory.py (+59/-56) breezy/inventory_delta.py (+52/-52) breezy/osutils.py (+11/-11) breezy/pack.py (+7/-7) breezy/repofmt/groupcompress_repo.py (+7/-7) breezy/repofmt/pack_repo.py (+3/-3) breezy/repository.py (+2/-1) breezy/revision.py (+10/-5) breezy/sixish.py (+10/-0) breezy/tests/test__chk_map.py (+96/-94) breezy/tests/test__chunks_to_lines.py (+40/-38) breezy/tests/test_inv.py (+37/-36) breezy/tests/test_inventory_delta.py (+147/-146) breezy/transport/memory.py (+1/-1) breezy/versionedfile.py (+10/-10) breezy/xml_serializer.py (+2/-2) |
To merge this branch: | bzr merge lp:~gz/brz/py3_bootstrap2 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Jelmer Vernooij | Approve | ||
Review via email:
|
Commit message
More progress towards Python 3 support
Description of the change
Another somewhat large branch mostly sorting out string semantics across a range of modules.
Also includes some test fixes for Python 3 to start passing, particularly bt.test_
To post a comment you must log in.
Revision history for this message
![](/+icing/build/overlay/assets/skins/sam/images/close.gif)
Jelmer Vernooij (jelmer) : | # |
review:
Approve
Revision history for this message
![](/+icing/build/overlay/assets/skins/sam/images/close.gif)
The Breezy Bot (the-breezy-bot) wrote : | # |
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'breezy/_chk_map_py.py' | |||
2 | --- breezy/_chk_map_py.py 2017-05-22 00:56:52 +0000 | |||
3 | +++ breezy/_chk_map_py.py 2017-06-11 01:47:17 +0000 | |||
4 | @@ -21,6 +21,7 @@ | |||
5 | 21 | import zlib | 21 | import zlib |
6 | 22 | import struct | 22 | import struct |
7 | 23 | 23 | ||
8 | 24 | from .sixish import bytesintern | ||
9 | 24 | from .static_tuple import StaticTuple | 25 | from .static_tuple import StaticTuple |
10 | 25 | 26 | ||
11 | 26 | _LeafNode = None | 27 | _LeafNode = None |
12 | @@ -44,7 +45,7 @@ | |||
13 | 44 | 45 | ||
14 | 45 | def _search_key_16(key): | 46 | def _search_key_16(key): |
15 | 46 | """Map the key tuple into a search key string which has 16-way fan out.""" | 47 | """Map the key tuple into a search key string which has 16-way fan out.""" |
17 | 47 | return '\x00'.join(['%08X' % _crc32(bit) for bit in key]) | 48 | return b'\x00'.join([b'%08X' % _crc32(bit) for bit in key]) |
18 | 48 | 49 | ||
19 | 49 | 50 | ||
20 | 50 | def _search_key_255(key): | 51 | def _search_key_255(key): |
21 | @@ -53,11 +54,11 @@ | |||
22 | 53 | We use 255-way because '\n' is used as a delimiter, and causes problems | 54 | We use 255-way because '\n' is used as a delimiter, and causes problems |
23 | 54 | while parsing. | 55 | while parsing. |
24 | 55 | """ | 56 | """ |
30 | 56 | bytes = '\x00'.join([struct.pack('>L', _crc32(bit)) for bit in key]) | 57 | data = b'\x00'.join([struct.pack('>L', _crc32(bit)) for bit in key]) |
31 | 57 | return bytes.replace('\n', '_') | 58 | return data.replace(b'\n', b'_') |
32 | 58 | 59 | ||
33 | 59 | 60 | ||
34 | 60 | def _deserialise_leaf_node(bytes, key, search_key_func=None): | 61 | def _deserialise_leaf_node(data, key, search_key_func=None): |
35 | 61 | """Deserialise bytes, with key key, into a LeafNode. | 62 | """Deserialise bytes, with key key, into a LeafNode. |
36 | 62 | 63 | ||
37 | 63 | :param bytes: The bytes of the node. | 64 | :param bytes: The bytes of the node. |
38 | @@ -72,13 +73,13 @@ | |||
39 | 72 | result = _LeafNode(search_key_func=search_key_func) | 73 | result = _LeafNode(search_key_func=search_key_func) |
40 | 73 | # Splitlines can split on '\r' so don't use it, split('\n') adds an | 74 | # Splitlines can split on '\r' so don't use it, split('\n') adds an |
41 | 74 | # extra '' if the bytes ends in a final newline. | 75 | # extra '' if the bytes ends in a final newline. |
43 | 75 | lines = bytes.split('\n') | 76 | lines = data.split(b'\n') |
44 | 76 | trailing = lines.pop() | 77 | trailing = lines.pop() |
46 | 77 | if trailing != '': | 78 | if trailing != b'': |
47 | 78 | raise AssertionError('We did not have a final newline for %s' | 79 | raise AssertionError('We did not have a final newline for %s' |
48 | 79 | % (key,)) | 80 | % (key,)) |
49 | 80 | items = {} | 81 | items = {} |
51 | 81 | if lines[0] != 'chkleaf:': | 82 | if lines[0] != b'chkleaf:': |
52 | 82 | raise ValueError("not a serialised leaf node: %r" % bytes) | 83 | raise ValueError("not a serialised leaf node: %r" % bytes) |
53 | 83 | maximum_size = int(lines[1]) | 84 | maximum_size = int(lines[1]) |
54 | 84 | width = int(lines[2]) | 85 | width = int(lines[2]) |
55 | @@ -87,7 +88,7 @@ | |||
56 | 87 | pos = 5 | 88 | pos = 5 |
57 | 88 | while pos < len(lines): | 89 | while pos < len(lines): |
58 | 89 | line = prefix + lines[pos] | 90 | line = prefix + lines[pos] |
60 | 90 | elements = line.split('\x00') | 91 | elements = line.split(b'\x00') |
61 | 91 | pos += 1 | 92 | pos += 1 |
62 | 92 | if len(elements) != width + 1: | 93 | if len(elements) != width + 1: |
63 | 93 | raise AssertionError( | 94 | raise AssertionError( |
64 | @@ -96,7 +97,7 @@ | |||
65 | 96 | num_value_lines = int(elements[-1]) | 97 | num_value_lines = int(elements[-1]) |
66 | 97 | value_lines = lines[pos:pos+num_value_lines] | 98 | value_lines = lines[pos:pos+num_value_lines] |
67 | 98 | pos += num_value_lines | 99 | pos += num_value_lines |
69 | 99 | value = '\n'.join(value_lines) | 100 | value = b'\n'.join(value_lines) |
70 | 100 | items[StaticTuple.from_sequence(elements[:-1])] = value | 101 | items[StaticTuple.from_sequence(elements[:-1])] = value |
71 | 101 | if len(items) != length: | 102 | if len(items) != length: |
72 | 102 | raise AssertionError("item count (%d) mismatch for key %s," | 103 | raise AssertionError("item count (%d) mismatch for key %s," |
73 | @@ -115,12 +116,12 @@ | |||
74 | 115 | else: | 116 | else: |
75 | 116 | result._search_prefix = _unknown | 117 | result._search_prefix = _unknown |
76 | 117 | result._common_serialised_prefix = prefix | 118 | result._common_serialised_prefix = prefix |
78 | 118 | if len(bytes) != result._current_size(): | 119 | if len(data) != result._current_size(): |
79 | 119 | raise AssertionError('_current_size computed incorrectly') | 120 | raise AssertionError('_current_size computed incorrectly') |
80 | 120 | return result | 121 | return result |
81 | 121 | 122 | ||
82 | 122 | 123 | ||
84 | 123 | def _deserialise_internal_node(bytes, key, search_key_func=None): | 124 | def _deserialise_internal_node(data, key, search_key_func=None): |
85 | 124 | global _unknown, _LeafNode, _InternalNode | 125 | global _unknown, _LeafNode, _InternalNode |
86 | 125 | if _InternalNode is None: | 126 | if _InternalNode is None: |
87 | 126 | from breezy import chk_map | 127 | from breezy import chk_map |
88 | @@ -131,12 +132,12 @@ | |||
89 | 131 | # Splitlines can split on '\r' so don't use it, remove the extra '' | 132 | # Splitlines can split on '\r' so don't use it, remove the extra '' |
90 | 132 | # from the result of split('\n') because we should have a trailing | 133 | # from the result of split('\n') because we should have a trailing |
91 | 133 | # newline | 134 | # newline |
94 | 134 | lines = bytes.split('\n') | 135 | lines = data.split(b'\n') |
95 | 135 | if lines[-1] != '': | 136 | if lines[-1] != b'': |
96 | 136 | raise ValueError("last line must be ''") | 137 | raise ValueError("last line must be ''") |
97 | 137 | lines.pop(-1) | 138 | lines.pop(-1) |
98 | 138 | items = {} | 139 | items = {} |
100 | 139 | if lines[0] != 'chknode:': | 140 | if lines[0] != b'chknode:': |
101 | 140 | raise ValueError("not a serialised internal node: %r" % bytes) | 141 | raise ValueError("not a serialised internal node: %r" % bytes) |
102 | 141 | maximum_size = int(lines[1]) | 142 | maximum_size = int(lines[1]) |
103 | 142 | width = int(lines[2]) | 143 | width = int(lines[2]) |
104 | @@ -144,7 +145,7 @@ | |||
105 | 144 | common_prefix = lines[4] | 145 | common_prefix = lines[4] |
106 | 145 | for line in lines[5:]: | 146 | for line in lines[5:]: |
107 | 146 | line = common_prefix + line | 147 | line = common_prefix + line |
109 | 147 | prefix, flat_key = line.rsplit('\x00', 1) | 148 | prefix, flat_key = line.rsplit(b'\x00', 1) |
110 | 148 | items[prefix] = StaticTuple(flat_key,) | 149 | items[prefix] = StaticTuple(flat_key,) |
111 | 149 | if len(items) == 0: | 150 | if len(items) == 0: |
112 | 150 | raise AssertionError("We didn't find any item for %s" % key) | 151 | raise AssertionError("We didn't find any item for %s" % key) |
113 | @@ -161,9 +162,9 @@ | |||
114 | 161 | return result | 162 | return result |
115 | 162 | 163 | ||
116 | 163 | 164 | ||
118 | 164 | def _bytes_to_text_key(bytes): | 165 | def _bytes_to_text_key(data): |
119 | 165 | """Take a CHKInventory value string and return a (file_id, rev_id) tuple""" | 166 | """Take a CHKInventory value string and return a (file_id, rev_id) tuple""" |
123 | 166 | sections = bytes.split('\n') | 167 | sections = data.split(b'\n') |
124 | 167 | kind, file_id = sections[0].split(': ') | 168 | kind, file_id = sections[0].split(b': ') |
125 | 168 | return (intern(file_id), intern(sections[3])) | 169 | return (bytesintern(file_id), bytesintern(sections[3])) |
126 | 169 | 170 | ||
127 | 170 | 171 | ||
128 | === modified file 'breezy/_chunks_to_lines_py.py' | |||
129 | --- breezy/_chunks_to_lines_py.py 2017-05-21 18:10:28 +0000 | |||
130 | +++ breezy/_chunks_to_lines_py.py 2017-06-11 01:47:17 +0000 | |||
131 | @@ -41,10 +41,10 @@ | |||
132 | 41 | if not chunk: | 41 | if not chunk: |
133 | 42 | # Empty strings are never valid lines | 42 | # Empty strings are never valid lines |
134 | 43 | break | 43 | break |
136 | 44 | elif '\n' in chunk[:-1]: | 44 | elif b'\n' in chunk[:-1]: |
137 | 45 | # This chunk has an extra '\n', so we will have to split it | 45 | # This chunk has an extra '\n', so we will have to split it |
138 | 46 | break | 46 | break |
140 | 47 | elif chunk[-1] != '\n': | 47 | elif chunk[-1:] != b'\n': |
141 | 48 | # This chunk does not have a trailing newline | 48 | # This chunk does not have a trailing newline |
142 | 49 | last_no_newline = True | 49 | last_no_newline = True |
143 | 50 | else: | 50 | else: |
144 | @@ -56,4 +56,4 @@ | |||
145 | 56 | 56 | ||
146 | 57 | # These aren't simple lines, just join and split again. | 57 | # These aren't simple lines, just join and split again. |
147 | 58 | from breezy import osutils | 58 | from breezy import osutils |
149 | 59 | return osutils._split_lines(''.join(chunks)) | 59 | return osutils._split_lines(b''.join(chunks)) |
150 | 60 | 60 | ||
151 | === modified file 'breezy/_dirstate_helpers_py.py' | |||
152 | --- breezy/_dirstate_helpers_py.py 2017-06-10 01:57:23 +0000 | |||
153 | +++ breezy/_dirstate_helpers_py.py 2017-06-11 01:47:17 +0000 | |||
154 | @@ -184,7 +184,7 @@ | |||
155 | 184 | if not isinstance(path2, bytes): | 184 | if not isinstance(path2, bytes): |
156 | 185 | raise TypeError("'path2' must be a plain string, not %s: %r" | 185 | raise TypeError("'path2' must be a plain string, not %s: %r" |
157 | 186 | % (type(path2), path2)) | 186 | % (type(path2), path2)) |
159 | 187 | return path1.split('/') < path2.split('/') | 187 | return path1.split(b'/') < path2.split(b'/') |
160 | 188 | 188 | ||
161 | 189 | 189 | ||
162 | 190 | def _lt_path_by_dirblock(path1, path2): | 190 | def _lt_path_by_dirblock(path1, path2): |
163 | @@ -207,7 +207,7 @@ | |||
164 | 207 | dirname1, basename1 = os.path.split(path1) | 207 | dirname1, basename1 = os.path.split(path1) |
165 | 208 | key1 = (dirname1.split(b'/'), basename1) | 208 | key1 = (dirname1.split(b'/'), basename1) |
166 | 209 | dirname2, basename2 = os.path.split(path2) | 209 | dirname2, basename2 = os.path.split(path2) |
168 | 210 | key2 = (dirname2.split('/'), basename2) | 210 | key2 = (dirname2.split(b'/'), basename2) |
169 | 211 | return key1 < key2 | 211 | return key1 < key2 |
170 | 212 | 212 | ||
171 | 213 | 213 | ||
172 | 214 | 214 | ||
173 | === modified file 'breezy/_groupcompress_py.py' | |||
174 | --- breezy/_groupcompress_py.py 2017-06-04 18:09:30 +0000 | |||
175 | +++ breezy/_groupcompress_py.py 2017-06-11 01:47:17 +0000 | |||
176 | @@ -290,7 +290,7 @@ | |||
177 | 290 | if bytes_length is None: | 290 | if bytes_length is None: |
178 | 291 | bytes_length = sum(map(len, new_lines)) | 291 | bytes_length = sum(map(len, new_lines)) |
179 | 292 | # reserved for content type, content length | 292 | # reserved for content type, content length |
181 | 293 | out_lines = ['', '', encode_base128_int(bytes_length)] | 293 | out_lines = [b'', b'', encode_base128_int(bytes_length)] |
182 | 294 | index_lines = [False, False, False] | 294 | index_lines = [False, False, False] |
183 | 295 | output_handler = _OutputHandler(out_lines, index_lines, | 295 | output_handler = _OutputHandler(out_lines, index_lines, |
184 | 296 | self._MIN_MATCH_BYTES) | 296 | self._MIN_MATCH_BYTES) |
185 | @@ -316,26 +316,26 @@ | |||
186 | 316 | 316 | ||
187 | 317 | def encode_base128_int(val): | 317 | def encode_base128_int(val): |
188 | 318 | """Convert an integer into a 7-bit lsb encoding.""" | 318 | """Convert an integer into a 7-bit lsb encoding.""" |
190 | 319 | bytes = [] | 319 | data = bytearray() |
191 | 320 | count = 0 | 320 | count = 0 |
192 | 321 | while val >= 0x80: | 321 | while val >= 0x80: |
194 | 322 | bytes.append(chr((val | 0x80) & 0xFF)) | 322 | data.append((val | 0x80) & 0xFF) |
195 | 323 | val >>= 7 | 323 | val >>= 7 |
201 | 324 | bytes.append(chr(val)) | 324 | data.append(val) |
202 | 325 | return ''.join(bytes) | 325 | return bytes(data) |
203 | 326 | 326 | ||
204 | 327 | 327 | ||
205 | 328 | def decode_base128_int(bytes): | 328 | def decode_base128_int(data): |
206 | 329 | """Decode an integer from a 7-bit lsb encoding.""" | 329 | """Decode an integer from a 7-bit lsb encoding.""" |
207 | 330 | offset = 0 | 330 | offset = 0 |
208 | 331 | val = 0 | 331 | val = 0 |
209 | 332 | shift = 0 | 332 | shift = 0 |
211 | 333 | bval = ord(bytes[offset]) | 333 | bval = ord(data[offset]) |
212 | 334 | while bval >= 0x80: | 334 | while bval >= 0x80: |
213 | 335 | val |= (bval & 0x7F) << shift | 335 | val |= (bval & 0x7F) << shift |
214 | 336 | shift += 7 | 336 | shift += 7 |
215 | 337 | offset += 1 | 337 | offset += 1 |
217 | 338 | bval = ord(bytes[offset]) | 338 | bval = ord(data[offset]) |
218 | 339 | val |= bval << shift | 339 | val |= bval << shift |
219 | 340 | offset += 1 | 340 | offset += 1 |
220 | 341 | return val, offset | 341 | return val, offset |
221 | 342 | 342 | ||
222 | === modified file 'breezy/bzrworkingtree.py' | |||
223 | --- breezy/bzrworkingtree.py 2017-06-10 18:44:23 +0000 | |||
224 | +++ breezy/bzrworkingtree.py 2017-06-11 01:47:17 +0000 | |||
225 | @@ -78,11 +78,11 @@ | |||
226 | 78 | ) | 78 | ) |
227 | 79 | 79 | ||
228 | 80 | 80 | ||
230 | 81 | MERGE_MODIFIED_HEADER_1 = "BZR merge-modified list format 1" | 81 | MERGE_MODIFIED_HEADER_1 = b"BZR merge-modified list format 1" |
231 | 82 | # TODO: Modifying the conflict objects or their type is currently nearly | 82 | # TODO: Modifying the conflict objects or their type is currently nearly |
232 | 83 | # impossible as there is no clear relationship between the working tree format | 83 | # impossible as there is no clear relationship between the working tree format |
233 | 84 | # and the conflict list file format. | 84 | # and the conflict list file format. |
235 | 85 | CONFLICT_HEADER_1 = "BZR conflict list format 1" | 85 | CONFLICT_HEADER_1 = b"BZR conflict list format 1" |
236 | 86 | 86 | ||
237 | 87 | 87 | ||
238 | 88 | class InventoryWorkingTree(WorkingTree,MutableInventoryTree): | 88 | class InventoryWorkingTree(WorkingTree,MutableInventoryTree): |
239 | @@ -382,7 +382,7 @@ | |||
240 | 382 | return _mod_conflicts.ConflictList() | 382 | return _mod_conflicts.ConflictList() |
241 | 383 | try: | 383 | try: |
242 | 384 | try: | 384 | try: |
244 | 385 | if next(confile) != CONFLICT_HEADER_1 + '\n': | 385 | if next(confile) != CONFLICT_HEADER_1 + b'\n': |
245 | 386 | raise errors.ConflictFormatError() | 386 | raise errors.ConflictFormatError() |
246 | 387 | except StopIteration: | 387 | except StopIteration: |
247 | 388 | raise errors.ConflictFormatError() | 388 | raise errors.ConflictFormatError() |
248 | @@ -650,7 +650,7 @@ | |||
249 | 650 | 650 | ||
250 | 651 | def _put_rio(self, filename, stanzas, header): | 651 | def _put_rio(self, filename, stanzas, header): |
251 | 652 | self._must_be_locked() | 652 | self._must_be_locked() |
253 | 653 | my_file = _mod_rio.rio_file(stanzas, header.encode('ascii')) | 653 | my_file = _mod_rio.rio_file(stanzas, header) |
254 | 654 | self._transport.put_file(filename, my_file, | 654 | self._transport.put_file(filename, my_file, |
255 | 655 | mode=self.controldir._get_file_mode()) | 655 | mode=self.controldir._get_file_mode()) |
256 | 656 | 656 | ||
257 | @@ -680,7 +680,7 @@ | |||
258 | 680 | try: | 680 | try: |
259 | 681 | merge_hashes = {} | 681 | merge_hashes = {} |
260 | 682 | try: | 682 | try: |
262 | 683 | if next(hashfile) != MERGE_MODIFIED_HEADER_1 + '\n': | 683 | if next(hashfile) != MERGE_MODIFIED_HEADER_1 + b'\n': |
263 | 684 | raise errors.MergeModifiedFormatError() | 684 | raise errors.MergeModifiedFormatError() |
264 | 685 | except StopIteration: | 685 | except StopIteration: |
265 | 686 | raise errors.MergeModifiedFormatError() | 686 | raise errors.MergeModifiedFormatError() |
266 | 687 | 687 | ||
267 | === modified file 'breezy/chk_map.py' | |||
268 | --- breezy/chk_map.py 2017-06-05 20:48:31 +0000 | |||
269 | +++ breezy/chk_map.py 2017-06-11 01:47:17 +0000 | |||
270 | @@ -100,7 +100,7 @@ | |||
271 | 100 | 100 | ||
272 | 101 | def _search_key_plain(key): | 101 | def _search_key_plain(key): |
273 | 102 | """Map the key tuple into a search string that just uses the key bytes.""" | 102 | """Map the key tuple into a search string that just uses the key bytes.""" |
275 | 103 | return '\x00'.join(key) | 103 | return b'\x00'.join(key) |
276 | 104 | 104 | ||
277 | 105 | 105 | ||
278 | 106 | search_key_registry = registry.Registry() | 106 | search_key_registry = registry.Registry() |
279 | @@ -197,8 +197,8 @@ | |||
280 | 197 | self._ensure_root() | 197 | self._ensure_root() |
281 | 198 | res = self._dump_tree_node(self._root_node, prefix='', indent='', | 198 | res = self._dump_tree_node(self._root_node, prefix='', indent='', |
282 | 199 | include_keys=include_keys) | 199 | include_keys=include_keys) |
285 | 200 | res.append('') # Give a trailing '\n' | 200 | res.append(b'') # Give a trailing '\n' |
286 | 201 | return '\n'.join(res) | 201 | return b'\n'.join(res) |
287 | 202 | 202 | ||
288 | 203 | def _dump_tree_node(self, node, prefix, indent, include_keys=True): | 203 | def _dump_tree_node(self, node, prefix, indent, include_keys=True): |
289 | 204 | """For this node and all children, generate a string representation.""" | 204 | """For this node and all children, generate a string representation.""" |
290 | @@ -208,11 +208,11 @@ | |||
291 | 208 | else: | 208 | else: |
292 | 209 | node_key = node.key() | 209 | node_key = node.key() |
293 | 210 | if node_key is not None: | 210 | if node_key is not None: |
295 | 211 | key_str = ' %s' % (node_key[0],) | 211 | key_str = b' %s' % (node_key[0],) |
296 | 212 | else: | 212 | else: |
300 | 213 | key_str = ' None' | 213 | key_str = b' None' |
301 | 214 | result.append('%s%r %s%s' % (indent, prefix, node.__class__.__name__, | 214 | result.append(b'%s%r %s%s' % (indent, prefix, node.__class__.__name__, |
302 | 215 | key_str)) | 215 | key_str)) |
303 | 216 | if isinstance(node, InternalNode): | 216 | if isinstance(node, InternalNode): |
304 | 217 | # Trigger all child nodes to get loaded | 217 | # Trigger all child nodes to get loaded |
305 | 218 | list(node._iter_nodes(self._store)) | 218 | list(node._iter_nodes(self._store)) |
306 | @@ -223,7 +223,7 @@ | |||
307 | 223 | for key, value in sorted(viewitems(node._items)): | 223 | for key, value in sorted(viewitems(node._items)): |
308 | 224 | # Don't use prefix nor indent here to line up when used in | 224 | # Don't use prefix nor indent here to line up when used in |
309 | 225 | # tests in conjunction with assertEqualDiff | 225 | # tests in conjunction with assertEqualDiff |
311 | 226 | result.append(' %r %r' % (tuple(key), value)) | 226 | result.append(b' %r %r' % (tuple(key), value)) |
312 | 227 | return result | 227 | return result |
313 | 228 | 228 | ||
314 | 229 | @classmethod | 229 | @classmethod |
315 | @@ -680,7 +680,7 @@ | |||
316 | 680 | if not common_prefix: | 680 | if not common_prefix: |
317 | 681 | # if common_prefix is the empty string, then we know it won't | 681 | # if common_prefix is the empty string, then we know it won't |
318 | 682 | # change further | 682 | # change further |
320 | 683 | return '' | 683 | return b'' |
321 | 684 | return common_prefix | 684 | return common_prefix |
322 | 685 | 685 | ||
323 | 686 | 686 | ||
324 | @@ -786,7 +786,7 @@ | |||
325 | 786 | # TODO: Should probably be done without actually joining the key, but | 786 | # TODO: Should probably be done without actually joining the key, but |
326 | 787 | # then that can be done via the C extension | 787 | # then that can be done via the C extension |
327 | 788 | return (len(self._serialise_key(key)) + 1 | 788 | return (len(self._serialise_key(key)) + 1 |
329 | 789 | + len(str(value.count('\n'))) + 1 | 789 | + len(str(value.count(b'\n'))) + 1 |
330 | 790 | + len(value) + 1) | 790 | + len(value) + 1) |
331 | 791 | 791 | ||
332 | 792 | def _search_key(self, key): | 792 | def _search_key(self, key): |
333 | @@ -853,7 +853,7 @@ | |||
334 | 853 | # may get a '\00' node anywhere, but won't have keys of | 853 | # may get a '\00' node anywhere, but won't have keys of |
335 | 854 | # different lengths. | 854 | # different lengths. |
336 | 855 | if len(prefix) < split_at: | 855 | if len(prefix) < split_at: |
338 | 856 | prefix += '\x00'*(split_at - len(prefix)) | 856 | prefix += b'\x00'*(split_at - len(prefix)) |
339 | 857 | if prefix not in result: | 857 | if prefix not in result: |
340 | 858 | node = LeafNode(search_key_func=self._search_key_func) | 858 | node = LeafNode(search_key_func=self._search_key_func) |
341 | 859 | node.set_maximum_size(self._maximum_size) | 859 | node.set_maximum_size(self._maximum_size) |
342 | @@ -889,7 +889,7 @@ | |||
343 | 889 | raise AssertionError('%r must be known' % self._search_prefix) | 889 | raise AssertionError('%r must be known' % self._search_prefix) |
344 | 890 | return self._search_prefix, [("", self)] | 890 | return self._search_prefix, [("", self)] |
345 | 891 | 891 | ||
347 | 892 | _serialise_key = '\x00'.join | 892 | _serialise_key = b'\x00'.join |
348 | 893 | 893 | ||
349 | 894 | def serialise(self, store): | 894 | def serialise(self, store): |
350 | 895 | """Serialise the LeafNode to store. | 895 | """Serialise the LeafNode to store. |
351 | @@ -897,22 +897,22 @@ | |||
352 | 897 | :param store: A VersionedFiles honouring the CHK extensions. | 897 | :param store: A VersionedFiles honouring the CHK extensions. |
353 | 898 | :return: An iterable of the keys inserted by this operation. | 898 | :return: An iterable of the keys inserted by this operation. |
354 | 899 | """ | 899 | """ |
359 | 900 | lines = ["chkleaf:\n"] | 900 | lines = [b"chkleaf:\n"] |
360 | 901 | lines.append("%d\n" % self._maximum_size) | 901 | lines.append(b"%d\n" % self._maximum_size) |
361 | 902 | lines.append("%d\n" % self._key_width) | 902 | lines.append(b"%d\n" % self._key_width) |
362 | 903 | lines.append("%d\n" % self._len) | 903 | lines.append(b"%d\n" % self._len) |
363 | 904 | if self._common_serialised_prefix is None: | 904 | if self._common_serialised_prefix is None: |
365 | 905 | lines.append('\n') | 905 | lines.append(b'\n') |
366 | 906 | if len(self._items) != 0: | 906 | if len(self._items) != 0: |
367 | 907 | raise AssertionError('If _common_serialised_prefix is None' | 907 | raise AssertionError('If _common_serialised_prefix is None' |
368 | 908 | ' we should have no items') | 908 | ' we should have no items') |
369 | 909 | else: | 909 | else: |
371 | 910 | lines.append('%s\n' % (self._common_serialised_prefix,)) | 910 | lines.append(b'%s\n' % (self._common_serialised_prefix,)) |
372 | 911 | prefix_len = len(self._common_serialised_prefix) | 911 | prefix_len = len(self._common_serialised_prefix) |
373 | 912 | for key, value in sorted(viewitems(self._items)): | 912 | for key, value in sorted(viewitems(self._items)): |
374 | 913 | # Always add a final newline | 913 | # Always add a final newline |
377 | 914 | value_lines = osutils.chunks_to_lines([value + '\n']) | 914 | value_lines = osutils.chunks_to_lines([value + b'\n']) |
378 | 915 | serialized = "%s\x00%s\n" % (self._serialise_key(key), | 915 | serialized = b"%s\x00%d\n" % (self._serialise_key(key), |
379 | 916 | len(value_lines)) | 916 | len(value_lines)) |
380 | 917 | if not serialized.startswith(self._common_serialised_prefix): | 917 | if not serialized.startswith(self._common_serialised_prefix): |
381 | 918 | raise AssertionError('We thought the common prefix was %r' | 918 | raise AssertionError('We thought the common prefix was %r' |
382 | @@ -921,11 +921,11 @@ | |||
383 | 921 | lines.append(serialized[prefix_len:]) | 921 | lines.append(serialized[prefix_len:]) |
384 | 922 | lines.extend(value_lines) | 922 | lines.extend(value_lines) |
385 | 923 | sha1, _, _ = store.add_lines((None,), (), lines) | 923 | sha1, _, _ = store.add_lines((None,), (), lines) |
389 | 924 | self._key = StaticTuple("sha1:" + sha1,).intern() | 924 | self._key = StaticTuple(b"sha1:" + sha1,).intern() |
390 | 925 | bytes = ''.join(lines) | 925 | data = b''.join(lines) |
391 | 926 | if len(bytes) != self._current_size(): | 926 | if len(data) != self._current_size(): |
392 | 927 | raise AssertionError('Invalid _current_size') | 927 | raise AssertionError('Invalid _current_size') |
394 | 928 | _get_cache()[self._key] = bytes | 928 | _get_cache()[self._key] = data |
395 | 929 | return [self._key] | 929 | return [self._key] |
396 | 930 | 930 | ||
397 | 931 | def refs(self): | 931 | def refs(self): |
398 | @@ -1304,34 +1304,34 @@ | |||
399 | 1304 | continue | 1304 | continue |
400 | 1305 | for key in node.serialise(store): | 1305 | for key in node.serialise(store): |
401 | 1306 | yield key | 1306 | yield key |
406 | 1307 | lines = ["chknode:\n"] | 1307 | lines = [b"chknode:\n"] |
407 | 1308 | lines.append("%d\n" % self._maximum_size) | 1308 | lines.append(b"%d\n" % self._maximum_size) |
408 | 1309 | lines.append("%d\n" % self._key_width) | 1309 | lines.append(b"%d\n" % self._key_width) |
409 | 1310 | lines.append("%d\n" % self._len) | 1310 | lines.append(b"%d\n" % self._len) |
410 | 1311 | if self._search_prefix is None: | 1311 | if self._search_prefix is None: |
411 | 1312 | raise AssertionError("_search_prefix should not be None") | 1312 | raise AssertionError("_search_prefix should not be None") |
413 | 1313 | lines.append('%s\n' % (self._search_prefix,)) | 1313 | lines.append(b'%s\n' % (self._search_prefix,)) |
414 | 1314 | prefix_len = len(self._search_prefix) | 1314 | prefix_len = len(self._search_prefix) |
415 | 1315 | for prefix, node in sorted(viewitems(self._items)): | 1315 | for prefix, node in sorted(viewitems(self._items)): |
416 | 1316 | if isinstance(node, StaticTuple): | 1316 | if isinstance(node, StaticTuple): |
417 | 1317 | key = node[0] | 1317 | key = node[0] |
418 | 1318 | else: | 1318 | else: |
419 | 1319 | key = node._key[0] | 1319 | key = node._key[0] |
421 | 1320 | serialised = "%s\x00%s\n" % (prefix, key) | 1320 | serialised = b"%s\x00%s\n" % (prefix, key) |
422 | 1321 | if not serialised.startswith(self._search_prefix): | 1321 | if not serialised.startswith(self._search_prefix): |
423 | 1322 | raise AssertionError("prefixes mismatch: %s must start with %s" | 1322 | raise AssertionError("prefixes mismatch: %s must start with %s" |
424 | 1323 | % (serialised, self._search_prefix)) | 1323 | % (serialised, self._search_prefix)) |
425 | 1324 | lines.append(serialised[prefix_len:]) | 1324 | lines.append(serialised[prefix_len:]) |
426 | 1325 | sha1, _, _ = store.add_lines((None,), (), lines) | 1325 | sha1, _, _ = store.add_lines((None,), (), lines) |
429 | 1326 | self._key = StaticTuple("sha1:" + sha1,).intern() | 1326 | self._key = StaticTuple(b"sha1:" + sha1,).intern() |
430 | 1327 | _get_cache()[self._key] = ''.join(lines) | 1327 | _get_cache()[self._key] = b''.join(lines) |
431 | 1328 | yield self._key | 1328 | yield self._key |
432 | 1329 | 1329 | ||
433 | 1330 | def _search_key(self, key): | 1330 | def _search_key(self, key): |
434 | 1331 | """Return the serialised key for key in this node.""" | 1331 | """Return the serialised key for key in this node.""" |
435 | 1332 | # search keys are fixed width. All will be self._node_width wide, so we | 1332 | # search keys are fixed width. All will be self._node_width wide, so we |
436 | 1333 | # pad as necessary. | 1333 | # pad as necessary. |
438 | 1334 | return (self._search_key_func(key) + '\x00'*self._node_width)[:self._node_width] | 1334 | return (self._search_key_func(key) + b'\x00'*self._node_width)[:self._node_width] |
439 | 1335 | 1335 | ||
440 | 1336 | def _search_prefix_filter(self, key): | 1336 | def _search_prefix_filter(self, key): |
441 | 1337 | """Serialise key for use as a prefix filter in iteritems.""" | 1337 | """Serialise key for use as a prefix filter in iteritems.""" |
442 | @@ -1450,12 +1450,12 @@ | |||
443 | 1450 | return new_leaf | 1450 | return new_leaf |
444 | 1451 | 1451 | ||
445 | 1452 | 1452 | ||
447 | 1453 | def _deserialise(bytes, key, search_key_func): | 1453 | def _deserialise(data, key, search_key_func): |
448 | 1454 | """Helper for repositorydetails - convert bytes to a node.""" | 1454 | """Helper for repositorydetails - convert bytes to a node.""" |
453 | 1455 | if bytes.startswith("chkleaf:\n"): | 1455 | if data.startswith(b"chkleaf:\n"): |
454 | 1456 | node = LeafNode.deserialise(bytes, key, search_key_func=search_key_func) | 1456 | node = LeafNode.deserialise(data, key, search_key_func=search_key_func) |
455 | 1457 | elif bytes.startswith("chknode:\n"): | 1457 | elif data.startswith(b"chknode:\n"): |
456 | 1458 | node = InternalNode.deserialise(bytes, key, | 1458 | node = InternalNode.deserialise(data, key, |
457 | 1459 | search_key_func=search_key_func) | 1459 | search_key_func=search_key_func) |
458 | 1460 | else: | 1460 | else: |
459 | 1461 | raise AssertionError("Unknown node type.") | 1461 | raise AssertionError("Unknown node type.") |
460 | 1462 | 1462 | ||
461 | === modified file 'breezy/commit.py' | |||
462 | --- breezy/commit.py 2017-05-30 20:17:23 +0000 | |||
463 | +++ breezy/commit.py 2017-06-11 01:47:17 +0000 | |||
464 | @@ -280,7 +280,7 @@ | |||
465 | 280 | raise errors.RootNotRich() | 280 | raise errors.RootNotRich() |
466 | 281 | if message_callback is None: | 281 | if message_callback is None: |
467 | 282 | if message is not None: | 282 | if message is not None: |
469 | 283 | if isinstance(message, str): | 283 | if isinstance(message, bytes): |
470 | 284 | message = message.decode(get_user_encoding()) | 284 | message = message.decode(get_user_encoding()) |
471 | 285 | message_callback = lambda x: message | 285 | message_callback = lambda x: message |
472 | 286 | else: | 286 | else: |
473 | @@ -657,7 +657,7 @@ | |||
474 | 657 | """ | 657 | """ |
475 | 658 | exclude = self.exclude | 658 | exclude = self.exclude |
476 | 659 | specific_files = self.specific_files | 659 | specific_files = self.specific_files |
478 | 660 | mutter("Selecting files for commit with filter %s", specific_files) | 660 | mutter("Selecting files for commit with filter %r", specific_files) |
479 | 661 | 661 | ||
480 | 662 | self._check_strict() | 662 | self._check_strict() |
481 | 663 | if self.use_record_iter_changes: | 663 | if self.use_record_iter_changes: |
482 | 664 | 664 | ||
483 | === modified file 'breezy/config.py' | |||
484 | --- breezy/config.py 2017-06-10 12:56:18 +0000 | |||
485 | +++ breezy/config.py 2017-06-11 01:47:17 +0000 | |||
486 | @@ -477,10 +477,12 @@ | |||
487 | 477 | If no username can be found, errors.NoWhoami exception is raised. | 477 | If no username can be found, errors.NoWhoami exception is raised. |
488 | 478 | """ | 478 | """ |
489 | 479 | v = os.environ.get('BRZ_EMAIL') | 479 | v = os.environ.get('BRZ_EMAIL') |
492 | 480 | if v and not PY3: | 480 | if v: |
493 | 481 | return v.decode(osutils.get_user_encoding()) | 481 | if not PY3: |
494 | 482 | v = v.decode(osutils.get_user_encoding()) | ||
495 | 483 | return v | ||
496 | 482 | v = self._get_user_id() | 484 | v = self._get_user_id() |
498 | 483 | if v and not PY3: | 485 | if v: |
499 | 484 | return v | 486 | return v |
500 | 485 | return default_email() | 487 | return default_email() |
501 | 486 | 488 | ||
502 | @@ -1497,11 +1499,15 @@ | |||
503 | 1497 | 1499 | ||
504 | 1498 | def default_email(): | 1500 | def default_email(): |
505 | 1499 | v = os.environ.get('BRZ_EMAIL') | 1501 | v = os.environ.get('BRZ_EMAIL') |
508 | 1500 | if v and not PY3: | 1502 | if v: |
509 | 1501 | return v.decode(osutils.get_user_encoding()) | 1503 | if not PY3: |
510 | 1504 | v = v.decode(osutils.get_user_encoding()) | ||
511 | 1505 | return v | ||
512 | 1502 | v = os.environ.get('EMAIL') | 1506 | v = os.environ.get('EMAIL') |
515 | 1503 | if v and not PY3: | 1507 | if v: |
516 | 1504 | return v.decode(osutils.get_user_encoding()) | 1508 | if not PY3: |
517 | 1509 | v = v.decode(osutils.get_user_encoding()) | ||
518 | 1510 | return v | ||
519 | 1505 | name, email = _auto_user_id() | 1511 | name, email = _auto_user_id() |
520 | 1506 | if name and email: | 1512 | if name and email: |
521 | 1507 | return u'%s <%s>' % (name, email) | 1513 | return u'%s <%s>' % (name, email) |
522 | 1508 | 1514 | ||
523 | === modified file 'breezy/dirstate.py' | |||
524 | --- breezy/dirstate.py 2017-06-10 02:39:00 +0000 | |||
525 | +++ breezy/dirstate.py 2017-06-11 01:47:17 +0000 | |||
526 | @@ -3541,9 +3541,13 @@ | |||
527 | 3541 | source_details = DirState.NULL_PARENT_DETAILS | 3541 | source_details = DirState.NULL_PARENT_DETAILS |
528 | 3542 | else: | 3542 | else: |
529 | 3543 | source_details = entry[1][self.source_index] | 3543 | source_details = entry[1][self.source_index] |
530 | 3544 | # GZ 2017-06-09: Eck, more sets. | ||
531 | 3545 | _fdltr = {b'f', b'd', b'l', b't', b'r'} | ||
532 | 3546 | _fdlt = {b'f', b'd', b'l', b't'} | ||
533 | 3547 | _ra = (b'r', b'a') | ||
534 | 3544 | target_details = entry[1][self.target_index] | 3548 | target_details = entry[1][self.target_index] |
535 | 3545 | target_minikind = target_details[0] | 3549 | target_minikind = target_details[0] |
537 | 3546 | if path_info is not None and target_minikind in 'fdlt': | 3550 | if path_info is not None and target_minikind in _fdlt: |
538 | 3547 | if not (self.target_index == 0): | 3551 | if not (self.target_index == 0): |
539 | 3548 | raise AssertionError() | 3552 | raise AssertionError() |
540 | 3549 | link_or_sha1 = update_entry(self.state, entry, | 3553 | link_or_sha1 = update_entry(self.state, entry, |
541 | @@ -3555,10 +3559,6 @@ | |||
542 | 3555 | link_or_sha1 = None | 3559 | link_or_sha1 = None |
543 | 3556 | file_id = entry[0][2] | 3560 | file_id = entry[0][2] |
544 | 3557 | source_minikind = source_details[0] | 3561 | source_minikind = source_details[0] |
545 | 3558 | # GZ 2017-06-09: Eck, more sets. | ||
546 | 3559 | _fdltr = {b'f', b'd', b'l', b't', b'r'} | ||
547 | 3560 | _fdlt = {b'f', b'd', b'l', b't'} | ||
548 | 3561 | _ra = (b'r', b'a') | ||
549 | 3562 | if source_minikind in _fdltr and target_minikind in _fdlt: | 3562 | if source_minikind in _fdltr and target_minikind in _fdlt: |
550 | 3563 | # claimed content in both: diff | 3563 | # claimed content in both: diff |
551 | 3564 | # r | fdlt | | add source to search, add id path move and perform | 3564 | # r | fdlt | | add source to search, add id path move and perform |
552 | 3565 | 3565 | ||
553 | === modified file 'breezy/groupcompress.py' | |||
554 | --- breezy/groupcompress.py 2017-06-05 20:48:31 +0000 | |||
555 | +++ breezy/groupcompress.py 2017-06-11 01:47:17 +0000 | |||
556 | @@ -61,7 +61,7 @@ | |||
557 | 61 | BATCH_SIZE = 2**16 | 61 | BATCH_SIZE = 2**16 |
558 | 62 | 62 | ||
559 | 63 | # osutils.sha_string('') | 63 | # osutils.sha_string('') |
561 | 64 | _null_sha1 = 'da39a3ee5e6b4b0d3255bfef95601890afd80709' | 64 | _null_sha1 = b'da39a3ee5e6b4b0d3255bfef95601890afd80709' |
562 | 65 | 65 | ||
563 | 66 | def sort_gc_optimal(parent_map): | 66 | def sort_gc_optimal(parent_map): |
564 | 67 | """Sort and group the keys in parent_map into groupcompress order. | 67 | """Sort and group the keys in parent_map into groupcompress order. |
565 | @@ -75,8 +75,8 @@ | |||
566 | 75 | # properly grouped by file-id. | 75 | # properly grouped by file-id. |
567 | 76 | per_prefix_map = {} | 76 | per_prefix_map = {} |
568 | 77 | for key, value in viewitems(parent_map): | 77 | for key, value in viewitems(parent_map): |
571 | 78 | if isinstance(key, str) or len(key) == 1: | 78 | if isinstance(key, bytes) or len(key) == 1: |
572 | 79 | prefix = '' | 79 | prefix = b'' |
573 | 80 | else: | 80 | else: |
574 | 81 | prefix = key[0] | 81 | prefix = key[0] |
575 | 82 | try: | 82 | try: |
576 | @@ -102,9 +102,9 @@ | |||
577 | 102 | """ | 102 | """ |
578 | 103 | 103 | ||
579 | 104 | # Group Compress Block v1 Zlib | 104 | # Group Compress Block v1 Zlib |
581 | 105 | GCB_HEADER = 'gcb1z\n' | 105 | GCB_HEADER = b'gcb1z\n' |
582 | 106 | # Group Compress Block v1 Lzma | 106 | # Group Compress Block v1 Lzma |
584 | 107 | GCB_LZ_HEADER = 'gcb1l\n' | 107 | GCB_LZ_HEADER = b'gcb1l\n' |
585 | 108 | GCB_KNOWN_HEADERS = (GCB_HEADER, GCB_LZ_HEADER) | 108 | GCB_KNOWN_HEADERS = (GCB_HEADER, GCB_LZ_HEADER) |
586 | 109 | 109 | ||
587 | 110 | def __init__(self): | 110 | def __init__(self): |
588 | @@ -141,7 +141,7 @@ | |||
589 | 141 | # Expand the content if required | 141 | # Expand the content if required |
590 | 142 | if self._content is None: | 142 | if self._content is None: |
591 | 143 | if self._content_chunks is not None: | 143 | if self._content_chunks is not None: |
593 | 144 | self._content = ''.join(self._content_chunks) | 144 | self._content = b''.join(self._content_chunks) |
594 | 145 | self._content_chunks = None | 145 | self._content_chunks = None |
595 | 146 | if self._content is None: | 146 | if self._content is None: |
596 | 147 | # We join self._z_content_chunks here, because if we are | 147 | # We join self._z_content_chunks here, because if we are |
597 | @@ -149,9 +149,9 @@ | |||
598 | 149 | # chunk | 149 | # chunk |
599 | 150 | if self._z_content_chunks is None: | 150 | if self._z_content_chunks is None: |
600 | 151 | raise AssertionError('No content to decompress') | 151 | raise AssertionError('No content to decompress') |
604 | 152 | z_content = ''.join(self._z_content_chunks) | 152 | z_content = b''.join(self._z_content_chunks) |
605 | 153 | if z_content == '': | 153 | if z_content == b'': |
606 | 154 | self._content = '' | 154 | self._content = b'' |
607 | 155 | elif self._compressor_name == 'lzma': | 155 | elif self._compressor_name == 'lzma': |
608 | 156 | # We don't do partial lzma decomp yet | 156 | # We don't do partial lzma decomp yet |
609 | 157 | import pylzma | 157 | import pylzma |
610 | @@ -201,7 +201,7 @@ | |||
611 | 201 | # The stream is finished | 201 | # The stream is finished |
612 | 202 | self._z_content_decompressor = None | 202 | self._z_content_decompressor = None |
613 | 203 | 203 | ||
615 | 204 | def _parse_bytes(self, bytes, pos): | 204 | def _parse_bytes(self, data, pos): |
616 | 205 | """Read the various lengths from the header. | 205 | """Read the various lengths from the header. |
617 | 206 | 206 | ||
618 | 207 | This also populates the various 'compressed' buffers. | 207 | This also populates the various 'compressed' buffers. |
619 | @@ -211,17 +211,17 @@ | |||
620 | 211 | # At present, we have 2 integers for the compressed and uncompressed | 211 | # At present, we have 2 integers for the compressed and uncompressed |
621 | 212 | # content. In base10 (ascii) 14 bytes can represent > 1TB, so to avoid | 212 | # content. In base10 (ascii) 14 bytes can represent > 1TB, so to avoid |
622 | 213 | # checking too far, cap the search to 14 bytes. | 213 | # checking too far, cap the search to 14 bytes. |
630 | 214 | pos2 = bytes.index('\n', pos, pos + 14) | 214 | pos2 = data.index(b'\n', pos, pos + 14) |
631 | 215 | self._z_content_length = int(bytes[pos:pos2]) | 215 | self._z_content_length = int(data[pos:pos2]) |
632 | 216 | pos = pos2 + 1 | 216 | pos = pos2 + 1 |
633 | 217 | pos2 = bytes.index('\n', pos, pos + 14) | 217 | pos2 = data.index(b'\n', pos, pos + 14) |
634 | 218 | self._content_length = int(bytes[pos:pos2]) | 218 | self._content_length = int(data[pos:pos2]) |
635 | 219 | pos = pos2 + 1 | 219 | pos = pos2 + 1 |
636 | 220 | if len(bytes) != (pos + self._z_content_length): | 220 | if len(data) != (pos + self._z_content_length): |
637 | 221 | # XXX: Define some GCCorrupt error ? | 221 | # XXX: Define some GCCorrupt error ? |
638 | 222 | raise AssertionError('Invalid bytes: (%d) != %d + %d' % | 222 | raise AssertionError('Invalid bytes: (%d) != %d + %d' % |
641 | 223 | (len(bytes), pos, self._z_content_length)) | 223 | (len(data), pos, self._z_content_length)) |
642 | 224 | self._z_content_chunks = (bytes[pos:],) | 224 | self._z_content_chunks = (data[pos:],) |
643 | 225 | 225 | ||
644 | 226 | @property | 226 | @property |
645 | 227 | def _z_content(self): | 227 | def _z_content(self): |
646 | @@ -230,7 +230,7 @@ | |||
647 | 230 | Meant only to be used by the test suite. | 230 | Meant only to be used by the test suite. |
648 | 231 | """ | 231 | """ |
649 | 232 | if self._z_content_chunks is not None: | 232 | if self._z_content_chunks is not None: |
651 | 233 | return ''.join(self._z_content_chunks) | 233 | return b''.join(self._z_content_chunks) |
652 | 234 | return None | 234 | return None |
653 | 235 | 235 | ||
654 | 236 | @classmethod | 236 | @classmethod |
655 | @@ -257,17 +257,17 @@ | |||
656 | 257 | :return: The bytes for the content | 257 | :return: The bytes for the content |
657 | 258 | """ | 258 | """ |
658 | 259 | if start == end == 0: | 259 | if start == end == 0: |
660 | 260 | return '' | 260 | return b'' |
661 | 261 | self._ensure_content(end) | 261 | self._ensure_content(end) |
662 | 262 | # The bytes are 'f' or 'd' for the type, then a variable-length | 262 | # The bytes are 'f' or 'd' for the type, then a variable-length |
663 | 263 | # base128 integer for the content size, then the actual content | 263 | # base128 integer for the content size, then the actual content |
664 | 264 | # We know that the variable-length integer won't be longer than 5 | 264 | # We know that the variable-length integer won't be longer than 5 |
665 | 265 | # bytes (it takes 5 bytes to encode 2^32) | 265 | # bytes (it takes 5 bytes to encode 2^32) |
666 | 266 | c = self._content[start] | 266 | c = self._content[start] |
668 | 267 | if c == 'f': | 267 | if c == b'f': |
669 | 268 | type = 'fulltext' | 268 | type = 'fulltext' |
670 | 269 | else: | 269 | else: |
672 | 270 | if c != 'd': | 270 | if c != b'd': |
673 | 271 | raise ValueError('Unknown content control code: %s' | 271 | raise ValueError('Unknown content control code: %s' |
674 | 272 | % (c,)) | 272 | % (c,)) |
675 | 273 | type = 'delta' | 273 | type = 'delta' |
676 | @@ -277,11 +277,10 @@ | |||
677 | 277 | if end != content_start + content_len: | 277 | if end != content_start + content_len: |
678 | 278 | raise ValueError('end != len according to field header' | 278 | raise ValueError('end != len according to field header' |
679 | 279 | ' %s != %s' % (end, content_start + content_len)) | 279 | ' %s != %s' % (end, content_start + content_len)) |
685 | 280 | if c == 'f': | 280 | if c == b'f': |
686 | 281 | bytes = self._content[content_start:end] | 281 | return self._content[content_start:end] |
687 | 282 | elif c == 'd': | 282 | # Must be type delta as checked above |
688 | 283 | bytes = apply_delta_to_source(self._content, content_start, end) | 283 | return apply_delta_to_source(self._content, content_start, end) |
684 | 284 | return bytes | ||
689 | 285 | 284 | ||
690 | 286 | def set_chunked_content(self, content_chunks, length): | 285 | def set_chunked_content(self, content_chunks, length): |
691 | 287 | """Set the content of this block to the given chunks.""" | 286 | """Set the content of this block to the given chunks.""" |
692 | @@ -324,7 +323,7 @@ | |||
693 | 324 | """Create the byte stream as a series of 'chunks'""" | 323 | """Create the byte stream as a series of 'chunks'""" |
694 | 325 | self._create_z_content() | 324 | self._create_z_content() |
695 | 326 | header = self.GCB_HEADER | 325 | header = self.GCB_HEADER |
697 | 327 | chunks = ['%s%d\n%d\n' | 326 | chunks = [b'%s%d\n%d\n' |
698 | 328 | % (header, self._z_content_length, self._content_length), | 327 | % (header, self._z_content_length, self._content_length), |
699 | 329 | ] | 328 | ] |
700 | 330 | chunks.extend(self._z_content_chunks) | 329 | chunks.extend(self._z_content_chunks) |
701 | @@ -334,7 +333,7 @@ | |||
702 | 334 | def to_bytes(self): | 333 | def to_bytes(self): |
703 | 335 | """Encode the information into a byte stream.""" | 334 | """Encode the information into a byte stream.""" |
704 | 336 | total_len, chunks = self.to_chunks() | 335 | total_len, chunks = self.to_chunks() |
706 | 337 | return ''.join(chunks) | 336 | return b''.join(chunks) |
707 | 338 | 337 | ||
708 | 339 | def _dump(self, include_text=False): | 338 | def _dump(self, include_text=False): |
709 | 340 | """Take this block, and spit out a human-readable structure. | 339 | """Take this block, and spit out a human-readable structure. |
710 | @@ -352,7 +351,7 @@ | |||
711 | 352 | while pos < self._content_length: | 351 | while pos < self._content_length: |
712 | 353 | kind = self._content[pos] | 352 | kind = self._content[pos] |
713 | 354 | pos += 1 | 353 | pos += 1 |
715 | 355 | if kind not in ('f', 'd'): | 354 | if kind not in (b'f', b'd'): |
716 | 356 | raise ValueError('invalid kind character: %r' % (kind,)) | 355 | raise ValueError('invalid kind character: %r' % (kind,)) |
717 | 357 | content_len, len_len = decode_base128_int( | 356 | content_len, len_len = decode_base128_int( |
718 | 358 | self._content[pos:pos + 5]) | 357 | self._content[pos:pos + 5]) |
719 | @@ -360,18 +359,18 @@ | |||
720 | 360 | if content_len + pos > self._content_length: | 359 | if content_len + pos > self._content_length: |
721 | 361 | raise ValueError('invalid content_len %d for record @ pos %d' | 360 | raise ValueError('invalid content_len %d for record @ pos %d' |
722 | 362 | % (content_len, pos - len_len - 1)) | 361 | % (content_len, pos - len_len - 1)) |
724 | 363 | if kind == 'f': # Fulltext | 362 | if kind == b'f': # Fulltext |
725 | 364 | if include_text: | 363 | if include_text: |
726 | 365 | text = self._content[pos:pos+content_len] | 364 | text = self._content[pos:pos+content_len] |
728 | 366 | result.append(('f', content_len, text)) | 365 | result.append((b'f', content_len, text)) |
729 | 367 | else: | 366 | else: |
732 | 368 | result.append(('f', content_len)) | 367 | result.append((b'f', content_len)) |
733 | 369 | elif kind == 'd': # Delta | 368 | elif kind == b'd': # Delta |
734 | 370 | delta_content = self._content[pos:pos+content_len] | 369 | delta_content = self._content[pos:pos+content_len] |
735 | 371 | delta_info = [] | 370 | delta_info = [] |
736 | 372 | # The first entry in a delta is the decompressed length | 371 | # The first entry in a delta is the decompressed length |
737 | 373 | decomp_len, delta_pos = decode_base128_int(delta_content) | 372 | decomp_len, delta_pos = decode_base128_int(delta_content) |
739 | 374 | result.append(('d', content_len, decomp_len, delta_info)) | 373 | result.append((b'd', content_len, decomp_len, delta_info)) |
740 | 375 | measured_len = 0 | 374 | measured_len = 0 |
741 | 376 | while delta_pos < content_len: | 375 | while delta_pos < content_len: |
742 | 377 | c = ord(delta_content[delta_pos]) | 376 | c = ord(delta_content[delta_pos]) |
743 | @@ -382,16 +381,16 @@ | |||
744 | 382 | delta_pos) | 381 | delta_pos) |
745 | 383 | if include_text: | 382 | if include_text: |
746 | 384 | text = self._content[offset:offset+length] | 383 | text = self._content[offset:offset+length] |
748 | 385 | delta_info.append(('c', offset, length, text)) | 384 | delta_info.append((b'c', offset, length, text)) |
749 | 386 | else: | 385 | else: |
751 | 387 | delta_info.append(('c', offset, length)) | 386 | delta_info.append((b'c', offset, length)) |
752 | 388 | measured_len += length | 387 | measured_len += length |
753 | 389 | else: # Insert | 388 | else: # Insert |
754 | 390 | if include_text: | 389 | if include_text: |
755 | 391 | txt = delta_content[delta_pos:delta_pos+c] | 390 | txt = delta_content[delta_pos:delta_pos+c] |
756 | 392 | else: | 391 | else: |
757 | 393 | txt = '' | 392 | txt = '' |
759 | 394 | delta_info.append(('i', c, txt)) | 393 | delta_info.append((b'i', c, txt)) |
760 | 395 | measured_len += c | 394 | measured_len += c |
761 | 396 | delta_pos += c | 395 | delta_pos += c |
762 | 397 | if delta_pos != content_len: | 396 | if delta_pos != content_len: |
763 | @@ -447,7 +446,7 @@ | |||
764 | 447 | # wire bytes, something... | 446 | # wire bytes, something... |
765 | 448 | return self._manager._wire_bytes() | 447 | return self._manager._wire_bytes() |
766 | 449 | else: | 448 | else: |
768 | 450 | return '' | 449 | return b'' |
769 | 451 | if storage_kind in ('fulltext', 'chunked'): | 450 | if storage_kind in ('fulltext', 'chunked'): |
770 | 452 | if self._bytes is None: | 451 | if self._bytes is None: |
771 | 453 | # Grab and cache the raw bytes for this entry | 452 | # Grab and cache the raw bytes for this entry |
772 | @@ -842,7 +841,9 @@ | |||
773 | 842 | if sha1 == nostore_sha: | 841 | if sha1 == nostore_sha: |
774 | 843 | raise errors.ExistingContent() | 842 | raise errors.ExistingContent() |
775 | 844 | if key[-1] is None: | 843 | if key[-1] is None: |
777 | 845 | key = key[:-1] + ('sha1:' + sha1,) | 844 | # GZ 2017-06-10: Seems perverse to have to encode here. |
778 | 845 | sha1 = sha1.encode('ascii') | ||
779 | 846 | key = key[:-1] + (b'sha1:' + sha1,) | ||
780 | 846 | 847 | ||
781 | 847 | start, end, type = self._compress(key, bytes, len(bytes) / 2, soft) | 848 | start, end, type = self._compress(key, bytes, len(bytes) / 2, soft) |
782 | 848 | return sha1, start, end, type | 849 | return sha1, start, end, type |
783 | @@ -875,7 +876,7 @@ | |||
784 | 875 | (start_byte, start_chunk, end_byte, end_chunk) = self.labels_deltas[key] | 876 | (start_byte, start_chunk, end_byte, end_chunk) = self.labels_deltas[key] |
785 | 876 | delta_chunks = self.chunks[start_chunk:end_chunk] | 877 | delta_chunks = self.chunks[start_chunk:end_chunk] |
786 | 877 | stored_bytes = ''.join(delta_chunks) | 878 | stored_bytes = ''.join(delta_chunks) |
788 | 878 | if stored_bytes[0] == 'f': | 879 | if stored_bytes[0] == b'f': |
789 | 879 | fulltext_len, offset = decode_base128_int(stored_bytes[1:10]) | 880 | fulltext_len, offset = decode_base128_int(stored_bytes[1:10]) |
790 | 880 | data_len = fulltext_len + 1 + offset | 881 | data_len = fulltext_len + 1 + offset |
791 | 881 | if data_len != len(stored_bytes): | 882 | if data_len != len(stored_bytes): |
792 | @@ -947,14 +948,14 @@ | |||
793 | 947 | if delta_length > max_delta_size: | 948 | if delta_length > max_delta_size: |
794 | 948 | # The delta is longer than the fulltext, insert a fulltext | 949 | # The delta is longer than the fulltext, insert a fulltext |
795 | 949 | type = 'fulltext' | 950 | type = 'fulltext' |
797 | 950 | out_lines = ['f', encode_base128_int(input_len)] | 951 | out_lines = [b'f', encode_base128_int(input_len)] |
798 | 951 | out_lines.extend(new_lines) | 952 | out_lines.extend(new_lines) |
799 | 952 | index_lines = [False, False] | 953 | index_lines = [False, False] |
800 | 953 | index_lines.extend([True] * len(new_lines)) | 954 | index_lines.extend([True] * len(new_lines)) |
801 | 954 | else: | 955 | else: |
802 | 955 | # this is a worthy delta, output it | 956 | # this is a worthy delta, output it |
803 | 956 | type = 'delta' | 957 | type = 'delta' |
805 | 957 | out_lines[0] = 'd' | 958 | out_lines[0] = b'd' |
806 | 958 | # Update the delta_length to include those two encoded integers | 959 | # Update the delta_length to include those two encoded integers |
807 | 959 | out_lines[1] = encode_base128_int(delta_length) | 960 | out_lines[1] = encode_base128_int(delta_length) |
808 | 960 | # Before insertion | 961 | # Before insertion |
809 | @@ -1014,12 +1015,12 @@ | |||
810 | 1014 | enc_length = encode_base128_int(len(bytes)) | 1015 | enc_length = encode_base128_int(len(bytes)) |
811 | 1015 | len_mini_header = 1 + len(enc_length) | 1016 | len_mini_header = 1 + len(enc_length) |
812 | 1016 | self._delta_index.add_source(bytes, len_mini_header) | 1017 | self._delta_index.add_source(bytes, len_mini_header) |
814 | 1017 | new_chunks = ['f', enc_length, bytes] | 1018 | new_chunks = [b'f', enc_length, bytes] |
815 | 1018 | else: | 1019 | else: |
816 | 1019 | type = 'delta' | 1020 | type = 'delta' |
817 | 1020 | enc_length = encode_base128_int(len(delta)) | 1021 | enc_length = encode_base128_int(len(delta)) |
818 | 1021 | len_mini_header = 1 + len(enc_length) | 1022 | len_mini_header = 1 + len(enc_length) |
820 | 1022 | new_chunks = ['d', enc_length, delta] | 1023 | new_chunks = [b'd', enc_length, delta] |
821 | 1023 | self._delta_index.add_delta_source(delta, len_mini_header) | 1024 | self._delta_index.add_delta_source(delta, len_mini_header) |
822 | 1024 | # Before insertion | 1025 | # Before insertion |
823 | 1025 | start = self.endpoint | 1026 | start = self.endpoint |
824 | @@ -1715,13 +1716,13 @@ | |||
825 | 1715 | # the fulltext content at this point. Note that sometimes we | 1716 | # the fulltext content at this point. Note that sometimes we |
826 | 1716 | # will want it later (streaming CHK pages), but most of the | 1717 | # will want it later (streaming CHK pages), but most of the |
827 | 1717 | # time we won't (everything else) | 1718 | # time we won't (everything else) |
829 | 1718 | bytes = ''.join(chunks) | 1719 | data = b''.join(chunks) |
830 | 1719 | del chunks | 1720 | del chunks |
831 | 1720 | index, start, length = self._access.add_raw_records( | 1721 | index, start, length = self._access.add_raw_records( |
833 | 1721 | [(None, len(bytes))], bytes)[0] | 1722 | [(None, len(data))], data)[0] |
834 | 1722 | nodes = [] | 1723 | nodes = [] |
835 | 1723 | for key, reads, refs in keys_to_add: | 1724 | for key, reads, refs in keys_to_add: |
837 | 1724 | nodes.append((key, "%d %d %s" % (start, length, reads), refs)) | 1725 | nodes.append((key, b"%d %d %s" % (start, length, reads), refs)) |
838 | 1725 | self._index.add_records(nodes, random_id=random_id) | 1726 | self._index.add_records(nodes, random_id=random_id) |
839 | 1726 | self._unadded_refs = {} | 1727 | self._unadded_refs = {} |
840 | 1727 | del keys_to_add[:] | 1728 | del keys_to_add[:] |
841 | @@ -1777,7 +1778,7 @@ | |||
842 | 1777 | ' the current record, we cannot be positive' | 1778 | ' the current record, we cannot be positive' |
843 | 1778 | ' that the appropriate content was inserted.' | 1779 | ' that the appropriate content was inserted.' |
844 | 1779 | ) | 1780 | ) |
846 | 1780 | value = "%d %d %d %d" % (block_start, block_length, | 1781 | value = b"%d %d %d %d" % (block_start, block_length, |
847 | 1781 | record._start, record._end) | 1782 | record._start, record._end) |
848 | 1782 | nodes = [(record.key, value, (record.parents,))] | 1783 | nodes = [(record.key, value, (record.parents,))] |
849 | 1783 | # TODO: Consider buffering up many nodes to be added, not | 1784 | # TODO: Consider buffering up many nodes to be added, not |
850 | @@ -1827,7 +1828,7 @@ | |||
851 | 1827 | type) = self._compressor.compress(record.key, bytes, | 1828 | type) = self._compressor.compress(record.key, bytes, |
852 | 1828 | record.sha1) | 1829 | record.sha1) |
853 | 1829 | if record.key[-1] is None: | 1830 | if record.key[-1] is None: |
855 | 1830 | key = record.key[:-1] + ('sha1:' + found_sha1,) | 1831 | key = record.key[:-1] + (b'sha1:' + found_sha1,) |
856 | 1831 | else: | 1832 | else: |
857 | 1832 | key = record.key | 1833 | key = record.key |
858 | 1833 | self._unadded_refs[key] = record.parents | 1834 | self._unadded_refs[key] = record.parents |
859 | @@ -1838,7 +1839,8 @@ | |||
860 | 1838 | else: | 1839 | else: |
861 | 1839 | parents = None | 1840 | parents = None |
862 | 1840 | refs = static_tuple.StaticTuple(parents) | 1841 | refs = static_tuple.StaticTuple(parents) |
864 | 1841 | keys_to_add.append((key, '%d %d' % (start_point, end_point), refs)) | 1842 | keys_to_add.append( |
865 | 1843 | (key, b'%d %d' % (start_point, end_point), refs)) | ||
866 | 1842 | if len(keys_to_add): | 1844 | if len(keys_to_add): |
867 | 1843 | flush() | 1845 | flush() |
868 | 1844 | self._compressor = None | 1846 | self._compressor = None |
869 | 1845 | 1847 | ||
870 | === modified file 'breezy/index.py' | |||
871 | --- breezy/index.py 2017-06-10 01:57:00 +0000 | |||
872 | +++ breezy/index.py 2017-06-11 01:47:17 +0000 | |||
873 | @@ -56,8 +56,8 @@ | |||
874 | 56 | _SIGNATURE = b"Bazaar Graph Index 1\n" | 56 | _SIGNATURE = b"Bazaar Graph Index 1\n" |
875 | 57 | 57 | ||
876 | 58 | 58 | ||
879 | 59 | _whitespace_re = re.compile('[\t\n\x0b\x0c\r\x00 ]') | 59 | _whitespace_re = re.compile(b'[\t\n\x0b\x0c\r\x00 ]') |
880 | 60 | _newline_null_re = re.compile('[\n\0]') | 60 | _newline_null_re = re.compile(b'[\n\0]') |
881 | 61 | 61 | ||
882 | 62 | 62 | ||
883 | 63 | def _has_key_from_parent_map(self, key): | 63 | def _has_key_from_parent_map(self, key): |
884 | 64 | 64 | ||
885 | === modified file 'breezy/inventory.py' | |||
886 | --- breezy/inventory.py 2017-06-10 01:57:00 +0000 | |||
887 | +++ breezy/inventory.py 2017-06-11 01:47:17 +0000 | |||
888 | @@ -49,6 +49,8 @@ | |||
889 | 49 | trace, | 49 | trace, |
890 | 50 | ) | 50 | ) |
891 | 51 | from .sixish import ( | 51 | from .sixish import ( |
892 | 52 | bytesintern, | ||
893 | 53 | PY3, | ||
894 | 52 | viewitems, | 54 | viewitems, |
895 | 53 | viewvalues, | 55 | viewvalues, |
896 | 54 | ) | 56 | ) |
897 | @@ -217,7 +219,7 @@ | |||
898 | 217 | Traceback (most recent call last): | 219 | Traceback (most recent call last): |
899 | 218 | InvalidEntryName: Invalid entry name: src/hello.c | 220 | InvalidEntryName: Invalid entry name: src/hello.c |
900 | 219 | """ | 221 | """ |
902 | 220 | if '/' in name or '\\' in name: | 222 | if u'/' in name or u'\\' in name: |
903 | 221 | raise errors.InvalidEntryName(name=name) | 223 | raise errors.InvalidEntryName(name=name) |
904 | 222 | self.file_id = file_id | 224 | self.file_id = file_id |
905 | 223 | self.revision = None | 225 | self.revision = None |
906 | @@ -388,8 +390,8 @@ | |||
907 | 388 | # to provide a per-fileid log. The hash of every directory content is | 390 | # to provide a per-fileid log. The hash of every directory content is |
908 | 389 | # "da..." below (the sha1sum of ''). | 391 | # "da..." below (the sha1sum of ''). |
909 | 390 | checker.add_pending_item(rev_id, | 392 | checker.add_pending_item(rev_id, |
912 | 391 | ('texts', self.file_id, self.revision), 'text', | 393 | (b'texts', self.file_id, self.revision), b'text', |
913 | 392 | 'da39a3ee5e6b4b0d3255bfef95601890afd80709') | 394 | b'da39a3ee5e6b4b0d3255bfef95601890afd80709') |
914 | 393 | 395 | ||
915 | 394 | def copy(self): | 396 | def copy(self): |
916 | 395 | other = InventoryDirectory(self.file_id, self.name, self.parent_id) | 397 | other = InventoryDirectory(self.file_id, self.name, self.parent_id) |
917 | @@ -428,7 +430,7 @@ | |||
918 | 428 | """See InventoryEntry._check""" | 430 | """See InventoryEntry._check""" |
919 | 429 | # TODO: check size too. | 431 | # TODO: check size too. |
920 | 430 | checker.add_pending_item(tree_revision_id, | 432 | checker.add_pending_item(tree_revision_id, |
922 | 431 | ('texts', self.file_id, self.revision), 'text', | 433 | (b'texts', self.file_id, self.revision), b'text', |
923 | 432 | self.text_sha1) | 434 | self.text_sha1) |
924 | 433 | if self.text_size is None: | 435 | if self.text_size is None: |
925 | 434 | checker._report_items.append( | 436 | checker._report_items.append( |
926 | @@ -528,8 +530,8 @@ | |||
927 | 528 | % (self.file_id, tree_revision_id)) | 530 | % (self.file_id, tree_revision_id)) |
928 | 529 | # Symlinks are stored as '' | 531 | # Symlinks are stored as '' |
929 | 530 | checker.add_pending_item(tree_revision_id, | 532 | checker.add_pending_item(tree_revision_id, |
932 | 531 | ('texts', self.file_id, self.revision), 'text', | 533 | (b'texts', self.file_id, self.revision), b'text', |
933 | 532 | 'da39a3ee5e6b4b0d3255bfef95601890afd80709') | 534 | b'da39a3ee5e6b4b0d3255bfef95601890afd80709') |
934 | 533 | 535 | ||
935 | 534 | def copy(self): | 536 | def copy(self): |
936 | 535 | other = InventoryLink(self.file_id, self.name, self.parent_id) | 537 | other = InventoryLink(self.file_id, self.name, self.parent_id) |
937 | @@ -1398,25 +1400,25 @@ | |||
938 | 1398 | if entry.parent_id is not None: | 1400 | if entry.parent_id is not None: |
939 | 1399 | parent_str = entry.parent_id | 1401 | parent_str = entry.parent_id |
940 | 1400 | else: | 1402 | else: |
942 | 1401 | parent_str = '' | 1403 | parent_str = b'' |
943 | 1402 | name_str = entry.name.encode("utf8") | 1404 | name_str = entry.name.encode("utf8") |
944 | 1403 | if entry.kind == 'file': | 1405 | if entry.kind == 'file': |
945 | 1404 | if entry.executable: | 1406 | if entry.executable: |
947 | 1405 | exec_str = "Y" | 1407 | exec_str = b"Y" |
948 | 1406 | else: | 1408 | else: |
951 | 1407 | exec_str = "N" | 1409 | exec_str = b"N" |
952 | 1408 | return "file: %s\n%s\n%s\n%s\n%s\n%d\n%s" % ( | 1410 | return b"file: %s\n%s\n%s\n%s\n%s\n%d\n%s" % ( |
953 | 1409 | entry.file_id, parent_str, name_str, entry.revision, | 1411 | entry.file_id, parent_str, name_str, entry.revision, |
954 | 1410 | entry.text_sha1, entry.text_size, exec_str) | 1412 | entry.text_sha1, entry.text_size, exec_str) |
955 | 1411 | elif entry.kind == 'directory': | 1413 | elif entry.kind == 'directory': |
957 | 1412 | return "dir: %s\n%s\n%s\n%s" % ( | 1414 | return b"dir: %s\n%s\n%s\n%s" % ( |
958 | 1413 | entry.file_id, parent_str, name_str, entry.revision) | 1415 | entry.file_id, parent_str, name_str, entry.revision) |
959 | 1414 | elif entry.kind == 'symlink': | 1416 | elif entry.kind == 'symlink': |
961 | 1415 | return "symlink: %s\n%s\n%s\n%s\n%s" % ( | 1417 | return b"symlink: %s\n%s\n%s\n%s\n%s" % ( |
962 | 1416 | entry.file_id, parent_str, name_str, entry.revision, | 1418 | entry.file_id, parent_str, name_str, entry.revision, |
963 | 1417 | entry.symlink_target.encode("utf8")) | 1419 | entry.symlink_target.encode("utf8")) |
964 | 1418 | elif entry.kind == 'tree-reference': | 1420 | elif entry.kind == 'tree-reference': |
966 | 1419 | return "tree: %s\n%s\n%s\n%s\n%s" % ( | 1421 | return b"tree: %s\n%s\n%s\n%s\n%s" % ( |
967 | 1420 | entry.file_id, parent_str, name_str, entry.revision, | 1422 | entry.file_id, parent_str, name_str, entry.revision, |
968 | 1421 | entry.reference_revision) | 1423 | entry.reference_revision) |
969 | 1422 | else: | 1424 | else: |
970 | @@ -1534,43 +1536,43 @@ | |||
971 | 1534 | return other | 1536 | return other |
972 | 1535 | 1537 | ||
973 | 1536 | @staticmethod | 1538 | @staticmethod |
976 | 1537 | def _bytes_to_utf8name_key(bytes): | 1539 | def _bytes_to_utf8name_key(data): |
977 | 1538 | """Get the file_id, revision_id key out of bytes.""" | 1540 | """Get the file_id, revision_id key out of data.""" |
978 | 1539 | # We don't normally care about name, except for times when we want | 1541 | # We don't normally care about name, except for times when we want |
979 | 1540 | # to filter out empty names because of non rich-root... | 1542 | # to filter out empty names because of non rich-root... |
983 | 1541 | sections = bytes.split('\n') | 1543 | sections = data.split(b'\n') |
984 | 1542 | kind, file_id = sections[0].split(': ') | 1544 | kind, file_id = sections[0].split(b': ') |
985 | 1543 | return (sections[2], intern(file_id), intern(sections[3])) | 1545 | return (sections[2], bytesintern(file_id), bytesintern(sections[3])) |
986 | 1544 | 1546 | ||
987 | 1545 | def _bytes_to_entry(self, bytes): | 1547 | def _bytes_to_entry(self, bytes): |
988 | 1546 | """Deserialise a serialised entry.""" | 1548 | """Deserialise a serialised entry.""" |
991 | 1547 | sections = bytes.split('\n') | 1549 | sections = bytes.split(b'\n') |
992 | 1548 | if sections[0].startswith("file: "): | 1550 | if sections[0].startswith(b"file: "): |
993 | 1549 | result = InventoryFile(sections[0][6:], | 1551 | result = InventoryFile(sections[0][6:], |
994 | 1550 | sections[2].decode('utf8'), | 1552 | sections[2].decode('utf8'), |
995 | 1551 | sections[1]) | 1553 | sections[1]) |
996 | 1552 | result.text_sha1 = sections[4] | 1554 | result.text_sha1 = sections[4] |
997 | 1553 | result.text_size = int(sections[5]) | 1555 | result.text_size = int(sections[5]) |
1000 | 1554 | result.executable = sections[6] == "Y" | 1556 | result.executable = sections[6] == b"Y" |
1001 | 1555 | elif sections[0].startswith("dir: "): | 1557 | elif sections[0].startswith(b"dir: "): |
1002 | 1556 | result = CHKInventoryDirectory(sections[0][5:], | 1558 | result = CHKInventoryDirectory(sections[0][5:], |
1003 | 1557 | sections[2].decode('utf8'), | 1559 | sections[2].decode('utf8'), |
1004 | 1558 | sections[1], self) | 1560 | sections[1], self) |
1006 | 1559 | elif sections[0].startswith("symlink: "): | 1561 | elif sections[0].startswith(b"symlink: "): |
1007 | 1560 | result = InventoryLink(sections[0][9:], | 1562 | result = InventoryLink(sections[0][9:], |
1008 | 1561 | sections[2].decode('utf8'), | 1563 | sections[2].decode('utf8'), |
1009 | 1562 | sections[1]) | 1564 | sections[1]) |
1010 | 1563 | result.symlink_target = sections[4].decode('utf8') | 1565 | result.symlink_target = sections[4].decode('utf8') |
1012 | 1564 | elif sections[0].startswith("tree: "): | 1566 | elif sections[0].startswith(b"tree: "): |
1013 | 1565 | result = TreeReference(sections[0][6:], | 1567 | result = TreeReference(sections[0][6:], |
1014 | 1566 | sections[2].decode('utf8'), | 1568 | sections[2].decode('utf8'), |
1015 | 1567 | sections[1]) | 1569 | sections[1]) |
1016 | 1568 | result.reference_revision = sections[4] | 1570 | result.reference_revision = sections[4] |
1017 | 1569 | else: | 1571 | else: |
1018 | 1570 | raise ValueError("Not a serialised entry %r" % bytes) | 1572 | raise ValueError("Not a serialised entry %r" % bytes) |
1022 | 1571 | result.file_id = intern(result.file_id) | 1573 | result.file_id = bytesintern(result.file_id) |
1023 | 1572 | result.revision = intern(sections[3]) | 1574 | result.revision = bytesintern(sections[3]) |
1024 | 1573 | if result.parent_id == '': | 1575 | if result.parent_id == b'': |
1025 | 1574 | result.parent_id = None | 1576 | result.parent_id = None |
1026 | 1575 | self._fileid_to_entry_cache[result.file_id] = result | 1577 | self._fileid_to_entry_cache[result.file_id] = result |
1027 | 1576 | return result | 1578 | return result |
1028 | @@ -1754,18 +1756,18 @@ | |||
1029 | 1754 | for. | 1756 | for. |
1030 | 1755 | :return: A CHKInventory | 1757 | :return: A CHKInventory |
1031 | 1756 | """ | 1758 | """ |
1034 | 1757 | lines = bytes.split('\n') | 1759 | lines = bytes.split(b'\n') |
1035 | 1758 | if lines[-1] != '': | 1760 | if lines[-1] != b'': |
1036 | 1759 | raise AssertionError('bytes to deserialize must end with an eol') | 1761 | raise AssertionError('bytes to deserialize must end with an eol') |
1037 | 1760 | lines.pop() | 1762 | lines.pop() |
1039 | 1761 | if lines[0] != 'chkinventory:': | 1763 | if lines[0] != b'chkinventory:': |
1040 | 1762 | raise ValueError("not a serialised CHKInventory: %r" % bytes) | 1764 | raise ValueError("not a serialised CHKInventory: %r" % bytes) |
1041 | 1763 | info = {} | 1765 | info = {} |
1045 | 1764 | allowed_keys = frozenset(['root_id', 'revision_id', 'search_key_name', | 1766 | allowed_keys = frozenset((b'root_id', b'revision_id', |
1046 | 1765 | 'parent_id_basename_to_file_id', | 1767 | b'parent_id_basename_to_file_id', |
1047 | 1766 | 'id_to_entry']) | 1768 | b'search_key_name', b'id_to_entry')) |
1048 | 1767 | for line in lines[1:]: | 1769 | for line in lines[1:]: |
1050 | 1768 | key, value = line.split(': ', 1) | 1770 | key, value = line.split(b': ', 1) |
1051 | 1769 | if key not in allowed_keys: | 1771 | if key not in allowed_keys: |
1052 | 1770 | raise errors.BzrError('Unknown key in inventory: %r\n%r' | 1772 | raise errors.BzrError('Unknown key in inventory: %r\n%r' |
1053 | 1771 | % (key, bytes)) | 1773 | % (key, bytes)) |
1054 | @@ -1773,16 +1775,16 @@ | |||
1055 | 1773 | raise errors.BzrError('Duplicate key in inventory: %r\n%r' | 1775 | raise errors.BzrError('Duplicate key in inventory: %r\n%r' |
1056 | 1774 | % (key, bytes)) | 1776 | % (key, bytes)) |
1057 | 1775 | info[key] = value | 1777 | info[key] = value |
1064 | 1776 | revision_id = intern(info['revision_id']) | 1778 | revision_id = bytesintern(info[b'revision_id']) |
1065 | 1777 | root_id = intern(info['root_id']) | 1779 | root_id = bytesintern(info[b'root_id']) |
1066 | 1778 | search_key_name = intern(info.get('search_key_name', 'plain')) | 1780 | search_key_name = bytesintern(info.get(b'search_key_name', b'plain')) |
1067 | 1779 | parent_id_basename_to_file_id = intern(info.get( | 1781 | parent_id_basename_to_file_id = bytesintern(info.get( |
1068 | 1780 | 'parent_id_basename_to_file_id', None)) | 1782 | b'parent_id_basename_to_file_id', None)) |
1069 | 1781 | if not parent_id_basename_to_file_id.startswith('sha1:'): | 1783 | if not parent_id_basename_to_file_id.startswith(b'sha1:'): |
1070 | 1782 | raise ValueError('parent_id_basename_to_file_id should be a sha1' | 1784 | raise ValueError('parent_id_basename_to_file_id should be a sha1' |
1071 | 1783 | ' key not %r' % (parent_id_basename_to_file_id,)) | 1785 | ' key not %r' % (parent_id_basename_to_file_id,)) |
1074 | 1784 | id_to_entry = info['id_to_entry'] | 1786 | id_to_entry = info[b'id_to_entry'] |
1075 | 1785 | if not id_to_entry.startswith('sha1:'): | 1787 | if not id_to_entry.startswith(b'sha1:'): |
1076 | 1786 | raise ValueError('id_to_entry should be a sha1' | 1788 | raise ValueError('id_to_entry should be a sha1' |
1077 | 1787 | ' key not %r' % (id_to_entry,)) | 1789 | ' key not %r' % (id_to_entry,)) |
1078 | 1788 | 1790 | ||
1079 | @@ -1790,7 +1792,7 @@ | |||
1080 | 1790 | result.revision_id = revision_id | 1792 | result.revision_id = revision_id |
1081 | 1791 | result.root_id = root_id | 1793 | result.root_id = root_id |
1082 | 1792 | search_key_func = chk_map.search_key_registry.get( | 1794 | search_key_func = chk_map.search_key_registry.get( |
1084 | 1793 | result._search_key_name) | 1795 | result._search_key_name.decode("ascii")) |
1085 | 1794 | if parent_id_basename_to_file_id is not None: | 1796 | if parent_id_basename_to_file_id is not None: |
1086 | 1795 | result.parent_id_basename_to_file_id = chk_map.CHKMap( | 1797 | result.parent_id_basename_to_file_id = chk_map.CHKMap( |
1087 | 1796 | chk_store, StaticTuple(parent_id_basename_to_file_id,), | 1798 | chk_store, StaticTuple(parent_id_basename_to_file_id,), |
1088 | @@ -1856,7 +1858,7 @@ | |||
1089 | 1856 | if entry.parent_id is not None: | 1858 | if entry.parent_id is not None: |
1090 | 1857 | parent_id = entry.parent_id | 1859 | parent_id = entry.parent_id |
1091 | 1858 | else: | 1860 | else: |
1093 | 1859 | parent_id = '' | 1861 | parent_id = b'' |
1094 | 1860 | return StaticTuple(parent_id, entry.name.encode('utf8')).intern() | 1862 | return StaticTuple(parent_id, entry.name.encode('utf8')).intern() |
1095 | 1861 | 1863 | ||
1096 | 1862 | def __getitem__(self, file_id): | 1864 | def __getitem__(self, file_id): |
1097 | @@ -1868,7 +1870,7 @@ | |||
1098 | 1868 | return result | 1870 | return result |
1099 | 1869 | try: | 1871 | try: |
1100 | 1870 | return self._bytes_to_entry( | 1872 | return self._bytes_to_entry( |
1102 | 1871 | self.id_to_entry.iteritems([StaticTuple(file_id,)]).next()[1]) | 1873 | next(self.id_to_entry.iteritems([StaticTuple(file_id,)]))[1]) |
1103 | 1872 | except StopIteration: | 1874 | except StopIteration: |
1104 | 1873 | # really we're passing an inventory, not a tree... | 1875 | # really we're passing an inventory, not a tree... |
1105 | 1874 | raise errors.NoSuchId(self, file_id) | 1876 | raise errors.NoSuchId(self, file_id) |
1106 | @@ -1951,7 +1953,7 @@ | |||
1107 | 1951 | last_parent_id = last_parent_ie = None | 1953 | last_parent_id = last_parent_ie = None |
1108 | 1952 | pid_items = self.parent_id_basename_to_file_id.iteritems() | 1954 | pid_items = self.parent_id_basename_to_file_id.iteritems() |
1109 | 1953 | for key, child_file_id in pid_items: | 1955 | for key, child_file_id in pid_items: |
1111 | 1954 | if key == ('', ''): # This is the root | 1956 | if key == (b'', b''): # This is the root |
1112 | 1955 | if child_file_id != self.root_id: | 1957 | if child_file_id != self.root_id: |
1113 | 1956 | raise ValueError('Data inconsistency detected.' | 1958 | raise ValueError('Data inconsistency detected.' |
1114 | 1957 | ' We expected data with key ("","") to match' | 1959 | ' We expected data with key ("","") to match' |
1115 | @@ -2129,22 +2131,23 @@ | |||
1116 | 2129 | 2131 | ||
1117 | 2130 | def to_lines(self): | 2132 | def to_lines(self): |
1118 | 2131 | """Serialise the inventory to lines.""" | 2133 | """Serialise the inventory to lines.""" |
1120 | 2132 | lines = ["chkinventory:\n"] | 2134 | lines = [b"chkinventory:\n"] |
1121 | 2133 | if self._search_key_name != 'plain': | 2135 | if self._search_key_name != 'plain': |
1122 | 2134 | # custom ordering grouping things that don't change together | 2136 | # custom ordering grouping things that don't change together |
1126 | 2135 | lines.append('search_key_name: %s\n' % (self._search_key_name,)) | 2137 | lines.append(b'search_key_name: %s\n' % ( |
1127 | 2136 | lines.append("root_id: %s\n" % self.root_id) | 2138 | self._search_key_name.encode('ascii'))) |
1128 | 2137 | lines.append('parent_id_basename_to_file_id: %s\n' % | 2139 | lines.append(b"root_id: %s\n" % self.root_id) |
1129 | 2140 | lines.append(b'parent_id_basename_to_file_id: %s\n' % | ||
1130 | 2138 | (self.parent_id_basename_to_file_id.key()[0],)) | 2141 | (self.parent_id_basename_to_file_id.key()[0],)) |
1133 | 2139 | lines.append("revision_id: %s\n" % self.revision_id) | 2142 | lines.append(b"revision_id: %s\n" % self.revision_id) |
1134 | 2140 | lines.append("id_to_entry: %s\n" % (self.id_to_entry.key()[0],)) | 2143 | lines.append(b"id_to_entry: %s\n" % (self.id_to_entry.key()[0],)) |
1135 | 2141 | else: | 2144 | else: |
1138 | 2142 | lines.append("revision_id: %s\n" % self.revision_id) | 2145 | lines.append(b"revision_id: %s\n" % self.revision_id) |
1139 | 2143 | lines.append("root_id: %s\n" % self.root_id) | 2146 | lines.append(b"root_id: %s\n" % self.root_id) |
1140 | 2144 | if self.parent_id_basename_to_file_id is not None: | 2147 | if self.parent_id_basename_to_file_id is not None: |
1142 | 2145 | lines.append('parent_id_basename_to_file_id: %s\n' % | 2148 | lines.append(b'parent_id_basename_to_file_id: %s\n' % |
1143 | 2146 | (self.parent_id_basename_to_file_id.key()[0],)) | 2149 | (self.parent_id_basename_to_file_id.key()[0],)) |
1145 | 2147 | lines.append("id_to_entry: %s\n" % (self.id_to_entry.key()[0],)) | 2150 | lines.append(b"id_to_entry: %s\n" % (self.id_to_entry.key()[0],)) |
1146 | 2148 | return lines | 2151 | return lines |
1147 | 2149 | 2152 | ||
1148 | 2150 | @property | 2153 | @property |
1149 | 2151 | 2154 | ||
1150 | === modified file 'breezy/inventory_delta.py' | |||
1151 | --- breezy/inventory_delta.py 2017-06-09 16:31:49 +0000 | |||
1152 | +++ breezy/inventory_delta.py 2017-06-11 01:47:17 +0000 | |||
1153 | @@ -31,7 +31,7 @@ | |||
1154 | 31 | from . import inventory | 31 | from . import inventory |
1155 | 32 | from .revision import NULL_REVISION | 32 | from .revision import NULL_REVISION |
1156 | 33 | 33 | ||
1158 | 34 | FORMAT_1 = 'bzr inventory delta v1 (bzr 1.14)' | 34 | FORMAT_1 = b'bzr inventory delta v1 (bzr 1.14)' |
1159 | 35 | 35 | ||
1160 | 36 | 36 | ||
1161 | 37 | class InventoryDeltaError(errors.BzrError): | 37 | class InventoryDeltaError(errors.BzrError): |
1162 | @@ -61,7 +61,7 @@ | |||
1163 | 61 | 61 | ||
1164 | 62 | :param entry: An InventoryDirectory. | 62 | :param entry: An InventoryDirectory. |
1165 | 63 | """ | 63 | """ |
1167 | 64 | return "dir" | 64 | return b"dir" |
1168 | 65 | 65 | ||
1169 | 66 | 66 | ||
1170 | 67 | def _file_content(entry): | 67 | def _file_content(entry): |
1171 | @@ -70,14 +70,14 @@ | |||
1172 | 70 | :param entry: An InventoryFile. | 70 | :param entry: An InventoryFile. |
1173 | 71 | """ | 71 | """ |
1174 | 72 | if entry.executable: | 72 | if entry.executable: |
1176 | 73 | exec_bytes = 'Y' | 73 | exec_bytes = b'Y' |
1177 | 74 | else: | 74 | else: |
1180 | 75 | exec_bytes = '' | 75 | exec_bytes = b'' |
1181 | 76 | size_exec_sha = (entry.text_size, exec_bytes, entry.text_sha1) | 76 | size_exec_sha = entry.text_size, exec_bytes, entry.text_sha1 |
1182 | 77 | if None in size_exec_sha: | 77 | if None in size_exec_sha: |
1183 | 78 | raise InventoryDeltaError( | 78 | raise InventoryDeltaError( |
1184 | 79 | 'Missing size or sha for %(fileid)r', fileid=entry.file_id) | 79 | 'Missing size or sha for %(fileid)r', fileid=entry.file_id) |
1186 | 80 | return "file\x00%d\x00%s\x00%s" % size_exec_sha | 80 | return b"file\x00%d\x00%s\x00%s" % size_exec_sha |
1187 | 81 | 81 | ||
1188 | 82 | 82 | ||
1189 | 83 | def _link_content(entry): | 83 | def _link_content(entry): |
1190 | @@ -89,7 +89,7 @@ | |||
1191 | 89 | if target is None: | 89 | if target is None: |
1192 | 90 | raise InventoryDeltaError( | 90 | raise InventoryDeltaError( |
1193 | 91 | 'Missing target for %(fileid)r', fileid=entry.file_id) | 91 | 'Missing target for %(fileid)r', fileid=entry.file_id) |
1195 | 92 | return "link\x00%s" % target.encode('utf8') | 92 | return b"link\x00%s" % target.encode('utf8') |
1196 | 93 | 93 | ||
1197 | 94 | 94 | ||
1198 | 95 | def _reference_content(entry): | 95 | def _reference_content(entry): |
1199 | @@ -101,7 +101,7 @@ | |||
1200 | 101 | if tree_revision is None: | 101 | if tree_revision is None: |
1201 | 102 | raise InventoryDeltaError( | 102 | raise InventoryDeltaError( |
1202 | 103 | 'Missing reference revision for %(fileid)r', fileid=entry.file_id) | 103 | 'Missing reference revision for %(fileid)r', fileid=entry.file_id) |
1204 | 104 | return "tree\x00%s" % tree_revision | 104 | return b"tree\x00%s" % tree_revision |
1205 | 105 | 105 | ||
1206 | 106 | 106 | ||
1207 | 107 | def _dir_to_entry(content, name, parent_id, file_id, last_modified, | 107 | def _dir_to_entry(content, name, parent_id, file_id, last_modified, |
1208 | @@ -179,11 +179,11 @@ | |||
1209 | 179 | takes. | 179 | takes. |
1210 | 180 | :return: The serialized delta as lines. | 180 | :return: The serialized delta as lines. |
1211 | 181 | """ | 181 | """ |
1213 | 182 | if not isinstance(old_name, str): | 182 | if not isinstance(old_name, bytes): |
1214 | 183 | raise TypeError('old_name should be str, got %r' % (old_name,)) | 183 | raise TypeError('old_name should be str, got %r' % (old_name,)) |
1216 | 184 | if not isinstance(new_name, str): | 184 | if not isinstance(new_name, bytes): |
1217 | 185 | raise TypeError('new_name should be str, got %r' % (new_name,)) | 185 | raise TypeError('new_name should be str, got %r' % (new_name,)) |
1219 | 186 | lines = ['', '', '', '', ''] | 186 | lines = [b'', b'', b'', b'', b''] |
1220 | 187 | to_line = self._delta_item_to_line | 187 | to_line = self._delta_item_to_line |
1221 | 188 | for delta_item in delta_to_new: | 188 | for delta_item in delta_to_new: |
1222 | 189 | line = to_line(delta_item, new_name) | 189 | line = to_line(delta_item, new_name) |
1223 | @@ -193,48 +193,48 @@ | |||
1224 | 193 | 'to_line gave non-bytes output %(line)r', line=lines[-1]) | 193 | 'to_line gave non-bytes output %(line)r', line=lines[-1]) |
1225 | 194 | lines.append(line) | 194 | lines.append(line) |
1226 | 195 | lines.sort() | 195 | lines.sort() |
1231 | 196 | lines[0] = "format: %s\n" % FORMAT_1 | 196 | lines[0] = b"format: %s\n" % FORMAT_1 |
1232 | 197 | lines[1] = "parent: %s\n" % old_name | 197 | lines[1] = b"parent: %s\n" % old_name |
1233 | 198 | lines[2] = "version: %s\n" % new_name | 198 | lines[2] = b"version: %s\n" % new_name |
1234 | 199 | lines[3] = "versioned_root: %s\n" % self._serialize_bool( | 199 | lines[3] = b"versioned_root: %s\n" % self._serialize_bool( |
1235 | 200 | self._versioned_root) | 200 | self._versioned_root) |
1237 | 201 | lines[4] = "tree_references: %s\n" % self._serialize_bool( | 201 | lines[4] = b"tree_references: %s\n" % self._serialize_bool( |
1238 | 202 | self._tree_references) | 202 | self._tree_references) |
1239 | 203 | return lines | 203 | return lines |
1240 | 204 | 204 | ||
1241 | 205 | def _serialize_bool(self, value): | 205 | def _serialize_bool(self, value): |
1242 | 206 | if value: | 206 | if value: |
1244 | 207 | return "true" | 207 | return b"true" |
1245 | 208 | else: | 208 | else: |
1247 | 209 | return "false" | 209 | return b"false" |
1248 | 210 | 210 | ||
1249 | 211 | def _delta_item_to_line(self, delta_item, new_version): | 211 | def _delta_item_to_line(self, delta_item, new_version): |
1250 | 212 | """Convert delta_item to a line.""" | 212 | """Convert delta_item to a line.""" |
1251 | 213 | oldpath, newpath, file_id, entry = delta_item | 213 | oldpath, newpath, file_id, entry = delta_item |
1252 | 214 | if newpath is None: | 214 | if newpath is None: |
1253 | 215 | # delete | 215 | # delete |
1257 | 216 | oldpath_utf8 = '/' + oldpath.encode('utf8') | 216 | oldpath_utf8 = b'/' + oldpath.encode('utf8') |
1258 | 217 | newpath_utf8 = 'None' | 217 | newpath_utf8 = b'None' |
1259 | 218 | parent_id = '' | 218 | parent_id = b'' |
1260 | 219 | last_modified = NULL_REVISION | 219 | last_modified = NULL_REVISION |
1262 | 220 | content = 'deleted\x00\x00' | 220 | content = b'deleted\x00\x00' |
1263 | 221 | else: | 221 | else: |
1264 | 222 | if oldpath is None: | 222 | if oldpath is None: |
1266 | 223 | oldpath_utf8 = 'None' | 223 | oldpath_utf8 = b'None' |
1267 | 224 | else: | 224 | else: |
1269 | 225 | oldpath_utf8 = '/' + oldpath.encode('utf8') | 225 | oldpath_utf8 = b'/' + oldpath.encode('utf8') |
1270 | 226 | if newpath == '/': | 226 | if newpath == '/': |
1271 | 227 | raise AssertionError( | 227 | raise AssertionError( |
1272 | 228 | "Bad inventory delta: '/' is not a valid newpath " | 228 | "Bad inventory delta: '/' is not a valid newpath " |
1273 | 229 | "(should be '') in delta item %r" % (delta_item,)) | 229 | "(should be '') in delta item %r" % (delta_item,)) |
1274 | 230 | # TODO: Test real-world utf8 cache hit rate. It may be a win. | 230 | # TODO: Test real-world utf8 cache hit rate. It may be a win. |
1276 | 231 | newpath_utf8 = '/' + newpath.encode('utf8') | 231 | newpath_utf8 = b'/' + newpath.encode('utf8') |
1277 | 232 | # Serialize None as '' | 232 | # Serialize None as '' |
1279 | 233 | parent_id = entry.parent_id or '' | 233 | parent_id = entry.parent_id or b'' |
1280 | 234 | # Serialize unknown revisions as NULL_REVISION | 234 | # Serialize unknown revisions as NULL_REVISION |
1281 | 235 | last_modified = entry.revision | 235 | last_modified = entry.revision |
1282 | 236 | # special cases for / | 236 | # special cases for / |
1284 | 237 | if newpath_utf8 == '/' and not self._versioned_root: | 237 | if newpath_utf8 == b'/' and not self._versioned_root: |
1285 | 238 | # This is an entry for the root, this inventory does not | 238 | # This is an entry for the root, this inventory does not |
1286 | 239 | # support versioned roots. So this must be an unversioned | 239 | # support versioned roots. So this must be an unversioned |
1287 | 240 | # root, i.e. last_modified == new revision. Otherwise, this | 240 | # root, i.e. last_modified == new revision. Otherwise, this |
1288 | @@ -251,7 +251,7 @@ | |||
1289 | 251 | raise InventoryDeltaError( | 251 | raise InventoryDeltaError( |
1290 | 252 | "no version for fileid %(fileid)r", fileid=file_id) | 252 | "no version for fileid %(fileid)r", fileid=file_id) |
1291 | 253 | content = self._entry_to_content[entry.kind](entry) | 253 | content = self._entry_to_content[entry.kind](entry) |
1293 | 254 | return ("%s\x00%s\x00%s\x00%s\x00%s\x00%s\n" % | 254 | return (b"%s\x00%s\x00%s\x00%s\x00%s\x00%s\n" % |
1294 | 255 | (oldpath_utf8, newpath_utf8, file_id, parent_id, last_modified, | 255 | (oldpath_utf8, newpath_utf8, file_id, parent_id, last_modified, |
1295 | 256 | content)) | 256 | content)) |
1296 | 257 | 257 | ||
1297 | @@ -270,9 +270,9 @@ | |||
1298 | 270 | self._allow_tree_references = allow_tree_references | 270 | self._allow_tree_references = allow_tree_references |
1299 | 271 | 271 | ||
1300 | 272 | def _deserialize_bool(self, value): | 272 | def _deserialize_bool(self, value): |
1302 | 273 | if value == "true": | 273 | if value == b"true": |
1303 | 274 | return True | 274 | return True |
1305 | 275 | elif value == "false": | 275 | elif value == b"false": |
1306 | 276 | return False | 276 | return False |
1307 | 277 | else: | 277 | else: |
1308 | 278 | raise InventoryDeltaError("value %(val)r is not a bool", val=value) | 278 | raise InventoryDeltaError("value %(val)r is not a bool", val=value) |
1309 | @@ -289,24 +289,24 @@ | |||
1310 | 289 | :return: (parent_id, new_id, versioned_root, tree_references, | 289 | :return: (parent_id, new_id, versioned_root, tree_references, |
1311 | 290 | inventory_delta) | 290 | inventory_delta) |
1312 | 291 | """ | 291 | """ |
1315 | 292 | if bytes[-1:] != '\n': | 292 | if bytes[-1:] != b'\n': |
1316 | 293 | last_line = bytes.rsplit('\n', 1)[-1] | 293 | last_line = bytes.rsplit(b'\n', 1)[-1] |
1317 | 294 | raise InventoryDeltaError( | 294 | raise InventoryDeltaError( |
1318 | 295 | 'last line not empty: %(line)r', line=last_line) | 295 | 'last line not empty: %(line)r', line=last_line) |
1321 | 296 | lines = bytes.split('\n')[:-1] # discard the last empty line | 296 | lines = bytes.split(b'\n')[:-1] # discard the last empty line |
1322 | 297 | if not lines or lines[0] != 'format: %s' % FORMAT_1: | 297 | if not lines or lines[0] != b'format: %s' % FORMAT_1: |
1323 | 298 | raise InventoryDeltaError( | 298 | raise InventoryDeltaError( |
1324 | 299 | 'unknown format %(line)r', line=lines[0:1]) | 299 | 'unknown format %(line)r', line=lines[0:1]) |
1326 | 300 | if len(lines) < 2 or not lines[1].startswith('parent: '): | 300 | if len(lines) < 2 or not lines[1].startswith(b'parent: '): |
1327 | 301 | raise InventoryDeltaError('missing parent: marker') | 301 | raise InventoryDeltaError('missing parent: marker') |
1328 | 302 | delta_parent_id = lines[1][8:] | 302 | delta_parent_id = lines[1][8:] |
1330 | 303 | if len(lines) < 3 or not lines[2].startswith('version: '): | 303 | if len(lines) < 3 or not lines[2].startswith(b'version: '): |
1331 | 304 | raise InventoryDeltaError('missing version: marker') | 304 | raise InventoryDeltaError('missing version: marker') |
1332 | 305 | delta_version_id = lines[2][9:] | 305 | delta_version_id = lines[2][9:] |
1334 | 306 | if len(lines) < 4 or not lines[3].startswith('versioned_root: '): | 306 | if len(lines) < 4 or not lines[3].startswith(b'versioned_root: '): |
1335 | 307 | raise InventoryDeltaError('missing versioned_root: marker') | 307 | raise InventoryDeltaError('missing versioned_root: marker') |
1336 | 308 | delta_versioned_root = self._deserialize_bool(lines[3][16:]) | 308 | delta_versioned_root = self._deserialize_bool(lines[3][16:]) |
1338 | 309 | if len(lines) < 5 or not lines[4].startswith('tree_references: '): | 309 | if len(lines) < 5 or not lines[4].startswith(b'tree_references: '): |
1339 | 310 | raise InventoryDeltaError('missing tree_references: marker') | 310 | raise InventoryDeltaError('missing tree_references: marker') |
1340 | 311 | delta_tree_references = self._deserialize_bool(lines[4][17:]) | 311 | delta_tree_references = self._deserialize_bool(lines[4][17:]) |
1341 | 312 | if (not self._allow_versioned_root and delta_versioned_root): | 312 | if (not self._allow_versioned_root and delta_versioned_root): |
1342 | @@ -318,24 +318,24 @@ | |||
1343 | 318 | next(line_iter) | 318 | next(line_iter) |
1344 | 319 | for line in line_iter: | 319 | for line in line_iter: |
1345 | 320 | (oldpath_utf8, newpath_utf8, file_id, parent_id, last_modified, | 320 | (oldpath_utf8, newpath_utf8, file_id, parent_id, last_modified, |
1347 | 321 | content) = line.split('\x00', 5) | 321 | content) = line.split(b'\x00', 5) |
1348 | 322 | parent_id = parent_id or None | 322 | parent_id = parent_id or None |
1349 | 323 | if file_id in seen_ids: | 323 | if file_id in seen_ids: |
1350 | 324 | raise InventoryDeltaError( | 324 | raise InventoryDeltaError( |
1351 | 325 | "duplicate file id %(fileid)r", fileid=file_id) | 325 | "duplicate file id %(fileid)r", fileid=file_id) |
1352 | 326 | seen_ids.add(file_id) | 326 | seen_ids.add(file_id) |
1354 | 327 | if (newpath_utf8 == '/' and not delta_versioned_root and | 327 | if (newpath_utf8 == b'/' and not delta_versioned_root and |
1355 | 328 | last_modified != delta_version_id): | 328 | last_modified != delta_version_id): |
1356 | 329 | # Delta claims to be not have a versioned root, yet here's | 329 | # Delta claims to be not have a versioned root, yet here's |
1357 | 330 | # a root entry with a non-default version. | 330 | # a root entry with a non-default version. |
1358 | 331 | raise InventoryDeltaError( | 331 | raise InventoryDeltaError( |
1359 | 332 | "Versioned root found: %(line)r", line=line) | 332 | "Versioned root found: %(line)r", line=line) |
1361 | 333 | elif newpath_utf8 != 'None' and last_modified[-1] == ':': | 333 | elif newpath_utf8 != b'None' and last_modified[-1:] == b':': |
1362 | 334 | # Deletes have a last_modified of null:, but otherwise special | 334 | # Deletes have a last_modified of null:, but otherwise special |
1363 | 335 | # revision ids should not occur. | 335 | # revision ids should not occur. |
1364 | 336 | raise InventoryDeltaError( | 336 | raise InventoryDeltaError( |
1365 | 337 | 'special revisionid found: %(line)r', line=line) | 337 | 'special revisionid found: %(line)r', line=line) |
1367 | 338 | if content.startswith('tree\x00'): | 338 | if content.startswith(b'tree\x00'): |
1368 | 339 | if delta_tree_references is False: | 339 | if delta_tree_references is False: |
1369 | 340 | raise InventoryDeltaError( | 340 | raise InventoryDeltaError( |
1370 | 341 | "Tree reference found (but header said " | 341 | "Tree reference found (but header said " |
1371 | @@ -343,18 +343,18 @@ | |||
1372 | 343 | elif not self._allow_tree_references: | 343 | elif not self._allow_tree_references: |
1373 | 344 | raise IncompatibleInventoryDelta( | 344 | raise IncompatibleInventoryDelta( |
1374 | 345 | "Tree reference not allowed") | 345 | "Tree reference not allowed") |
1376 | 346 | if oldpath_utf8 == 'None': | 346 | if oldpath_utf8 == b'None': |
1377 | 347 | oldpath = None | 347 | oldpath = None |
1379 | 348 | elif oldpath_utf8[:1] != '/': | 348 | elif oldpath_utf8[:1] != b'/': |
1380 | 349 | raise InventoryDeltaError( | 349 | raise InventoryDeltaError( |
1381 | 350 | "oldpath invalid (does not start with /): %(path)r", | 350 | "oldpath invalid (does not start with /): %(path)r", |
1382 | 351 | path=oldpath_utf8) | 351 | path=oldpath_utf8) |
1383 | 352 | else: | 352 | else: |
1384 | 353 | oldpath_utf8 = oldpath_utf8[1:] | 353 | oldpath_utf8 = oldpath_utf8[1:] |
1385 | 354 | oldpath = oldpath_utf8.decode('utf8') | 354 | oldpath = oldpath_utf8.decode('utf8') |
1387 | 355 | if newpath_utf8 == 'None': | 355 | if newpath_utf8 == b'None': |
1388 | 356 | newpath = None | 356 | newpath = None |
1390 | 357 | elif newpath_utf8[:1] != '/': | 357 | elif newpath_utf8[:1] != b'/': |
1391 | 358 | raise InventoryDeltaError( | 358 | raise InventoryDeltaError( |
1392 | 359 | "newpath invalid (does not start with /): %(path)r", | 359 | "newpath invalid (does not start with /): %(path)r", |
1393 | 360 | path=newpath_utf8) | 360 | path=newpath_utf8) |
1394 | @@ -362,8 +362,8 @@ | |||
1395 | 362 | # Trim leading slash | 362 | # Trim leading slash |
1396 | 363 | newpath_utf8 = newpath_utf8[1:] | 363 | newpath_utf8 = newpath_utf8[1:] |
1397 | 364 | newpath = newpath_utf8.decode('utf8') | 364 | newpath = newpath_utf8.decode('utf8') |
1400 | 365 | content_tuple = tuple(content.split('\x00')) | 365 | content_tuple = tuple(content.split(b'\x00')) |
1401 | 366 | if content_tuple[0] == 'deleted': | 366 | if content_tuple[0] == b'deleted': |
1402 | 367 | entry = None | 367 | entry = None |
1403 | 368 | else: | 368 | else: |
1404 | 369 | entry = _parse_entry( | 369 | entry = _parse_entry( |
1405 | @@ -376,10 +376,10 @@ | |||
1406 | 376 | 376 | ||
1407 | 377 | def _parse_entry(path, file_id, parent_id, last_modified, content): | 377 | def _parse_entry(path, file_id, parent_id, last_modified, content): |
1408 | 378 | entry_factory = { | 378 | entry_factory = { |
1413 | 379 | 'dir': _dir_to_entry, | 379 | b'dir': _dir_to_entry, |
1414 | 380 | 'file': _file_to_entry, | 380 | b'file': _file_to_entry, |
1415 | 381 | 'link': _link_to_entry, | 381 | b'link': _link_to_entry, |
1416 | 382 | 'tree': _tree_to_entry, | 382 | b'tree': _tree_to_entry, |
1417 | 383 | } | 383 | } |
1418 | 384 | kind = content[0] | 384 | kind = content[0] |
1419 | 385 | if path.startswith('/'): | 385 | if path.startswith('/'): |
1420 | 386 | 386 | ||
1421 | === modified file 'breezy/osutils.py' | |||
1422 | --- breezy/osutils.py 2017-06-10 01:57:00 +0000 | |||
1423 | +++ breezy/osutils.py 2017-06-11 01:47:17 +0000 | |||
1424 | @@ -1209,11 +1209,11 @@ | |||
1425 | 1209 | # separators | 1209 | # separators |
1426 | 1210 | # 3) '\xa0' isn't unicode safe since it is >128. | 1210 | # 3) '\xa0' isn't unicode safe since it is >128. |
1427 | 1211 | 1211 | ||
1433 | 1212 | # This should *not* be a unicode set of characters in case the source | 1212 | if isinstance(s, str): |
1434 | 1213 | # string is not a Unicode string. We can auto-up-cast the characters since | 1213 | ws = ' \t\n\r\v\f' |
1435 | 1214 | # they are ascii, but we don't want to auto-up-cast the string in case it | 1214 | else: |
1436 | 1215 | # is utf-8 | 1215 | ws = (b' ', b'\t', b'\n', b'\r', b'\v', b'\f') |
1437 | 1216 | for ch in ' \t\n\r\v\f': | 1216 | for ch in ws: |
1438 | 1217 | if ch in s: | 1217 | if ch in s: |
1439 | 1218 | return True | 1218 | return True |
1440 | 1219 | else: | 1219 | else: |
1441 | @@ -1370,7 +1370,7 @@ | |||
1442 | 1370 | If it is a str, it is returned. | 1370 | If it is a str, it is returned. |
1443 | 1371 | If it is Unicode, it is encoded into a utf-8 string. | 1371 | If it is Unicode, it is encoded into a utf-8 string. |
1444 | 1372 | """ | 1372 | """ |
1446 | 1373 | if isinstance(unicode_or_utf8_string, str): | 1373 | if isinstance(unicode_or_utf8_string, bytes): |
1447 | 1374 | # TODO: jam 20070209 This is overkill, and probably has an impact on | 1374 | # TODO: jam 20070209 This is overkill, and probably has an impact on |
1448 | 1375 | # performance if we are dealing with lots of apis that want a | 1375 | # performance if we are dealing with lots of apis that want a |
1449 | 1376 | # utf-8 revision id | 1376 | # utf-8 revision id |
1450 | @@ -1443,13 +1443,13 @@ | |||
1451 | 1443 | can be accessed by that path. | 1443 | can be accessed by that path. |
1452 | 1444 | """ | 1444 | """ |
1453 | 1445 | 1445 | ||
1455 | 1446 | return unicodedata.normalize('NFC', unicode(path)), True | 1446 | return unicodedata.normalize('NFC', text_type(path)), True |
1456 | 1447 | 1447 | ||
1457 | 1448 | 1448 | ||
1458 | 1449 | def _inaccessible_normalized_filename(path): | 1449 | def _inaccessible_normalized_filename(path): |
1459 | 1450 | __doc__ = _accessible_normalized_filename.__doc__ | 1450 | __doc__ = _accessible_normalized_filename.__doc__ |
1460 | 1451 | 1451 | ||
1462 | 1452 | normalized = unicodedata.normalize('NFC', unicode(path)) | 1452 | normalized = unicodedata.normalize('NFC', text_type(path)) |
1463 | 1453 | return normalized, normalized == path | 1453 | return normalized, normalized == path |
1464 | 1454 | 1454 | ||
1465 | 1455 | 1455 | ||
1466 | @@ -1878,10 +1878,10 @@ | |||
1467 | 1878 | _kind_from_mode = file_kind_from_stat_mode | 1878 | _kind_from_mode = file_kind_from_stat_mode |
1468 | 1879 | 1879 | ||
1469 | 1880 | if prefix: | 1880 | if prefix: |
1471 | 1881 | relprefix = prefix + '/' | 1881 | relprefix = prefix + b'/' |
1472 | 1882 | else: | 1882 | else: |
1475 | 1883 | relprefix = '' | 1883 | relprefix = b'' |
1476 | 1884 | top_slash = top + u'/' | 1884 | top_slash = top + '/' |
1477 | 1885 | 1885 | ||
1478 | 1886 | dirblock = [] | 1886 | dirblock = [] |
1479 | 1887 | append = dirblock.append | 1887 | append = dirblock.append |
1480 | 1888 | 1888 | ||
1481 | === modified file 'breezy/pack.py' | |||
1482 | --- breezy/pack.py 2017-05-25 01:35:55 +0000 | |||
1483 | +++ breezy/pack.py 2017-06-11 01:47:17 +0000 | |||
1484 | @@ -73,28 +73,28 @@ | |||
1485 | 73 | 73 | ||
1486 | 74 | def begin(self): | 74 | def begin(self): |
1487 | 75 | """Return the bytes to begin a container.""" | 75 | """Return the bytes to begin a container.""" |
1489 | 76 | return FORMAT_ONE + "\n" | 76 | return FORMAT_ONE.encode("ascii") + b"\n" |
1490 | 77 | 77 | ||
1491 | 78 | def end(self): | 78 | def end(self): |
1492 | 79 | """Return the bytes to finish a container.""" | 79 | """Return the bytes to finish a container.""" |
1494 | 80 | return "E" | 80 | return b"E" |
1495 | 81 | 81 | ||
1496 | 82 | def bytes_header(self, length, names): | 82 | def bytes_header(self, length, names): |
1497 | 83 | """Return the header for a Bytes record.""" | 83 | """Return the header for a Bytes record.""" |
1498 | 84 | # Kind marker | 84 | # Kind marker |
1500 | 85 | byte_sections = ["B"] | 85 | byte_sections = [b"B"] |
1501 | 86 | # Length | 86 | # Length |
1503 | 87 | byte_sections.append(str(length) + "\n") | 87 | byte_sections.append(b"%d\n" % (length,)) |
1504 | 88 | # Names | 88 | # Names |
1505 | 89 | for name_tuple in names: | 89 | for name_tuple in names: |
1506 | 90 | # Make sure we're writing valid names. Note that we will leave a | 90 | # Make sure we're writing valid names. Note that we will leave a |
1507 | 91 | # half-written record if a name is bad! | 91 | # half-written record if a name is bad! |
1508 | 92 | for name in name_tuple: | 92 | for name in name_tuple: |
1509 | 93 | _check_name(name) | 93 | _check_name(name) |
1511 | 94 | byte_sections.append('\x00'.join(name_tuple) + "\n") | 94 | byte_sections.append(b'\x00'.join(name_tuple) + b"\n") |
1512 | 95 | # End of headers | 95 | # End of headers |
1515 | 96 | byte_sections.append("\n") | 96 | byte_sections.append(b"\n") |
1516 | 97 | return ''.join(byte_sections) | 97 | return b''.join(byte_sections) |
1517 | 98 | 98 | ||
1518 | 99 | def bytes_record(self, bytes, names): | 99 | def bytes_record(self, bytes, names): |
1519 | 100 | """Return the bytes for a Bytes record with the given name and | 100 | """Return the bytes for a Bytes record with the given name and |
1520 | 101 | 101 | ||
1521 | === modified file 'breezy/repofmt/groupcompress_repo.py' | |||
1522 | --- breezy/repofmt/groupcompress_repo.py 2017-06-10 00:52:37 +0000 | |||
1523 | +++ breezy/repofmt/groupcompress_repo.py 2017-06-11 01:47:17 +0000 | |||
1524 | @@ -147,15 +147,15 @@ | |||
1525 | 147 | # robertc says- this is a closure rather than a method on the object | 147 | # robertc says- this is a closure rather than a method on the object |
1526 | 148 | # so that the variables are locals, and faster than accessing object | 148 | # so that the variables are locals, and faster than accessing object |
1527 | 149 | # members. | 149 | # members. |
1529 | 150 | def _write_data(bytes, flush=False, _buffer=self._buffer, | 150 | def _write_data(data, flush=False, _buffer=self._buffer, |
1530 | 151 | _write=self.write_stream.write, _update=self._hash.update): | 151 | _write=self.write_stream.write, _update=self._hash.update): |
1533 | 152 | _buffer[0].append(bytes) | 152 | _buffer[0].append(data) |
1534 | 153 | _buffer[1] += len(bytes) | 153 | _buffer[1] += len(data) |
1535 | 154 | # buffer cap | 154 | # buffer cap |
1536 | 155 | if _buffer[1] > self._cache_limit or flush: | 155 | if _buffer[1] > self._cache_limit or flush: |
1540 | 156 | bytes = ''.join(_buffer[0]) | 156 | data = b''.join(_buffer[0]) |
1541 | 157 | _write(bytes) | 157 | _write(data) |
1542 | 158 | _update(bytes) | 158 | _update(data) |
1543 | 159 | _buffer[:] = [[], 0] | 159 | _buffer[:] = [[], 0] |
1544 | 160 | # expose this on self, for the occasion when clients want to add data. | 160 | # expose this on self, for the occasion when clients want to add data. |
1545 | 161 | self._write_data = _write_data | 161 | self._write_data = _write_data |
1546 | @@ -905,7 +905,7 @@ | |||
1547 | 905 | ' no new_path %r' % (file_id,)) | 905 | ' no new_path %r' % (file_id,)) |
1548 | 906 | if new_path == '': | 906 | if new_path == '': |
1549 | 907 | new_inv.root_id = file_id | 907 | new_inv.root_id = file_id |
1551 | 908 | parent_id_basename_key = StaticTuple('', '').intern() | 908 | parent_id_basename_key = StaticTuple(b'', b'').intern() |
1552 | 909 | else: | 909 | else: |
1553 | 910 | utf8_entry_name = entry.name.encode('utf-8') | 910 | utf8_entry_name = entry.name.encode('utf-8') |
1554 | 911 | parent_id_basename_key = StaticTuple(entry.parent_id, | 911 | parent_id_basename_key = StaticTuple(entry.parent_id, |
1555 | 912 | 912 | ||
1556 | === modified file 'breezy/repofmt/pack_repo.py' | |||
1557 | --- breezy/repofmt/pack_repo.py 2017-06-10 12:56:18 +0000 | |||
1558 | +++ breezy/repofmt/pack_repo.py 2017-06-11 01:47:17 +0000 | |||
1559 | @@ -419,7 +419,7 @@ | |||
1560 | 419 | _buffer[1] += len(bytes) | 419 | _buffer[1] += len(bytes) |
1561 | 420 | # buffer cap | 420 | # buffer cap |
1562 | 421 | if _buffer[1] > self._cache_limit or flush: | 421 | if _buffer[1] > self._cache_limit or flush: |
1564 | 422 | bytes = ''.join(_buffer[0]) | 422 | bytes = b''.join(_buffer[0]) |
1565 | 423 | _write(bytes) | 423 | _write(bytes) |
1566 | 424 | _update(bytes) | 424 | _update(bytes) |
1567 | 425 | _buffer[:] = [[], 0] | 425 | _buffer[:] = [[], 0] |
1568 | @@ -524,7 +524,7 @@ | |||
1569 | 524 | def flush(self): | 524 | def flush(self): |
1570 | 525 | """Flush any current data.""" | 525 | """Flush any current data.""" |
1571 | 526 | if self._buffer[1]: | 526 | if self._buffer[1]: |
1573 | 527 | bytes = ''.join(self._buffer[0]) | 527 | bytes = b''.join(self._buffer[0]) |
1574 | 528 | self.write_stream.write(bytes) | 528 | self.write_stream.write(bytes) |
1575 | 529 | self._hash.update(bytes) | 529 | self._hash.update(bytes) |
1576 | 530 | self._buffer[:] = [[], 0] | 530 | self._buffer[:] = [[], 0] |
1577 | @@ -1987,7 +1987,7 @@ | |||
1578 | 1987 | length), where the index field is the write_index object supplied | 1987 | length), where the index field is the write_index object supplied |
1579 | 1988 | to the PackAccess object. | 1988 | to the PackAccess object. |
1580 | 1989 | """ | 1989 | """ |
1582 | 1990 | if not isinstance(raw_data, str): | 1990 | if not isinstance(raw_data, bytes): |
1583 | 1991 | raise AssertionError( | 1991 | raise AssertionError( |
1584 | 1992 | 'data must be plain bytes was %s' % type(raw_data)) | 1992 | 'data must be plain bytes was %s' % type(raw_data)) |
1585 | 1993 | result = [] | 1993 | result = [] |
1586 | 1994 | 1994 | ||
1587 | === modified file 'breezy/repository.py' | |||
1588 | --- breezy/repository.py 2017-06-10 00:52:37 +0000 | |||
1589 | +++ breezy/repository.py 2017-06-11 01:47:17 +0000 | |||
1590 | @@ -46,6 +46,7 @@ | |||
1591 | 46 | from .inter import InterObject | 46 | from .inter import InterObject |
1592 | 47 | from .lock import _RelockDebugMixin, LogicalLockResult | 47 | from .lock import _RelockDebugMixin, LogicalLockResult |
1593 | 48 | from .sixish import ( | 48 | from .sixish import ( |
1594 | 49 | text_type, | ||
1595 | 49 | viewitems, | 50 | viewitems, |
1596 | 50 | viewvalues, | 51 | viewvalues, |
1597 | 51 | ) | 52 | ) |
1598 | @@ -145,7 +146,7 @@ | |||
1599 | 145 | for key, value in viewitems(revprops): | 146 | for key, value in viewitems(revprops): |
1600 | 146 | # We know that the XML serializers do not round trip '\r' | 147 | # We know that the XML serializers do not round trip '\r' |
1601 | 147 | # correctly, so refuse to accept them | 148 | # correctly, so refuse to accept them |
1603 | 148 | if not isinstance(value, basestring): | 149 | if not isinstance(value, (text_type, str)): |
1604 | 149 | raise ValueError('revision property (%s) is not a valid' | 150 | raise ValueError('revision property (%s) is not a valid' |
1605 | 150 | ' (unicode) string: %r' % (key, value)) | 151 | ' (unicode) string: %r' % (key, value)) |
1606 | 151 | self._validate_unicode_text(value, | 152 | self._validate_unicode_text(value, |
1607 | 152 | 153 | ||
1608 | === modified file 'breezy/revision.py' | |||
1609 | --- breezy/revision.py 2017-06-10 01:57:00 +0000 | |||
1610 | +++ breezy/revision.py 2017-06-11 01:47:17 +0000 | |||
1611 | @@ -26,8 +26,11 @@ | |||
1612 | 26 | """) | 26 | """) |
1613 | 27 | from . import ( | 27 | from . import ( |
1614 | 28 | errors, | 28 | errors, |
1617 | 29 | ) | 29 | osutils, |
1618 | 30 | from .osutils import contains_whitespace | 30 | ) |
1619 | 31 | from .sixish import ( | ||
1620 | 32 | text_type, | ||
1621 | 33 | ) | ||
1622 | 31 | 34 | ||
1623 | 32 | NULL_REVISION=b"null:" | 35 | NULL_REVISION=b"null:" |
1624 | 33 | CURRENT_REVISION=b"current:" | 36 | CURRENT_REVISION=b"current:" |
1625 | @@ -86,9 +89,11 @@ | |||
1626 | 86 | def _check_properties(self): | 89 | def _check_properties(self): |
1627 | 87 | """Verify that all revision properties are OK.""" | 90 | """Verify that all revision properties are OK.""" |
1628 | 88 | for name, value in self.properties.items(): | 91 | for name, value in self.properties.items(): |
1630 | 89 | if not isinstance(name, basestring) or contains_whitespace(name): | 92 | # GZ 2017-06-10: What sort of string are properties exactly? |
1631 | 93 | not_text = not isinstance(name, (text_type, str)) | ||
1632 | 94 | if not_text or osutils.contains_whitespace(name): | ||
1633 | 90 | raise ValueError("invalid property name %r" % name) | 95 | raise ValueError("invalid property name %r" % name) |
1635 | 91 | if not isinstance(value, basestring): | 96 | if not isinstance(value, (text_type, bytes)): |
1636 | 92 | raise ValueError("invalid property value %r for %r" % | 97 | raise ValueError("invalid property value %r for %r" % |
1637 | 93 | (value, name)) | 98 | (value, name)) |
1638 | 94 | 99 | ||
1639 | @@ -205,7 +210,7 @@ | |||
1640 | 205 | 210 | ||
1641 | 206 | :return: True if the revision is reserved, False otherwise | 211 | :return: True if the revision is reserved, False otherwise |
1642 | 207 | """ | 212 | """ |
1644 | 208 | return isinstance(revision_id, basestring) and revision_id.endswith(':') | 213 | return isinstance(revision_id, bytes) and revision_id.endswith(b':') |
1645 | 209 | 214 | ||
1646 | 210 | 215 | ||
1647 | 211 | def check_not_reserved_id(revision_id): | 216 | def check_not_reserved_id(revision_id): |
1648 | 212 | 217 | ||
1649 | === modified file 'breezy/sixish.py' | |||
1650 | --- breezy/sixish.py 2017-06-05 01:55:02 +0000 | |||
1651 | +++ breezy/sixish.py 2017-06-11 01:47:17 +0000 | |||
1652 | @@ -46,3 +46,13 @@ | |||
1653 | 46 | from StringIO import StringIO | 46 | from StringIO import StringIO |
1654 | 47 | from future_builtins import zip, map | 47 | from future_builtins import zip, map |
1655 | 48 | range = xrange | 48 | range = xrange |
1656 | 49 | |||
1657 | 50 | |||
1658 | 51 | # GZ 2017-06-10: Work out if interning bits of inventory is behaviour we want | ||
1659 | 52 | # to retain outside of StaticTuple, if so need to implement for Python 3. | ||
1660 | 53 | if PY3: | ||
1661 | 54 | def bytesintern(b): | ||
1662 | 55 | """Dummy intern() function.""" | ||
1663 | 56 | return b | ||
1664 | 57 | else: | ||
1665 | 58 | bytesintern = intern | ||
1666 | 49 | 59 | ||
1667 | === modified file 'breezy/tests/test__chk_map.py' | |||
1668 | --- breezy/tests/test__chk_map.py 2017-05-23 14:08:03 +0000 | |||
1669 | +++ breezy/tests/test__chk_map.py 2017-06-11 01:47:17 +0000 | |||
1670 | @@ -42,18 +42,18 @@ | |||
1671 | 42 | self.assertEqual(expected, actual, 'actual: %r' % (actual,)) | 42 | self.assertEqual(expected, actual, 'actual: %r' % (actual,)) |
1672 | 43 | 43 | ||
1673 | 44 | def test_simple_16(self): | 44 | def test_simple_16(self): |
1678 | 45 | self.assertSearchKey16('8C736521', stuple('foo',)) | 45 | self.assertSearchKey16(b'8C736521', stuple('foo',)) |
1679 | 46 | self.assertSearchKey16('8C736521\x008C736521', stuple('foo', 'foo')) | 46 | self.assertSearchKey16(b'8C736521\x008C736521', stuple('foo', 'foo')) |
1680 | 47 | self.assertSearchKey16('8C736521\x0076FF8CAA', stuple('foo', 'bar')) | 47 | self.assertSearchKey16(b'8C736521\x0076FF8CAA', stuple('foo', 'bar')) |
1681 | 48 | self.assertSearchKey16('ED82CD11', stuple('abcd',)) | 48 | self.assertSearchKey16(b'ED82CD11', stuple('abcd',)) |
1682 | 49 | 49 | ||
1683 | 50 | def test_simple_255(self): | 50 | def test_simple_255(self): |
1687 | 51 | self.assertSearchKey255('\x8cse!', stuple('foo',)) | 51 | self.assertSearchKey255(b'\x8cse!', stuple('foo',)) |
1688 | 52 | self.assertSearchKey255('\x8cse!\x00\x8cse!', stuple('foo', 'foo')) | 52 | self.assertSearchKey255(b'\x8cse!\x00\x8cse!', stuple('foo', 'foo')) |
1689 | 53 | self.assertSearchKey255('\x8cse!\x00v\xff\x8c\xaa', stuple('foo', 'bar')) | 53 | self.assertSearchKey255(b'\x8cse!\x00v\xff\x8c\xaa', stuple('foo', 'bar')) |
1690 | 54 | # The standard mapping for these would include '\n', so it should be | 54 | # The standard mapping for these would include '\n', so it should be |
1691 | 55 | # mapped to '_' | 55 | # mapped to '_' |
1693 | 56 | self.assertSearchKey255('\xfdm\x93_\x00P_\x1bL', stuple('<', 'V')) | 56 | self.assertSearchKey255(b'\xfdm\x93_\x00P_\x1bL', stuple('<', 'V')) |
1694 | 57 | 57 | ||
1695 | 58 | def test_255_does_not_include_newline(self): | 58 | def test_255_does_not_include_newline(self): |
1696 | 59 | # When mapping via _search_key_255, we should never have the '\n' | 59 | # When mapping via _search_key_255, we should never have the '\n' |
1697 | @@ -64,7 +64,7 @@ | |||
1698 | 64 | chars_used.update(search_key) | 64 | chars_used.update(search_key) |
1699 | 65 | all_chars = {chr(x) for x in range(256)} | 65 | all_chars = {chr(x) for x in range(256)} |
1700 | 66 | unused_chars = all_chars.symmetric_difference(chars_used) | 66 | unused_chars = all_chars.symmetric_difference(chars_used) |
1702 | 67 | self.assertEqual(set('\n'), unused_chars) | 67 | self.assertEqual(set(b'\n'), unused_chars) |
1703 | 68 | 68 | ||
1704 | 69 | 69 | ||
1705 | 70 | class TestDeserialiseLeafNode(tests.TestCase): | 70 | class TestDeserialiseLeafNode(tests.TestCase): |
1706 | @@ -73,94 +73,94 @@ | |||
1707 | 73 | 73 | ||
1708 | 74 | def assertDeserialiseErrors(self, text): | 74 | def assertDeserialiseErrors(self, text): |
1709 | 75 | self.assertRaises((ValueError, IndexError), | 75 | self.assertRaises((ValueError, IndexError), |
1711 | 76 | self.module._deserialise_leaf_node, text, 'not-a-real-sha') | 76 | self.module._deserialise_leaf_node, text, b'not-a-real-sha') |
1712 | 77 | 77 | ||
1713 | 78 | def test_raises_on_non_leaf(self): | 78 | def test_raises_on_non_leaf(self): |
1723 | 79 | self.assertDeserialiseErrors('') | 79 | self.assertDeserialiseErrors(b'') |
1724 | 80 | self.assertDeserialiseErrors('short\n') | 80 | self.assertDeserialiseErrors(b'short\n') |
1725 | 81 | self.assertDeserialiseErrors('chknotleaf:\n') | 81 | self.assertDeserialiseErrors(b'chknotleaf:\n') |
1726 | 82 | self.assertDeserialiseErrors('chkleaf:x\n') | 82 | self.assertDeserialiseErrors(b'chkleaf:x\n') |
1727 | 83 | self.assertDeserialiseErrors('chkleaf:\n') | 83 | self.assertDeserialiseErrors(b'chkleaf:\n') |
1728 | 84 | self.assertDeserialiseErrors('chkleaf:\nnotint\n') | 84 | self.assertDeserialiseErrors(b'chkleaf:\nnotint\n') |
1729 | 85 | self.assertDeserialiseErrors('chkleaf:\n10\n') | 85 | self.assertDeserialiseErrors(b'chkleaf:\n10\n') |
1730 | 86 | self.assertDeserialiseErrors('chkleaf:\n10\n256\n') | 86 | self.assertDeserialiseErrors(b'chkleaf:\n10\n256\n') |
1731 | 87 | self.assertDeserialiseErrors('chkleaf:\n10\n256\n10\n') | 87 | self.assertDeserialiseErrors(b'chkleaf:\n10\n256\n10\n') |
1732 | 88 | 88 | ||
1733 | 89 | def test_deserialise_empty(self): | 89 | def test_deserialise_empty(self): |
1734 | 90 | node = self.module._deserialise_leaf_node( | 90 | node = self.module._deserialise_leaf_node( |
1736 | 91 | "chkleaf:\n10\n1\n0\n\n", stuple("sha1:1234",)) | 91 | b"chkleaf:\n10\n1\n0\n\n", stuple(b"sha1:1234",)) |
1737 | 92 | self.assertEqual(0, len(node)) | 92 | self.assertEqual(0, len(node)) |
1738 | 93 | self.assertEqual(10, node.maximum_size) | 93 | self.assertEqual(10, node.maximum_size) |
1740 | 94 | self.assertEqual(("sha1:1234",), node.key()) | 94 | self.assertEqual((b"sha1:1234",), node.key()) |
1741 | 95 | self.assertIsInstance(node.key(), StaticTuple) | 95 | self.assertIsInstance(node.key(), StaticTuple) |
1742 | 96 | self.assertIs(None, node._search_prefix) | 96 | self.assertIs(None, node._search_prefix) |
1743 | 97 | self.assertIs(None, node._common_serialised_prefix) | 97 | self.assertIs(None, node._common_serialised_prefix) |
1744 | 98 | 98 | ||
1745 | 99 | def test_deserialise_items(self): | 99 | def test_deserialise_items(self): |
1746 | 100 | node = self.module._deserialise_leaf_node( | 100 | node = self.module._deserialise_leaf_node( |
1749 | 101 | "chkleaf:\n0\n1\n2\n\nfoo bar\x001\nbaz\nquux\x001\nblarh\n", | 101 | b"chkleaf:\n0\n1\n2\n\nfoo bar\x001\nbaz\nquux\x001\nblarh\n", |
1750 | 102 | ("sha1:1234",)) | 102 | (b"sha1:1234",)) |
1751 | 103 | self.assertEqual(2, len(node)) | 103 | self.assertEqual(2, len(node)) |
1753 | 104 | self.assertEqual([(("foo bar",), "baz"), (("quux",), "blarh")], | 104 | self.assertEqual([((b"foo bar",), b"baz"), ((b"quux",), b"blarh")], |
1754 | 105 | sorted(node.iteritems(None))) | 105 | sorted(node.iteritems(None))) |
1755 | 106 | 106 | ||
1756 | 107 | def test_deserialise_item_with_null_width_1(self): | 107 | def test_deserialise_item_with_null_width_1(self): |
1757 | 108 | node = self.module._deserialise_leaf_node( | 108 | node = self.module._deserialise_leaf_node( |
1760 | 109 | "chkleaf:\n0\n1\n2\n\nfoo\x001\nbar\x00baz\nquux\x001\nblarh\n", | 109 | b"chkleaf:\n0\n1\n2\n\nfoo\x001\nbar\x00baz\nquux\x001\nblarh\n", |
1761 | 110 | ("sha1:1234",)) | 110 | (b"sha1:1234",)) |
1762 | 111 | self.assertEqual(2, len(node)) | 111 | self.assertEqual(2, len(node)) |
1764 | 112 | self.assertEqual([(("foo",), "bar\x00baz"), (("quux",), "blarh")], | 112 | self.assertEqual([((b"foo",), b"bar\x00baz"), ((b"quux",), b"blarh")], |
1765 | 113 | sorted(node.iteritems(None))) | 113 | sorted(node.iteritems(None))) |
1766 | 114 | 114 | ||
1767 | 115 | def test_deserialise_item_with_null_width_2(self): | 115 | def test_deserialise_item_with_null_width_2(self): |
1768 | 116 | node = self.module._deserialise_leaf_node( | 116 | node = self.module._deserialise_leaf_node( |
1772 | 117 | "chkleaf:\n0\n2\n2\n\nfoo\x001\x001\nbar\x00baz\n" | 117 | b"chkleaf:\n0\n2\n2\n\nfoo\x001\x001\nbar\x00baz\n" |
1773 | 118 | "quux\x00\x001\nblarh\n", | 118 | b"quux\x00\x001\nblarh\n", |
1774 | 119 | ("sha1:1234",)) | 119 | (b"sha1:1234",)) |
1775 | 120 | self.assertEqual(2, len(node)) | 120 | self.assertEqual(2, len(node)) |
1777 | 121 | self.assertEqual([(("foo", "1"), "bar\x00baz"), (("quux", ""), "blarh")], | 121 | self.assertEqual([((b"foo", "1"), b"bar\x00baz"), ((b"quux", ""), b"blarh")], |
1778 | 122 | sorted(node.iteritems(None))) | 122 | sorted(node.iteritems(None))) |
1779 | 123 | 123 | ||
1780 | 124 | def test_iteritems_selected_one_of_two_items(self): | 124 | def test_iteritems_selected_one_of_two_items(self): |
1781 | 125 | node = self.module._deserialise_leaf_node( | 125 | node = self.module._deserialise_leaf_node( |
1784 | 126 | "chkleaf:\n0\n1\n2\n\nfoo bar\x001\nbaz\nquux\x001\nblarh\n", | 126 | b"chkleaf:\n0\n1\n2\n\nfoo bar\x001\nbaz\nquux\x001\nblarh\n", |
1785 | 127 | ("sha1:1234",)) | 127 | (b"sha1:1234",)) |
1786 | 128 | self.assertEqual(2, len(node)) | 128 | self.assertEqual(2, len(node)) |
1789 | 129 | self.assertEqual([(("quux",), "blarh")], | 129 | self.assertEqual([((b"quux",), b"blarh")], |
1790 | 130 | sorted(node.iteritems(None, [("quux",), ("qaz",)]))) | 130 | sorted(node.iteritems(None, [(b"quux",), (b"qaz",)]))) |
1791 | 131 | 131 | ||
1792 | 132 | def test_deserialise_item_with_common_prefix(self): | 132 | def test_deserialise_item_with_common_prefix(self): |
1793 | 133 | node = self.module._deserialise_leaf_node( | 133 | node = self.module._deserialise_leaf_node( |
1796 | 134 | "chkleaf:\n0\n2\n2\nfoo\x00\n1\x001\nbar\x00baz\n2\x001\nblarh\n", | 134 | b"chkleaf:\n0\n2\n2\nfoo\x00\n1\x001\nbar\x00baz\n2\x001\nblarh\n", |
1797 | 135 | ("sha1:1234",)) | 135 | (b"sha1:1234",)) |
1798 | 136 | self.assertEqual(2, len(node)) | 136 | self.assertEqual(2, len(node)) |
1800 | 137 | self.assertEqual([(("foo", "1"), "bar\x00baz"), (("foo", "2"), "blarh")], | 137 | self.assertEqual([((b"foo", b"1"), b"bar\x00baz"), ((b"foo", b"2"), b"blarh")], |
1801 | 138 | sorted(node.iteritems(None))) | 138 | sorted(node.iteritems(None))) |
1802 | 139 | self.assertIs(chk_map._unknown, node._search_prefix) | 139 | self.assertIs(chk_map._unknown, node._search_prefix) |
1804 | 140 | self.assertEqual('foo\x00', node._common_serialised_prefix) | 140 | self.assertEqual(b'foo\x00', node._common_serialised_prefix) |
1805 | 141 | 141 | ||
1806 | 142 | def test_deserialise_multi_line(self): | 142 | def test_deserialise_multi_line(self): |
1807 | 143 | node = self.module._deserialise_leaf_node( | 143 | node = self.module._deserialise_leaf_node( |
1810 | 144 | "chkleaf:\n0\n2\n2\nfoo\x00\n1\x002\nbar\nbaz\n2\x002\nblarh\n\n", | 144 | b"chkleaf:\n0\n2\n2\nfoo\x00\n1\x002\nbar\nbaz\n2\x002\nblarh\n\n", |
1811 | 145 | ("sha1:1234",)) | 145 | (b"sha1:1234",)) |
1812 | 146 | self.assertEqual(2, len(node)) | 146 | self.assertEqual(2, len(node)) |
1815 | 147 | self.assertEqual([(("foo", "1"), "bar\nbaz"), | 147 | self.assertEqual([((b"foo", b"1"), b"bar\nbaz"), |
1816 | 148 | (("foo", "2"), "blarh\n"), | 148 | ((b"foo", b"2"), b"blarh\n"), |
1817 | 149 | ], sorted(node.iteritems(None))) | 149 | ], sorted(node.iteritems(None))) |
1818 | 150 | self.assertIs(chk_map._unknown, node._search_prefix) | 150 | self.assertIs(chk_map._unknown, node._search_prefix) |
1820 | 151 | self.assertEqual('foo\x00', node._common_serialised_prefix) | 151 | self.assertEqual(b'foo\x00', node._common_serialised_prefix) |
1821 | 152 | 152 | ||
1822 | 153 | def test_key_after_map(self): | 153 | def test_key_after_map(self): |
1823 | 154 | node = self.module._deserialise_leaf_node( | 154 | node = self.module._deserialise_leaf_node( |
1826 | 155 | "chkleaf:\n10\n1\n0\n\n", ("sha1:1234",)) | 155 | b"chkleaf:\n10\n1\n0\n\n", (b"sha1:1234",)) |
1827 | 156 | node.map(None, ("foo bar",), "baz quux") | 156 | node.map(None, (b"foo bar",), b"baz quux") |
1828 | 157 | self.assertEqual(None, node.key()) | 157 | self.assertEqual(None, node.key()) |
1829 | 158 | 158 | ||
1830 | 159 | def test_key_after_unmap(self): | 159 | def test_key_after_unmap(self): |
1831 | 160 | node = self.module._deserialise_leaf_node( | 160 | node = self.module._deserialise_leaf_node( |
1835 | 161 | "chkleaf:\n0\n1\n2\n\nfoo bar\x001\nbaz\nquux\x001\nblarh\n", | 161 | b"chkleaf:\n0\n1\n2\n\nfoo bar\x001\nbaz\nquux\x001\nblarh\n", |
1836 | 162 | ("sha1:1234",)) | 162 | (b"sha1:1234",)) |
1837 | 163 | node.unmap(None, ("foo bar",)) | 163 | node.unmap(None, (b"foo bar",)) |
1838 | 164 | self.assertEqual(None, node.key()) | 164 | self.assertEqual(None, node.key()) |
1839 | 165 | 165 | ||
1840 | 166 | 166 | ||
1841 | @@ -171,71 +171,73 @@ | |||
1842 | 171 | def assertDeserialiseErrors(self, text): | 171 | def assertDeserialiseErrors(self, text): |
1843 | 172 | self.assertRaises((ValueError, IndexError), | 172 | self.assertRaises((ValueError, IndexError), |
1844 | 173 | self.module._deserialise_internal_node, text, | 173 | self.module._deserialise_internal_node, text, |
1846 | 174 | stuple('not-a-real-sha',)) | 174 | stuple(b'not-a-real-sha',)) |
1847 | 175 | 175 | ||
1848 | 176 | def test_raises_on_non_internal(self): | 176 | def test_raises_on_non_internal(self): |
1858 | 177 | self.assertDeserialiseErrors('') | 177 | self.assertDeserialiseErrors(b'') |
1859 | 178 | self.assertDeserialiseErrors('short\n') | 178 | self.assertDeserialiseErrors(b'short\n') |
1860 | 179 | self.assertDeserialiseErrors('chknotnode:\n') | 179 | self.assertDeserialiseErrors(b'chknotnode:\n') |
1861 | 180 | self.assertDeserialiseErrors('chknode:x\n') | 180 | self.assertDeserialiseErrors(b'chknode:x\n') |
1862 | 181 | self.assertDeserialiseErrors('chknode:\n') | 181 | self.assertDeserialiseErrors(b'chknode:\n') |
1863 | 182 | self.assertDeserialiseErrors('chknode:\nnotint\n') | 182 | self.assertDeserialiseErrors(b'chknode:\nnotint\n') |
1864 | 183 | self.assertDeserialiseErrors('chknode:\n10\n') | 183 | self.assertDeserialiseErrors(b'chknode:\n10\n') |
1865 | 184 | self.assertDeserialiseErrors('chknode:\n10\n256\n') | 184 | self.assertDeserialiseErrors(b'chknode:\n10\n256\n') |
1866 | 185 | self.assertDeserialiseErrors('chknode:\n10\n256\n10\n') | 185 | self.assertDeserialiseErrors(b'chknode:\n10\n256\n10\n') |
1867 | 186 | # no trailing newline | 186 | # no trailing newline |
1869 | 187 | self.assertDeserialiseErrors('chknode:\n10\n256\n0\n1\nfo') | 187 | self.assertDeserialiseErrors(b'chknode:\n10\n256\n0\n1\nfo') |
1870 | 188 | 188 | ||
1871 | 189 | def test_deserialise_one(self): | 189 | def test_deserialise_one(self): |
1872 | 190 | node = self.module._deserialise_internal_node( | 190 | node = self.module._deserialise_internal_node( |
1874 | 191 | "chknode:\n10\n1\n1\n\na\x00sha1:abcd\n", stuple('sha1:1234',)) | 191 | b"chknode:\n10\n1\n1\n\na\x00sha1:abcd\n", stuple(b'sha1:1234',)) |
1875 | 192 | self.assertIsInstance(node, chk_map.InternalNode) | 192 | self.assertIsInstance(node, chk_map.InternalNode) |
1876 | 193 | self.assertEqual(1, len(node)) | 193 | self.assertEqual(1, len(node)) |
1877 | 194 | self.assertEqual(10, node.maximum_size) | 194 | self.assertEqual(10, node.maximum_size) |
1881 | 195 | self.assertEqual(("sha1:1234",), node.key()) | 195 | self.assertEqual((b"sha1:1234",), node.key()) |
1882 | 196 | self.assertEqual('', node._search_prefix) | 196 | self.assertEqual(b'', node._search_prefix) |
1883 | 197 | self.assertEqual({'a': ('sha1:abcd',)}, node._items) | 197 | self.assertEqual({b'a': (b'sha1:abcd',)}, node._items) |
1884 | 198 | 198 | ||
1885 | 199 | def test_deserialise_with_prefix(self): | 199 | def test_deserialise_with_prefix(self): |
1886 | 200 | node = self.module._deserialise_internal_node( | 200 | node = self.module._deserialise_internal_node( |
1888 | 201 | "chknode:\n10\n1\n1\npref\na\x00sha1:abcd\n", stuple('sha1:1234',)) | 201 | b"chknode:\n10\n1\n1\npref\na\x00sha1:abcd\n", |
1889 | 202 | stuple(b'sha1:1234',)) | ||
1890 | 202 | self.assertIsInstance(node, chk_map.InternalNode) | 203 | self.assertIsInstance(node, chk_map.InternalNode) |
1891 | 203 | self.assertEqual(1, len(node)) | 204 | self.assertEqual(1, len(node)) |
1892 | 204 | self.assertEqual(10, node.maximum_size) | 205 | self.assertEqual(10, node.maximum_size) |
1896 | 205 | self.assertEqual(("sha1:1234",), node.key()) | 206 | self.assertEqual((b"sha1:1234",), node.key()) |
1897 | 206 | self.assertEqual('pref', node._search_prefix) | 207 | self.assertEqual(b'pref', node._search_prefix) |
1898 | 207 | self.assertEqual({'prefa': ('sha1:abcd',)}, node._items) | 208 | self.assertEqual({b'prefa': (b'sha1:abcd',)}, node._items) |
1899 | 208 | 209 | ||
1900 | 209 | node = self.module._deserialise_internal_node( | 210 | node = self.module._deserialise_internal_node( |
1902 | 210 | "chknode:\n10\n1\n1\npref\n\x00sha1:abcd\n", stuple('sha1:1234',)) | 211 | b"chknode:\n10\n1\n1\npref\n\x00sha1:abcd\n", |
1903 | 212 | stuple(b'sha1:1234',)) | ||
1904 | 211 | self.assertIsInstance(node, chk_map.InternalNode) | 213 | self.assertIsInstance(node, chk_map.InternalNode) |
1905 | 212 | self.assertEqual(1, len(node)) | 214 | self.assertEqual(1, len(node)) |
1906 | 213 | self.assertEqual(10, node.maximum_size) | 215 | self.assertEqual(10, node.maximum_size) |
1910 | 214 | self.assertEqual(("sha1:1234",), node.key()) | 216 | self.assertEqual((b"sha1:1234",), node.key()) |
1911 | 215 | self.assertEqual('pref', node._search_prefix) | 217 | self.assertEqual(b'pref', node._search_prefix) |
1912 | 216 | self.assertEqual({'pref': ('sha1:abcd',)}, node._items) | 218 | self.assertEqual({b'pref': (b'sha1:abcd',)}, node._items) |
1913 | 217 | 219 | ||
1914 | 218 | def test_deserialise_pref_with_null(self): | 220 | def test_deserialise_pref_with_null(self): |
1915 | 219 | node = self.module._deserialise_internal_node( | 221 | node = self.module._deserialise_internal_node( |
1918 | 220 | "chknode:\n10\n1\n1\npref\x00fo\n\x00sha1:abcd\n", | 222 | b"chknode:\n10\n1\n1\npref\x00fo\n\x00sha1:abcd\n", |
1919 | 221 | stuple('sha1:1234',)) | 223 | stuple(b'sha1:1234',)) |
1920 | 222 | self.assertIsInstance(node, chk_map.InternalNode) | 224 | self.assertIsInstance(node, chk_map.InternalNode) |
1921 | 223 | self.assertEqual(1, len(node)) | 225 | self.assertEqual(1, len(node)) |
1922 | 224 | self.assertEqual(10, node.maximum_size) | 226 | self.assertEqual(10, node.maximum_size) |
1926 | 225 | self.assertEqual(("sha1:1234",), node.key()) | 227 | self.assertEqual((b"sha1:1234",), node.key()) |
1927 | 226 | self.assertEqual('pref\x00fo', node._search_prefix) | 228 | self.assertEqual(b'pref\x00fo', node._search_prefix) |
1928 | 227 | self.assertEqual({'pref\x00fo': ('sha1:abcd',)}, node._items) | 229 | self.assertEqual({b'pref\x00fo': (b'sha1:abcd',)}, node._items) |
1929 | 228 | 230 | ||
1930 | 229 | def test_deserialise_with_null_pref(self): | 231 | def test_deserialise_with_null_pref(self): |
1931 | 230 | node = self.module._deserialise_internal_node( | 232 | node = self.module._deserialise_internal_node( |
1934 | 231 | "chknode:\n10\n1\n1\npref\x00fo\n\x00\x00sha1:abcd\n", | 233 | b"chknode:\n10\n1\n1\npref\x00fo\n\x00\x00sha1:abcd\n", |
1935 | 232 | stuple('sha1:1234',)) | 234 | stuple(b'sha1:1234',)) |
1936 | 233 | self.assertIsInstance(node, chk_map.InternalNode) | 235 | self.assertIsInstance(node, chk_map.InternalNode) |
1937 | 234 | self.assertEqual(1, len(node)) | 236 | self.assertEqual(1, len(node)) |
1938 | 235 | self.assertEqual(10, node.maximum_size) | 237 | self.assertEqual(10, node.maximum_size) |
1942 | 236 | self.assertEqual(("sha1:1234",), node.key()) | 238 | self.assertEqual((b"sha1:1234",), node.key()) |
1943 | 237 | self.assertEqual('pref\x00fo', node._search_prefix) | 239 | self.assertEqual(b'pref\x00fo', node._search_prefix) |
1944 | 238 | self.assertEqual({'pref\x00fo\x00': ('sha1:abcd',)}, node._items) | 240 | self.assertEqual({b'pref\x00fo\x00': (b'sha1:abcd',)}, node._items) |
1945 | 239 | 241 | ||
1946 | 240 | 242 | ||
1947 | 241 | class Test_BytesToTextKey(tests.TestCase): | 243 | class Test_BytesToTextKey(tests.TestCase): |
1948 | @@ -251,29 +253,29 @@ | |||
1949 | 251 | self.assertRaises(Exception, self.module._bytes_to_text_key, bytes) | 253 | self.assertRaises(Exception, self.module._bytes_to_text_key, bytes) |
1950 | 252 | 254 | ||
1951 | 253 | def test_file(self): | 255 | def test_file(self): |
1955 | 254 | self.assertBytesToTextKey(('file-id', 'revision-id'), | 256 | self.assertBytesToTextKey((b'file-id', b'revision-id'), |
1956 | 255 | 'file: file-id\nparent-id\nname\nrevision-id\n' | 257 | b'file: file-id\nparent-id\nname\nrevision-id\n' |
1957 | 256 | 'da39a3ee5e6b4b0d3255bfef95601890afd80709\n100\nN') | 258 | b'da39a3ee5e6b4b0d3255bfef95601890afd80709\n100\nN') |
1958 | 257 | 259 | ||
1959 | 258 | def test_invalid_no_kind(self): | 260 | def test_invalid_no_kind(self): |
1960 | 259 | self.assertBytesToTextKeyRaises( | 261 | self.assertBytesToTextKeyRaises( |
1963 | 260 | 'file file-id\nparent-id\nname\nrevision-id\n' | 262 | b'file file-id\nparent-id\nname\nrevision-id\n' |
1964 | 261 | 'da39a3ee5e6b4b0d3255bfef95601890afd80709\n100\nN') | 263 | b'da39a3ee5e6b4b0d3255bfef95601890afd80709\n100\nN') |
1965 | 262 | 264 | ||
1966 | 263 | def test_invalid_no_space(self): | 265 | def test_invalid_no_space(self): |
1967 | 264 | self.assertBytesToTextKeyRaises( | 266 | self.assertBytesToTextKeyRaises( |
1970 | 265 | 'file:file-id\nparent-id\nname\nrevision-id\n' | 267 | b'file:file-id\nparent-id\nname\nrevision-id\n' |
1971 | 266 | 'da39a3ee5e6b4b0d3255bfef95601890afd80709\n100\nN') | 268 | b'da39a3ee5e6b4b0d3255bfef95601890afd80709\n100\nN') |
1972 | 267 | 269 | ||
1973 | 268 | def test_invalid_too_short_file_id(self): | 270 | def test_invalid_too_short_file_id(self): |
1975 | 269 | self.assertBytesToTextKeyRaises('file:file-id') | 271 | self.assertBytesToTextKeyRaises(b'file:file-id') |
1976 | 270 | 272 | ||
1977 | 271 | def test_invalid_too_short_parent_id(self): | 273 | def test_invalid_too_short_parent_id(self): |
1979 | 272 | self.assertBytesToTextKeyRaises('file:file-id\nparent-id') | 274 | self.assertBytesToTextKeyRaises(b'file:file-id\nparent-id') |
1980 | 273 | 275 | ||
1981 | 274 | def test_invalid_too_short_name(self): | 276 | def test_invalid_too_short_name(self): |
1983 | 275 | self.assertBytesToTextKeyRaises('file:file-id\nparent-id\nname') | 277 | self.assertBytesToTextKeyRaises(b'file:file-id\nparent-id\nname') |
1984 | 276 | 278 | ||
1985 | 277 | def test_dir(self): | 279 | def test_dir(self): |
1988 | 278 | self.assertBytesToTextKey(('dir-id', 'revision-id'), | 280 | self.assertBytesToTextKey((b'dir-id', b'revision-id'), |
1989 | 279 | 'dir: dir-id\nparent-id\nname\nrevision-id') | 281 | b'dir: dir-id\nparent-id\nname\nrevision-id') |
1990 | 280 | 282 | ||
1991 | === modified file 'breezy/tests/test__chunks_to_lines.py' | |||
1992 | --- breezy/tests/test__chunks_to_lines.py 2017-05-23 14:08:03 +0000 | |||
1993 | +++ breezy/tests/test__chunks_to_lines.py 2017-06-11 01:47:17 +0000 | |||
1994 | @@ -47,58 +47,60 @@ | |||
1995 | 47 | self.assertIs(chunks, result) | 47 | self.assertIs(chunks, result) |
1996 | 48 | 48 | ||
1997 | 49 | def test_fulltext_chunk_to_lines(self): | 49 | def test_fulltext_chunk_to_lines(self): |
2007 | 50 | self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz\n'], | 50 | self.assertChunksToLines( |
2008 | 51 | ['foo\nbar\r\nba\rz\n']) | 51 | [b'foo\n', b'bar\r\n', b'ba\rz\n'], |
2009 | 52 | self.assertChunksToLines(['foobarbaz\n'], ['foobarbaz\n'], | 52 | [b'foo\nbar\r\nba\rz\n']) |
2010 | 53 | alreadly_lines=True) | 53 | self.assertChunksToLines( |
2011 | 54 | self.assertChunksToLines(['foo\n', 'bar\n', '\n', 'baz\n', '\n', '\n'], | 54 | [b'foobarbaz\n'], [b'foobarbaz\n'], alreadly_lines=True) |
2012 | 55 | ['foo\nbar\n\nbaz\n\n\n']) | 55 | self.assertChunksToLines( |
2013 | 56 | self.assertChunksToLines(['foobarbaz'], ['foobarbaz'], | 56 | [b'foo\n', b'bar\n', b'\n', b'baz\n', b'\n', b'\n'], |
2014 | 57 | alreadly_lines=True) | 57 | [b'foo\nbar\n\nbaz\n\n\n']) |
2015 | 58 | self.assertChunksToLines(['foobarbaz'], ['foo', 'bar', 'baz']) | 58 | self.assertChunksToLines( |
2016 | 59 | [b'foobarbaz'], [b'foobarbaz'], alreadly_lines=True) | ||
2017 | 60 | self.assertChunksToLines([b'foobarbaz'], [b'foo', b'bar', b'baz']) | ||
2018 | 59 | 61 | ||
2019 | 60 | def test_newlines(self): | 62 | def test_newlines(self): |
2026 | 61 | self.assertChunksToLines(['\n'], ['\n'], alreadly_lines=True) | 63 | self.assertChunksToLines([b'\n'], [b'\n'], alreadly_lines=True) |
2027 | 62 | self.assertChunksToLines(['\n'], ['', '\n', '']) | 64 | self.assertChunksToLines([b'\n'], [b'', b'\n', b'']) |
2028 | 63 | self.assertChunksToLines(['\n'], ['\n', '']) | 65 | self.assertChunksToLines([b'\n'], [b'\n', b'']) |
2029 | 64 | self.assertChunksToLines(['\n'], ['', '\n']) | 66 | self.assertChunksToLines([b'\n'], [b'', b'\n']) |
2030 | 65 | self.assertChunksToLines(['\n', '\n', '\n'], ['\n\n\n']) | 67 | self.assertChunksToLines([b'\n', b'\n', b'\n'], [b'\n\n\n']) |
2031 | 66 | self.assertChunksToLines(['\n', '\n', '\n'], ['\n', '\n', '\n'], | 68 | self.assertChunksToLines([b'\n', b'\n', b'\n'], [b'\n', b'\n', b'\n'], |
2032 | 67 | alreadly_lines=True) | 69 | alreadly_lines=True) |
2033 | 68 | 70 | ||
2034 | 69 | def test_lines_to_lines(self): | 71 | def test_lines_to_lines(self): |
2037 | 70 | self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz\n'], | 72 | self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz\n'], |
2038 | 71 | ['foo\n', 'bar\r\n', 'ba\rz\n'], | 73 | [b'foo\n', b'bar\r\n', b'ba\rz\n'], |
2039 | 72 | alreadly_lines=True) | 74 | alreadly_lines=True) |
2040 | 73 | 75 | ||
2041 | 74 | def test_no_final_newline(self): | 76 | def test_no_final_newline(self): |
2046 | 75 | self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz'], | 77 | self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz'], |
2047 | 76 | ['foo\nbar\r\nba\rz']) | 78 | [b'foo\nbar\r\nba\rz']) |
2048 | 77 | self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz'], | 79 | self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz'], |
2049 | 78 | ['foo\n', 'bar\r\n', 'ba\rz'], | 80 | [b'foo\n', b'bar\r\n', b'ba\rz'], |
2050 | 79 | alreadly_lines=True) | 81 | alreadly_lines=True) |
2053 | 80 | self.assertChunksToLines(('foo\n', 'bar\r\n', 'ba\rz'), | 82 | self.assertChunksToLines((b'foo\n', b'bar\r\n', b'ba\rz'), |
2054 | 81 | ('foo\n', 'bar\r\n', 'ba\rz'), | 83 | (b'foo\n', b'bar\r\n', b'ba\rz'), |
2055 | 82 | alreadly_lines=True) | 84 | alreadly_lines=True) |
2056 | 83 | self.assertChunksToLines([], [], alreadly_lines=True) | 85 | self.assertChunksToLines([], [], alreadly_lines=True) |
2058 | 84 | self.assertChunksToLines(['foobarbaz'], ['foobarbaz'], | 86 | self.assertChunksToLines([b'foobarbaz'], [b'foobarbaz'], |
2059 | 85 | alreadly_lines=True) | 87 | alreadly_lines=True) |
2061 | 86 | self.assertChunksToLines([], ['']) | 88 | self.assertChunksToLines([], [b'']) |
2062 | 87 | 89 | ||
2063 | 88 | def test_mixed(self): | 90 | def test_mixed(self): |
2070 | 89 | self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz'], | 91 | self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz'], |
2071 | 90 | ['foo\n', 'bar\r\nba\r', 'z']) | 92 | [b'foo\n', b'bar\r\nba\r', b'z']) |
2072 | 91 | self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz'], | 93 | self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz'], |
2073 | 92 | ['foo\nb', 'a', 'r\r\nba\r', 'z']) | 94 | [b'foo\nb', b'a', b'r\r\nba\r', b'z']) |
2074 | 93 | self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz'], | 95 | self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz'], |
2075 | 94 | ['foo\nbar\r\nba', '\r', 'z']) | 96 | [b'foo\nbar\r\nba', b'\r', b'z']) |
2076 | 95 | 97 | ||
2083 | 96 | self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz'], | 98 | self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz'], |
2084 | 97 | ['foo\n', '', 'bar\r\nba', '\r', 'z']) | 99 | [b'foo\n', b'', b'bar\r\nba', b'\r', b'z']) |
2085 | 98 | self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz\n'], | 100 | self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz\n'], |
2086 | 99 | ['foo\n', 'bar\r\n', 'ba\rz\n', '']) | 101 | [b'foo\n', b'bar\r\n', b'ba\rz\n', b'']) |
2087 | 100 | self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz\n'], | 102 | self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz\n'], |
2088 | 101 | ['foo\n', 'bar', '\r\n', 'ba\rz\n']) | 103 | [b'foo\n', b'bar', b'\r\n', b'ba\rz\n']) |
2089 | 102 | 104 | ||
2090 | 103 | def test_not_lines(self): | 105 | def test_not_lines(self): |
2091 | 104 | # We should raise a TypeError, not crash | 106 | # We should raise a TypeError, not crash |
2092 | @@ -107,4 +109,4 @@ | |||
2093 | 107 | self.assertRaises(TypeError, self.module.chunks_to_lines, | 109 | self.assertRaises(TypeError, self.module.chunks_to_lines, |
2094 | 108 | [object()]) | 110 | [object()]) |
2095 | 109 | self.assertRaises(TypeError, self.module.chunks_to_lines, | 111 | self.assertRaises(TypeError, self.module.chunks_to_lines, |
2097 | 110 | ['foo', object()]) | 112 | [b'foo', object()]) |
2098 | 111 | 113 | ||
2099 | === modified file 'breezy/tests/test_inv.py' | |||
2100 | --- breezy/tests/test_inv.py 2017-06-10 00:52:37 +0000 | |||
2101 | +++ breezy/tests/test_inv.py 2017-06-11 01:47:17 +0000 | |||
2102 | @@ -284,35 +284,35 @@ | |||
2103 | 284 | 284 | ||
2104 | 285 | def test_creation_from_root_id(self): | 285 | def test_creation_from_root_id(self): |
2105 | 286 | # iff a root id is passed to the constructor, a root directory is made | 286 | # iff a root id is passed to the constructor, a root directory is made |
2107 | 287 | inv = inventory.Inventory(root_id='tree-root') | 287 | inv = inventory.Inventory(root_id=b'tree-root') |
2108 | 288 | self.assertNotEqual(None, inv.root) | 288 | self.assertNotEqual(None, inv.root) |
2110 | 289 | self.assertEqual('tree-root', inv.root.file_id) | 289 | self.assertEqual(b'tree-root', inv.root.file_id) |
2111 | 290 | 290 | ||
2112 | 291 | def test_add_path_of_root(self): | 291 | def test_add_path_of_root(self): |
2113 | 292 | # if no root id is given at creation time, there is no root directory | 292 | # if no root id is given at creation time, there is no root directory |
2114 | 293 | inv = inventory.Inventory(root_id=None) | 293 | inv = inventory.Inventory(root_id=None) |
2115 | 294 | self.assertIs(None, inv.root) | 294 | self.assertIs(None, inv.root) |
2116 | 295 | # add a root entry by adding its path | 295 | # add a root entry by adding its path |
2120 | 296 | ie = inv.add_path("", "directory", "my-root") | 296 | ie = inv.add_path(u"", "directory", b"my-root") |
2121 | 297 | ie.revision = 'test-rev' | 297 | ie.revision = b'test-rev' |
2122 | 298 | self.assertEqual("my-root", ie.file_id) | 298 | self.assertEqual(b"my-root", ie.file_id) |
2123 | 299 | self.assertIs(ie, inv.root) | 299 | self.assertIs(ie, inv.root) |
2124 | 300 | 300 | ||
2125 | 301 | def test_add_path(self): | 301 | def test_add_path(self): |
2129 | 302 | inv = inventory.Inventory(root_id='tree_root') | 302 | inv = inventory.Inventory(root_id=b'tree_root') |
2130 | 303 | ie = inv.add_path('hello', 'file', 'hello-id') | 303 | ie = inv.add_path(u'hello', 'file', b'hello-id') |
2131 | 304 | self.assertEqual('hello-id', ie.file_id) | 304 | self.assertEqual(b'hello-id', ie.file_id) |
2132 | 305 | self.assertEqual('file', ie.kind) | 305 | self.assertEqual('file', ie.kind) |
2133 | 306 | 306 | ||
2134 | 307 | def test_copy(self): | 307 | def test_copy(self): |
2135 | 308 | """Make sure copy() works and creates a deep copy.""" | 308 | """Make sure copy() works and creates a deep copy.""" |
2138 | 309 | inv = inventory.Inventory(root_id='some-tree-root') | 309 | inv = inventory.Inventory(root_id=b'some-tree-root') |
2139 | 310 | ie = inv.add_path('hello', 'file', 'hello-id') | 310 | ie = inv.add_path(u'hello', 'file', b'hello-id') |
2140 | 311 | inv2 = inv.copy() | 311 | inv2 = inv.copy() |
2145 | 312 | inv.root.file_id = 'some-new-root' | 312 | inv.root.file_id = b'some-new-root' |
2146 | 313 | ie.name = 'file2' | 313 | ie.name = u'file2' |
2147 | 314 | self.assertEqual('some-tree-root', inv2.root.file_id) | 314 | self.assertEqual(b'some-tree-root', inv2.root.file_id) |
2148 | 315 | self.assertEqual('hello', inv2['hello-id'].name) | 315 | self.assertEqual(u'hello', inv2[b'hello-id'].name) |
2149 | 316 | 316 | ||
2150 | 317 | def test_copy_empty(self): | 317 | def test_copy_empty(self): |
2151 | 318 | """Make sure an empty inventory can be copied.""" | 318 | """Make sure an empty inventory can be copied.""" |
2152 | @@ -322,16 +322,17 @@ | |||
2153 | 322 | 322 | ||
2154 | 323 | def test_copy_copies_root_revision(self): | 323 | def test_copy_copies_root_revision(self): |
2155 | 324 | """Make sure the revision of the root gets copied.""" | 324 | """Make sure the revision of the root gets copied.""" |
2158 | 325 | inv = inventory.Inventory(root_id='someroot') | 325 | inv = inventory.Inventory(root_id=b'someroot') |
2159 | 326 | inv.root.revision = 'therev' | 326 | inv.root.revision = b'therev' |
2160 | 327 | inv2 = inv.copy() | 327 | inv2 = inv.copy() |
2163 | 328 | self.assertEqual('someroot', inv2.root.file_id) | 328 | self.assertEqual(b'someroot', inv2.root.file_id) |
2164 | 329 | self.assertEqual('therev', inv2.root.revision) | 329 | self.assertEqual(b'therev', inv2.root.revision) |
2165 | 330 | 330 | ||
2166 | 331 | def test_create_tree_reference(self): | 331 | def test_create_tree_reference(self): |
2170 | 332 | inv = inventory.Inventory('tree-root-123') | 332 | inv = inventory.Inventory(b'tree-root-123') |
2171 | 333 | inv.add(TreeReference('nested-id', 'nested', parent_id='tree-root-123', | 333 | inv.add(TreeReference( |
2172 | 334 | revision='rev', reference_revision='rev2')) | 334 | b'nested-id', 'nested', parent_id=b'tree-root-123', |
2173 | 335 | revision=b'rev', reference_revision=b'rev2')) | ||
2174 | 335 | 336 | ||
2175 | 336 | def test_error_encoding(self): | 337 | def test_error_encoding(self): |
2176 | 337 | inv = inventory.Inventory('tree-root') | 338 | inv = inventory.Inventory('tree-root') |
2177 | @@ -997,30 +998,30 @@ | |||
2178 | 997 | 998 | ||
2179 | 998 | def test___getitem__(self): | 999 | def test___getitem__(self): |
2180 | 999 | inv = Inventory() | 1000 | inv = Inventory() |
2188 | 1000 | inv.revision_id = "revid" | 1001 | inv.revision_id = b"revid" |
2189 | 1001 | inv.root.revision = "rootrev" | 1002 | inv.root.revision = b"rootrev" |
2190 | 1002 | inv.add(InventoryFile("fileid", "file", inv.root.file_id)) | 1003 | inv.add(InventoryFile(b"fileid", u"file", inv.root.file_id)) |
2191 | 1003 | inv["fileid"].revision = "filerev" | 1004 | inv[b"fileid"].revision = b"filerev" |
2192 | 1004 | inv["fileid"].executable = True | 1005 | inv[b"fileid"].executable = True |
2193 | 1005 | inv["fileid"].text_sha1 = "ffff" | 1006 | inv[b"fileid"].text_sha1 = b"ffff" |
2194 | 1006 | inv["fileid"].text_size = 1 | 1007 | inv[b"fileid"].text_size = 1 |
2195 | 1007 | chk_bytes = self.get_chk_bytes() | 1008 | chk_bytes = self.get_chk_bytes() |
2196 | 1008 | chk_inv = CHKInventory.from_inventory(chk_bytes, inv) | 1009 | chk_inv = CHKInventory.from_inventory(chk_bytes, inv) |
2199 | 1009 | bytes = ''.join(chk_inv.to_lines()) | 1010 | data = b''.join(chk_inv.to_lines()) |
2200 | 1010 | new_inv = CHKInventory.deserialise(chk_bytes, bytes, ("revid",)) | 1011 | new_inv = CHKInventory.deserialise(chk_bytes, data, (b"revid",)) |
2201 | 1011 | root_entry = new_inv[inv.root.file_id] | 1012 | root_entry = new_inv[inv.root.file_id] |
2203 | 1012 | file_entry = new_inv["fileid"] | 1013 | file_entry = new_inv[b"fileid"] |
2204 | 1013 | self.assertEqual("directory", root_entry.kind) | 1014 | self.assertEqual("directory", root_entry.kind) |
2205 | 1014 | self.assertEqual(inv.root.file_id, root_entry.file_id) | 1015 | self.assertEqual(inv.root.file_id, root_entry.file_id) |
2206 | 1015 | self.assertEqual(inv.root.parent_id, root_entry.parent_id) | 1016 | self.assertEqual(inv.root.parent_id, root_entry.parent_id) |
2207 | 1016 | self.assertEqual(inv.root.name, root_entry.name) | 1017 | self.assertEqual(inv.root.name, root_entry.name) |
2209 | 1017 | self.assertEqual("rootrev", root_entry.revision) | 1018 | self.assertEqual(b"rootrev", root_entry.revision) |
2210 | 1018 | self.assertEqual("file", file_entry.kind) | 1019 | self.assertEqual("file", file_entry.kind) |
2212 | 1019 | self.assertEqual("fileid", file_entry.file_id) | 1020 | self.assertEqual(b"fileid", file_entry.file_id) |
2213 | 1020 | self.assertEqual(inv.root.file_id, file_entry.parent_id) | 1021 | self.assertEqual(inv.root.file_id, file_entry.parent_id) |
2217 | 1021 | self.assertEqual("file", file_entry.name) | 1022 | self.assertEqual(u"file", file_entry.name) |
2218 | 1022 | self.assertEqual("filerev", file_entry.revision) | 1023 | self.assertEqual(b"filerev", file_entry.revision) |
2219 | 1023 | self.assertEqual("ffff", file_entry.text_sha1) | 1024 | self.assertEqual(b"ffff", file_entry.text_sha1) |
2220 | 1024 | self.assertEqual(1, file_entry.text_size) | 1025 | self.assertEqual(1, file_entry.text_size) |
2221 | 1025 | self.assertEqual(True, file_entry.executable) | 1026 | self.assertEqual(True, file_entry.executable) |
2222 | 1026 | self.assertRaises(errors.NoSuchId, new_inv.__getitem__, 'missing') | 1027 | self.assertRaises(errors.NoSuchId, new_inv.__getitem__, 'missing') |
2223 | 1027 | 1028 | ||
2224 | === modified file 'breezy/tests/test_inventory_delta.py' | |||
2225 | --- breezy/tests/test_inventory_delta.py 2017-06-09 16:31:49 +0000 | |||
2226 | +++ breezy/tests/test_inventory_delta.py 2017-06-11 01:47:17 +0000 | |||
2227 | @@ -32,14 +32,14 @@ | |||
2228 | 32 | from . import TestCase | 32 | from . import TestCase |
2229 | 33 | 33 | ||
2230 | 34 | ### DO NOT REFLOW THESE TEXTS. NEW LINES ARE SIGNIFICANT. ### | 34 | ### DO NOT REFLOW THESE TEXTS. NEW LINES ARE SIGNIFICANT. ### |
2232 | 35 | empty_lines = """format: bzr inventory delta v1 (bzr 1.14) | 35 | empty_lines = b"""format: bzr inventory delta v1 (bzr 1.14) |
2233 | 36 | parent: null: | 36 | parent: null: |
2234 | 37 | version: null: | 37 | version: null: |
2235 | 38 | versioned_root: true | 38 | versioned_root: true |
2236 | 39 | tree_references: true | 39 | tree_references: true |
2237 | 40 | """ | 40 | """ |
2238 | 41 | 41 | ||
2240 | 42 | root_only_lines = """format: bzr inventory delta v1 (bzr 1.14) | 42 | root_only_lines = b"""format: bzr inventory delta v1 (bzr 1.14) |
2241 | 43 | parent: null: | 43 | parent: null: |
2242 | 44 | version: entry-version | 44 | version: entry-version |
2243 | 45 | versioned_root: true | 45 | versioned_root: true |
2244 | @@ -48,7 +48,7 @@ | |||
2245 | 48 | """ | 48 | """ |
2246 | 49 | 49 | ||
2247 | 50 | 50 | ||
2249 | 51 | root_change_lines = """format: bzr inventory delta v1 (bzr 1.14) | 51 | root_change_lines = b"""format: bzr inventory delta v1 (bzr 1.14) |
2250 | 52 | parent: entry-version | 52 | parent: entry-version |
2251 | 53 | version: changed-root | 53 | version: changed-root |
2252 | 54 | versioned_root: true | 54 | versioned_root: true |
2253 | @@ -56,7 +56,7 @@ | |||
2254 | 56 | /\x00an-id\x00\x00different-version\x00dir | 56 | /\x00an-id\x00\x00different-version\x00dir |
2255 | 57 | """ | 57 | """ |
2256 | 58 | 58 | ||
2258 | 59 | corrupt_parent_lines = """format: bzr inventory delta v1 (bzr 1.14) | 59 | corrupt_parent_lines = b"""format: bzr inventory delta v1 (bzr 1.14) |
2259 | 60 | parent: entry-version | 60 | parent: entry-version |
2260 | 61 | version: changed-root | 61 | version: changed-root |
2261 | 62 | versioned_root: false | 62 | versioned_root: false |
2262 | @@ -64,7 +64,7 @@ | |||
2263 | 64 | /\x00an-id\x00\x00different-version\x00dir | 64 | /\x00an-id\x00\x00different-version\x00dir |
2264 | 65 | """ | 65 | """ |
2265 | 66 | 66 | ||
2267 | 67 | root_only_unversioned = """format: bzr inventory delta v1 (bzr 1.14) | 67 | root_only_unversioned = b"""format: bzr inventory delta v1 (bzr 1.14) |
2268 | 68 | parent: null: | 68 | parent: null: |
2269 | 69 | version: entry-version | 69 | version: entry-version |
2270 | 70 | versioned_root: false | 70 | versioned_root: false |
2271 | @@ -72,7 +72,7 @@ | |||
2272 | 72 | None\x00/\x00TREE_ROOT\x00\x00entry-version\x00dir | 72 | None\x00/\x00TREE_ROOT\x00\x00entry-version\x00dir |
2273 | 73 | """ | 73 | """ |
2274 | 74 | 74 | ||
2276 | 75 | reference_lines = """format: bzr inventory delta v1 (bzr 1.14) | 75 | reference_lines = b"""format: bzr inventory delta v1 (bzr 1.14) |
2277 | 76 | parent: null: | 76 | parent: null: |
2278 | 77 | version: entry-version | 77 | version: entry-version |
2279 | 78 | versioned_root: true | 78 | versioned_root: true |
2280 | @@ -81,7 +81,7 @@ | |||
2281 | 81 | None\x00/foo\x00id\x00TREE_ROOT\x00changed\x00tree\x00subtree-version | 81 | None\x00/foo\x00id\x00TREE_ROOT\x00changed\x00tree\x00subtree-version |
2282 | 82 | """ | 82 | """ |
2283 | 83 | 83 | ||
2285 | 84 | change_tree_lines = """format: bzr inventory delta v1 (bzr 1.14) | 84 | change_tree_lines = b"""format: bzr inventory delta v1 (bzr 1.14) |
2286 | 85 | parent: entry-version | 85 | parent: entry-version |
2287 | 86 | version: change-tree | 86 | version: change-tree |
2288 | 87 | versioned_root: false | 87 | versioned_root: false |
2289 | @@ -96,34 +96,34 @@ | |||
2290 | 96 | def test_parse_no_bytes(self): | 96 | def test_parse_no_bytes(self): |
2291 | 97 | deserializer = inventory_delta.InventoryDeltaDeserializer() | 97 | deserializer = inventory_delta.InventoryDeltaDeserializer() |
2292 | 98 | err = self.assertRaises( | 98 | err = self.assertRaises( |
2294 | 99 | InventoryDeltaError, deserializer.parse_text_bytes, '') | 99 | InventoryDeltaError, deserializer.parse_text_bytes, b'') |
2295 | 100 | self.assertContainsRe(str(err), 'last line not empty') | 100 | self.assertContainsRe(str(err), 'last line not empty') |
2296 | 101 | 101 | ||
2297 | 102 | def test_parse_bad_format(self): | 102 | def test_parse_bad_format(self): |
2298 | 103 | deserializer = inventory_delta.InventoryDeltaDeserializer() | 103 | deserializer = inventory_delta.InventoryDeltaDeserializer() |
2299 | 104 | err = self.assertRaises(InventoryDeltaError, | 104 | err = self.assertRaises(InventoryDeltaError, |
2301 | 105 | deserializer.parse_text_bytes, 'format: foo\n') | 105 | deserializer.parse_text_bytes, b'format: foo\n') |
2302 | 106 | self.assertContainsRe(str(err), 'unknown format') | 106 | self.assertContainsRe(str(err), 'unknown format') |
2303 | 107 | 107 | ||
2304 | 108 | def test_parse_no_parent(self): | 108 | def test_parse_no_parent(self): |
2305 | 109 | deserializer = inventory_delta.InventoryDeltaDeserializer() | 109 | deserializer = inventory_delta.InventoryDeltaDeserializer() |
2306 | 110 | err = self.assertRaises(InventoryDeltaError, | 110 | err = self.assertRaises(InventoryDeltaError, |
2307 | 111 | deserializer.parse_text_bytes, | 111 | deserializer.parse_text_bytes, |
2309 | 112 | 'format: bzr inventory delta v1 (bzr 1.14)\n') | 112 | b'format: bzr inventory delta v1 (bzr 1.14)\n') |
2310 | 113 | self.assertContainsRe(str(err), 'missing parent: marker') | 113 | self.assertContainsRe(str(err), 'missing parent: marker') |
2311 | 114 | 114 | ||
2312 | 115 | def test_parse_no_version(self): | 115 | def test_parse_no_version(self): |
2313 | 116 | deserializer = inventory_delta.InventoryDeltaDeserializer() | 116 | deserializer = inventory_delta.InventoryDeltaDeserializer() |
2314 | 117 | err = self.assertRaises(InventoryDeltaError, | 117 | err = self.assertRaises(InventoryDeltaError, |
2315 | 118 | deserializer.parse_text_bytes, | 118 | deserializer.parse_text_bytes, |
2318 | 119 | 'format: bzr inventory delta v1 (bzr 1.14)\n' | 119 | b'format: bzr inventory delta v1 (bzr 1.14)\n' |
2319 | 120 | 'parent: null:\n') | 120 | b'parent: null:\n') |
2320 | 121 | self.assertContainsRe(str(err), 'missing version: marker') | 121 | self.assertContainsRe(str(err), 'missing version: marker') |
2322 | 122 | 122 | ||
2323 | 123 | def test_parse_duplicate_key_errors(self): | 123 | def test_parse_duplicate_key_errors(self): |
2324 | 124 | deserializer = inventory_delta.InventoryDeltaDeserializer() | 124 | deserializer = inventory_delta.InventoryDeltaDeserializer() |
2325 | 125 | double_root_lines = \ | 125 | double_root_lines = \ |
2327 | 126 | """format: bzr inventory delta v1 (bzr 1.14) | 126 | b"""format: bzr inventory delta v1 (bzr 1.14) |
2328 | 127 | parent: null: | 127 | parent: null: |
2329 | 128 | version: null: | 128 | version: null: |
2330 | 129 | versioned_root: true | 129 | versioned_root: true |
2331 | @@ -139,16 +139,16 @@ | |||
2332 | 139 | deserializer = inventory_delta.InventoryDeltaDeserializer() | 139 | deserializer = inventory_delta.InventoryDeltaDeserializer() |
2333 | 140 | parse_result = deserializer.parse_text_bytes(root_only_lines) | 140 | parse_result = deserializer.parse_text_bytes(root_only_lines) |
2334 | 141 | expected_entry = inventory.make_entry( | 141 | expected_entry = inventory.make_entry( |
2337 | 142 | 'directory', u'', None, 'an-id') | 142 | 'directory', u'', None, b'an-id') |
2338 | 143 | expected_entry.revision = 'a@e\xc3\xa5ample.com--2004' | 143 | expected_entry.revision = b'a@e\xc3\xa5ample.com--2004' |
2339 | 144 | self.assertEqual( | 144 | self.assertEqual( |
2342 | 145 | ('null:', 'entry-version', True, True, | 145 | (b'null:', b'entry-version', True, True, |
2343 | 146 | [(None, '', 'an-id', expected_entry)]), | 146 | [(None, u'', b'an-id', expected_entry)]), |
2344 | 147 | parse_result) | 147 | parse_result) |
2345 | 148 | 148 | ||
2346 | 149 | def test_parse_special_revid_not_valid_last_mod(self): | 149 | def test_parse_special_revid_not_valid_last_mod(self): |
2347 | 150 | deserializer = inventory_delta.InventoryDeltaDeserializer() | 150 | deserializer = inventory_delta.InventoryDeltaDeserializer() |
2349 | 151 | root_only_lines = """format: bzr inventory delta v1 (bzr 1.14) | 151 | root_only_lines = b"""format: bzr inventory delta v1 (bzr 1.14) |
2350 | 152 | parent: null: | 152 | parent: null: |
2351 | 153 | version: null: | 153 | version: null: |
2352 | 154 | versioned_root: false | 154 | versioned_root: false |
2353 | @@ -161,7 +161,7 @@ | |||
2354 | 161 | 161 | ||
2355 | 162 | def test_parse_versioned_root_versioned_disabled(self): | 162 | def test_parse_versioned_root_versioned_disabled(self): |
2356 | 163 | deserializer = inventory_delta.InventoryDeltaDeserializer() | 163 | deserializer = inventory_delta.InventoryDeltaDeserializer() |
2358 | 164 | root_only_lines = """format: bzr inventory delta v1 (bzr 1.14) | 164 | root_only_lines = b"""format: bzr inventory delta v1 (bzr 1.14) |
2359 | 165 | parent: null: | 165 | parent: null: |
2360 | 166 | version: null: | 166 | version: null: |
2361 | 167 | versioned_root: false | 167 | versioned_root: false |
2362 | @@ -174,7 +174,7 @@ | |||
2363 | 174 | 174 | ||
2364 | 175 | def test_parse_unique_root_id_root_versioned_disabled(self): | 175 | def test_parse_unique_root_id_root_versioned_disabled(self): |
2365 | 176 | deserializer = inventory_delta.InventoryDeltaDeserializer() | 176 | deserializer = inventory_delta.InventoryDeltaDeserializer() |
2367 | 177 | root_only_lines = """format: bzr inventory delta v1 (bzr 1.14) | 177 | root_only_lines = b"""format: bzr inventory delta v1 (bzr 1.14) |
2368 | 178 | parent: parent-id | 178 | parent: parent-id |
2369 | 179 | version: a@e\xc3\xa5ample.com--2004 | 179 | version: a@e\xc3\xa5ample.com--2004 |
2370 | 180 | versioned_root: false | 180 | versioned_root: false |
2371 | @@ -189,11 +189,11 @@ | |||
2372 | 189 | deserializer = inventory_delta.InventoryDeltaDeserializer() | 189 | deserializer = inventory_delta.InventoryDeltaDeserializer() |
2373 | 190 | parse_result = deserializer.parse_text_bytes(root_only_unversioned) | 190 | parse_result = deserializer.parse_text_bytes(root_only_unversioned) |
2374 | 191 | expected_entry = inventory.make_entry( | 191 | expected_entry = inventory.make_entry( |
2377 | 192 | 'directory', u'', None, 'TREE_ROOT') | 192 | 'directory', u'', None, b'TREE_ROOT') |
2378 | 193 | expected_entry.revision = 'entry-version' | 193 | expected_entry.revision = b'entry-version' |
2379 | 194 | self.assertEqual( | 194 | self.assertEqual( |
2382 | 195 | ('null:', 'entry-version', False, False, | 195 | (b'null:', b'entry-version', False, False, |
2383 | 196 | [(None, u'', 'TREE_ROOT', expected_entry)]), | 196 | [(None, u'', b'TREE_ROOT', expected_entry)]), |
2384 | 197 | parse_result) | 197 | parse_result) |
2385 | 198 | 198 | ||
2386 | 199 | def test_parse_versioned_root_when_disabled(self): | 199 | def test_parse_versioned_root_when_disabled(self): |
2387 | @@ -215,7 +215,7 @@ | |||
2388 | 215 | deserializer = inventory_delta.InventoryDeltaDeserializer() | 215 | deserializer = inventory_delta.InventoryDeltaDeserializer() |
2389 | 216 | # A serialised inventory delta with a header saying no tree refs, but | 216 | # A serialised inventory delta with a header saying no tree refs, but |
2390 | 217 | # that has a tree ref in its content. | 217 | # that has a tree ref in its content. |
2392 | 218 | lines = """format: bzr inventory delta v1 (bzr 1.14) | 218 | lines = b"""format: bzr inventory delta v1 (bzr 1.14) |
2393 | 219 | parent: null: | 219 | parent: null: |
2394 | 220 | version: entry-version | 220 | version: entry-version |
2395 | 221 | versioned_root: false | 221 | versioned_root: false |
2396 | @@ -231,7 +231,7 @@ | |||
2397 | 231 | deserializer = inventory_delta.InventoryDeltaDeserializer() | 231 | deserializer = inventory_delta.InventoryDeltaDeserializer() |
2398 | 232 | # A serialised inventory delta with a header saying no tree refs, but | 232 | # A serialised inventory delta with a header saying no tree refs, but |
2399 | 233 | # that has a tree ref in its content. | 233 | # that has a tree ref in its content. |
2401 | 234 | lines = """format: bzr inventory delta v1 (bzr 1.14) | 234 | lines = b"""format: bzr inventory delta v1 (bzr 1.14) |
2402 | 235 | parent: null: | 235 | parent: null: |
2403 | 236 | version: entry-version | 236 | version: entry-version |
2404 | 237 | versioned_root: false | 237 | versioned_root: false |
2405 | @@ -254,7 +254,7 @@ | |||
2406 | 254 | def test_parse_invalid_newpath(self): | 254 | def test_parse_invalid_newpath(self): |
2407 | 255 | """newpath must start with / if it is not None.""" | 255 | """newpath must start with / if it is not None.""" |
2408 | 256 | lines = empty_lines | 256 | lines = empty_lines |
2410 | 257 | lines += "None\x00bad\x00TREE_ROOT\x00\x00version\x00dir\n" | 257 | lines += b"None\x00bad\x00TREE_ROOT\x00\x00version\x00dir\n" |
2411 | 258 | deserializer = inventory_delta.InventoryDeltaDeserializer() | 258 | deserializer = inventory_delta.InventoryDeltaDeserializer() |
2412 | 259 | err = self.assertRaises(InventoryDeltaError, | 259 | err = self.assertRaises(InventoryDeltaError, |
2413 | 260 | deserializer.parse_text_bytes, lines) | 260 | deserializer.parse_text_bytes, lines) |
2414 | @@ -263,39 +263,39 @@ | |||
2415 | 263 | def test_parse_invalid_oldpath(self): | 263 | def test_parse_invalid_oldpath(self): |
2416 | 264 | """oldpath must start with / if it is not None.""" | 264 | """oldpath must start with / if it is not None.""" |
2417 | 265 | lines = root_only_lines | 265 | lines = root_only_lines |
2419 | 266 | lines += "bad\x00/new\x00file-id\x00\x00version\x00dir\n" | 266 | lines += b"bad\x00/new\x00file-id\x00\x00version\x00dir\n" |
2420 | 267 | deserializer = inventory_delta.InventoryDeltaDeserializer() | 267 | deserializer = inventory_delta.InventoryDeltaDeserializer() |
2421 | 268 | err = self.assertRaises(InventoryDeltaError, | 268 | err = self.assertRaises(InventoryDeltaError, |
2422 | 269 | deserializer.parse_text_bytes, lines) | 269 | deserializer.parse_text_bytes, lines) |
2423 | 270 | self.assertContainsRe(str(err), 'oldpath invalid') | 270 | self.assertContainsRe(str(err), 'oldpath invalid') |
2425 | 271 | 271 | ||
2426 | 272 | def test_parse_new_file(self): | 272 | def test_parse_new_file(self): |
2427 | 273 | """a new file is parsed correctly""" | 273 | """a new file is parsed correctly""" |
2428 | 274 | lines = root_only_lines | 274 | lines = root_only_lines |
2430 | 275 | fake_sha = "deadbeef" * 5 | 275 | fake_sha = b"deadbeef" * 5 |
2431 | 276 | lines += ( | 276 | lines += ( |
2434 | 277 | "None\x00/new\x00file-id\x00an-id\x00version\x00file\x00123\x00" + | 277 | b"None\x00/new\x00file-id\x00an-id\x00version\x00file\x00123\x00" + |
2435 | 278 | "\x00" + fake_sha + "\n") | 278 | b"\x00" + fake_sha + b"\n") |
2436 | 279 | deserializer = inventory_delta.InventoryDeltaDeserializer() | 279 | deserializer = inventory_delta.InventoryDeltaDeserializer() |
2437 | 280 | parse_result = deserializer.parse_text_bytes(lines) | 280 | parse_result = deserializer.parse_text_bytes(lines) |
2438 | 281 | expected_entry = inventory.make_entry( | 281 | expected_entry = inventory.make_entry( |
2441 | 282 | 'file', u'new', 'an-id', 'file-id') | 282 | 'file', u'new', b'an-id', b'file-id') |
2442 | 283 | expected_entry.revision = 'version' | 283 | expected_entry.revision = b'version' |
2443 | 284 | expected_entry.text_size = 123 | 284 | expected_entry.text_size = 123 |
2444 | 285 | expected_entry.text_sha1 = fake_sha | 285 | expected_entry.text_sha1 = fake_sha |
2445 | 286 | delta = parse_result[4] | 286 | delta = parse_result[4] |
2446 | 287 | self.assertEqual( | 287 | self.assertEqual( |
2448 | 288 | (None, u'new', 'file-id', expected_entry), delta[-1]) | 288 | (None, u'new', b'file-id', expected_entry), delta[-1]) |
2449 | 289 | 289 | ||
2450 | 290 | def test_parse_delete(self): | 290 | def test_parse_delete(self): |
2451 | 291 | lines = root_only_lines | 291 | lines = root_only_lines |
2452 | 292 | lines += ( | 292 | lines += ( |
2454 | 293 | "/old-file\x00None\x00deleted-id\x00\x00null:\x00deleted\x00\x00\n") | 293 | b"/old-file\x00None\x00deleted-id\x00\x00null:\x00deleted\x00\x00\n") |
2455 | 294 | deserializer = inventory_delta.InventoryDeltaDeserializer() | 294 | deserializer = inventory_delta.InventoryDeltaDeserializer() |
2456 | 295 | parse_result = deserializer.parse_text_bytes(lines) | 295 | parse_result = deserializer.parse_text_bytes(lines) |
2457 | 296 | delta = parse_result[4] | 296 | delta = parse_result[4] |
2458 | 297 | self.assertEqual( | 297 | self.assertEqual( |
2460 | 298 | (u'old-file', None, 'deleted-id', None), delta[-1]) | 298 | (u'old-file', None, b'deleted-id', None), delta[-1]) |
2461 | 299 | 299 | ||
2462 | 300 | 300 | ||
2463 | 301 | class TestSerialization(TestCase): | 301 | class TestSerialization(TestCase): |
2464 | @@ -313,86 +313,86 @@ | |||
2465 | 313 | def test_root_only_to_lines(self): | 313 | def test_root_only_to_lines(self): |
2466 | 314 | old_inv = Inventory(None) | 314 | old_inv = Inventory(None) |
2467 | 315 | new_inv = Inventory(None) | 315 | new_inv = Inventory(None) |
2470 | 316 | root = new_inv.make_entry('directory', '', None, 'an-id') | 316 | root = new_inv.make_entry('directory', u'', None, b'an-id') |
2471 | 317 | root.revision = 'a@e\xc3\xa5ample.com--2004' | 317 | root.revision = b'a@e\xc3\xa5ample.com--2004' |
2472 | 318 | new_inv.add(root) | 318 | new_inv.add(root) |
2473 | 319 | delta = new_inv._make_delta(old_inv) | 319 | delta = new_inv._make_delta(old_inv) |
2474 | 320 | serializer = inventory_delta.InventoryDeltaSerializer( | 320 | serializer = inventory_delta.InventoryDeltaSerializer( |
2475 | 321 | versioned_root=True, tree_references=True) | 321 | versioned_root=True, tree_references=True) |
2476 | 322 | self.assertEqual(BytesIO(root_only_lines).readlines(), | 322 | self.assertEqual(BytesIO(root_only_lines).readlines(), |
2478 | 323 | serializer.delta_to_lines(NULL_REVISION, 'entry-version', delta)) | 323 | serializer.delta_to_lines(NULL_REVISION, b'entry-version', delta)) |
2479 | 324 | 324 | ||
2480 | 325 | def test_unversioned_root(self): | 325 | def test_unversioned_root(self): |
2481 | 326 | old_inv = Inventory(None) | 326 | old_inv = Inventory(None) |
2482 | 327 | new_inv = Inventory(None) | 327 | new_inv = Inventory(None) |
2484 | 328 | root = new_inv.make_entry('directory', '', None, 'TREE_ROOT') | 328 | root = new_inv.make_entry('directory', u'', None, b'TREE_ROOT') |
2485 | 329 | # Implicit roots are considered modified in every revision. | 329 | # Implicit roots are considered modified in every revision. |
2487 | 330 | root.revision = 'entry-version' | 330 | root.revision = b'entry-version' |
2488 | 331 | new_inv.add(root) | 331 | new_inv.add(root) |
2489 | 332 | delta = new_inv._make_delta(old_inv) | 332 | delta = new_inv._make_delta(old_inv) |
2490 | 333 | serializer = inventory_delta.InventoryDeltaSerializer( | 333 | serializer = inventory_delta.InventoryDeltaSerializer( |
2491 | 334 | versioned_root=False, tree_references=False) | 334 | versioned_root=False, tree_references=False) |
2492 | 335 | serialized_lines = serializer.delta_to_lines( | 335 | serialized_lines = serializer.delta_to_lines( |
2494 | 336 | NULL_REVISION, 'entry-version', delta) | 336 | NULL_REVISION, b'entry-version', delta) |
2495 | 337 | self.assertEqual(BytesIO(root_only_unversioned).readlines(), | 337 | self.assertEqual(BytesIO(root_only_unversioned).readlines(), |
2496 | 338 | serialized_lines) | 338 | serialized_lines) |
2497 | 339 | deserializer = inventory_delta.InventoryDeltaDeserializer() | 339 | deserializer = inventory_delta.InventoryDeltaDeserializer() |
2498 | 340 | self.assertEqual( | 340 | self.assertEqual( |
2501 | 341 | (NULL_REVISION, 'entry-version', False, False, delta), | 341 | (NULL_REVISION, b'entry-version', False, False, delta), |
2502 | 342 | deserializer.parse_text_bytes(''.join(serialized_lines))) | 342 | deserializer.parse_text_bytes(b''.join(serialized_lines))) |
2503 | 343 | 343 | ||
2504 | 344 | def test_unversioned_non_root_errors(self): | 344 | def test_unversioned_non_root_errors(self): |
2505 | 345 | old_inv = Inventory(None) | 345 | old_inv = Inventory(None) |
2506 | 346 | new_inv = Inventory(None) | 346 | new_inv = Inventory(None) |
2509 | 347 | root = new_inv.make_entry('directory', '', None, 'TREE_ROOT') | 347 | root = new_inv.make_entry('directory', u'', None, b'TREE_ROOT') |
2510 | 348 | root.revision = 'a@e\xc3\xa5ample.com--2004' | 348 | root.revision = b'a@e\xc3\xa5ample.com--2004' |
2511 | 349 | new_inv.add(root) | 349 | new_inv.add(root) |
2513 | 350 | non_root = new_inv.make_entry('directory', 'foo', root.file_id, 'id') | 350 | non_root = new_inv.make_entry('directory', u'foo', root.file_id, b'id') |
2514 | 351 | new_inv.add(non_root) | 351 | new_inv.add(non_root) |
2515 | 352 | delta = new_inv._make_delta(old_inv) | 352 | delta = new_inv._make_delta(old_inv) |
2516 | 353 | serializer = inventory_delta.InventoryDeltaSerializer( | 353 | serializer = inventory_delta.InventoryDeltaSerializer( |
2517 | 354 | versioned_root=True, tree_references=True) | 354 | versioned_root=True, tree_references=True) |
2518 | 355 | err = self.assertRaises(InventoryDeltaError, | 355 | err = self.assertRaises(InventoryDeltaError, |
2520 | 356 | serializer.delta_to_lines, NULL_REVISION, 'entry-version', delta) | 356 | serializer.delta_to_lines, NULL_REVISION, b'entry-version', delta) |
2521 | 357 | self.assertContainsRe(str(err), "^no version for fileid b?'id'$") | 357 | self.assertContainsRe(str(err), "^no version for fileid b?'id'$") |
2522 | 358 | 358 | ||
2523 | 359 | def test_richroot_unversioned_root_errors(self): | 359 | def test_richroot_unversioned_root_errors(self): |
2524 | 360 | old_inv = Inventory(None) | 360 | old_inv = Inventory(None) |
2525 | 361 | new_inv = Inventory(None) | 361 | new_inv = Inventory(None) |
2527 | 362 | root = new_inv.make_entry('directory', '', None, 'TREE_ROOT') | 362 | root = new_inv.make_entry('directory', '', None, b'TREE_ROOT') |
2528 | 363 | new_inv.add(root) | 363 | new_inv.add(root) |
2529 | 364 | delta = new_inv._make_delta(old_inv) | 364 | delta = new_inv._make_delta(old_inv) |
2530 | 365 | serializer = inventory_delta.InventoryDeltaSerializer( | 365 | serializer = inventory_delta.InventoryDeltaSerializer( |
2531 | 366 | versioned_root=True, tree_references=True) | 366 | versioned_root=True, tree_references=True) |
2532 | 367 | err = self.assertRaises(InventoryDeltaError, | 367 | err = self.assertRaises(InventoryDeltaError, |
2534 | 368 | serializer.delta_to_lines, NULL_REVISION, 'entry-version', delta) | 368 | serializer.delta_to_lines, NULL_REVISION, b'entry-version', delta) |
2535 | 369 | self.assertContainsRe( | 369 | self.assertContainsRe( |
2536 | 370 | str(err), "no version for fileid b?'TREE_ROOT'$") | 370 | str(err), "no version for fileid b?'TREE_ROOT'$") |
2537 | 371 | 371 | ||
2538 | 372 | def test_nonrichroot_versioned_root_errors(self): | 372 | def test_nonrichroot_versioned_root_errors(self): |
2539 | 373 | old_inv = Inventory(None) | 373 | old_inv = Inventory(None) |
2540 | 374 | new_inv = Inventory(None) | 374 | new_inv = Inventory(None) |
2543 | 375 | root = new_inv.make_entry('directory', '', None, 'TREE_ROOT') | 375 | root = new_inv.make_entry('directory', u'', None, b'TREE_ROOT') |
2544 | 376 | root.revision = 'a@e\xc3\xa5ample.com--2004' | 376 | root.revision = b'a@e\xc3\xa5ample.com--2004' |
2545 | 377 | new_inv.add(root) | 377 | new_inv.add(root) |
2546 | 378 | delta = new_inv._make_delta(old_inv) | 378 | delta = new_inv._make_delta(old_inv) |
2547 | 379 | serializer = inventory_delta.InventoryDeltaSerializer( | 379 | serializer = inventory_delta.InventoryDeltaSerializer( |
2548 | 380 | versioned_root=False, tree_references=True) | 380 | versioned_root=False, tree_references=True) |
2549 | 381 | err = self.assertRaises(InventoryDeltaError, | 381 | err = self.assertRaises(InventoryDeltaError, |
2551 | 382 | serializer.delta_to_lines, NULL_REVISION, 'entry-version', delta) | 382 | serializer.delta_to_lines, NULL_REVISION, b'entry-version', delta) |
2552 | 383 | self.assertContainsRe( | 383 | self.assertContainsRe( |
2553 | 384 | str(err), "^Version present for / in b?'TREE_ROOT'") | 384 | str(err), "^Version present for / in b?'TREE_ROOT'") |
2554 | 385 | 385 | ||
2555 | 386 | def test_unknown_kind_errors(self): | 386 | def test_unknown_kind_errors(self): |
2556 | 387 | old_inv = Inventory(None) | 387 | old_inv = Inventory(None) |
2557 | 388 | new_inv = Inventory(None) | 388 | new_inv = Inventory(None) |
2560 | 389 | root = new_inv.make_entry('directory', '', None, 'my-rich-root-id') | 389 | root = new_inv.make_entry('directory', u'', None, b'my-rich-root-id') |
2561 | 390 | root.revision = 'changed' | 390 | root.revision = b'changed' |
2562 | 391 | new_inv.add(root) | 391 | new_inv.add(root) |
2563 | 392 | class StrangeInventoryEntry(inventory.InventoryEntry): | 392 | class StrangeInventoryEntry(inventory.InventoryEntry): |
2564 | 393 | kind = 'strange' | 393 | kind = 'strange' |
2567 | 394 | non_root = StrangeInventoryEntry('id', 'foo', root.file_id) | 394 | non_root = StrangeInventoryEntry('id', u'foo', root.file_id) |
2568 | 395 | non_root.revision = 'changed' | 395 | non_root.revision = b'changed' |
2569 | 396 | new_inv.add(non_root) | 396 | new_inv.add(non_root) |
2570 | 397 | delta = new_inv._make_delta(old_inv) | 397 | delta = new_inv._make_delta(old_inv) |
2571 | 398 | serializer = inventory_delta.InventoryDeltaSerializer( | 398 | serializer = inventory_delta.InventoryDeltaSerializer( |
2572 | @@ -400,19 +400,19 @@ | |||
2573 | 400 | # we expect keyerror because there is little value wrapping this. | 400 | # we expect keyerror because there is little value wrapping this. |
2574 | 401 | # This test aims to prove that it errors more than how it errors. | 401 | # This test aims to prove that it errors more than how it errors. |
2575 | 402 | err = self.assertRaises(KeyError, | 402 | err = self.assertRaises(KeyError, |
2577 | 403 | serializer.delta_to_lines, NULL_REVISION, 'entry-version', delta) | 403 | serializer.delta_to_lines, NULL_REVISION, b'entry-version', delta) |
2578 | 404 | self.assertEqual(('strange',), err.args) | 404 | self.assertEqual(('strange',), err.args) |
2579 | 405 | 405 | ||
2580 | 406 | def test_tree_reference_disabled(self): | 406 | def test_tree_reference_disabled(self): |
2581 | 407 | old_inv = Inventory(None) | 407 | old_inv = Inventory(None) |
2582 | 408 | new_inv = Inventory(None) | 408 | new_inv = Inventory(None) |
2585 | 409 | root = new_inv.make_entry('directory', '', None, 'TREE_ROOT') | 409 | root = new_inv.make_entry('directory', u'', None, b'TREE_ROOT') |
2586 | 410 | root.revision = 'a@e\xc3\xa5ample.com--2004' | 410 | root.revision = b'a@e\xc3\xa5ample.com--2004' |
2587 | 411 | new_inv.add(root) | 411 | new_inv.add(root) |
2588 | 412 | non_root = new_inv.make_entry( | 412 | non_root = new_inv.make_entry( |
2592 | 413 | 'tree-reference', 'foo', root.file_id, 'id') | 413 | 'tree-reference', u'foo', root.file_id, b'id') |
2593 | 414 | non_root.revision = 'changed' | 414 | non_root.revision = b'changed' |
2594 | 415 | non_root.reference_revision = 'subtree-version' | 415 | non_root.reference_revision = b'subtree-version' |
2595 | 416 | new_inv.add(non_root) | 416 | new_inv.add(non_root) |
2596 | 417 | delta = new_inv._make_delta(old_inv) | 417 | delta = new_inv._make_delta(old_inv) |
2597 | 418 | serializer = inventory_delta.InventoryDeltaSerializer( | 418 | serializer = inventory_delta.InventoryDeltaSerializer( |
2598 | @@ -420,59 +420,60 @@ | |||
2599 | 420 | # we expect keyerror because there is little value wrapping this. | 420 | # we expect keyerror because there is little value wrapping this. |
2600 | 421 | # This test aims to prove that it errors more than how it errors. | 421 | # This test aims to prove that it errors more than how it errors. |
2601 | 422 | err = self.assertRaises(KeyError, | 422 | err = self.assertRaises(KeyError, |
2603 | 423 | serializer.delta_to_lines, NULL_REVISION, 'entry-version', delta) | 423 | serializer.delta_to_lines, NULL_REVISION, b'entry-version', delta) |
2604 | 424 | self.assertEqual(('tree-reference',), err.args) | 424 | self.assertEqual(('tree-reference',), err.args) |
2605 | 425 | 425 | ||
2606 | 426 | def test_tree_reference_enabled(self): | 426 | def test_tree_reference_enabled(self): |
2607 | 427 | old_inv = Inventory(None) | 427 | old_inv = Inventory(None) |
2608 | 428 | new_inv = Inventory(None) | 428 | new_inv = Inventory(None) |
2611 | 429 | root = new_inv.make_entry('directory', '', None, 'TREE_ROOT') | 429 | root = new_inv.make_entry('directory', u'', None, b'TREE_ROOT') |
2612 | 430 | root.revision = 'a@e\xc3\xa5ample.com--2004' | 430 | root.revision = b'a@e\xc3\xa5ample.com--2004' |
2613 | 431 | new_inv.add(root) | 431 | new_inv.add(root) |
2614 | 432 | non_root = new_inv.make_entry( | 432 | non_root = new_inv.make_entry( |
2618 | 433 | 'tree-reference', 'foo', root.file_id, 'id') | 433 | 'tree-reference', u'foo', root.file_id, b'id') |
2619 | 434 | non_root.revision = 'changed' | 434 | non_root.revision = b'changed' |
2620 | 435 | non_root.reference_revision = 'subtree-version' | 435 | non_root.reference_revision = b'subtree-version' |
2621 | 436 | new_inv.add(non_root) | 436 | new_inv.add(non_root) |
2622 | 437 | delta = new_inv._make_delta(old_inv) | 437 | delta = new_inv._make_delta(old_inv) |
2623 | 438 | serializer = inventory_delta.InventoryDeltaSerializer( | 438 | serializer = inventory_delta.InventoryDeltaSerializer( |
2624 | 439 | versioned_root=True, tree_references=True) | 439 | versioned_root=True, tree_references=True) |
2625 | 440 | self.assertEqual(BytesIO(reference_lines).readlines(), | 440 | self.assertEqual(BytesIO(reference_lines).readlines(), |
2627 | 441 | serializer.delta_to_lines(NULL_REVISION, 'entry-version', delta)) | 441 | serializer.delta_to_lines(NULL_REVISION, b'entry-version', delta)) |
2628 | 442 | 442 | ||
2629 | 443 | def test_to_inventory_root_id_versioned_not_permitted(self): | 443 | def test_to_inventory_root_id_versioned_not_permitted(self): |
2633 | 444 | root_entry = inventory.make_entry('directory', '', None, 'TREE_ROOT') | 444 | root_entry = inventory.make_entry('directory', u'', None, b'TREE_ROOT') |
2634 | 445 | root_entry.revision = 'some-version' | 445 | root_entry.revision = b'some-version' |
2635 | 446 | delta = [(None, '', 'TREE_ROOT', root_entry)] | 446 | delta = [(None, u'', b'TREE_ROOT', root_entry)] |
2636 | 447 | serializer = inventory_delta.InventoryDeltaSerializer( | 447 | serializer = inventory_delta.InventoryDeltaSerializer( |
2637 | 448 | versioned_root=False, tree_references=True) | 448 | versioned_root=False, tree_references=True) |
2638 | 449 | self.assertRaises( | 449 | self.assertRaises( |
2641 | 450 | InventoryDeltaError, serializer.delta_to_lines, 'old-version', | 450 | InventoryDeltaError, serializer.delta_to_lines, b'old-version', |
2642 | 451 | 'new-version', delta) | 451 | b'new-version', delta) |
2643 | 452 | 452 | ||
2644 | 453 | def test_to_inventory_root_id_not_versioned(self): | 453 | def test_to_inventory_root_id_not_versioned(self): |
2647 | 454 | delta = [(None, '', 'an-id', inventory.make_entry( | 454 | delta = [(None, u'', b'an-id', inventory.make_entry( |
2648 | 455 | 'directory', '', None, 'an-id'))] | 455 | 'directory', u'', None, b'an-id'))] |
2649 | 456 | serializer = inventory_delta.InventoryDeltaSerializer( | 456 | serializer = inventory_delta.InventoryDeltaSerializer( |
2650 | 457 | versioned_root=True, tree_references=True) | 457 | versioned_root=True, tree_references=True) |
2651 | 458 | self.assertRaises( | 458 | self.assertRaises( |
2654 | 459 | InventoryDeltaError, serializer.delta_to_lines, 'old-version', | 459 | InventoryDeltaError, serializer.delta_to_lines, b'old-version', |
2655 | 460 | 'new-version', delta) | 460 | b'new-version', delta) |
2656 | 461 | 461 | ||
2657 | 462 | def test_to_inventory_has_tree_not_meant_to(self): | 462 | def test_to_inventory_has_tree_not_meant_to(self): |
2658 | 463 | make_entry = inventory.make_entry | 463 | make_entry = inventory.make_entry |
2661 | 464 | tree_ref = make_entry('tree-reference', 'foo', 'changed-in', 'ref-id') | 464 | tree_ref = make_entry( |
2662 | 465 | tree_ref.reference_revision = 'ref-revision' | 465 | 'tree-reference', u'foo', b'changed-in', b'ref-id') |
2663 | 466 | tree_ref.reference_revision = b'ref-revision' | ||
2664 | 466 | delta = [ | 467 | delta = [ |
2668 | 467 | (None, '', 'an-id', | 468 | (None, u'', b'an-id', |
2669 | 468 | make_entry('directory', '', 'changed-in', 'an-id')), | 469 | make_entry('directory', u'', b'changed-in', b'an-id')), |
2670 | 469 | (None, 'foo', 'ref-id', tree_ref) | 470 | (None, u'foo', b'ref-id', tree_ref) |
2671 | 470 | # a file that followed the root move | 471 | # a file that followed the root move |
2672 | 471 | ] | 472 | ] |
2673 | 472 | serializer = inventory_delta.InventoryDeltaSerializer( | 473 | serializer = inventory_delta.InventoryDeltaSerializer( |
2674 | 473 | versioned_root=True, tree_references=True) | 474 | versioned_root=True, tree_references=True) |
2675 | 474 | self.assertRaises(InventoryDeltaError, serializer.delta_to_lines, | 475 | self.assertRaises(InventoryDeltaError, serializer.delta_to_lines, |
2677 | 475 | 'old-version', 'new-version', delta) | 476 | b'old-version', b'new-version', delta) |
2678 | 476 | 477 | ||
2679 | 477 | def test_to_inventory_torture(self): | 478 | def test_to_inventory_torture(self): |
2680 | 478 | def make_entry(kind, name, parent_id, file_id, **attrs): | 479 | def make_entry(kind, name, parent_id, file_id, **attrs): |
2681 | @@ -488,43 +489,43 @@ | |||
2682 | 488 | # - files with and without exec bit | 489 | # - files with and without exec bit |
2683 | 489 | delta = [ | 490 | delta = [ |
2684 | 490 | # new root: | 491 | # new root: |
2688 | 491 | (None, '', 'new-root-id', | 492 | (None, u'', b'new-root-id', |
2689 | 492 | make_entry('directory', '', None, 'new-root-id', | 493 | make_entry('directory', u'', None, b'new-root-id', |
2690 | 493 | revision='changed-in')), | 494 | revision=b'changed-in')), |
2691 | 494 | # an old root: | 495 | # an old root: |
2695 | 495 | ('', 'old-root', 'TREE_ROOT', | 496 | (u'', u'old-root', b'TREE_ROOT', |
2696 | 496 | make_entry('directory', 'subdir-now', 'new-root-id', | 497 | make_entry('directory', u'subdir-now', b'new-root-id', |
2697 | 497 | 'TREE_ROOT', revision='moved-root')), | 498 | b'TREE_ROOT', revision=b'moved-root')), |
2698 | 498 | # a file that followed the root move | 499 | # a file that followed the root move |
2703 | 499 | ('under-old-root', 'old-root/under-old-root', 'moved-id', | 500 | (u'under-old-root', u'old-root/under-old-root', b'moved-id', |
2704 | 500 | make_entry('file', 'under-old-root', 'TREE_ROOT', 'moved-id', | 501 | make_entry('file', u'under-old-root', b'TREE_ROOT', |
2705 | 501 | revision='old-rev', executable=False, text_size=30, | 502 | b'moved-id', revision=b'old-rev', executable=False, |
2706 | 502 | text_sha1='some-sha')), | 503 | text_size=30, text_sha1=b'some-sha')), |
2707 | 503 | # a deleted path | 504 | # a deleted path |
2709 | 504 | ('old-file', None, 'deleted-id', None), | 505 | (u'old-file', None, b'deleted-id', None), |
2710 | 505 | # a tree reference moved to the new root | 506 | # a tree reference moved to the new root |
2715 | 506 | ('ref', 'ref', 'ref-id', | 507 | (u'ref', u'ref', b'ref-id', |
2716 | 507 | make_entry('tree-reference', 'ref', 'new-root-id', 'ref-id', | 508 | make_entry('tree-reference', u'ref', b'new-root-id', b'ref-id', |
2717 | 508 | reference_revision='tree-reference-id', | 509 | reference_revision=b'tree-reference-id', |
2718 | 509 | revision='new-rev')), | 510 | revision=b'new-rev')), |
2719 | 510 | # a symlink now in a deep dir | 511 | # a symlink now in a deep dir |
2723 | 511 | ('dir/link', 'old-root/dir/link', 'link-id', | 512 | (u'dir/link', u'old-root/dir/link', b'link-id', |
2724 | 512 | make_entry('symlink', 'link', 'deep-id', 'link-id', | 513 | make_entry('symlink', u'link', b'deep-id', b'link-id', |
2725 | 513 | symlink_target='target', revision='new-rev')), | 514 | symlink_target=u'target', revision=b'new-rev')), |
2726 | 514 | # a deep dir | 515 | # a deep dir |
2730 | 515 | ('dir', 'old-root/dir', 'deep-id', | 516 | (u'dir', u'old-root/dir', b'deep-id', |
2731 | 516 | make_entry('directory', 'dir', 'TREE_ROOT', 'deep-id', | 517 | make_entry('directory', u'dir', b'TREE_ROOT', b'deep-id', |
2732 | 517 | revision='new-rev')), | 518 | revision=b'new-rev')), |
2733 | 518 | # a file with an exec bit set | 519 | # a file with an exec bit set |
2738 | 519 | (None, 'configure', 'exec-id', | 520 | (None, u'configure', b'exec-id', |
2739 | 520 | make_entry('file', 'configure', 'new-root-id', 'exec-id', | 521 | make_entry('file', u'configure', b'new-root-id', b'exec-id', |
2740 | 521 | executable=True, text_size=30, text_sha1='some-sha', | 522 | executable=True, text_size=30, text_sha1=b'some-sha', |
2741 | 522 | revision='old-rev')), | 523 | revision=b'old-rev')), |
2742 | 523 | ] | 524 | ] |
2743 | 524 | serializer = inventory_delta.InventoryDeltaSerializer( | 525 | serializer = inventory_delta.InventoryDeltaSerializer( |
2744 | 525 | versioned_root=True, tree_references=True) | 526 | versioned_root=True, tree_references=True) |
2747 | 526 | lines = serializer.delta_to_lines(NULL_REVISION, 'something', delta) | 527 | lines = serializer.delta_to_lines(NULL_REVISION, b'something', delta) |
2748 | 527 | expected = """format: bzr inventory delta v1 (bzr 1.14) | 528 | expected = b"""format: bzr inventory delta v1 (bzr 1.14) |
2749 | 528 | parent: null: | 529 | parent: null: |
2750 | 529 | version: something | 530 | version: something |
2751 | 530 | versioned_root: true | 531 | versioned_root: true |
2752 | @@ -538,8 +539,8 @@ | |||
2753 | 538 | None\x00/\x00new-root-id\x00\x00changed-in\x00dir | 539 | None\x00/\x00new-root-id\x00\x00changed-in\x00dir |
2754 | 539 | None\x00/configure\x00exec-id\x00new-root-id\x00old-rev\x00file\x0030\x00Y\x00some-sha | 540 | None\x00/configure\x00exec-id\x00new-root-id\x00old-rev\x00file\x0030\x00Y\x00some-sha |
2755 | 540 | """ | 541 | """ |
2758 | 541 | serialized = ''.join(lines) | 542 | serialized = b''.join(lines) |
2759 | 542 | self.assertIsInstance(serialized, str) | 543 | self.assertIsInstance(serialized, bytes) |
2760 | 543 | self.assertEqual(expected, serialized) | 544 | self.assertEqual(expected, serialized) |
2761 | 544 | 545 | ||
2762 | 545 | 546 | ||
2763 | @@ -547,79 +548,79 @@ | |||
2764 | 547 | """Test serialization of the content part of a line.""" | 548 | """Test serialization of the content part of a line.""" |
2765 | 548 | 549 | ||
2766 | 549 | def test_dir(self): | 550 | def test_dir(self): |
2769 | 550 | entry = inventory.make_entry('directory', 'a dir', None) | 551 | entry = inventory.make_entry('directory', u'a dir', None) |
2770 | 551 | self.assertEqual('dir', inventory_delta._directory_content(entry)) | 552 | self.assertEqual(b'dir', inventory_delta._directory_content(entry)) |
2771 | 552 | 553 | ||
2772 | 553 | def test_file_0_short_sha(self): | 554 | def test_file_0_short_sha(self): |
2775 | 554 | file_entry = inventory.make_entry('file', 'a file', None, 'file-id') | 555 | file_entry = inventory.make_entry('file', u'a file', None, b'file-id') |
2776 | 555 | file_entry.text_sha1 = '' | 556 | file_entry.text_sha1 = b'' |
2777 | 556 | file_entry.text_size = 0 | 557 | file_entry.text_size = 0 |
2779 | 557 | self.assertEqual('file\x000\x00\x00', | 558 | self.assertEqual(b'file\x000\x00\x00', |
2780 | 558 | inventory_delta._file_content(file_entry)) | 559 | inventory_delta._file_content(file_entry)) |
2781 | 559 | 560 | ||
2782 | 560 | def test_file_10_foo(self): | 561 | def test_file_10_foo(self): |
2785 | 561 | file_entry = inventory.make_entry('file', 'a file', None, 'file-id') | 562 | file_entry = inventory.make_entry('file', u'a file', None, b'file-id') |
2786 | 562 | file_entry.text_sha1 = 'foo' | 563 | file_entry.text_sha1 = b'foo' |
2787 | 563 | file_entry.text_size = 10 | 564 | file_entry.text_size = 10 |
2789 | 564 | self.assertEqual('file\x0010\x00\x00foo', | 565 | self.assertEqual(b'file\x0010\x00\x00foo', |
2790 | 565 | inventory_delta._file_content(file_entry)) | 566 | inventory_delta._file_content(file_entry)) |
2791 | 566 | 567 | ||
2792 | 567 | def test_file_executable(self): | 568 | def test_file_executable(self): |
2794 | 568 | file_entry = inventory.make_entry('file', 'a file', None, 'file-id') | 569 | file_entry = inventory.make_entry('file', u'a file', None, b'file-id') |
2795 | 569 | file_entry.executable = True | 570 | file_entry.executable = True |
2797 | 570 | file_entry.text_sha1 = 'foo' | 571 | file_entry.text_sha1 = b'foo' |
2798 | 571 | file_entry.text_size = 10 | 572 | file_entry.text_size = 10 |
2800 | 572 | self.assertEqual('file\x0010\x00Y\x00foo', | 573 | self.assertEqual(b'file\x0010\x00Y\x00foo', |
2801 | 573 | inventory_delta._file_content(file_entry)) | 574 | inventory_delta._file_content(file_entry)) |
2802 | 574 | 575 | ||
2803 | 575 | def test_file_without_size(self): | 576 | def test_file_without_size(self): |
2806 | 576 | file_entry = inventory.make_entry('file', 'a file', None, 'file-id') | 577 | file_entry = inventory.make_entry('file', u'a file', None, b'file-id') |
2807 | 577 | file_entry.text_sha1 = 'foo' | 578 | file_entry.text_sha1 = b'foo' |
2808 | 578 | self.assertRaises(InventoryDeltaError, | 579 | self.assertRaises(InventoryDeltaError, |
2809 | 579 | inventory_delta._file_content, file_entry) | 580 | inventory_delta._file_content, file_entry) |
2810 | 580 | 581 | ||
2811 | 581 | def test_file_without_sha1(self): | 582 | def test_file_without_sha1(self): |
2813 | 582 | file_entry = inventory.make_entry('file', 'a file', None, 'file-id') | 583 | file_entry = inventory.make_entry('file', u'a file', None, b'file-id') |
2814 | 583 | file_entry.text_size = 10 | 584 | file_entry.text_size = 10 |
2815 | 584 | self.assertRaises(InventoryDeltaError, | 585 | self.assertRaises(InventoryDeltaError, |
2816 | 585 | inventory_delta._file_content, file_entry) | 586 | inventory_delta._file_content, file_entry) |
2817 | 586 | 587 | ||
2818 | 587 | def test_link_empty_target(self): | 588 | def test_link_empty_target(self): |
2822 | 588 | entry = inventory.make_entry('symlink', 'a link', None) | 589 | entry = inventory.make_entry('symlink', u'a link', None) |
2823 | 589 | entry.symlink_target = '' | 590 | entry.symlink_target = u'' |
2824 | 590 | self.assertEqual('link\x00', | 591 | self.assertEqual(b'link\x00', |
2825 | 591 | inventory_delta._link_content(entry)) | 592 | inventory_delta._link_content(entry)) |
2826 | 592 | 593 | ||
2827 | 593 | def test_link_unicode_target(self): | 594 | def test_link_unicode_target(self): |
2831 | 594 | entry = inventory.make_entry('symlink', 'a link', None) | 595 | entry = inventory.make_entry('symlink', u'a link', None) |
2832 | 595 | entry.symlink_target = ' \xc3\xa5'.decode('utf8') | 596 | entry.symlink_target = b' \xc3\xa5'.decode('utf8') |
2833 | 596 | self.assertEqual('link\x00 \xc3\xa5', | 597 | self.assertEqual(b'link\x00 \xc3\xa5', |
2834 | 597 | inventory_delta._link_content(entry)) | 598 | inventory_delta._link_content(entry)) |
2835 | 598 | 599 | ||
2836 | 599 | def test_link_space_target(self): | 600 | def test_link_space_target(self): |
2840 | 600 | entry = inventory.make_entry('symlink', 'a link', None) | 601 | entry = inventory.make_entry('symlink', u'a link', None) |
2841 | 601 | entry.symlink_target = ' ' | 602 | entry.symlink_target = u' ' |
2842 | 602 | self.assertEqual('link\x00 ', | 603 | self.assertEqual(b'link\x00 ', |
2843 | 603 | inventory_delta._link_content(entry)) | 604 | inventory_delta._link_content(entry)) |
2844 | 604 | 605 | ||
2845 | 605 | def test_link_no_target(self): | 606 | def test_link_no_target(self): |
2847 | 606 | entry = inventory.make_entry('symlink', 'a link', None) | 607 | entry = inventory.make_entry('symlink', u'a link', None) |
2848 | 607 | self.assertRaises(InventoryDeltaError, | 608 | self.assertRaises(InventoryDeltaError, |
2849 | 608 | inventory_delta._link_content, entry) | 609 | inventory_delta._link_content, entry) |
2850 | 609 | 610 | ||
2851 | 610 | def test_reference_null(self): | 611 | def test_reference_null(self): |
2853 | 611 | entry = inventory.make_entry('tree-reference', 'a tree', None) | 612 | entry = inventory.make_entry('tree-reference', u'a tree', None) |
2854 | 612 | entry.reference_revision = NULL_REVISION | 613 | entry.reference_revision = NULL_REVISION |
2856 | 613 | self.assertEqual('tree\x00null:', | 614 | self.assertEqual(b'tree\x00null:', |
2857 | 614 | inventory_delta._reference_content(entry)) | 615 | inventory_delta._reference_content(entry)) |
2858 | 615 | 616 | ||
2859 | 616 | def test_reference_revision(self): | 617 | def test_reference_revision(self): |
2863 | 617 | entry = inventory.make_entry('tree-reference', 'a tree', None) | 618 | entry = inventory.make_entry('tree-reference', u'a tree', None) |
2864 | 618 | entry.reference_revision = 'foo@\xc3\xa5b-lah' | 619 | entry.reference_revision = b'foo@\xc3\xa5b-lah' |
2865 | 619 | self.assertEqual('tree\x00foo@\xc3\xa5b-lah', | 620 | self.assertEqual(b'tree\x00foo@\xc3\xa5b-lah', |
2866 | 620 | inventory_delta._reference_content(entry)) | 621 | inventory_delta._reference_content(entry)) |
2867 | 621 | 622 | ||
2868 | 622 | def test_reference_no_reference(self): | 623 | def test_reference_no_reference(self): |
2870 | 623 | entry = inventory.make_entry('tree-reference', 'a tree', None) | 624 | entry = inventory.make_entry('tree-reference', u'a tree', None) |
2871 | 624 | self.assertRaises(InventoryDeltaError, | 625 | self.assertRaises(InventoryDeltaError, |
2872 | 625 | inventory_delta._reference_content, entry) | 626 | inventory_delta._reference_content, entry) |
2873 | 626 | 627 | ||
2874 | === modified file 'breezy/transport/memory.py' | |||
2875 | --- breezy/transport/memory.py 2017-05-24 19:44:00 +0000 | |||
2876 | +++ breezy/transport/memory.py 2017-06-11 01:47:17 +0000 | |||
2877 | @@ -164,7 +164,7 @@ | |||
2878 | 164 | 164 | ||
2879 | 165 | def open_write_stream(self, relpath, mode=None): | 165 | def open_write_stream(self, relpath, mode=None): |
2880 | 166 | """See Transport.open_write_stream.""" | 166 | """See Transport.open_write_stream.""" |
2882 | 167 | self.put_bytes(relpath, "", mode) | 167 | self.put_bytes(relpath, b"", mode) |
2883 | 168 | result = AppendBasedFileStream(self, relpath) | 168 | result = AppendBasedFileStream(self, relpath) |
2884 | 169 | _file_streams[self.abspath(relpath)] = result | 169 | _file_streams[self.abspath(relpath)] = result |
2885 | 170 | return result | 170 | return result |
2886 | 171 | 171 | ||
2887 | === modified file 'breezy/versionedfile.py' | |||
2888 | --- breezy/versionedfile.py 2017-06-05 20:48:31 +0000 | |||
2889 | +++ breezy/versionedfile.py 2017-06-11 01:47:17 +0000 | |||
2890 | @@ -120,7 +120,7 @@ | |||
2891 | 120 | if storage_kind == 'chunked': | 120 | if storage_kind == 'chunked': |
2892 | 121 | return self._chunks | 121 | return self._chunks |
2893 | 122 | elif storage_kind == 'fulltext': | 122 | elif storage_kind == 'fulltext': |
2895 | 123 | return ''.join(self._chunks) | 123 | return b''.join(self._chunks) |
2896 | 124 | raise errors.UnavailableRepresentation(self.key, storage_kind, | 124 | raise errors.UnavailableRepresentation(self.key, storage_kind, |
2897 | 125 | self.storage_kind) | 125 | self.storage_kind) |
2898 | 126 | 126 | ||
2899 | @@ -1079,13 +1079,13 @@ | |||
2900 | 1079 | def _check_lines_not_unicode(self, lines): | 1079 | def _check_lines_not_unicode(self, lines): |
2901 | 1080 | """Check that lines being added to a versioned file are not unicode.""" | 1080 | """Check that lines being added to a versioned file are not unicode.""" |
2902 | 1081 | for line in lines: | 1081 | for line in lines: |
2904 | 1082 | if line.__class__ is not str: | 1082 | if line.__class__ is not bytes: |
2905 | 1083 | raise errors.BzrBadParameterUnicode("lines") | 1083 | raise errors.BzrBadParameterUnicode("lines") |
2906 | 1084 | 1084 | ||
2907 | 1085 | def _check_lines_are_lines(self, lines): | 1085 | def _check_lines_are_lines(self, lines): |
2908 | 1086 | """Check that the lines really are full lines without inline EOL.""" | 1086 | """Check that the lines really are full lines without inline EOL.""" |
2909 | 1087 | for line in lines: | 1087 | for line in lines: |
2911 | 1088 | if '\n' in line[:-1]: | 1088 | if b'\n' in line[:-1]: |
2912 | 1089 | raise errors.BzrBadParameterContainsNewline("lines") | 1089 | raise errors.BzrBadParameterContainsNewline("lines") |
2913 | 1090 | 1090 | ||
2914 | 1091 | def get_known_graph_ancestry(self, keys): | 1091 | def get_known_graph_ancestry(self, keys): |
2915 | @@ -1792,7 +1792,7 @@ | |||
2916 | 1792 | "nostore_sha behaviour.") | 1792 | "nostore_sha behaviour.") |
2917 | 1793 | if key[-1] is None: | 1793 | if key[-1] is None: |
2918 | 1794 | sha1 = osutils.sha_strings(lines) | 1794 | sha1 = osutils.sha_strings(lines) |
2920 | 1795 | key = ("sha1:" + sha1,) | 1795 | key = (b"sha1:" + sha1,) |
2921 | 1796 | else: | 1796 | else: |
2922 | 1797 | sha1 = None | 1797 | sha1 = None |
2923 | 1798 | if key in self._store.get_parent_map([key]): | 1798 | if key in self._store.get_parent_map([key]): |
2924 | @@ -1816,7 +1816,7 @@ | |||
2925 | 1816 | :param network_bytes: The bytes of a record. | 1816 | :param network_bytes: The bytes of a record. |
2926 | 1817 | :return: A tuple (storage_kind, offset_of_remaining_bytes) | 1817 | :return: A tuple (storage_kind, offset_of_remaining_bytes) |
2927 | 1818 | """ | 1818 | """ |
2929 | 1819 | line_end = network_bytes.find('\n') | 1819 | line_end = network_bytes.find(b'\n') |
2930 | 1820 | storage_kind = network_bytes[:line_end] | 1820 | storage_kind = network_bytes[:line_end] |
2931 | 1821 | return storage_kind, line_end + 1 | 1821 | return storage_kind, line_end + 1 |
2932 | 1822 | 1822 | ||
2933 | @@ -1859,7 +1859,7 @@ | |||
2934 | 1859 | meta_len, = struct.unpack('!L', bytes[line_end:line_end+4]) | 1859 | meta_len, = struct.unpack('!L', bytes[line_end:line_end+4]) |
2935 | 1860 | record_meta = bytes[line_end+4:line_end+4+meta_len] | 1860 | record_meta = bytes[line_end+4:line_end+4+meta_len] |
2936 | 1861 | key, parents = bencode.bdecode_as_tuple(record_meta) | 1861 | key, parents = bencode.bdecode_as_tuple(record_meta) |
2938 | 1862 | if parents == 'nil': | 1862 | if parents == b'nil': |
2939 | 1863 | parents = None | 1863 | parents = None |
2940 | 1864 | fulltext = bytes[line_end+4+meta_len:] | 1864 | fulltext = bytes[line_end+4+meta_len:] |
2941 | 1865 | return [FulltextContentFactory(key, parents, None, fulltext)] | 1865 | return [FulltextContentFactory(key, parents, None, fulltext)] |
2942 | @@ -1871,12 +1871,12 @@ | |||
2943 | 1871 | 1871 | ||
2944 | 1872 | def record_to_fulltext_bytes(record): | 1872 | def record_to_fulltext_bytes(record): |
2945 | 1873 | if record.parents is None: | 1873 | if record.parents is None: |
2947 | 1874 | parents = 'nil' | 1874 | parents = b'nil' |
2948 | 1875 | else: | 1875 | else: |
2949 | 1876 | parents = record.parents | 1876 | parents = record.parents |
2950 | 1877 | record_meta = bencode.bencode((record.key, parents)) | 1877 | record_meta = bencode.bencode((record.key, parents)) |
2951 | 1878 | record_content = record.get_bytes_as('fulltext') | 1878 | record_content = record.get_bytes_as('fulltext') |
2953 | 1879 | return "fulltext\n%s%s%s" % ( | 1879 | return b"fulltext\n%s%s%s" % ( |
2954 | 1880 | _length_prefix(record_meta), record_meta, record_content) | 1880 | _length_prefix(record_meta), record_meta, record_content) |
2955 | 1881 | 1881 | ||
2956 | 1882 | 1882 | ||
2957 | @@ -1893,8 +1893,8 @@ | |||
2958 | 1893 | per_prefix_map = {} | 1893 | per_prefix_map = {} |
2959 | 1894 | for item in viewitems(parent_map): | 1894 | for item in viewitems(parent_map): |
2960 | 1895 | key = item[0] | 1895 | key = item[0] |
2963 | 1896 | if isinstance(key, str) or len(key) == 1: | 1896 | if isinstance(key, bytes) or len(key) == 1: |
2964 | 1897 | prefix = '' | 1897 | prefix = b'' |
2965 | 1898 | else: | 1898 | else: |
2966 | 1899 | prefix = key[0] | 1899 | prefix = key[0] |
2967 | 1900 | try: | 1900 | try: |
2968 | 1901 | 1901 | ||
2969 | === modified file 'breezy/xml_serializer.py' | |||
2970 | --- breezy/xml_serializer.py 2017-05-25 01:35:55 +0000 | |||
2971 | +++ breezy/xml_serializer.py 2017-06-11 01:47:17 +0000 | |||
2972 | @@ -37,9 +37,9 @@ | |||
2973 | 37 | except ImportError: | 37 | except ImportError: |
2974 | 38 | from xml.parsers.expat import ExpatError as ParseError | 38 | from xml.parsers.expat import ExpatError as ParseError |
2975 | 39 | 39 | ||
2977 | 40 | (ElementTree, SubElement, Element, XMLTreeBuilder, fromstring, tostring) = ( | 40 | (ElementTree, SubElement, Element, fromstring, tostring) = ( |
2978 | 41 | elementtree.ElementTree, elementtree.SubElement, elementtree.Element, | 41 | elementtree.ElementTree, elementtree.SubElement, elementtree.Element, |
2980 | 42 | elementtree.XMLTreeBuilder, elementtree.fromstring, elementtree.tostring) | 42 | elementtree.fromstring, elementtree.tostring) |
2981 | 43 | 43 | ||
2982 | 44 | 44 | ||
2983 | 45 | from . import ( | 45 | from . import ( |
Running landing tests failed 10.242. 247.184: 8080/job/ brz-dev/ 119/
http://