Merge lp:~gz/brz/py3_bootstrap2 into lp:brz

Proposed by Martin Packman
Status: Merged
Approved by: Martin Packman
Approved revision: no longer in the source branch.
Merge reported by: The Breezy Bot
Merged at revision: not available
Proposed branch: lp:~gz/brz/py3_bootstrap2
Merge into: lp:brz
Diff against target: 2983 lines (+650/-616)
27 files modified
breezy/_chk_map_py.py (+22/-21)
breezy/_chunks_to_lines_py.py (+3/-3)
breezy/_dirstate_helpers_py.py (+2/-2)
breezy/_groupcompress_py.py (+10/-10)
breezy/bzrworkingtree.py (+5/-5)
breezy/chk_map.py (+38/-38)
breezy/commit.py (+2/-2)
breezy/config.py (+13/-7)
breezy/dirstate.py (+5/-5)
breezy/groupcompress.py (+54/-52)
breezy/index.py (+2/-2)
breezy/inventory.py (+59/-56)
breezy/inventory_delta.py (+52/-52)
breezy/osutils.py (+11/-11)
breezy/pack.py (+7/-7)
breezy/repofmt/groupcompress_repo.py (+7/-7)
breezy/repofmt/pack_repo.py (+3/-3)
breezy/repository.py (+2/-1)
breezy/revision.py (+10/-5)
breezy/sixish.py (+10/-0)
breezy/tests/test__chk_map.py (+96/-94)
breezy/tests/test__chunks_to_lines.py (+40/-38)
breezy/tests/test_inv.py (+37/-36)
breezy/tests/test_inventory_delta.py (+147/-146)
breezy/transport/memory.py (+1/-1)
breezy/versionedfile.py (+10/-10)
breezy/xml_serializer.py (+2/-2)
To merge this branch: bzr merge lp:~gz/brz/py3_bootstrap2
Reviewer Review Type Date Requested Status
Jelmer Vernooij Approve
Review via email: mp+325452@code.launchpad.net

Commit message

More progress towards Python 3 support

Description of the change

Another somewhat large branch mostly sorting out string semantics across a range of modules.

Also includes some test fixes for Python 3 to start passing, particularly bt.test_inventory_delta and the start of bt.test_inv as well.

To post a comment you must log in.
Revision history for this message
Jelmer Vernooij (jelmer) :
review: Approve
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote :

Running landing tests failed
http://10.242.247.184:8080/job/brz-dev/119/

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'breezy/_chk_map_py.py'
2--- breezy/_chk_map_py.py 2017-05-22 00:56:52 +0000
3+++ breezy/_chk_map_py.py 2017-06-11 01:47:17 +0000
4@@ -21,6 +21,7 @@
5 import zlib
6 import struct
7
8+from .sixish import bytesintern
9 from .static_tuple import StaticTuple
10
11 _LeafNode = None
12@@ -44,7 +45,7 @@
13
14 def _search_key_16(key):
15 """Map the key tuple into a search key string which has 16-way fan out."""
16- return '\x00'.join(['%08X' % _crc32(bit) for bit in key])
17+ return b'\x00'.join([b'%08X' % _crc32(bit) for bit in key])
18
19
20 def _search_key_255(key):
21@@ -53,11 +54,11 @@
22 We use 255-way because '\n' is used as a delimiter, and causes problems
23 while parsing.
24 """
25- bytes = '\x00'.join([struct.pack('>L', _crc32(bit)) for bit in key])
26- return bytes.replace('\n', '_')
27-
28-
29-def _deserialise_leaf_node(bytes, key, search_key_func=None):
30+ data = b'\x00'.join([struct.pack('>L', _crc32(bit)) for bit in key])
31+ return data.replace(b'\n', b'_')
32+
33+
34+def _deserialise_leaf_node(data, key, search_key_func=None):
35 """Deserialise bytes, with key key, into a LeafNode.
36
37 :param bytes: The bytes of the node.
38@@ -72,13 +73,13 @@
39 result = _LeafNode(search_key_func=search_key_func)
40 # Splitlines can split on '\r' so don't use it, split('\n') adds an
41 # extra '' if the bytes ends in a final newline.
42- lines = bytes.split('\n')
43+ lines = data.split(b'\n')
44 trailing = lines.pop()
45- if trailing != '':
46+ if trailing != b'':
47 raise AssertionError('We did not have a final newline for %s'
48 % (key,))
49 items = {}
50- if lines[0] != 'chkleaf:':
51+ if lines[0] != b'chkleaf:':
52 raise ValueError("not a serialised leaf node: %r" % bytes)
53 maximum_size = int(lines[1])
54 width = int(lines[2])
55@@ -87,7 +88,7 @@
56 pos = 5
57 while pos < len(lines):
58 line = prefix + lines[pos]
59- elements = line.split('\x00')
60+ elements = line.split(b'\x00')
61 pos += 1
62 if len(elements) != width + 1:
63 raise AssertionError(
64@@ -96,7 +97,7 @@
65 num_value_lines = int(elements[-1])
66 value_lines = lines[pos:pos+num_value_lines]
67 pos += num_value_lines
68- value = '\n'.join(value_lines)
69+ value = b'\n'.join(value_lines)
70 items[StaticTuple.from_sequence(elements[:-1])] = value
71 if len(items) != length:
72 raise AssertionError("item count (%d) mismatch for key %s,"
73@@ -115,12 +116,12 @@
74 else:
75 result._search_prefix = _unknown
76 result._common_serialised_prefix = prefix
77- if len(bytes) != result._current_size():
78+ if len(data) != result._current_size():
79 raise AssertionError('_current_size computed incorrectly')
80 return result
81
82
83-def _deserialise_internal_node(bytes, key, search_key_func=None):
84+def _deserialise_internal_node(data, key, search_key_func=None):
85 global _unknown, _LeafNode, _InternalNode
86 if _InternalNode is None:
87 from breezy import chk_map
88@@ -131,12 +132,12 @@
89 # Splitlines can split on '\r' so don't use it, remove the extra ''
90 # from the result of split('\n') because we should have a trailing
91 # newline
92- lines = bytes.split('\n')
93- if lines[-1] != '':
94+ lines = data.split(b'\n')
95+ if lines[-1] != b'':
96 raise ValueError("last line must be ''")
97 lines.pop(-1)
98 items = {}
99- if lines[0] != 'chknode:':
100+ if lines[0] != b'chknode:':
101 raise ValueError("not a serialised internal node: %r" % bytes)
102 maximum_size = int(lines[1])
103 width = int(lines[2])
104@@ -144,7 +145,7 @@
105 common_prefix = lines[4]
106 for line in lines[5:]:
107 line = common_prefix + line
108- prefix, flat_key = line.rsplit('\x00', 1)
109+ prefix, flat_key = line.rsplit(b'\x00', 1)
110 items[prefix] = StaticTuple(flat_key,)
111 if len(items) == 0:
112 raise AssertionError("We didn't find any item for %s" % key)
113@@ -161,9 +162,9 @@
114 return result
115
116
117-def _bytes_to_text_key(bytes):
118+def _bytes_to_text_key(data):
119 """Take a CHKInventory value string and return a (file_id, rev_id) tuple"""
120- sections = bytes.split('\n')
121- kind, file_id = sections[0].split(': ')
122- return (intern(file_id), intern(sections[3]))
123+ sections = data.split(b'\n')
124+ kind, file_id = sections[0].split(b': ')
125+ return (bytesintern(file_id), bytesintern(sections[3]))
126
127
128=== modified file 'breezy/_chunks_to_lines_py.py'
129--- breezy/_chunks_to_lines_py.py 2017-05-21 18:10:28 +0000
130+++ breezy/_chunks_to_lines_py.py 2017-06-11 01:47:17 +0000
131@@ -41,10 +41,10 @@
132 if not chunk:
133 # Empty strings are never valid lines
134 break
135- elif '\n' in chunk[:-1]:
136+ elif b'\n' in chunk[:-1]:
137 # This chunk has an extra '\n', so we will have to split it
138 break
139- elif chunk[-1] != '\n':
140+ elif chunk[-1:] != b'\n':
141 # This chunk does not have a trailing newline
142 last_no_newline = True
143 else:
144@@ -56,4 +56,4 @@
145
146 # These aren't simple lines, just join and split again.
147 from breezy import osutils
148- return osutils._split_lines(''.join(chunks))
149+ return osutils._split_lines(b''.join(chunks))
150
151=== modified file 'breezy/_dirstate_helpers_py.py'
152--- breezy/_dirstate_helpers_py.py 2017-06-10 01:57:23 +0000
153+++ breezy/_dirstate_helpers_py.py 2017-06-11 01:47:17 +0000
154@@ -184,7 +184,7 @@
155 if not isinstance(path2, bytes):
156 raise TypeError("'path2' must be a plain string, not %s: %r"
157 % (type(path2), path2))
158- return path1.split('/') < path2.split('/')
159+ return path1.split(b'/') < path2.split(b'/')
160
161
162 def _lt_path_by_dirblock(path1, path2):
163@@ -207,7 +207,7 @@
164 dirname1, basename1 = os.path.split(path1)
165 key1 = (dirname1.split(b'/'), basename1)
166 dirname2, basename2 = os.path.split(path2)
167- key2 = (dirname2.split('/'), basename2)
168+ key2 = (dirname2.split(b'/'), basename2)
169 return key1 < key2
170
171
172
173=== modified file 'breezy/_groupcompress_py.py'
174--- breezy/_groupcompress_py.py 2017-06-04 18:09:30 +0000
175+++ breezy/_groupcompress_py.py 2017-06-11 01:47:17 +0000
176@@ -290,7 +290,7 @@
177 if bytes_length is None:
178 bytes_length = sum(map(len, new_lines))
179 # reserved for content type, content length
180- out_lines = ['', '', encode_base128_int(bytes_length)]
181+ out_lines = [b'', b'', encode_base128_int(bytes_length)]
182 index_lines = [False, False, False]
183 output_handler = _OutputHandler(out_lines, index_lines,
184 self._MIN_MATCH_BYTES)
185@@ -316,26 +316,26 @@
186
187 def encode_base128_int(val):
188 """Convert an integer into a 7-bit lsb encoding."""
189- bytes = []
190+ data = bytearray()
191 count = 0
192 while val >= 0x80:
193- bytes.append(chr((val | 0x80) & 0xFF))
194+ data.append((val | 0x80) & 0xFF)
195 val >>= 7
196- bytes.append(chr(val))
197- return ''.join(bytes)
198-
199-
200-def decode_base128_int(bytes):
201+ data.append(val)
202+ return bytes(data)
203+
204+
205+def decode_base128_int(data):
206 """Decode an integer from a 7-bit lsb encoding."""
207 offset = 0
208 val = 0
209 shift = 0
210- bval = ord(bytes[offset])
211+ bval = ord(data[offset])
212 while bval >= 0x80:
213 val |= (bval & 0x7F) << shift
214 shift += 7
215 offset += 1
216- bval = ord(bytes[offset])
217+ bval = ord(data[offset])
218 val |= bval << shift
219 offset += 1
220 return val, offset
221
222=== modified file 'breezy/bzrworkingtree.py'
223--- breezy/bzrworkingtree.py 2017-06-10 18:44:23 +0000
224+++ breezy/bzrworkingtree.py 2017-06-11 01:47:17 +0000
225@@ -78,11 +78,11 @@
226 )
227
228
229-MERGE_MODIFIED_HEADER_1 = "BZR merge-modified list format 1"
230+MERGE_MODIFIED_HEADER_1 = b"BZR merge-modified list format 1"
231 # TODO: Modifying the conflict objects or their type is currently nearly
232 # impossible as there is no clear relationship between the working tree format
233 # and the conflict list file format.
234-CONFLICT_HEADER_1 = "BZR conflict list format 1"
235+CONFLICT_HEADER_1 = b"BZR conflict list format 1"
236
237
238 class InventoryWorkingTree(WorkingTree,MutableInventoryTree):
239@@ -382,7 +382,7 @@
240 return _mod_conflicts.ConflictList()
241 try:
242 try:
243- if next(confile) != CONFLICT_HEADER_1 + '\n':
244+ if next(confile) != CONFLICT_HEADER_1 + b'\n':
245 raise errors.ConflictFormatError()
246 except StopIteration:
247 raise errors.ConflictFormatError()
248@@ -650,7 +650,7 @@
249
250 def _put_rio(self, filename, stanzas, header):
251 self._must_be_locked()
252- my_file = _mod_rio.rio_file(stanzas, header.encode('ascii'))
253+ my_file = _mod_rio.rio_file(stanzas, header)
254 self._transport.put_file(filename, my_file,
255 mode=self.controldir._get_file_mode())
256
257@@ -680,7 +680,7 @@
258 try:
259 merge_hashes = {}
260 try:
261- if next(hashfile) != MERGE_MODIFIED_HEADER_1 + '\n':
262+ if next(hashfile) != MERGE_MODIFIED_HEADER_1 + b'\n':
263 raise errors.MergeModifiedFormatError()
264 except StopIteration:
265 raise errors.MergeModifiedFormatError()
266
267=== modified file 'breezy/chk_map.py'
268--- breezy/chk_map.py 2017-06-05 20:48:31 +0000
269+++ breezy/chk_map.py 2017-06-11 01:47:17 +0000
270@@ -100,7 +100,7 @@
271
272 def _search_key_plain(key):
273 """Map the key tuple into a search string that just uses the key bytes."""
274- return '\x00'.join(key)
275+ return b'\x00'.join(key)
276
277
278 search_key_registry = registry.Registry()
279@@ -197,8 +197,8 @@
280 self._ensure_root()
281 res = self._dump_tree_node(self._root_node, prefix='', indent='',
282 include_keys=include_keys)
283- res.append('') # Give a trailing '\n'
284- return '\n'.join(res)
285+ res.append(b'') # Give a trailing '\n'
286+ return b'\n'.join(res)
287
288 def _dump_tree_node(self, node, prefix, indent, include_keys=True):
289 """For this node and all children, generate a string representation."""
290@@ -208,11 +208,11 @@
291 else:
292 node_key = node.key()
293 if node_key is not None:
294- key_str = ' %s' % (node_key[0],)
295+ key_str = b' %s' % (node_key[0],)
296 else:
297- key_str = ' None'
298- result.append('%s%r %s%s' % (indent, prefix, node.__class__.__name__,
299- key_str))
300+ key_str = b' None'
301+ result.append(b'%s%r %s%s' % (indent, prefix, node.__class__.__name__,
302+ key_str))
303 if isinstance(node, InternalNode):
304 # Trigger all child nodes to get loaded
305 list(node._iter_nodes(self._store))
306@@ -223,7 +223,7 @@
307 for key, value in sorted(viewitems(node._items)):
308 # Don't use prefix nor indent here to line up when used in
309 # tests in conjunction with assertEqualDiff
310- result.append(' %r %r' % (tuple(key), value))
311+ result.append(b' %r %r' % (tuple(key), value))
312 return result
313
314 @classmethod
315@@ -680,7 +680,7 @@
316 if not common_prefix:
317 # if common_prefix is the empty string, then we know it won't
318 # change further
319- return ''
320+ return b''
321 return common_prefix
322
323
324@@ -786,7 +786,7 @@
325 # TODO: Should probably be done without actually joining the key, but
326 # then that can be done via the C extension
327 return (len(self._serialise_key(key)) + 1
328- + len(str(value.count('\n'))) + 1
329+ + len(str(value.count(b'\n'))) + 1
330 + len(value) + 1)
331
332 def _search_key(self, key):
333@@ -853,7 +853,7 @@
334 # may get a '\00' node anywhere, but won't have keys of
335 # different lengths.
336 if len(prefix) < split_at:
337- prefix += '\x00'*(split_at - len(prefix))
338+ prefix += b'\x00'*(split_at - len(prefix))
339 if prefix not in result:
340 node = LeafNode(search_key_func=self._search_key_func)
341 node.set_maximum_size(self._maximum_size)
342@@ -889,7 +889,7 @@
343 raise AssertionError('%r must be known' % self._search_prefix)
344 return self._search_prefix, [("", self)]
345
346- _serialise_key = '\x00'.join
347+ _serialise_key = b'\x00'.join
348
349 def serialise(self, store):
350 """Serialise the LeafNode to store.
351@@ -897,22 +897,22 @@
352 :param store: A VersionedFiles honouring the CHK extensions.
353 :return: An iterable of the keys inserted by this operation.
354 """
355- lines = ["chkleaf:\n"]
356- lines.append("%d\n" % self._maximum_size)
357- lines.append("%d\n" % self._key_width)
358- lines.append("%d\n" % self._len)
359+ lines = [b"chkleaf:\n"]
360+ lines.append(b"%d\n" % self._maximum_size)
361+ lines.append(b"%d\n" % self._key_width)
362+ lines.append(b"%d\n" % self._len)
363 if self._common_serialised_prefix is None:
364- lines.append('\n')
365+ lines.append(b'\n')
366 if len(self._items) != 0:
367 raise AssertionError('If _common_serialised_prefix is None'
368 ' we should have no items')
369 else:
370- lines.append('%s\n' % (self._common_serialised_prefix,))
371+ lines.append(b'%s\n' % (self._common_serialised_prefix,))
372 prefix_len = len(self._common_serialised_prefix)
373 for key, value in sorted(viewitems(self._items)):
374 # Always add a final newline
375- value_lines = osutils.chunks_to_lines([value + '\n'])
376- serialized = "%s\x00%s\n" % (self._serialise_key(key),
377+ value_lines = osutils.chunks_to_lines([value + b'\n'])
378+ serialized = b"%s\x00%d\n" % (self._serialise_key(key),
379 len(value_lines))
380 if not serialized.startswith(self._common_serialised_prefix):
381 raise AssertionError('We thought the common prefix was %r'
382@@ -921,11 +921,11 @@
383 lines.append(serialized[prefix_len:])
384 lines.extend(value_lines)
385 sha1, _, _ = store.add_lines((None,), (), lines)
386- self._key = StaticTuple("sha1:" + sha1,).intern()
387- bytes = ''.join(lines)
388- if len(bytes) != self._current_size():
389+ self._key = StaticTuple(b"sha1:" + sha1,).intern()
390+ data = b''.join(lines)
391+ if len(data) != self._current_size():
392 raise AssertionError('Invalid _current_size')
393- _get_cache()[self._key] = bytes
394+ _get_cache()[self._key] = data
395 return [self._key]
396
397 def refs(self):
398@@ -1304,34 +1304,34 @@
399 continue
400 for key in node.serialise(store):
401 yield key
402- lines = ["chknode:\n"]
403- lines.append("%d\n" % self._maximum_size)
404- lines.append("%d\n" % self._key_width)
405- lines.append("%d\n" % self._len)
406+ lines = [b"chknode:\n"]
407+ lines.append(b"%d\n" % self._maximum_size)
408+ lines.append(b"%d\n" % self._key_width)
409+ lines.append(b"%d\n" % self._len)
410 if self._search_prefix is None:
411 raise AssertionError("_search_prefix should not be None")
412- lines.append('%s\n' % (self._search_prefix,))
413+ lines.append(b'%s\n' % (self._search_prefix,))
414 prefix_len = len(self._search_prefix)
415 for prefix, node in sorted(viewitems(self._items)):
416 if isinstance(node, StaticTuple):
417 key = node[0]
418 else:
419 key = node._key[0]
420- serialised = "%s\x00%s\n" % (prefix, key)
421+ serialised = b"%s\x00%s\n" % (prefix, key)
422 if not serialised.startswith(self._search_prefix):
423 raise AssertionError("prefixes mismatch: %s must start with %s"
424 % (serialised, self._search_prefix))
425 lines.append(serialised[prefix_len:])
426 sha1, _, _ = store.add_lines((None,), (), lines)
427- self._key = StaticTuple("sha1:" + sha1,).intern()
428- _get_cache()[self._key] = ''.join(lines)
429+ self._key = StaticTuple(b"sha1:" + sha1,).intern()
430+ _get_cache()[self._key] = b''.join(lines)
431 yield self._key
432
433 def _search_key(self, key):
434 """Return the serialised key for key in this node."""
435 # search keys are fixed width. All will be self._node_width wide, so we
436 # pad as necessary.
437- return (self._search_key_func(key) + '\x00'*self._node_width)[:self._node_width]
438+ return (self._search_key_func(key) + b'\x00'*self._node_width)[:self._node_width]
439
440 def _search_prefix_filter(self, key):
441 """Serialise key for use as a prefix filter in iteritems."""
442@@ -1450,12 +1450,12 @@
443 return new_leaf
444
445
446-def _deserialise(bytes, key, search_key_func):
447+def _deserialise(data, key, search_key_func):
448 """Helper for repositorydetails - convert bytes to a node."""
449- if bytes.startswith("chkleaf:\n"):
450- node = LeafNode.deserialise(bytes, key, search_key_func=search_key_func)
451- elif bytes.startswith("chknode:\n"):
452- node = InternalNode.deserialise(bytes, key,
453+ if data.startswith(b"chkleaf:\n"):
454+ node = LeafNode.deserialise(data, key, search_key_func=search_key_func)
455+ elif data.startswith(b"chknode:\n"):
456+ node = InternalNode.deserialise(data, key,
457 search_key_func=search_key_func)
458 else:
459 raise AssertionError("Unknown node type.")
460
461=== modified file 'breezy/commit.py'
462--- breezy/commit.py 2017-05-30 20:17:23 +0000
463+++ breezy/commit.py 2017-06-11 01:47:17 +0000
464@@ -280,7 +280,7 @@
465 raise errors.RootNotRich()
466 if message_callback is None:
467 if message is not None:
468- if isinstance(message, str):
469+ if isinstance(message, bytes):
470 message = message.decode(get_user_encoding())
471 message_callback = lambda x: message
472 else:
473@@ -657,7 +657,7 @@
474 """
475 exclude = self.exclude
476 specific_files = self.specific_files
477- mutter("Selecting files for commit with filter %s", specific_files)
478+ mutter("Selecting files for commit with filter %r", specific_files)
479
480 self._check_strict()
481 if self.use_record_iter_changes:
482
483=== modified file 'breezy/config.py'
484--- breezy/config.py 2017-06-10 12:56:18 +0000
485+++ breezy/config.py 2017-06-11 01:47:17 +0000
486@@ -477,10 +477,12 @@
487 If no username can be found, errors.NoWhoami exception is raised.
488 """
489 v = os.environ.get('BRZ_EMAIL')
490- if v and not PY3:
491- return v.decode(osutils.get_user_encoding())
492+ if v:
493+ if not PY3:
494+ v = v.decode(osutils.get_user_encoding())
495+ return v
496 v = self._get_user_id()
497- if v and not PY3:
498+ if v:
499 return v
500 return default_email()
501
502@@ -1497,11 +1499,15 @@
503
504 def default_email():
505 v = os.environ.get('BRZ_EMAIL')
506- if v and not PY3:
507- return v.decode(osutils.get_user_encoding())
508+ if v:
509+ if not PY3:
510+ v = v.decode(osutils.get_user_encoding())
511+ return v
512 v = os.environ.get('EMAIL')
513- if v and not PY3:
514- return v.decode(osutils.get_user_encoding())
515+ if v:
516+ if not PY3:
517+ v = v.decode(osutils.get_user_encoding())
518+ return v
519 name, email = _auto_user_id()
520 if name and email:
521 return u'%s <%s>' % (name, email)
522
523=== modified file 'breezy/dirstate.py'
524--- breezy/dirstate.py 2017-06-10 02:39:00 +0000
525+++ breezy/dirstate.py 2017-06-11 01:47:17 +0000
526@@ -3541,9 +3541,13 @@
527 source_details = DirState.NULL_PARENT_DETAILS
528 else:
529 source_details = entry[1][self.source_index]
530+ # GZ 2017-06-09: Eck, more sets.
531+ _fdltr = {b'f', b'd', b'l', b't', b'r'}
532+ _fdlt = {b'f', b'd', b'l', b't'}
533+ _ra = (b'r', b'a')
534 target_details = entry[1][self.target_index]
535 target_minikind = target_details[0]
536- if path_info is not None and target_minikind in 'fdlt':
537+ if path_info is not None and target_minikind in _fdlt:
538 if not (self.target_index == 0):
539 raise AssertionError()
540 link_or_sha1 = update_entry(self.state, entry,
541@@ -3555,10 +3559,6 @@
542 link_or_sha1 = None
543 file_id = entry[0][2]
544 source_minikind = source_details[0]
545- # GZ 2017-06-09: Eck, more sets.
546- _fdltr = {b'f', b'd', b'l', b't', b'r'}
547- _fdlt = {b'f', b'd', b'l', b't'}
548- _ra = (b'r', b'a')
549 if source_minikind in _fdltr and target_minikind in _fdlt:
550 # claimed content in both: diff
551 # r | fdlt | | add source to search, add id path move and perform
552
553=== modified file 'breezy/groupcompress.py'
554--- breezy/groupcompress.py 2017-06-05 20:48:31 +0000
555+++ breezy/groupcompress.py 2017-06-11 01:47:17 +0000
556@@ -61,7 +61,7 @@
557 BATCH_SIZE = 2**16
558
559 # osutils.sha_string('')
560-_null_sha1 = 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
561+_null_sha1 = b'da39a3ee5e6b4b0d3255bfef95601890afd80709'
562
563 def sort_gc_optimal(parent_map):
564 """Sort and group the keys in parent_map into groupcompress order.
565@@ -75,8 +75,8 @@
566 # properly grouped by file-id.
567 per_prefix_map = {}
568 for key, value in viewitems(parent_map):
569- if isinstance(key, str) or len(key) == 1:
570- prefix = ''
571+ if isinstance(key, bytes) or len(key) == 1:
572+ prefix = b''
573 else:
574 prefix = key[0]
575 try:
576@@ -102,9 +102,9 @@
577 """
578
579 # Group Compress Block v1 Zlib
580- GCB_HEADER = 'gcb1z\n'
581+ GCB_HEADER = b'gcb1z\n'
582 # Group Compress Block v1 Lzma
583- GCB_LZ_HEADER = 'gcb1l\n'
584+ GCB_LZ_HEADER = b'gcb1l\n'
585 GCB_KNOWN_HEADERS = (GCB_HEADER, GCB_LZ_HEADER)
586
587 def __init__(self):
588@@ -141,7 +141,7 @@
589 # Expand the content if required
590 if self._content is None:
591 if self._content_chunks is not None:
592- self._content = ''.join(self._content_chunks)
593+ self._content = b''.join(self._content_chunks)
594 self._content_chunks = None
595 if self._content is None:
596 # We join self._z_content_chunks here, because if we are
597@@ -149,9 +149,9 @@
598 # chunk
599 if self._z_content_chunks is None:
600 raise AssertionError('No content to decompress')
601- z_content = ''.join(self._z_content_chunks)
602- if z_content == '':
603- self._content = ''
604+ z_content = b''.join(self._z_content_chunks)
605+ if z_content == b'':
606+ self._content = b''
607 elif self._compressor_name == 'lzma':
608 # We don't do partial lzma decomp yet
609 import pylzma
610@@ -201,7 +201,7 @@
611 # The stream is finished
612 self._z_content_decompressor = None
613
614- def _parse_bytes(self, bytes, pos):
615+ def _parse_bytes(self, data, pos):
616 """Read the various lengths from the header.
617
618 This also populates the various 'compressed' buffers.
619@@ -211,17 +211,17 @@
620 # At present, we have 2 integers for the compressed and uncompressed
621 # content. In base10 (ascii) 14 bytes can represent > 1TB, so to avoid
622 # checking too far, cap the search to 14 bytes.
623- pos2 = bytes.index('\n', pos, pos + 14)
624- self._z_content_length = int(bytes[pos:pos2])
625- pos = pos2 + 1
626- pos2 = bytes.index('\n', pos, pos + 14)
627- self._content_length = int(bytes[pos:pos2])
628- pos = pos2 + 1
629- if len(bytes) != (pos + self._z_content_length):
630+ pos2 = data.index(b'\n', pos, pos + 14)
631+ self._z_content_length = int(data[pos:pos2])
632+ pos = pos2 + 1
633+ pos2 = data.index(b'\n', pos, pos + 14)
634+ self._content_length = int(data[pos:pos2])
635+ pos = pos2 + 1
636+ if len(data) != (pos + self._z_content_length):
637 # XXX: Define some GCCorrupt error ?
638 raise AssertionError('Invalid bytes: (%d) != %d + %d' %
639- (len(bytes), pos, self._z_content_length))
640- self._z_content_chunks = (bytes[pos:],)
641+ (len(data), pos, self._z_content_length))
642+ self._z_content_chunks = (data[pos:],)
643
644 @property
645 def _z_content(self):
646@@ -230,7 +230,7 @@
647 Meant only to be used by the test suite.
648 """
649 if self._z_content_chunks is not None:
650- return ''.join(self._z_content_chunks)
651+ return b''.join(self._z_content_chunks)
652 return None
653
654 @classmethod
655@@ -257,17 +257,17 @@
656 :return: The bytes for the content
657 """
658 if start == end == 0:
659- return ''
660+ return b''
661 self._ensure_content(end)
662 # The bytes are 'f' or 'd' for the type, then a variable-length
663 # base128 integer for the content size, then the actual content
664 # We know that the variable-length integer won't be longer than 5
665 # bytes (it takes 5 bytes to encode 2^32)
666 c = self._content[start]
667- if c == 'f':
668+ if c == b'f':
669 type = 'fulltext'
670 else:
671- if c != 'd':
672+ if c != b'd':
673 raise ValueError('Unknown content control code: %s'
674 % (c,))
675 type = 'delta'
676@@ -277,11 +277,10 @@
677 if end != content_start + content_len:
678 raise ValueError('end != len according to field header'
679 ' %s != %s' % (end, content_start + content_len))
680- if c == 'f':
681- bytes = self._content[content_start:end]
682- elif c == 'd':
683- bytes = apply_delta_to_source(self._content, content_start, end)
684- return bytes
685+ if c == b'f':
686+ return self._content[content_start:end]
687+ # Must be type delta as checked above
688+ return apply_delta_to_source(self._content, content_start, end)
689
690 def set_chunked_content(self, content_chunks, length):
691 """Set the content of this block to the given chunks."""
692@@ -324,7 +323,7 @@
693 """Create the byte stream as a series of 'chunks'"""
694 self._create_z_content()
695 header = self.GCB_HEADER
696- chunks = ['%s%d\n%d\n'
697+ chunks = [b'%s%d\n%d\n'
698 % (header, self._z_content_length, self._content_length),
699 ]
700 chunks.extend(self._z_content_chunks)
701@@ -334,7 +333,7 @@
702 def to_bytes(self):
703 """Encode the information into a byte stream."""
704 total_len, chunks = self.to_chunks()
705- return ''.join(chunks)
706+ return b''.join(chunks)
707
708 def _dump(self, include_text=False):
709 """Take this block, and spit out a human-readable structure.
710@@ -352,7 +351,7 @@
711 while pos < self._content_length:
712 kind = self._content[pos]
713 pos += 1
714- if kind not in ('f', 'd'):
715+ if kind not in (b'f', b'd'):
716 raise ValueError('invalid kind character: %r' % (kind,))
717 content_len, len_len = decode_base128_int(
718 self._content[pos:pos + 5])
719@@ -360,18 +359,18 @@
720 if content_len + pos > self._content_length:
721 raise ValueError('invalid content_len %d for record @ pos %d'
722 % (content_len, pos - len_len - 1))
723- if kind == 'f': # Fulltext
724+ if kind == b'f': # Fulltext
725 if include_text:
726 text = self._content[pos:pos+content_len]
727- result.append(('f', content_len, text))
728+ result.append((b'f', content_len, text))
729 else:
730- result.append(('f', content_len))
731- elif kind == 'd': # Delta
732+ result.append((b'f', content_len))
733+ elif kind == b'd': # Delta
734 delta_content = self._content[pos:pos+content_len]
735 delta_info = []
736 # The first entry in a delta is the decompressed length
737 decomp_len, delta_pos = decode_base128_int(delta_content)
738- result.append(('d', content_len, decomp_len, delta_info))
739+ result.append((b'd', content_len, decomp_len, delta_info))
740 measured_len = 0
741 while delta_pos < content_len:
742 c = ord(delta_content[delta_pos])
743@@ -382,16 +381,16 @@
744 delta_pos)
745 if include_text:
746 text = self._content[offset:offset+length]
747- delta_info.append(('c', offset, length, text))
748+ delta_info.append((b'c', offset, length, text))
749 else:
750- delta_info.append(('c', offset, length))
751+ delta_info.append((b'c', offset, length))
752 measured_len += length
753 else: # Insert
754 if include_text:
755 txt = delta_content[delta_pos:delta_pos+c]
756 else:
757 txt = ''
758- delta_info.append(('i', c, txt))
759+ delta_info.append((b'i', c, txt))
760 measured_len += c
761 delta_pos += c
762 if delta_pos != content_len:
763@@ -447,7 +446,7 @@
764 # wire bytes, something...
765 return self._manager._wire_bytes()
766 else:
767- return ''
768+ return b''
769 if storage_kind in ('fulltext', 'chunked'):
770 if self._bytes is None:
771 # Grab and cache the raw bytes for this entry
772@@ -842,7 +841,9 @@
773 if sha1 == nostore_sha:
774 raise errors.ExistingContent()
775 if key[-1] is None:
776- key = key[:-1] + ('sha1:' + sha1,)
777+ # GZ 2017-06-10: Seems perverse to have to encode here.
778+ sha1 = sha1.encode('ascii')
779+ key = key[:-1] + (b'sha1:' + sha1,)
780
781 start, end, type = self._compress(key, bytes, len(bytes) / 2, soft)
782 return sha1, start, end, type
783@@ -875,7 +876,7 @@
784 (start_byte, start_chunk, end_byte, end_chunk) = self.labels_deltas[key]
785 delta_chunks = self.chunks[start_chunk:end_chunk]
786 stored_bytes = ''.join(delta_chunks)
787- if stored_bytes[0] == 'f':
788+ if stored_bytes[0] == b'f':
789 fulltext_len, offset = decode_base128_int(stored_bytes[1:10])
790 data_len = fulltext_len + 1 + offset
791 if data_len != len(stored_bytes):
792@@ -947,14 +948,14 @@
793 if delta_length > max_delta_size:
794 # The delta is longer than the fulltext, insert a fulltext
795 type = 'fulltext'
796- out_lines = ['f', encode_base128_int(input_len)]
797+ out_lines = [b'f', encode_base128_int(input_len)]
798 out_lines.extend(new_lines)
799 index_lines = [False, False]
800 index_lines.extend([True] * len(new_lines))
801 else:
802 # this is a worthy delta, output it
803 type = 'delta'
804- out_lines[0] = 'd'
805+ out_lines[0] = b'd'
806 # Update the delta_length to include those two encoded integers
807 out_lines[1] = encode_base128_int(delta_length)
808 # Before insertion
809@@ -1014,12 +1015,12 @@
810 enc_length = encode_base128_int(len(bytes))
811 len_mini_header = 1 + len(enc_length)
812 self._delta_index.add_source(bytes, len_mini_header)
813- new_chunks = ['f', enc_length, bytes]
814+ new_chunks = [b'f', enc_length, bytes]
815 else:
816 type = 'delta'
817 enc_length = encode_base128_int(len(delta))
818 len_mini_header = 1 + len(enc_length)
819- new_chunks = ['d', enc_length, delta]
820+ new_chunks = [b'd', enc_length, delta]
821 self._delta_index.add_delta_source(delta, len_mini_header)
822 # Before insertion
823 start = self.endpoint
824@@ -1715,13 +1716,13 @@
825 # the fulltext content at this point. Note that sometimes we
826 # will want it later (streaming CHK pages), but most of the
827 # time we won't (everything else)
828- bytes = ''.join(chunks)
829+ data = b''.join(chunks)
830 del chunks
831 index, start, length = self._access.add_raw_records(
832- [(None, len(bytes))], bytes)[0]
833+ [(None, len(data))], data)[0]
834 nodes = []
835 for key, reads, refs in keys_to_add:
836- nodes.append((key, "%d %d %s" % (start, length, reads), refs))
837+ nodes.append((key, b"%d %d %s" % (start, length, reads), refs))
838 self._index.add_records(nodes, random_id=random_id)
839 self._unadded_refs = {}
840 del keys_to_add[:]
841@@ -1777,7 +1778,7 @@
842 ' the current record, we cannot be positive'
843 ' that the appropriate content was inserted.'
844 )
845- value = "%d %d %d %d" % (block_start, block_length,
846+ value = b"%d %d %d %d" % (block_start, block_length,
847 record._start, record._end)
848 nodes = [(record.key, value, (record.parents,))]
849 # TODO: Consider buffering up many nodes to be added, not
850@@ -1827,7 +1828,7 @@
851 type) = self._compressor.compress(record.key, bytes,
852 record.sha1)
853 if record.key[-1] is None:
854- key = record.key[:-1] + ('sha1:' + found_sha1,)
855+ key = record.key[:-1] + (b'sha1:' + found_sha1,)
856 else:
857 key = record.key
858 self._unadded_refs[key] = record.parents
859@@ -1838,7 +1839,8 @@
860 else:
861 parents = None
862 refs = static_tuple.StaticTuple(parents)
863- keys_to_add.append((key, '%d %d' % (start_point, end_point), refs))
864+ keys_to_add.append(
865+ (key, b'%d %d' % (start_point, end_point), refs))
866 if len(keys_to_add):
867 flush()
868 self._compressor = None
869
870=== modified file 'breezy/index.py'
871--- breezy/index.py 2017-06-10 01:57:00 +0000
872+++ breezy/index.py 2017-06-11 01:47:17 +0000
873@@ -56,8 +56,8 @@
874 _SIGNATURE = b"Bazaar Graph Index 1\n"
875
876
877-_whitespace_re = re.compile('[\t\n\x0b\x0c\r\x00 ]')
878-_newline_null_re = re.compile('[\n\0]')
879+_whitespace_re = re.compile(b'[\t\n\x0b\x0c\r\x00 ]')
880+_newline_null_re = re.compile(b'[\n\0]')
881
882
883 def _has_key_from_parent_map(self, key):
884
885=== modified file 'breezy/inventory.py'
886--- breezy/inventory.py 2017-06-10 01:57:00 +0000
887+++ breezy/inventory.py 2017-06-11 01:47:17 +0000
888@@ -49,6 +49,8 @@
889 trace,
890 )
891 from .sixish import (
892+ bytesintern,
893+ PY3,
894 viewitems,
895 viewvalues,
896 )
897@@ -217,7 +219,7 @@
898 Traceback (most recent call last):
899 InvalidEntryName: Invalid entry name: src/hello.c
900 """
901- if '/' in name or '\\' in name:
902+ if u'/' in name or u'\\' in name:
903 raise errors.InvalidEntryName(name=name)
904 self.file_id = file_id
905 self.revision = None
906@@ -388,8 +390,8 @@
907 # to provide a per-fileid log. The hash of every directory content is
908 # "da..." below (the sha1sum of '').
909 checker.add_pending_item(rev_id,
910- ('texts', self.file_id, self.revision), 'text',
911- 'da39a3ee5e6b4b0d3255bfef95601890afd80709')
912+ (b'texts', self.file_id, self.revision), b'text',
913+ b'da39a3ee5e6b4b0d3255bfef95601890afd80709')
914
915 def copy(self):
916 other = InventoryDirectory(self.file_id, self.name, self.parent_id)
917@@ -428,7 +430,7 @@
918 """See InventoryEntry._check"""
919 # TODO: check size too.
920 checker.add_pending_item(tree_revision_id,
921- ('texts', self.file_id, self.revision), 'text',
922+ (b'texts', self.file_id, self.revision), b'text',
923 self.text_sha1)
924 if self.text_size is None:
925 checker._report_items.append(
926@@ -528,8 +530,8 @@
927 % (self.file_id, tree_revision_id))
928 # Symlinks are stored as ''
929 checker.add_pending_item(tree_revision_id,
930- ('texts', self.file_id, self.revision), 'text',
931- 'da39a3ee5e6b4b0d3255bfef95601890afd80709')
932+ (b'texts', self.file_id, self.revision), b'text',
933+ b'da39a3ee5e6b4b0d3255bfef95601890afd80709')
934
935 def copy(self):
936 other = InventoryLink(self.file_id, self.name, self.parent_id)
937@@ -1398,25 +1400,25 @@
938 if entry.parent_id is not None:
939 parent_str = entry.parent_id
940 else:
941- parent_str = ''
942+ parent_str = b''
943 name_str = entry.name.encode("utf8")
944 if entry.kind == 'file':
945 if entry.executable:
946- exec_str = "Y"
947+ exec_str = b"Y"
948 else:
949- exec_str = "N"
950- return "file: %s\n%s\n%s\n%s\n%s\n%d\n%s" % (
951+ exec_str = b"N"
952+ return b"file: %s\n%s\n%s\n%s\n%s\n%d\n%s" % (
953 entry.file_id, parent_str, name_str, entry.revision,
954 entry.text_sha1, entry.text_size, exec_str)
955 elif entry.kind == 'directory':
956- return "dir: %s\n%s\n%s\n%s" % (
957+ return b"dir: %s\n%s\n%s\n%s" % (
958 entry.file_id, parent_str, name_str, entry.revision)
959 elif entry.kind == 'symlink':
960- return "symlink: %s\n%s\n%s\n%s\n%s" % (
961+ return b"symlink: %s\n%s\n%s\n%s\n%s" % (
962 entry.file_id, parent_str, name_str, entry.revision,
963 entry.symlink_target.encode("utf8"))
964 elif entry.kind == 'tree-reference':
965- return "tree: %s\n%s\n%s\n%s\n%s" % (
966+ return b"tree: %s\n%s\n%s\n%s\n%s" % (
967 entry.file_id, parent_str, name_str, entry.revision,
968 entry.reference_revision)
969 else:
970@@ -1534,43 +1536,43 @@
971 return other
972
973 @staticmethod
974- def _bytes_to_utf8name_key(bytes):
975- """Get the file_id, revision_id key out of bytes."""
976+ def _bytes_to_utf8name_key(data):
977+ """Get the file_id, revision_id key out of data."""
978 # We don't normally care about name, except for times when we want
979 # to filter out empty names because of non rich-root...
980- sections = bytes.split('\n')
981- kind, file_id = sections[0].split(': ')
982- return (sections[2], intern(file_id), intern(sections[3]))
983+ sections = data.split(b'\n')
984+ kind, file_id = sections[0].split(b': ')
985+ return (sections[2], bytesintern(file_id), bytesintern(sections[3]))
986
987 def _bytes_to_entry(self, bytes):
988 """Deserialise a serialised entry."""
989- sections = bytes.split('\n')
990- if sections[0].startswith("file: "):
991+ sections = bytes.split(b'\n')
992+ if sections[0].startswith(b"file: "):
993 result = InventoryFile(sections[0][6:],
994 sections[2].decode('utf8'),
995 sections[1])
996 result.text_sha1 = sections[4]
997 result.text_size = int(sections[5])
998- result.executable = sections[6] == "Y"
999- elif sections[0].startswith("dir: "):
1000+ result.executable = sections[6] == b"Y"
1001+ elif sections[0].startswith(b"dir: "):
1002 result = CHKInventoryDirectory(sections[0][5:],
1003 sections[2].decode('utf8'),
1004 sections[1], self)
1005- elif sections[0].startswith("symlink: "):
1006+ elif sections[0].startswith(b"symlink: "):
1007 result = InventoryLink(sections[0][9:],
1008 sections[2].decode('utf8'),
1009 sections[1])
1010 result.symlink_target = sections[4].decode('utf8')
1011- elif sections[0].startswith("tree: "):
1012+ elif sections[0].startswith(b"tree: "):
1013 result = TreeReference(sections[0][6:],
1014 sections[2].decode('utf8'),
1015 sections[1])
1016 result.reference_revision = sections[4]
1017 else:
1018 raise ValueError("Not a serialised entry %r" % bytes)
1019- result.file_id = intern(result.file_id)
1020- result.revision = intern(sections[3])
1021- if result.parent_id == '':
1022+ result.file_id = bytesintern(result.file_id)
1023+ result.revision = bytesintern(sections[3])
1024+ if result.parent_id == b'':
1025 result.parent_id = None
1026 self._fileid_to_entry_cache[result.file_id] = result
1027 return result
1028@@ -1754,18 +1756,18 @@
1029 for.
1030 :return: A CHKInventory
1031 """
1032- lines = bytes.split('\n')
1033- if lines[-1] != '':
1034+ lines = bytes.split(b'\n')
1035+ if lines[-1] != b'':
1036 raise AssertionError('bytes to deserialize must end with an eol')
1037 lines.pop()
1038- if lines[0] != 'chkinventory:':
1039+ if lines[0] != b'chkinventory:':
1040 raise ValueError("not a serialised CHKInventory: %r" % bytes)
1041 info = {}
1042- allowed_keys = frozenset(['root_id', 'revision_id', 'search_key_name',
1043- 'parent_id_basename_to_file_id',
1044- 'id_to_entry'])
1045+ allowed_keys = frozenset((b'root_id', b'revision_id',
1046+ b'parent_id_basename_to_file_id',
1047+ b'search_key_name', b'id_to_entry'))
1048 for line in lines[1:]:
1049- key, value = line.split(': ', 1)
1050+ key, value = line.split(b': ', 1)
1051 if key not in allowed_keys:
1052 raise errors.BzrError('Unknown key in inventory: %r\n%r'
1053 % (key, bytes))
1054@@ -1773,16 +1775,16 @@
1055 raise errors.BzrError('Duplicate key in inventory: %r\n%r'
1056 % (key, bytes))
1057 info[key] = value
1058- revision_id = intern(info['revision_id'])
1059- root_id = intern(info['root_id'])
1060- search_key_name = intern(info.get('search_key_name', 'plain'))
1061- parent_id_basename_to_file_id = intern(info.get(
1062- 'parent_id_basename_to_file_id', None))
1063- if not parent_id_basename_to_file_id.startswith('sha1:'):
1064+ revision_id = bytesintern(info[b'revision_id'])
1065+ root_id = bytesintern(info[b'root_id'])
1066+ search_key_name = bytesintern(info.get(b'search_key_name', b'plain'))
1067+ parent_id_basename_to_file_id = bytesintern(info.get(
1068+ b'parent_id_basename_to_file_id', None))
1069+ if not parent_id_basename_to_file_id.startswith(b'sha1:'):
1070 raise ValueError('parent_id_basename_to_file_id should be a sha1'
1071 ' key not %r' % (parent_id_basename_to_file_id,))
1072- id_to_entry = info['id_to_entry']
1073- if not id_to_entry.startswith('sha1:'):
1074+ id_to_entry = info[b'id_to_entry']
1075+ if not id_to_entry.startswith(b'sha1:'):
1076 raise ValueError('id_to_entry should be a sha1'
1077 ' key not %r' % (id_to_entry,))
1078
1079@@ -1790,7 +1792,7 @@
1080 result.revision_id = revision_id
1081 result.root_id = root_id
1082 search_key_func = chk_map.search_key_registry.get(
1083- result._search_key_name)
1084+ result._search_key_name.decode("ascii"))
1085 if parent_id_basename_to_file_id is not None:
1086 result.parent_id_basename_to_file_id = chk_map.CHKMap(
1087 chk_store, StaticTuple(parent_id_basename_to_file_id,),
1088@@ -1856,7 +1858,7 @@
1089 if entry.parent_id is not None:
1090 parent_id = entry.parent_id
1091 else:
1092- parent_id = ''
1093+ parent_id = b''
1094 return StaticTuple(parent_id, entry.name.encode('utf8')).intern()
1095
1096 def __getitem__(self, file_id):
1097@@ -1868,7 +1870,7 @@
1098 return result
1099 try:
1100 return self._bytes_to_entry(
1101- self.id_to_entry.iteritems([StaticTuple(file_id,)]).next()[1])
1102+ next(self.id_to_entry.iteritems([StaticTuple(file_id,)]))[1])
1103 except StopIteration:
1104 # really we're passing an inventory, not a tree...
1105 raise errors.NoSuchId(self, file_id)
1106@@ -1951,7 +1953,7 @@
1107 last_parent_id = last_parent_ie = None
1108 pid_items = self.parent_id_basename_to_file_id.iteritems()
1109 for key, child_file_id in pid_items:
1110- if key == ('', ''): # This is the root
1111+ if key == (b'', b''): # This is the root
1112 if child_file_id != self.root_id:
1113 raise ValueError('Data inconsistency detected.'
1114 ' We expected data with key ("","") to match'
1115@@ -2129,22 +2131,23 @@
1116
1117 def to_lines(self):
1118 """Serialise the inventory to lines."""
1119- lines = ["chkinventory:\n"]
1120+ lines = [b"chkinventory:\n"]
1121 if self._search_key_name != 'plain':
1122 # custom ordering grouping things that don't change together
1123- lines.append('search_key_name: %s\n' % (self._search_key_name,))
1124- lines.append("root_id: %s\n" % self.root_id)
1125- lines.append('parent_id_basename_to_file_id: %s\n' %
1126+ lines.append(b'search_key_name: %s\n' % (
1127+ self._search_key_name.encode('ascii')))
1128+ lines.append(b"root_id: %s\n" % self.root_id)
1129+ lines.append(b'parent_id_basename_to_file_id: %s\n' %
1130 (self.parent_id_basename_to_file_id.key()[0],))
1131- lines.append("revision_id: %s\n" % self.revision_id)
1132- lines.append("id_to_entry: %s\n" % (self.id_to_entry.key()[0],))
1133+ lines.append(b"revision_id: %s\n" % self.revision_id)
1134+ lines.append(b"id_to_entry: %s\n" % (self.id_to_entry.key()[0],))
1135 else:
1136- lines.append("revision_id: %s\n" % self.revision_id)
1137- lines.append("root_id: %s\n" % self.root_id)
1138+ lines.append(b"revision_id: %s\n" % self.revision_id)
1139+ lines.append(b"root_id: %s\n" % self.root_id)
1140 if self.parent_id_basename_to_file_id is not None:
1141- lines.append('parent_id_basename_to_file_id: %s\n' %
1142+ lines.append(b'parent_id_basename_to_file_id: %s\n' %
1143 (self.parent_id_basename_to_file_id.key()[0],))
1144- lines.append("id_to_entry: %s\n" % (self.id_to_entry.key()[0],))
1145+ lines.append(b"id_to_entry: %s\n" % (self.id_to_entry.key()[0],))
1146 return lines
1147
1148 @property
1149
1150=== modified file 'breezy/inventory_delta.py'
1151--- breezy/inventory_delta.py 2017-06-09 16:31:49 +0000
1152+++ breezy/inventory_delta.py 2017-06-11 01:47:17 +0000
1153@@ -31,7 +31,7 @@
1154 from . import inventory
1155 from .revision import NULL_REVISION
1156
1157-FORMAT_1 = 'bzr inventory delta v1 (bzr 1.14)'
1158+FORMAT_1 = b'bzr inventory delta v1 (bzr 1.14)'
1159
1160
1161 class InventoryDeltaError(errors.BzrError):
1162@@ -61,7 +61,7 @@
1163
1164 :param entry: An InventoryDirectory.
1165 """
1166- return "dir"
1167+ return b"dir"
1168
1169
1170 def _file_content(entry):
1171@@ -70,14 +70,14 @@
1172 :param entry: An InventoryFile.
1173 """
1174 if entry.executable:
1175- exec_bytes = 'Y'
1176+ exec_bytes = b'Y'
1177 else:
1178- exec_bytes = ''
1179- size_exec_sha = (entry.text_size, exec_bytes, entry.text_sha1)
1180+ exec_bytes = b''
1181+ size_exec_sha = entry.text_size, exec_bytes, entry.text_sha1
1182 if None in size_exec_sha:
1183 raise InventoryDeltaError(
1184 'Missing size or sha for %(fileid)r', fileid=entry.file_id)
1185- return "file\x00%d\x00%s\x00%s" % size_exec_sha
1186+ return b"file\x00%d\x00%s\x00%s" % size_exec_sha
1187
1188
1189 def _link_content(entry):
1190@@ -89,7 +89,7 @@
1191 if target is None:
1192 raise InventoryDeltaError(
1193 'Missing target for %(fileid)r', fileid=entry.file_id)
1194- return "link\x00%s" % target.encode('utf8')
1195+ return b"link\x00%s" % target.encode('utf8')
1196
1197
1198 def _reference_content(entry):
1199@@ -101,7 +101,7 @@
1200 if tree_revision is None:
1201 raise InventoryDeltaError(
1202 'Missing reference revision for %(fileid)r', fileid=entry.file_id)
1203- return "tree\x00%s" % tree_revision
1204+ return b"tree\x00%s" % tree_revision
1205
1206
1207 def _dir_to_entry(content, name, parent_id, file_id, last_modified,
1208@@ -179,11 +179,11 @@
1209 takes.
1210 :return: The serialized delta as lines.
1211 """
1212- if not isinstance(old_name, str):
1213+ if not isinstance(old_name, bytes):
1214 raise TypeError('old_name should be str, got %r' % (old_name,))
1215- if not isinstance(new_name, str):
1216+ if not isinstance(new_name, bytes):
1217 raise TypeError('new_name should be str, got %r' % (new_name,))
1218- lines = ['', '', '', '', '']
1219+ lines = [b'', b'', b'', b'', b'']
1220 to_line = self._delta_item_to_line
1221 for delta_item in delta_to_new:
1222 line = to_line(delta_item, new_name)
1223@@ -193,48 +193,48 @@
1224 'to_line gave non-bytes output %(line)r', line=lines[-1])
1225 lines.append(line)
1226 lines.sort()
1227- lines[0] = "format: %s\n" % FORMAT_1
1228- lines[1] = "parent: %s\n" % old_name
1229- lines[2] = "version: %s\n" % new_name
1230- lines[3] = "versioned_root: %s\n" % self._serialize_bool(
1231+ lines[0] = b"format: %s\n" % FORMAT_1
1232+ lines[1] = b"parent: %s\n" % old_name
1233+ lines[2] = b"version: %s\n" % new_name
1234+ lines[3] = b"versioned_root: %s\n" % self._serialize_bool(
1235 self._versioned_root)
1236- lines[4] = "tree_references: %s\n" % self._serialize_bool(
1237+ lines[4] = b"tree_references: %s\n" % self._serialize_bool(
1238 self._tree_references)
1239 return lines
1240
1241 def _serialize_bool(self, value):
1242 if value:
1243- return "true"
1244+ return b"true"
1245 else:
1246- return "false"
1247+ return b"false"
1248
1249 def _delta_item_to_line(self, delta_item, new_version):
1250 """Convert delta_item to a line."""
1251 oldpath, newpath, file_id, entry = delta_item
1252 if newpath is None:
1253 # delete
1254- oldpath_utf8 = '/' + oldpath.encode('utf8')
1255- newpath_utf8 = 'None'
1256- parent_id = ''
1257+ oldpath_utf8 = b'/' + oldpath.encode('utf8')
1258+ newpath_utf8 = b'None'
1259+ parent_id = b''
1260 last_modified = NULL_REVISION
1261- content = 'deleted\x00\x00'
1262+ content = b'deleted\x00\x00'
1263 else:
1264 if oldpath is None:
1265- oldpath_utf8 = 'None'
1266+ oldpath_utf8 = b'None'
1267 else:
1268- oldpath_utf8 = '/' + oldpath.encode('utf8')
1269+ oldpath_utf8 = b'/' + oldpath.encode('utf8')
1270 if newpath == '/':
1271 raise AssertionError(
1272 "Bad inventory delta: '/' is not a valid newpath "
1273 "(should be '') in delta item %r" % (delta_item,))
1274 # TODO: Test real-world utf8 cache hit rate. It may be a win.
1275- newpath_utf8 = '/' + newpath.encode('utf8')
1276+ newpath_utf8 = b'/' + newpath.encode('utf8')
1277 # Serialize None as ''
1278- parent_id = entry.parent_id or ''
1279+ parent_id = entry.parent_id or b''
1280 # Serialize unknown revisions as NULL_REVISION
1281 last_modified = entry.revision
1282 # special cases for /
1283- if newpath_utf8 == '/' and not self._versioned_root:
1284+ if newpath_utf8 == b'/' and not self._versioned_root:
1285 # This is an entry for the root, this inventory does not
1286 # support versioned roots. So this must be an unversioned
1287 # root, i.e. last_modified == new revision. Otherwise, this
1288@@ -251,7 +251,7 @@
1289 raise InventoryDeltaError(
1290 "no version for fileid %(fileid)r", fileid=file_id)
1291 content = self._entry_to_content[entry.kind](entry)
1292- return ("%s\x00%s\x00%s\x00%s\x00%s\x00%s\n" %
1293+ return (b"%s\x00%s\x00%s\x00%s\x00%s\x00%s\n" %
1294 (oldpath_utf8, newpath_utf8, file_id, parent_id, last_modified,
1295 content))
1296
1297@@ -270,9 +270,9 @@
1298 self._allow_tree_references = allow_tree_references
1299
1300 def _deserialize_bool(self, value):
1301- if value == "true":
1302+ if value == b"true":
1303 return True
1304- elif value == "false":
1305+ elif value == b"false":
1306 return False
1307 else:
1308 raise InventoryDeltaError("value %(val)r is not a bool", val=value)
1309@@ -289,24 +289,24 @@
1310 :return: (parent_id, new_id, versioned_root, tree_references,
1311 inventory_delta)
1312 """
1313- if bytes[-1:] != '\n':
1314- last_line = bytes.rsplit('\n', 1)[-1]
1315+ if bytes[-1:] != b'\n':
1316+ last_line = bytes.rsplit(b'\n', 1)[-1]
1317 raise InventoryDeltaError(
1318 'last line not empty: %(line)r', line=last_line)
1319- lines = bytes.split('\n')[:-1] # discard the last empty line
1320- if not lines or lines[0] != 'format: %s' % FORMAT_1:
1321+ lines = bytes.split(b'\n')[:-1] # discard the last empty line
1322+ if not lines or lines[0] != b'format: %s' % FORMAT_1:
1323 raise InventoryDeltaError(
1324 'unknown format %(line)r', line=lines[0:1])
1325- if len(lines) < 2 or not lines[1].startswith('parent: '):
1326+ if len(lines) < 2 or not lines[1].startswith(b'parent: '):
1327 raise InventoryDeltaError('missing parent: marker')
1328 delta_parent_id = lines[1][8:]
1329- if len(lines) < 3 or not lines[2].startswith('version: '):
1330+ if len(lines) < 3 or not lines[2].startswith(b'version: '):
1331 raise InventoryDeltaError('missing version: marker')
1332 delta_version_id = lines[2][9:]
1333- if len(lines) < 4 or not lines[3].startswith('versioned_root: '):
1334+ if len(lines) < 4 or not lines[3].startswith(b'versioned_root: '):
1335 raise InventoryDeltaError('missing versioned_root: marker')
1336 delta_versioned_root = self._deserialize_bool(lines[3][16:])
1337- if len(lines) < 5 or not lines[4].startswith('tree_references: '):
1338+ if len(lines) < 5 or not lines[4].startswith(b'tree_references: '):
1339 raise InventoryDeltaError('missing tree_references: marker')
1340 delta_tree_references = self._deserialize_bool(lines[4][17:])
1341 if (not self._allow_versioned_root and delta_versioned_root):
1342@@ -318,24 +318,24 @@
1343 next(line_iter)
1344 for line in line_iter:
1345 (oldpath_utf8, newpath_utf8, file_id, parent_id, last_modified,
1346- content) = line.split('\x00', 5)
1347+ content) = line.split(b'\x00', 5)
1348 parent_id = parent_id or None
1349 if file_id in seen_ids:
1350 raise InventoryDeltaError(
1351 "duplicate file id %(fileid)r", fileid=file_id)
1352 seen_ids.add(file_id)
1353- if (newpath_utf8 == '/' and not delta_versioned_root and
1354+ if (newpath_utf8 == b'/' and not delta_versioned_root and
1355 last_modified != delta_version_id):
1356 # Delta claims to be not have a versioned root, yet here's
1357 # a root entry with a non-default version.
1358 raise InventoryDeltaError(
1359 "Versioned root found: %(line)r", line=line)
1360- elif newpath_utf8 != 'None' and last_modified[-1] == ':':
1361+ elif newpath_utf8 != b'None' and last_modified[-1:] == b':':
1362 # Deletes have a last_modified of null:, but otherwise special
1363 # revision ids should not occur.
1364 raise InventoryDeltaError(
1365 'special revisionid found: %(line)r', line=line)
1366- if content.startswith('tree\x00'):
1367+ if content.startswith(b'tree\x00'):
1368 if delta_tree_references is False:
1369 raise InventoryDeltaError(
1370 "Tree reference found (but header said "
1371@@ -343,18 +343,18 @@
1372 elif not self._allow_tree_references:
1373 raise IncompatibleInventoryDelta(
1374 "Tree reference not allowed")
1375- if oldpath_utf8 == 'None':
1376+ if oldpath_utf8 == b'None':
1377 oldpath = None
1378- elif oldpath_utf8[:1] != '/':
1379+ elif oldpath_utf8[:1] != b'/':
1380 raise InventoryDeltaError(
1381 "oldpath invalid (does not start with /): %(path)r",
1382 path=oldpath_utf8)
1383 else:
1384 oldpath_utf8 = oldpath_utf8[1:]
1385 oldpath = oldpath_utf8.decode('utf8')
1386- if newpath_utf8 == 'None':
1387+ if newpath_utf8 == b'None':
1388 newpath = None
1389- elif newpath_utf8[:1] != '/':
1390+ elif newpath_utf8[:1] != b'/':
1391 raise InventoryDeltaError(
1392 "newpath invalid (does not start with /): %(path)r",
1393 path=newpath_utf8)
1394@@ -362,8 +362,8 @@
1395 # Trim leading slash
1396 newpath_utf8 = newpath_utf8[1:]
1397 newpath = newpath_utf8.decode('utf8')
1398- content_tuple = tuple(content.split('\x00'))
1399- if content_tuple[0] == 'deleted':
1400+ content_tuple = tuple(content.split(b'\x00'))
1401+ if content_tuple[0] == b'deleted':
1402 entry = None
1403 else:
1404 entry = _parse_entry(
1405@@ -376,10 +376,10 @@
1406
1407 def _parse_entry(path, file_id, parent_id, last_modified, content):
1408 entry_factory = {
1409- 'dir': _dir_to_entry,
1410- 'file': _file_to_entry,
1411- 'link': _link_to_entry,
1412- 'tree': _tree_to_entry,
1413+ b'dir': _dir_to_entry,
1414+ b'file': _file_to_entry,
1415+ b'link': _link_to_entry,
1416+ b'tree': _tree_to_entry,
1417 }
1418 kind = content[0]
1419 if path.startswith('/'):
1420
1421=== modified file 'breezy/osutils.py'
1422--- breezy/osutils.py 2017-06-10 01:57:00 +0000
1423+++ breezy/osutils.py 2017-06-11 01:47:17 +0000
1424@@ -1209,11 +1209,11 @@
1425 # separators
1426 # 3) '\xa0' isn't unicode safe since it is >128.
1427
1428- # This should *not* be a unicode set of characters in case the source
1429- # string is not a Unicode string. We can auto-up-cast the characters since
1430- # they are ascii, but we don't want to auto-up-cast the string in case it
1431- # is utf-8
1432- for ch in ' \t\n\r\v\f':
1433+ if isinstance(s, str):
1434+ ws = ' \t\n\r\v\f'
1435+ else:
1436+ ws = (b' ', b'\t', b'\n', b'\r', b'\v', b'\f')
1437+ for ch in ws:
1438 if ch in s:
1439 return True
1440 else:
1441@@ -1370,7 +1370,7 @@
1442 If it is a str, it is returned.
1443 If it is Unicode, it is encoded into a utf-8 string.
1444 """
1445- if isinstance(unicode_or_utf8_string, str):
1446+ if isinstance(unicode_or_utf8_string, bytes):
1447 # TODO: jam 20070209 This is overkill, and probably has an impact on
1448 # performance if we are dealing with lots of apis that want a
1449 # utf-8 revision id
1450@@ -1443,13 +1443,13 @@
1451 can be accessed by that path.
1452 """
1453
1454- return unicodedata.normalize('NFC', unicode(path)), True
1455+ return unicodedata.normalize('NFC', text_type(path)), True
1456
1457
1458 def _inaccessible_normalized_filename(path):
1459 __doc__ = _accessible_normalized_filename.__doc__
1460
1461- normalized = unicodedata.normalize('NFC', unicode(path))
1462+ normalized = unicodedata.normalize('NFC', text_type(path))
1463 return normalized, normalized == path
1464
1465
1466@@ -1878,10 +1878,10 @@
1467 _kind_from_mode = file_kind_from_stat_mode
1468
1469 if prefix:
1470- relprefix = prefix + '/'
1471+ relprefix = prefix + b'/'
1472 else:
1473- relprefix = ''
1474- top_slash = top + u'/'
1475+ relprefix = b''
1476+ top_slash = top + '/'
1477
1478 dirblock = []
1479 append = dirblock.append
1480
1481=== modified file 'breezy/pack.py'
1482--- breezy/pack.py 2017-05-25 01:35:55 +0000
1483+++ breezy/pack.py 2017-06-11 01:47:17 +0000
1484@@ -73,28 +73,28 @@
1485
1486 def begin(self):
1487 """Return the bytes to begin a container."""
1488- return FORMAT_ONE + "\n"
1489+ return FORMAT_ONE.encode("ascii") + b"\n"
1490
1491 def end(self):
1492 """Return the bytes to finish a container."""
1493- return "E"
1494+ return b"E"
1495
1496 def bytes_header(self, length, names):
1497 """Return the header for a Bytes record."""
1498 # Kind marker
1499- byte_sections = ["B"]
1500+ byte_sections = [b"B"]
1501 # Length
1502- byte_sections.append(str(length) + "\n")
1503+ byte_sections.append(b"%d\n" % (length,))
1504 # Names
1505 for name_tuple in names:
1506 # Make sure we're writing valid names. Note that we will leave a
1507 # half-written record if a name is bad!
1508 for name in name_tuple:
1509 _check_name(name)
1510- byte_sections.append('\x00'.join(name_tuple) + "\n")
1511+ byte_sections.append(b'\x00'.join(name_tuple) + b"\n")
1512 # End of headers
1513- byte_sections.append("\n")
1514- return ''.join(byte_sections)
1515+ byte_sections.append(b"\n")
1516+ return b''.join(byte_sections)
1517
1518 def bytes_record(self, bytes, names):
1519 """Return the bytes for a Bytes record with the given name and
1520
1521=== modified file 'breezy/repofmt/groupcompress_repo.py'
1522--- breezy/repofmt/groupcompress_repo.py 2017-06-10 00:52:37 +0000
1523+++ breezy/repofmt/groupcompress_repo.py 2017-06-11 01:47:17 +0000
1524@@ -147,15 +147,15 @@
1525 # robertc says- this is a closure rather than a method on the object
1526 # so that the variables are locals, and faster than accessing object
1527 # members.
1528- def _write_data(bytes, flush=False, _buffer=self._buffer,
1529+ def _write_data(data, flush=False, _buffer=self._buffer,
1530 _write=self.write_stream.write, _update=self._hash.update):
1531- _buffer[0].append(bytes)
1532- _buffer[1] += len(bytes)
1533+ _buffer[0].append(data)
1534+ _buffer[1] += len(data)
1535 # buffer cap
1536 if _buffer[1] > self._cache_limit or flush:
1537- bytes = ''.join(_buffer[0])
1538- _write(bytes)
1539- _update(bytes)
1540+ data = b''.join(_buffer[0])
1541+ _write(data)
1542+ _update(data)
1543 _buffer[:] = [[], 0]
1544 # expose this on self, for the occasion when clients want to add data.
1545 self._write_data = _write_data
1546@@ -905,7 +905,7 @@
1547 ' no new_path %r' % (file_id,))
1548 if new_path == '':
1549 new_inv.root_id = file_id
1550- parent_id_basename_key = StaticTuple('', '').intern()
1551+ parent_id_basename_key = StaticTuple(b'', b'').intern()
1552 else:
1553 utf8_entry_name = entry.name.encode('utf-8')
1554 parent_id_basename_key = StaticTuple(entry.parent_id,
1555
1556=== modified file 'breezy/repofmt/pack_repo.py'
1557--- breezy/repofmt/pack_repo.py 2017-06-10 12:56:18 +0000
1558+++ breezy/repofmt/pack_repo.py 2017-06-11 01:47:17 +0000
1559@@ -419,7 +419,7 @@
1560 _buffer[1] += len(bytes)
1561 # buffer cap
1562 if _buffer[1] > self._cache_limit or flush:
1563- bytes = ''.join(_buffer[0])
1564+ bytes = b''.join(_buffer[0])
1565 _write(bytes)
1566 _update(bytes)
1567 _buffer[:] = [[], 0]
1568@@ -524,7 +524,7 @@
1569 def flush(self):
1570 """Flush any current data."""
1571 if self._buffer[1]:
1572- bytes = ''.join(self._buffer[0])
1573+ bytes = b''.join(self._buffer[0])
1574 self.write_stream.write(bytes)
1575 self._hash.update(bytes)
1576 self._buffer[:] = [[], 0]
1577@@ -1987,7 +1987,7 @@
1578 length), where the index field is the write_index object supplied
1579 to the PackAccess object.
1580 """
1581- if not isinstance(raw_data, str):
1582+ if not isinstance(raw_data, bytes):
1583 raise AssertionError(
1584 'data must be plain bytes was %s' % type(raw_data))
1585 result = []
1586
1587=== modified file 'breezy/repository.py'
1588--- breezy/repository.py 2017-06-10 00:52:37 +0000
1589+++ breezy/repository.py 2017-06-11 01:47:17 +0000
1590@@ -46,6 +46,7 @@
1591 from .inter import InterObject
1592 from .lock import _RelockDebugMixin, LogicalLockResult
1593 from .sixish import (
1594+ text_type,
1595 viewitems,
1596 viewvalues,
1597 )
1598@@ -145,7 +146,7 @@
1599 for key, value in viewitems(revprops):
1600 # We know that the XML serializers do not round trip '\r'
1601 # correctly, so refuse to accept them
1602- if not isinstance(value, basestring):
1603+ if not isinstance(value, (text_type, str)):
1604 raise ValueError('revision property (%s) is not a valid'
1605 ' (unicode) string: %r' % (key, value))
1606 self._validate_unicode_text(value,
1607
1608=== modified file 'breezy/revision.py'
1609--- breezy/revision.py 2017-06-10 01:57:00 +0000
1610+++ breezy/revision.py 2017-06-11 01:47:17 +0000
1611@@ -26,8 +26,11 @@
1612 """)
1613 from . import (
1614 errors,
1615- )
1616-from .osutils import contains_whitespace
1617+ osutils,
1618+ )
1619+from .sixish import (
1620+ text_type,
1621+ )
1622
1623 NULL_REVISION=b"null:"
1624 CURRENT_REVISION=b"current:"
1625@@ -86,9 +89,11 @@
1626 def _check_properties(self):
1627 """Verify that all revision properties are OK."""
1628 for name, value in self.properties.items():
1629- if not isinstance(name, basestring) or contains_whitespace(name):
1630+ # GZ 2017-06-10: What sort of string are properties exactly?
1631+ not_text = not isinstance(name, (text_type, str))
1632+ if not_text or osutils.contains_whitespace(name):
1633 raise ValueError("invalid property name %r" % name)
1634- if not isinstance(value, basestring):
1635+ if not isinstance(value, (text_type, bytes)):
1636 raise ValueError("invalid property value %r for %r" %
1637 (value, name))
1638
1639@@ -205,7 +210,7 @@
1640
1641 :return: True if the revision is reserved, False otherwise
1642 """
1643- return isinstance(revision_id, basestring) and revision_id.endswith(':')
1644+ return isinstance(revision_id, bytes) and revision_id.endswith(b':')
1645
1646
1647 def check_not_reserved_id(revision_id):
1648
1649=== modified file 'breezy/sixish.py'
1650--- breezy/sixish.py 2017-06-05 01:55:02 +0000
1651+++ breezy/sixish.py 2017-06-11 01:47:17 +0000
1652@@ -46,3 +46,13 @@
1653 from StringIO import StringIO
1654 from future_builtins import zip, map
1655 range = xrange
1656+
1657+
1658+# GZ 2017-06-10: Work out if interning bits of inventory is behaviour we want
1659+# to retain outside of StaticTuple, if so need to implement for Python 3.
1660+if PY3:
1661+ def bytesintern(b):
1662+ """Dummy intern() function."""
1663+ return b
1664+else:
1665+ bytesintern = intern
1666
1667=== modified file 'breezy/tests/test__chk_map.py'
1668--- breezy/tests/test__chk_map.py 2017-05-23 14:08:03 +0000
1669+++ breezy/tests/test__chk_map.py 2017-06-11 01:47:17 +0000
1670@@ -42,18 +42,18 @@
1671 self.assertEqual(expected, actual, 'actual: %r' % (actual,))
1672
1673 def test_simple_16(self):
1674- self.assertSearchKey16('8C736521', stuple('foo',))
1675- self.assertSearchKey16('8C736521\x008C736521', stuple('foo', 'foo'))
1676- self.assertSearchKey16('8C736521\x0076FF8CAA', stuple('foo', 'bar'))
1677- self.assertSearchKey16('ED82CD11', stuple('abcd',))
1678+ self.assertSearchKey16(b'8C736521', stuple('foo',))
1679+ self.assertSearchKey16(b'8C736521\x008C736521', stuple('foo', 'foo'))
1680+ self.assertSearchKey16(b'8C736521\x0076FF8CAA', stuple('foo', 'bar'))
1681+ self.assertSearchKey16(b'ED82CD11', stuple('abcd',))
1682
1683 def test_simple_255(self):
1684- self.assertSearchKey255('\x8cse!', stuple('foo',))
1685- self.assertSearchKey255('\x8cse!\x00\x8cse!', stuple('foo', 'foo'))
1686- self.assertSearchKey255('\x8cse!\x00v\xff\x8c\xaa', stuple('foo', 'bar'))
1687+ self.assertSearchKey255(b'\x8cse!', stuple('foo',))
1688+ self.assertSearchKey255(b'\x8cse!\x00\x8cse!', stuple('foo', 'foo'))
1689+ self.assertSearchKey255(b'\x8cse!\x00v\xff\x8c\xaa', stuple('foo', 'bar'))
1690 # The standard mapping for these would include '\n', so it should be
1691 # mapped to '_'
1692- self.assertSearchKey255('\xfdm\x93_\x00P_\x1bL', stuple('<', 'V'))
1693+ self.assertSearchKey255(b'\xfdm\x93_\x00P_\x1bL', stuple('<', 'V'))
1694
1695 def test_255_does_not_include_newline(self):
1696 # When mapping via _search_key_255, we should never have the '\n'
1697@@ -64,7 +64,7 @@
1698 chars_used.update(search_key)
1699 all_chars = {chr(x) for x in range(256)}
1700 unused_chars = all_chars.symmetric_difference(chars_used)
1701- self.assertEqual(set('\n'), unused_chars)
1702+ self.assertEqual(set(b'\n'), unused_chars)
1703
1704
1705 class TestDeserialiseLeafNode(tests.TestCase):
1706@@ -73,94 +73,94 @@
1707
1708 def assertDeserialiseErrors(self, text):
1709 self.assertRaises((ValueError, IndexError),
1710- self.module._deserialise_leaf_node, text, 'not-a-real-sha')
1711+ self.module._deserialise_leaf_node, text, b'not-a-real-sha')
1712
1713 def test_raises_on_non_leaf(self):
1714- self.assertDeserialiseErrors('')
1715- self.assertDeserialiseErrors('short\n')
1716- self.assertDeserialiseErrors('chknotleaf:\n')
1717- self.assertDeserialiseErrors('chkleaf:x\n')
1718- self.assertDeserialiseErrors('chkleaf:\n')
1719- self.assertDeserialiseErrors('chkleaf:\nnotint\n')
1720- self.assertDeserialiseErrors('chkleaf:\n10\n')
1721- self.assertDeserialiseErrors('chkleaf:\n10\n256\n')
1722- self.assertDeserialiseErrors('chkleaf:\n10\n256\n10\n')
1723+ self.assertDeserialiseErrors(b'')
1724+ self.assertDeserialiseErrors(b'short\n')
1725+ self.assertDeserialiseErrors(b'chknotleaf:\n')
1726+ self.assertDeserialiseErrors(b'chkleaf:x\n')
1727+ self.assertDeserialiseErrors(b'chkleaf:\n')
1728+ self.assertDeserialiseErrors(b'chkleaf:\nnotint\n')
1729+ self.assertDeserialiseErrors(b'chkleaf:\n10\n')
1730+ self.assertDeserialiseErrors(b'chkleaf:\n10\n256\n')
1731+ self.assertDeserialiseErrors(b'chkleaf:\n10\n256\n10\n')
1732
1733 def test_deserialise_empty(self):
1734 node = self.module._deserialise_leaf_node(
1735- "chkleaf:\n10\n1\n0\n\n", stuple("sha1:1234",))
1736+ b"chkleaf:\n10\n1\n0\n\n", stuple(b"sha1:1234",))
1737 self.assertEqual(0, len(node))
1738 self.assertEqual(10, node.maximum_size)
1739- self.assertEqual(("sha1:1234",), node.key())
1740+ self.assertEqual((b"sha1:1234",), node.key())
1741 self.assertIsInstance(node.key(), StaticTuple)
1742 self.assertIs(None, node._search_prefix)
1743 self.assertIs(None, node._common_serialised_prefix)
1744
1745 def test_deserialise_items(self):
1746 node = self.module._deserialise_leaf_node(
1747- "chkleaf:\n0\n1\n2\n\nfoo bar\x001\nbaz\nquux\x001\nblarh\n",
1748- ("sha1:1234",))
1749+ b"chkleaf:\n0\n1\n2\n\nfoo bar\x001\nbaz\nquux\x001\nblarh\n",
1750+ (b"sha1:1234",))
1751 self.assertEqual(2, len(node))
1752- self.assertEqual([(("foo bar",), "baz"), (("quux",), "blarh")],
1753+ self.assertEqual([((b"foo bar",), b"baz"), ((b"quux",), b"blarh")],
1754 sorted(node.iteritems(None)))
1755
1756 def test_deserialise_item_with_null_width_1(self):
1757 node = self.module._deserialise_leaf_node(
1758- "chkleaf:\n0\n1\n2\n\nfoo\x001\nbar\x00baz\nquux\x001\nblarh\n",
1759- ("sha1:1234",))
1760+ b"chkleaf:\n0\n1\n2\n\nfoo\x001\nbar\x00baz\nquux\x001\nblarh\n",
1761+ (b"sha1:1234",))
1762 self.assertEqual(2, len(node))
1763- self.assertEqual([(("foo",), "bar\x00baz"), (("quux",), "blarh")],
1764+ self.assertEqual([((b"foo",), b"bar\x00baz"), ((b"quux",), b"blarh")],
1765 sorted(node.iteritems(None)))
1766
1767 def test_deserialise_item_with_null_width_2(self):
1768 node = self.module._deserialise_leaf_node(
1769- "chkleaf:\n0\n2\n2\n\nfoo\x001\x001\nbar\x00baz\n"
1770- "quux\x00\x001\nblarh\n",
1771- ("sha1:1234",))
1772+ b"chkleaf:\n0\n2\n2\n\nfoo\x001\x001\nbar\x00baz\n"
1773+ b"quux\x00\x001\nblarh\n",
1774+ (b"sha1:1234",))
1775 self.assertEqual(2, len(node))
1776- self.assertEqual([(("foo", "1"), "bar\x00baz"), (("quux", ""), "blarh")],
1777+ self.assertEqual([((b"foo", "1"), b"bar\x00baz"), ((b"quux", ""), b"blarh")],
1778 sorted(node.iteritems(None)))
1779
1780 def test_iteritems_selected_one_of_two_items(self):
1781 node = self.module._deserialise_leaf_node(
1782- "chkleaf:\n0\n1\n2\n\nfoo bar\x001\nbaz\nquux\x001\nblarh\n",
1783- ("sha1:1234",))
1784+ b"chkleaf:\n0\n1\n2\n\nfoo bar\x001\nbaz\nquux\x001\nblarh\n",
1785+ (b"sha1:1234",))
1786 self.assertEqual(2, len(node))
1787- self.assertEqual([(("quux",), "blarh")],
1788- sorted(node.iteritems(None, [("quux",), ("qaz",)])))
1789+ self.assertEqual([((b"quux",), b"blarh")],
1790+ sorted(node.iteritems(None, [(b"quux",), (b"qaz",)])))
1791
1792 def test_deserialise_item_with_common_prefix(self):
1793 node = self.module._deserialise_leaf_node(
1794- "chkleaf:\n0\n2\n2\nfoo\x00\n1\x001\nbar\x00baz\n2\x001\nblarh\n",
1795- ("sha1:1234",))
1796+ b"chkleaf:\n0\n2\n2\nfoo\x00\n1\x001\nbar\x00baz\n2\x001\nblarh\n",
1797+ (b"sha1:1234",))
1798 self.assertEqual(2, len(node))
1799- self.assertEqual([(("foo", "1"), "bar\x00baz"), (("foo", "2"), "blarh")],
1800+ self.assertEqual([((b"foo", b"1"), b"bar\x00baz"), ((b"foo", b"2"), b"blarh")],
1801 sorted(node.iteritems(None)))
1802 self.assertIs(chk_map._unknown, node._search_prefix)
1803- self.assertEqual('foo\x00', node._common_serialised_prefix)
1804+ self.assertEqual(b'foo\x00', node._common_serialised_prefix)
1805
1806 def test_deserialise_multi_line(self):
1807 node = self.module._deserialise_leaf_node(
1808- "chkleaf:\n0\n2\n2\nfoo\x00\n1\x002\nbar\nbaz\n2\x002\nblarh\n\n",
1809- ("sha1:1234",))
1810+ b"chkleaf:\n0\n2\n2\nfoo\x00\n1\x002\nbar\nbaz\n2\x002\nblarh\n\n",
1811+ (b"sha1:1234",))
1812 self.assertEqual(2, len(node))
1813- self.assertEqual([(("foo", "1"), "bar\nbaz"),
1814- (("foo", "2"), "blarh\n"),
1815+ self.assertEqual([((b"foo", b"1"), b"bar\nbaz"),
1816+ ((b"foo", b"2"), b"blarh\n"),
1817 ], sorted(node.iteritems(None)))
1818 self.assertIs(chk_map._unknown, node._search_prefix)
1819- self.assertEqual('foo\x00', node._common_serialised_prefix)
1820+ self.assertEqual(b'foo\x00', node._common_serialised_prefix)
1821
1822 def test_key_after_map(self):
1823 node = self.module._deserialise_leaf_node(
1824- "chkleaf:\n10\n1\n0\n\n", ("sha1:1234",))
1825- node.map(None, ("foo bar",), "baz quux")
1826+ b"chkleaf:\n10\n1\n0\n\n", (b"sha1:1234",))
1827+ node.map(None, (b"foo bar",), b"baz quux")
1828 self.assertEqual(None, node.key())
1829
1830 def test_key_after_unmap(self):
1831 node = self.module._deserialise_leaf_node(
1832- "chkleaf:\n0\n1\n2\n\nfoo bar\x001\nbaz\nquux\x001\nblarh\n",
1833- ("sha1:1234",))
1834- node.unmap(None, ("foo bar",))
1835+ b"chkleaf:\n0\n1\n2\n\nfoo bar\x001\nbaz\nquux\x001\nblarh\n",
1836+ (b"sha1:1234",))
1837+ node.unmap(None, (b"foo bar",))
1838 self.assertEqual(None, node.key())
1839
1840
1841@@ -171,71 +171,73 @@
1842 def assertDeserialiseErrors(self, text):
1843 self.assertRaises((ValueError, IndexError),
1844 self.module._deserialise_internal_node, text,
1845- stuple('not-a-real-sha',))
1846+ stuple(b'not-a-real-sha',))
1847
1848 def test_raises_on_non_internal(self):
1849- self.assertDeserialiseErrors('')
1850- self.assertDeserialiseErrors('short\n')
1851- self.assertDeserialiseErrors('chknotnode:\n')
1852- self.assertDeserialiseErrors('chknode:x\n')
1853- self.assertDeserialiseErrors('chknode:\n')
1854- self.assertDeserialiseErrors('chknode:\nnotint\n')
1855- self.assertDeserialiseErrors('chknode:\n10\n')
1856- self.assertDeserialiseErrors('chknode:\n10\n256\n')
1857- self.assertDeserialiseErrors('chknode:\n10\n256\n10\n')
1858+ self.assertDeserialiseErrors(b'')
1859+ self.assertDeserialiseErrors(b'short\n')
1860+ self.assertDeserialiseErrors(b'chknotnode:\n')
1861+ self.assertDeserialiseErrors(b'chknode:x\n')
1862+ self.assertDeserialiseErrors(b'chknode:\n')
1863+ self.assertDeserialiseErrors(b'chknode:\nnotint\n')
1864+ self.assertDeserialiseErrors(b'chknode:\n10\n')
1865+ self.assertDeserialiseErrors(b'chknode:\n10\n256\n')
1866+ self.assertDeserialiseErrors(b'chknode:\n10\n256\n10\n')
1867 # no trailing newline
1868- self.assertDeserialiseErrors('chknode:\n10\n256\n0\n1\nfo')
1869+ self.assertDeserialiseErrors(b'chknode:\n10\n256\n0\n1\nfo')
1870
1871 def test_deserialise_one(self):
1872 node = self.module._deserialise_internal_node(
1873- "chknode:\n10\n1\n1\n\na\x00sha1:abcd\n", stuple('sha1:1234',))
1874+ b"chknode:\n10\n1\n1\n\na\x00sha1:abcd\n", stuple(b'sha1:1234',))
1875 self.assertIsInstance(node, chk_map.InternalNode)
1876 self.assertEqual(1, len(node))
1877 self.assertEqual(10, node.maximum_size)
1878- self.assertEqual(("sha1:1234",), node.key())
1879- self.assertEqual('', node._search_prefix)
1880- self.assertEqual({'a': ('sha1:abcd',)}, node._items)
1881+ self.assertEqual((b"sha1:1234",), node.key())
1882+ self.assertEqual(b'', node._search_prefix)
1883+ self.assertEqual({b'a': (b'sha1:abcd',)}, node._items)
1884
1885 def test_deserialise_with_prefix(self):
1886 node = self.module._deserialise_internal_node(
1887- "chknode:\n10\n1\n1\npref\na\x00sha1:abcd\n", stuple('sha1:1234',))
1888+ b"chknode:\n10\n1\n1\npref\na\x00sha1:abcd\n",
1889+ stuple(b'sha1:1234',))
1890 self.assertIsInstance(node, chk_map.InternalNode)
1891 self.assertEqual(1, len(node))
1892 self.assertEqual(10, node.maximum_size)
1893- self.assertEqual(("sha1:1234",), node.key())
1894- self.assertEqual('pref', node._search_prefix)
1895- self.assertEqual({'prefa': ('sha1:abcd',)}, node._items)
1896+ self.assertEqual((b"sha1:1234",), node.key())
1897+ self.assertEqual(b'pref', node._search_prefix)
1898+ self.assertEqual({b'prefa': (b'sha1:abcd',)}, node._items)
1899
1900 node = self.module._deserialise_internal_node(
1901- "chknode:\n10\n1\n1\npref\n\x00sha1:abcd\n", stuple('sha1:1234',))
1902+ b"chknode:\n10\n1\n1\npref\n\x00sha1:abcd\n",
1903+ stuple(b'sha1:1234',))
1904 self.assertIsInstance(node, chk_map.InternalNode)
1905 self.assertEqual(1, len(node))
1906 self.assertEqual(10, node.maximum_size)
1907- self.assertEqual(("sha1:1234",), node.key())
1908- self.assertEqual('pref', node._search_prefix)
1909- self.assertEqual({'pref': ('sha1:abcd',)}, node._items)
1910+ self.assertEqual((b"sha1:1234",), node.key())
1911+ self.assertEqual(b'pref', node._search_prefix)
1912+ self.assertEqual({b'pref': (b'sha1:abcd',)}, node._items)
1913
1914 def test_deserialise_pref_with_null(self):
1915 node = self.module._deserialise_internal_node(
1916- "chknode:\n10\n1\n1\npref\x00fo\n\x00sha1:abcd\n",
1917- stuple('sha1:1234',))
1918+ b"chknode:\n10\n1\n1\npref\x00fo\n\x00sha1:abcd\n",
1919+ stuple(b'sha1:1234',))
1920 self.assertIsInstance(node, chk_map.InternalNode)
1921 self.assertEqual(1, len(node))
1922 self.assertEqual(10, node.maximum_size)
1923- self.assertEqual(("sha1:1234",), node.key())
1924- self.assertEqual('pref\x00fo', node._search_prefix)
1925- self.assertEqual({'pref\x00fo': ('sha1:abcd',)}, node._items)
1926+ self.assertEqual((b"sha1:1234",), node.key())
1927+ self.assertEqual(b'pref\x00fo', node._search_prefix)
1928+ self.assertEqual({b'pref\x00fo': (b'sha1:abcd',)}, node._items)
1929
1930 def test_deserialise_with_null_pref(self):
1931 node = self.module._deserialise_internal_node(
1932- "chknode:\n10\n1\n1\npref\x00fo\n\x00\x00sha1:abcd\n",
1933- stuple('sha1:1234',))
1934+ b"chknode:\n10\n1\n1\npref\x00fo\n\x00\x00sha1:abcd\n",
1935+ stuple(b'sha1:1234',))
1936 self.assertIsInstance(node, chk_map.InternalNode)
1937 self.assertEqual(1, len(node))
1938 self.assertEqual(10, node.maximum_size)
1939- self.assertEqual(("sha1:1234",), node.key())
1940- self.assertEqual('pref\x00fo', node._search_prefix)
1941- self.assertEqual({'pref\x00fo\x00': ('sha1:abcd',)}, node._items)
1942+ self.assertEqual((b"sha1:1234",), node.key())
1943+ self.assertEqual(b'pref\x00fo', node._search_prefix)
1944+ self.assertEqual({b'pref\x00fo\x00': (b'sha1:abcd',)}, node._items)
1945
1946
1947 class Test_BytesToTextKey(tests.TestCase):
1948@@ -251,29 +253,29 @@
1949 self.assertRaises(Exception, self.module._bytes_to_text_key, bytes)
1950
1951 def test_file(self):
1952- self.assertBytesToTextKey(('file-id', 'revision-id'),
1953- 'file: file-id\nparent-id\nname\nrevision-id\n'
1954- 'da39a3ee5e6b4b0d3255bfef95601890afd80709\n100\nN')
1955+ self.assertBytesToTextKey((b'file-id', b'revision-id'),
1956+ b'file: file-id\nparent-id\nname\nrevision-id\n'
1957+ b'da39a3ee5e6b4b0d3255bfef95601890afd80709\n100\nN')
1958
1959 def test_invalid_no_kind(self):
1960 self.assertBytesToTextKeyRaises(
1961- 'file file-id\nparent-id\nname\nrevision-id\n'
1962- 'da39a3ee5e6b4b0d3255bfef95601890afd80709\n100\nN')
1963+ b'file file-id\nparent-id\nname\nrevision-id\n'
1964+ b'da39a3ee5e6b4b0d3255bfef95601890afd80709\n100\nN')
1965
1966 def test_invalid_no_space(self):
1967 self.assertBytesToTextKeyRaises(
1968- 'file:file-id\nparent-id\nname\nrevision-id\n'
1969- 'da39a3ee5e6b4b0d3255bfef95601890afd80709\n100\nN')
1970+ b'file:file-id\nparent-id\nname\nrevision-id\n'
1971+ b'da39a3ee5e6b4b0d3255bfef95601890afd80709\n100\nN')
1972
1973 def test_invalid_too_short_file_id(self):
1974- self.assertBytesToTextKeyRaises('file:file-id')
1975+ self.assertBytesToTextKeyRaises(b'file:file-id')
1976
1977 def test_invalid_too_short_parent_id(self):
1978- self.assertBytesToTextKeyRaises('file:file-id\nparent-id')
1979+ self.assertBytesToTextKeyRaises(b'file:file-id\nparent-id')
1980
1981 def test_invalid_too_short_name(self):
1982- self.assertBytesToTextKeyRaises('file:file-id\nparent-id\nname')
1983+ self.assertBytesToTextKeyRaises(b'file:file-id\nparent-id\nname')
1984
1985 def test_dir(self):
1986- self.assertBytesToTextKey(('dir-id', 'revision-id'),
1987- 'dir: dir-id\nparent-id\nname\nrevision-id')
1988+ self.assertBytesToTextKey((b'dir-id', b'revision-id'),
1989+ b'dir: dir-id\nparent-id\nname\nrevision-id')
1990
1991=== modified file 'breezy/tests/test__chunks_to_lines.py'
1992--- breezy/tests/test__chunks_to_lines.py 2017-05-23 14:08:03 +0000
1993+++ breezy/tests/test__chunks_to_lines.py 2017-06-11 01:47:17 +0000
1994@@ -47,58 +47,60 @@
1995 self.assertIs(chunks, result)
1996
1997 def test_fulltext_chunk_to_lines(self):
1998- self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz\n'],
1999- ['foo\nbar\r\nba\rz\n'])
2000- self.assertChunksToLines(['foobarbaz\n'], ['foobarbaz\n'],
2001- alreadly_lines=True)
2002- self.assertChunksToLines(['foo\n', 'bar\n', '\n', 'baz\n', '\n', '\n'],
2003- ['foo\nbar\n\nbaz\n\n\n'])
2004- self.assertChunksToLines(['foobarbaz'], ['foobarbaz'],
2005- alreadly_lines=True)
2006- self.assertChunksToLines(['foobarbaz'], ['foo', 'bar', 'baz'])
2007+ self.assertChunksToLines(
2008+ [b'foo\n', b'bar\r\n', b'ba\rz\n'],
2009+ [b'foo\nbar\r\nba\rz\n'])
2010+ self.assertChunksToLines(
2011+ [b'foobarbaz\n'], [b'foobarbaz\n'], alreadly_lines=True)
2012+ self.assertChunksToLines(
2013+ [b'foo\n', b'bar\n', b'\n', b'baz\n', b'\n', b'\n'],
2014+ [b'foo\nbar\n\nbaz\n\n\n'])
2015+ self.assertChunksToLines(
2016+ [b'foobarbaz'], [b'foobarbaz'], alreadly_lines=True)
2017+ self.assertChunksToLines([b'foobarbaz'], [b'foo', b'bar', b'baz'])
2018
2019 def test_newlines(self):
2020- self.assertChunksToLines(['\n'], ['\n'], alreadly_lines=True)
2021- self.assertChunksToLines(['\n'], ['', '\n', ''])
2022- self.assertChunksToLines(['\n'], ['\n', ''])
2023- self.assertChunksToLines(['\n'], ['', '\n'])
2024- self.assertChunksToLines(['\n', '\n', '\n'], ['\n\n\n'])
2025- self.assertChunksToLines(['\n', '\n', '\n'], ['\n', '\n', '\n'],
2026+ self.assertChunksToLines([b'\n'], [b'\n'], alreadly_lines=True)
2027+ self.assertChunksToLines([b'\n'], [b'', b'\n', b''])
2028+ self.assertChunksToLines([b'\n'], [b'\n', b''])
2029+ self.assertChunksToLines([b'\n'], [b'', b'\n'])
2030+ self.assertChunksToLines([b'\n', b'\n', b'\n'], [b'\n\n\n'])
2031+ self.assertChunksToLines([b'\n', b'\n', b'\n'], [b'\n', b'\n', b'\n'],
2032 alreadly_lines=True)
2033
2034 def test_lines_to_lines(self):
2035- self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz\n'],
2036- ['foo\n', 'bar\r\n', 'ba\rz\n'],
2037+ self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz\n'],
2038+ [b'foo\n', b'bar\r\n', b'ba\rz\n'],
2039 alreadly_lines=True)
2040
2041 def test_no_final_newline(self):
2042- self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz'],
2043- ['foo\nbar\r\nba\rz'])
2044- self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz'],
2045- ['foo\n', 'bar\r\n', 'ba\rz'],
2046+ self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz'],
2047+ [b'foo\nbar\r\nba\rz'])
2048+ self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz'],
2049+ [b'foo\n', b'bar\r\n', b'ba\rz'],
2050 alreadly_lines=True)
2051- self.assertChunksToLines(('foo\n', 'bar\r\n', 'ba\rz'),
2052- ('foo\n', 'bar\r\n', 'ba\rz'),
2053+ self.assertChunksToLines((b'foo\n', b'bar\r\n', b'ba\rz'),
2054+ (b'foo\n', b'bar\r\n', b'ba\rz'),
2055 alreadly_lines=True)
2056 self.assertChunksToLines([], [], alreadly_lines=True)
2057- self.assertChunksToLines(['foobarbaz'], ['foobarbaz'],
2058+ self.assertChunksToLines([b'foobarbaz'], [b'foobarbaz'],
2059 alreadly_lines=True)
2060- self.assertChunksToLines([], [''])
2061+ self.assertChunksToLines([], [b''])
2062
2063 def test_mixed(self):
2064- self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz'],
2065- ['foo\n', 'bar\r\nba\r', 'z'])
2066- self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz'],
2067- ['foo\nb', 'a', 'r\r\nba\r', 'z'])
2068- self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz'],
2069- ['foo\nbar\r\nba', '\r', 'z'])
2070+ self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz'],
2071+ [b'foo\n', b'bar\r\nba\r', b'z'])
2072+ self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz'],
2073+ [b'foo\nb', b'a', b'r\r\nba\r', b'z'])
2074+ self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz'],
2075+ [b'foo\nbar\r\nba', b'\r', b'z'])
2076
2077- self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz'],
2078- ['foo\n', '', 'bar\r\nba', '\r', 'z'])
2079- self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz\n'],
2080- ['foo\n', 'bar\r\n', 'ba\rz\n', ''])
2081- self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz\n'],
2082- ['foo\n', 'bar', '\r\n', 'ba\rz\n'])
2083+ self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz'],
2084+ [b'foo\n', b'', b'bar\r\nba', b'\r', b'z'])
2085+ self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz\n'],
2086+ [b'foo\n', b'bar\r\n', b'ba\rz\n', b''])
2087+ self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz\n'],
2088+ [b'foo\n', b'bar', b'\r\n', b'ba\rz\n'])
2089
2090 def test_not_lines(self):
2091 # We should raise a TypeError, not crash
2092@@ -107,4 +109,4 @@
2093 self.assertRaises(TypeError, self.module.chunks_to_lines,
2094 [object()])
2095 self.assertRaises(TypeError, self.module.chunks_to_lines,
2096- ['foo', object()])
2097+ [b'foo', object()])
2098
2099=== modified file 'breezy/tests/test_inv.py'
2100--- breezy/tests/test_inv.py 2017-06-10 00:52:37 +0000
2101+++ breezy/tests/test_inv.py 2017-06-11 01:47:17 +0000
2102@@ -284,35 +284,35 @@
2103
2104 def test_creation_from_root_id(self):
2105 # iff a root id is passed to the constructor, a root directory is made
2106- inv = inventory.Inventory(root_id='tree-root')
2107+ inv = inventory.Inventory(root_id=b'tree-root')
2108 self.assertNotEqual(None, inv.root)
2109- self.assertEqual('tree-root', inv.root.file_id)
2110+ self.assertEqual(b'tree-root', inv.root.file_id)
2111
2112 def test_add_path_of_root(self):
2113 # if no root id is given at creation time, there is no root directory
2114 inv = inventory.Inventory(root_id=None)
2115 self.assertIs(None, inv.root)
2116 # add a root entry by adding its path
2117- ie = inv.add_path("", "directory", "my-root")
2118- ie.revision = 'test-rev'
2119- self.assertEqual("my-root", ie.file_id)
2120+ ie = inv.add_path(u"", "directory", b"my-root")
2121+ ie.revision = b'test-rev'
2122+ self.assertEqual(b"my-root", ie.file_id)
2123 self.assertIs(ie, inv.root)
2124
2125 def test_add_path(self):
2126- inv = inventory.Inventory(root_id='tree_root')
2127- ie = inv.add_path('hello', 'file', 'hello-id')
2128- self.assertEqual('hello-id', ie.file_id)
2129+ inv = inventory.Inventory(root_id=b'tree_root')
2130+ ie = inv.add_path(u'hello', 'file', b'hello-id')
2131+ self.assertEqual(b'hello-id', ie.file_id)
2132 self.assertEqual('file', ie.kind)
2133
2134 def test_copy(self):
2135 """Make sure copy() works and creates a deep copy."""
2136- inv = inventory.Inventory(root_id='some-tree-root')
2137- ie = inv.add_path('hello', 'file', 'hello-id')
2138+ inv = inventory.Inventory(root_id=b'some-tree-root')
2139+ ie = inv.add_path(u'hello', 'file', b'hello-id')
2140 inv2 = inv.copy()
2141- inv.root.file_id = 'some-new-root'
2142- ie.name = 'file2'
2143- self.assertEqual('some-tree-root', inv2.root.file_id)
2144- self.assertEqual('hello', inv2['hello-id'].name)
2145+ inv.root.file_id = b'some-new-root'
2146+ ie.name = u'file2'
2147+ self.assertEqual(b'some-tree-root', inv2.root.file_id)
2148+ self.assertEqual(u'hello', inv2[b'hello-id'].name)
2149
2150 def test_copy_empty(self):
2151 """Make sure an empty inventory can be copied."""
2152@@ -322,16 +322,17 @@
2153
2154 def test_copy_copies_root_revision(self):
2155 """Make sure the revision of the root gets copied."""
2156- inv = inventory.Inventory(root_id='someroot')
2157- inv.root.revision = 'therev'
2158+ inv = inventory.Inventory(root_id=b'someroot')
2159+ inv.root.revision = b'therev'
2160 inv2 = inv.copy()
2161- self.assertEqual('someroot', inv2.root.file_id)
2162- self.assertEqual('therev', inv2.root.revision)
2163+ self.assertEqual(b'someroot', inv2.root.file_id)
2164+ self.assertEqual(b'therev', inv2.root.revision)
2165
2166 def test_create_tree_reference(self):
2167- inv = inventory.Inventory('tree-root-123')
2168- inv.add(TreeReference('nested-id', 'nested', parent_id='tree-root-123',
2169- revision='rev', reference_revision='rev2'))
2170+ inv = inventory.Inventory(b'tree-root-123')
2171+ inv.add(TreeReference(
2172+ b'nested-id', 'nested', parent_id=b'tree-root-123',
2173+ revision=b'rev', reference_revision=b'rev2'))
2174
2175 def test_error_encoding(self):
2176 inv = inventory.Inventory('tree-root')
2177@@ -997,30 +998,30 @@
2178
2179 def test___getitem__(self):
2180 inv = Inventory()
2181- inv.revision_id = "revid"
2182- inv.root.revision = "rootrev"
2183- inv.add(InventoryFile("fileid", "file", inv.root.file_id))
2184- inv["fileid"].revision = "filerev"
2185- inv["fileid"].executable = True
2186- inv["fileid"].text_sha1 = "ffff"
2187- inv["fileid"].text_size = 1
2188+ inv.revision_id = b"revid"
2189+ inv.root.revision = b"rootrev"
2190+ inv.add(InventoryFile(b"fileid", u"file", inv.root.file_id))
2191+ inv[b"fileid"].revision = b"filerev"
2192+ inv[b"fileid"].executable = True
2193+ inv[b"fileid"].text_sha1 = b"ffff"
2194+ inv[b"fileid"].text_size = 1
2195 chk_bytes = self.get_chk_bytes()
2196 chk_inv = CHKInventory.from_inventory(chk_bytes, inv)
2197- bytes = ''.join(chk_inv.to_lines())
2198- new_inv = CHKInventory.deserialise(chk_bytes, bytes, ("revid",))
2199+ data = b''.join(chk_inv.to_lines())
2200+ new_inv = CHKInventory.deserialise(chk_bytes, data, (b"revid",))
2201 root_entry = new_inv[inv.root.file_id]
2202- file_entry = new_inv["fileid"]
2203+ file_entry = new_inv[b"fileid"]
2204 self.assertEqual("directory", root_entry.kind)
2205 self.assertEqual(inv.root.file_id, root_entry.file_id)
2206 self.assertEqual(inv.root.parent_id, root_entry.parent_id)
2207 self.assertEqual(inv.root.name, root_entry.name)
2208- self.assertEqual("rootrev", root_entry.revision)
2209+ self.assertEqual(b"rootrev", root_entry.revision)
2210 self.assertEqual("file", file_entry.kind)
2211- self.assertEqual("fileid", file_entry.file_id)
2212+ self.assertEqual(b"fileid", file_entry.file_id)
2213 self.assertEqual(inv.root.file_id, file_entry.parent_id)
2214- self.assertEqual("file", file_entry.name)
2215- self.assertEqual("filerev", file_entry.revision)
2216- self.assertEqual("ffff", file_entry.text_sha1)
2217+ self.assertEqual(u"file", file_entry.name)
2218+ self.assertEqual(b"filerev", file_entry.revision)
2219+ self.assertEqual(b"ffff", file_entry.text_sha1)
2220 self.assertEqual(1, file_entry.text_size)
2221 self.assertEqual(True, file_entry.executable)
2222 self.assertRaises(errors.NoSuchId, new_inv.__getitem__, 'missing')
2223
2224=== modified file 'breezy/tests/test_inventory_delta.py'
2225--- breezy/tests/test_inventory_delta.py 2017-06-09 16:31:49 +0000
2226+++ breezy/tests/test_inventory_delta.py 2017-06-11 01:47:17 +0000
2227@@ -32,14 +32,14 @@
2228 from . import TestCase
2229
2230 ### DO NOT REFLOW THESE TEXTS. NEW LINES ARE SIGNIFICANT. ###
2231-empty_lines = """format: bzr inventory delta v1 (bzr 1.14)
2232+empty_lines = b"""format: bzr inventory delta v1 (bzr 1.14)
2233 parent: null:
2234 version: null:
2235 versioned_root: true
2236 tree_references: true
2237 """
2238
2239-root_only_lines = """format: bzr inventory delta v1 (bzr 1.14)
2240+root_only_lines = b"""format: bzr inventory delta v1 (bzr 1.14)
2241 parent: null:
2242 version: entry-version
2243 versioned_root: true
2244@@ -48,7 +48,7 @@
2245 """
2246
2247
2248-root_change_lines = """format: bzr inventory delta v1 (bzr 1.14)
2249+root_change_lines = b"""format: bzr inventory delta v1 (bzr 1.14)
2250 parent: entry-version
2251 version: changed-root
2252 versioned_root: true
2253@@ -56,7 +56,7 @@
2254 /\x00an-id\x00\x00different-version\x00dir
2255 """
2256
2257-corrupt_parent_lines = """format: bzr inventory delta v1 (bzr 1.14)
2258+corrupt_parent_lines = b"""format: bzr inventory delta v1 (bzr 1.14)
2259 parent: entry-version
2260 version: changed-root
2261 versioned_root: false
2262@@ -64,7 +64,7 @@
2263 /\x00an-id\x00\x00different-version\x00dir
2264 """
2265
2266-root_only_unversioned = """format: bzr inventory delta v1 (bzr 1.14)
2267+root_only_unversioned = b"""format: bzr inventory delta v1 (bzr 1.14)
2268 parent: null:
2269 version: entry-version
2270 versioned_root: false
2271@@ -72,7 +72,7 @@
2272 None\x00/\x00TREE_ROOT\x00\x00entry-version\x00dir
2273 """
2274
2275-reference_lines = """format: bzr inventory delta v1 (bzr 1.14)
2276+reference_lines = b"""format: bzr inventory delta v1 (bzr 1.14)
2277 parent: null:
2278 version: entry-version
2279 versioned_root: true
2280@@ -81,7 +81,7 @@
2281 None\x00/foo\x00id\x00TREE_ROOT\x00changed\x00tree\x00subtree-version
2282 """
2283
2284-change_tree_lines = """format: bzr inventory delta v1 (bzr 1.14)
2285+change_tree_lines = b"""format: bzr inventory delta v1 (bzr 1.14)
2286 parent: entry-version
2287 version: change-tree
2288 versioned_root: false
2289@@ -96,34 +96,34 @@
2290 def test_parse_no_bytes(self):
2291 deserializer = inventory_delta.InventoryDeltaDeserializer()
2292 err = self.assertRaises(
2293- InventoryDeltaError, deserializer.parse_text_bytes, '')
2294+ InventoryDeltaError, deserializer.parse_text_bytes, b'')
2295 self.assertContainsRe(str(err), 'last line not empty')
2296
2297 def test_parse_bad_format(self):
2298 deserializer = inventory_delta.InventoryDeltaDeserializer()
2299 err = self.assertRaises(InventoryDeltaError,
2300- deserializer.parse_text_bytes, 'format: foo\n')
2301+ deserializer.parse_text_bytes, b'format: foo\n')
2302 self.assertContainsRe(str(err), 'unknown format')
2303
2304 def test_parse_no_parent(self):
2305 deserializer = inventory_delta.InventoryDeltaDeserializer()
2306 err = self.assertRaises(InventoryDeltaError,
2307 deserializer.parse_text_bytes,
2308- 'format: bzr inventory delta v1 (bzr 1.14)\n')
2309+ b'format: bzr inventory delta v1 (bzr 1.14)\n')
2310 self.assertContainsRe(str(err), 'missing parent: marker')
2311
2312 def test_parse_no_version(self):
2313 deserializer = inventory_delta.InventoryDeltaDeserializer()
2314 err = self.assertRaises(InventoryDeltaError,
2315 deserializer.parse_text_bytes,
2316- 'format: bzr inventory delta v1 (bzr 1.14)\n'
2317- 'parent: null:\n')
2318+ b'format: bzr inventory delta v1 (bzr 1.14)\n'
2319+ b'parent: null:\n')
2320 self.assertContainsRe(str(err), 'missing version: marker')
2321-
2322+
2323 def test_parse_duplicate_key_errors(self):
2324 deserializer = inventory_delta.InventoryDeltaDeserializer()
2325 double_root_lines = \
2326-"""format: bzr inventory delta v1 (bzr 1.14)
2327+b"""format: bzr inventory delta v1 (bzr 1.14)
2328 parent: null:
2329 version: null:
2330 versioned_root: true
2331@@ -139,16 +139,16 @@
2332 deserializer = inventory_delta.InventoryDeltaDeserializer()
2333 parse_result = deserializer.parse_text_bytes(root_only_lines)
2334 expected_entry = inventory.make_entry(
2335- 'directory', u'', None, 'an-id')
2336- expected_entry.revision = 'a@e\xc3\xa5ample.com--2004'
2337+ 'directory', u'', None, b'an-id')
2338+ expected_entry.revision = b'a@e\xc3\xa5ample.com--2004'
2339 self.assertEqual(
2340- ('null:', 'entry-version', True, True,
2341- [(None, '', 'an-id', expected_entry)]),
2342+ (b'null:', b'entry-version', True, True,
2343+ [(None, u'', b'an-id', expected_entry)]),
2344 parse_result)
2345
2346 def test_parse_special_revid_not_valid_last_mod(self):
2347 deserializer = inventory_delta.InventoryDeltaDeserializer()
2348- root_only_lines = """format: bzr inventory delta v1 (bzr 1.14)
2349+ root_only_lines = b"""format: bzr inventory delta v1 (bzr 1.14)
2350 parent: null:
2351 version: null:
2352 versioned_root: false
2353@@ -161,7 +161,7 @@
2354
2355 def test_parse_versioned_root_versioned_disabled(self):
2356 deserializer = inventory_delta.InventoryDeltaDeserializer()
2357- root_only_lines = """format: bzr inventory delta v1 (bzr 1.14)
2358+ root_only_lines = b"""format: bzr inventory delta v1 (bzr 1.14)
2359 parent: null:
2360 version: null:
2361 versioned_root: false
2362@@ -174,7 +174,7 @@
2363
2364 def test_parse_unique_root_id_root_versioned_disabled(self):
2365 deserializer = inventory_delta.InventoryDeltaDeserializer()
2366- root_only_lines = """format: bzr inventory delta v1 (bzr 1.14)
2367+ root_only_lines = b"""format: bzr inventory delta v1 (bzr 1.14)
2368 parent: parent-id
2369 version: a@e\xc3\xa5ample.com--2004
2370 versioned_root: false
2371@@ -189,11 +189,11 @@
2372 deserializer = inventory_delta.InventoryDeltaDeserializer()
2373 parse_result = deserializer.parse_text_bytes(root_only_unversioned)
2374 expected_entry = inventory.make_entry(
2375- 'directory', u'', None, 'TREE_ROOT')
2376- expected_entry.revision = 'entry-version'
2377+ 'directory', u'', None, b'TREE_ROOT')
2378+ expected_entry.revision = b'entry-version'
2379 self.assertEqual(
2380- ('null:', 'entry-version', False, False,
2381- [(None, u'', 'TREE_ROOT', expected_entry)]),
2382+ (b'null:', b'entry-version', False, False,
2383+ [(None, u'', b'TREE_ROOT', expected_entry)]),
2384 parse_result)
2385
2386 def test_parse_versioned_root_when_disabled(self):
2387@@ -215,7 +215,7 @@
2388 deserializer = inventory_delta.InventoryDeltaDeserializer()
2389 # A serialised inventory delta with a header saying no tree refs, but
2390 # that has a tree ref in its content.
2391- lines = """format: bzr inventory delta v1 (bzr 1.14)
2392+ lines = b"""format: bzr inventory delta v1 (bzr 1.14)
2393 parent: null:
2394 version: entry-version
2395 versioned_root: false
2396@@ -231,7 +231,7 @@
2397 deserializer = inventory_delta.InventoryDeltaDeserializer()
2398 # A serialised inventory delta with a header saying no tree refs, but
2399 # that has a tree ref in its content.
2400- lines = """format: bzr inventory delta v1 (bzr 1.14)
2401+ lines = b"""format: bzr inventory delta v1 (bzr 1.14)
2402 parent: null:
2403 version: entry-version
2404 versioned_root: false
2405@@ -254,7 +254,7 @@
2406 def test_parse_invalid_newpath(self):
2407 """newpath must start with / if it is not None."""
2408 lines = empty_lines
2409- lines += "None\x00bad\x00TREE_ROOT\x00\x00version\x00dir\n"
2410+ lines += b"None\x00bad\x00TREE_ROOT\x00\x00version\x00dir\n"
2411 deserializer = inventory_delta.InventoryDeltaDeserializer()
2412 err = self.assertRaises(InventoryDeltaError,
2413 deserializer.parse_text_bytes, lines)
2414@@ -263,39 +263,39 @@
2415 def test_parse_invalid_oldpath(self):
2416 """oldpath must start with / if it is not None."""
2417 lines = root_only_lines
2418- lines += "bad\x00/new\x00file-id\x00\x00version\x00dir\n"
2419+ lines += b"bad\x00/new\x00file-id\x00\x00version\x00dir\n"
2420 deserializer = inventory_delta.InventoryDeltaDeserializer()
2421 err = self.assertRaises(InventoryDeltaError,
2422 deserializer.parse_text_bytes, lines)
2423 self.assertContainsRe(str(err), 'oldpath invalid')
2424-
2425+
2426 def test_parse_new_file(self):
2427 """a new file is parsed correctly"""
2428 lines = root_only_lines
2429- fake_sha = "deadbeef" * 5
2430+ fake_sha = b"deadbeef" * 5
2431 lines += (
2432- "None\x00/new\x00file-id\x00an-id\x00version\x00file\x00123\x00" +
2433- "\x00" + fake_sha + "\n")
2434+ b"None\x00/new\x00file-id\x00an-id\x00version\x00file\x00123\x00" +
2435+ b"\x00" + fake_sha + b"\n")
2436 deserializer = inventory_delta.InventoryDeltaDeserializer()
2437 parse_result = deserializer.parse_text_bytes(lines)
2438 expected_entry = inventory.make_entry(
2439- 'file', u'new', 'an-id', 'file-id')
2440- expected_entry.revision = 'version'
2441+ 'file', u'new', b'an-id', b'file-id')
2442+ expected_entry.revision = b'version'
2443 expected_entry.text_size = 123
2444 expected_entry.text_sha1 = fake_sha
2445 delta = parse_result[4]
2446 self.assertEqual(
2447- (None, u'new', 'file-id', expected_entry), delta[-1])
2448+ (None, u'new', b'file-id', expected_entry), delta[-1])
2449
2450 def test_parse_delete(self):
2451 lines = root_only_lines
2452 lines += (
2453- "/old-file\x00None\x00deleted-id\x00\x00null:\x00deleted\x00\x00\n")
2454+ b"/old-file\x00None\x00deleted-id\x00\x00null:\x00deleted\x00\x00\n")
2455 deserializer = inventory_delta.InventoryDeltaDeserializer()
2456 parse_result = deserializer.parse_text_bytes(lines)
2457 delta = parse_result[4]
2458 self.assertEqual(
2459- (u'old-file', None, 'deleted-id', None), delta[-1])
2460+ (u'old-file', None, b'deleted-id', None), delta[-1])
2461
2462
2463 class TestSerialization(TestCase):
2464@@ -313,86 +313,86 @@
2465 def test_root_only_to_lines(self):
2466 old_inv = Inventory(None)
2467 new_inv = Inventory(None)
2468- root = new_inv.make_entry('directory', '', None, 'an-id')
2469- root.revision = 'a@e\xc3\xa5ample.com--2004'
2470+ root = new_inv.make_entry('directory', u'', None, b'an-id')
2471+ root.revision = b'a@e\xc3\xa5ample.com--2004'
2472 new_inv.add(root)
2473 delta = new_inv._make_delta(old_inv)
2474 serializer = inventory_delta.InventoryDeltaSerializer(
2475 versioned_root=True, tree_references=True)
2476 self.assertEqual(BytesIO(root_only_lines).readlines(),
2477- serializer.delta_to_lines(NULL_REVISION, 'entry-version', delta))
2478+ serializer.delta_to_lines(NULL_REVISION, b'entry-version', delta))
2479
2480 def test_unversioned_root(self):
2481 old_inv = Inventory(None)
2482 new_inv = Inventory(None)
2483- root = new_inv.make_entry('directory', '', None, 'TREE_ROOT')
2484+ root = new_inv.make_entry('directory', u'', None, b'TREE_ROOT')
2485 # Implicit roots are considered modified in every revision.
2486- root.revision = 'entry-version'
2487+ root.revision = b'entry-version'
2488 new_inv.add(root)
2489 delta = new_inv._make_delta(old_inv)
2490 serializer = inventory_delta.InventoryDeltaSerializer(
2491 versioned_root=False, tree_references=False)
2492 serialized_lines = serializer.delta_to_lines(
2493- NULL_REVISION, 'entry-version', delta)
2494+ NULL_REVISION, b'entry-version', delta)
2495 self.assertEqual(BytesIO(root_only_unversioned).readlines(),
2496 serialized_lines)
2497 deserializer = inventory_delta.InventoryDeltaDeserializer()
2498 self.assertEqual(
2499- (NULL_REVISION, 'entry-version', False, False, delta),
2500- deserializer.parse_text_bytes(''.join(serialized_lines)))
2501+ (NULL_REVISION, b'entry-version', False, False, delta),
2502+ deserializer.parse_text_bytes(b''.join(serialized_lines)))
2503
2504 def test_unversioned_non_root_errors(self):
2505 old_inv = Inventory(None)
2506 new_inv = Inventory(None)
2507- root = new_inv.make_entry('directory', '', None, 'TREE_ROOT')
2508- root.revision = 'a@e\xc3\xa5ample.com--2004'
2509+ root = new_inv.make_entry('directory', u'', None, b'TREE_ROOT')
2510+ root.revision = b'a@e\xc3\xa5ample.com--2004'
2511 new_inv.add(root)
2512- non_root = new_inv.make_entry('directory', 'foo', root.file_id, 'id')
2513+ non_root = new_inv.make_entry('directory', u'foo', root.file_id, b'id')
2514 new_inv.add(non_root)
2515 delta = new_inv._make_delta(old_inv)
2516 serializer = inventory_delta.InventoryDeltaSerializer(
2517 versioned_root=True, tree_references=True)
2518 err = self.assertRaises(InventoryDeltaError,
2519- serializer.delta_to_lines, NULL_REVISION, 'entry-version', delta)
2520+ serializer.delta_to_lines, NULL_REVISION, b'entry-version', delta)
2521 self.assertContainsRe(str(err), "^no version for fileid b?'id'$")
2522
2523 def test_richroot_unversioned_root_errors(self):
2524 old_inv = Inventory(None)
2525 new_inv = Inventory(None)
2526- root = new_inv.make_entry('directory', '', None, 'TREE_ROOT')
2527+ root = new_inv.make_entry('directory', '', None, b'TREE_ROOT')
2528 new_inv.add(root)
2529 delta = new_inv._make_delta(old_inv)
2530 serializer = inventory_delta.InventoryDeltaSerializer(
2531 versioned_root=True, tree_references=True)
2532 err = self.assertRaises(InventoryDeltaError,
2533- serializer.delta_to_lines, NULL_REVISION, 'entry-version', delta)
2534+ serializer.delta_to_lines, NULL_REVISION, b'entry-version', delta)
2535 self.assertContainsRe(
2536 str(err), "no version for fileid b?'TREE_ROOT'$")
2537
2538 def test_nonrichroot_versioned_root_errors(self):
2539 old_inv = Inventory(None)
2540 new_inv = Inventory(None)
2541- root = new_inv.make_entry('directory', '', None, 'TREE_ROOT')
2542- root.revision = 'a@e\xc3\xa5ample.com--2004'
2543+ root = new_inv.make_entry('directory', u'', None, b'TREE_ROOT')
2544+ root.revision = b'a@e\xc3\xa5ample.com--2004'
2545 new_inv.add(root)
2546 delta = new_inv._make_delta(old_inv)
2547 serializer = inventory_delta.InventoryDeltaSerializer(
2548 versioned_root=False, tree_references=True)
2549 err = self.assertRaises(InventoryDeltaError,
2550- serializer.delta_to_lines, NULL_REVISION, 'entry-version', delta)
2551+ serializer.delta_to_lines, NULL_REVISION, b'entry-version', delta)
2552 self.assertContainsRe(
2553 str(err), "^Version present for / in b?'TREE_ROOT'")
2554
2555 def test_unknown_kind_errors(self):
2556 old_inv = Inventory(None)
2557 new_inv = Inventory(None)
2558- root = new_inv.make_entry('directory', '', None, 'my-rich-root-id')
2559- root.revision = 'changed'
2560+ root = new_inv.make_entry('directory', u'', None, b'my-rich-root-id')
2561+ root.revision = b'changed'
2562 new_inv.add(root)
2563 class StrangeInventoryEntry(inventory.InventoryEntry):
2564 kind = 'strange'
2565- non_root = StrangeInventoryEntry('id', 'foo', root.file_id)
2566- non_root.revision = 'changed'
2567+ non_root = StrangeInventoryEntry('id', u'foo', root.file_id)
2568+ non_root.revision = b'changed'
2569 new_inv.add(non_root)
2570 delta = new_inv._make_delta(old_inv)
2571 serializer = inventory_delta.InventoryDeltaSerializer(
2572@@ -400,19 +400,19 @@
2573 # we expect keyerror because there is little value wrapping this.
2574 # This test aims to prove that it errors more than how it errors.
2575 err = self.assertRaises(KeyError,
2576- serializer.delta_to_lines, NULL_REVISION, 'entry-version', delta)
2577+ serializer.delta_to_lines, NULL_REVISION, b'entry-version', delta)
2578 self.assertEqual(('strange',), err.args)
2579
2580 def test_tree_reference_disabled(self):
2581 old_inv = Inventory(None)
2582 new_inv = Inventory(None)
2583- root = new_inv.make_entry('directory', '', None, 'TREE_ROOT')
2584- root.revision = 'a@e\xc3\xa5ample.com--2004'
2585+ root = new_inv.make_entry('directory', u'', None, b'TREE_ROOT')
2586+ root.revision = b'a@e\xc3\xa5ample.com--2004'
2587 new_inv.add(root)
2588 non_root = new_inv.make_entry(
2589- 'tree-reference', 'foo', root.file_id, 'id')
2590- non_root.revision = 'changed'
2591- non_root.reference_revision = 'subtree-version'
2592+ 'tree-reference', u'foo', root.file_id, b'id')
2593+ non_root.revision = b'changed'
2594+ non_root.reference_revision = b'subtree-version'
2595 new_inv.add(non_root)
2596 delta = new_inv._make_delta(old_inv)
2597 serializer = inventory_delta.InventoryDeltaSerializer(
2598@@ -420,59 +420,60 @@
2599 # we expect keyerror because there is little value wrapping this.
2600 # This test aims to prove that it errors more than how it errors.
2601 err = self.assertRaises(KeyError,
2602- serializer.delta_to_lines, NULL_REVISION, 'entry-version', delta)
2603+ serializer.delta_to_lines, NULL_REVISION, b'entry-version', delta)
2604 self.assertEqual(('tree-reference',), err.args)
2605
2606 def test_tree_reference_enabled(self):
2607 old_inv = Inventory(None)
2608 new_inv = Inventory(None)
2609- root = new_inv.make_entry('directory', '', None, 'TREE_ROOT')
2610- root.revision = 'a@e\xc3\xa5ample.com--2004'
2611+ root = new_inv.make_entry('directory', u'', None, b'TREE_ROOT')
2612+ root.revision = b'a@e\xc3\xa5ample.com--2004'
2613 new_inv.add(root)
2614 non_root = new_inv.make_entry(
2615- 'tree-reference', 'foo', root.file_id, 'id')
2616- non_root.revision = 'changed'
2617- non_root.reference_revision = 'subtree-version'
2618+ 'tree-reference', u'foo', root.file_id, b'id')
2619+ non_root.revision = b'changed'
2620+ non_root.reference_revision = b'subtree-version'
2621 new_inv.add(non_root)
2622 delta = new_inv._make_delta(old_inv)
2623 serializer = inventory_delta.InventoryDeltaSerializer(
2624 versioned_root=True, tree_references=True)
2625 self.assertEqual(BytesIO(reference_lines).readlines(),
2626- serializer.delta_to_lines(NULL_REVISION, 'entry-version', delta))
2627+ serializer.delta_to_lines(NULL_REVISION, b'entry-version', delta))
2628
2629 def test_to_inventory_root_id_versioned_not_permitted(self):
2630- root_entry = inventory.make_entry('directory', '', None, 'TREE_ROOT')
2631- root_entry.revision = 'some-version'
2632- delta = [(None, '', 'TREE_ROOT', root_entry)]
2633+ root_entry = inventory.make_entry('directory', u'', None, b'TREE_ROOT')
2634+ root_entry.revision = b'some-version'
2635+ delta = [(None, u'', b'TREE_ROOT', root_entry)]
2636 serializer = inventory_delta.InventoryDeltaSerializer(
2637 versioned_root=False, tree_references=True)
2638 self.assertRaises(
2639- InventoryDeltaError, serializer.delta_to_lines, 'old-version',
2640- 'new-version', delta)
2641+ InventoryDeltaError, serializer.delta_to_lines, b'old-version',
2642+ b'new-version', delta)
2643
2644 def test_to_inventory_root_id_not_versioned(self):
2645- delta = [(None, '', 'an-id', inventory.make_entry(
2646- 'directory', '', None, 'an-id'))]
2647+ delta = [(None, u'', b'an-id', inventory.make_entry(
2648+ 'directory', u'', None, b'an-id'))]
2649 serializer = inventory_delta.InventoryDeltaSerializer(
2650 versioned_root=True, tree_references=True)
2651 self.assertRaises(
2652- InventoryDeltaError, serializer.delta_to_lines, 'old-version',
2653- 'new-version', delta)
2654+ InventoryDeltaError, serializer.delta_to_lines, b'old-version',
2655+ b'new-version', delta)
2656
2657 def test_to_inventory_has_tree_not_meant_to(self):
2658 make_entry = inventory.make_entry
2659- tree_ref = make_entry('tree-reference', 'foo', 'changed-in', 'ref-id')
2660- tree_ref.reference_revision = 'ref-revision'
2661+ tree_ref = make_entry(
2662+ 'tree-reference', u'foo', b'changed-in', b'ref-id')
2663+ tree_ref.reference_revision = b'ref-revision'
2664 delta = [
2665- (None, '', 'an-id',
2666- make_entry('directory', '', 'changed-in', 'an-id')),
2667- (None, 'foo', 'ref-id', tree_ref)
2668+ (None, u'', b'an-id',
2669+ make_entry('directory', u'', b'changed-in', b'an-id')),
2670+ (None, u'foo', b'ref-id', tree_ref)
2671 # a file that followed the root move
2672 ]
2673 serializer = inventory_delta.InventoryDeltaSerializer(
2674 versioned_root=True, tree_references=True)
2675 self.assertRaises(InventoryDeltaError, serializer.delta_to_lines,
2676- 'old-version', 'new-version', delta)
2677+ b'old-version', b'new-version', delta)
2678
2679 def test_to_inventory_torture(self):
2680 def make_entry(kind, name, parent_id, file_id, **attrs):
2681@@ -488,43 +489,43 @@
2682 # - files with and without exec bit
2683 delta = [
2684 # new root:
2685- (None, '', 'new-root-id',
2686- make_entry('directory', '', None, 'new-root-id',
2687- revision='changed-in')),
2688+ (None, u'', b'new-root-id',
2689+ make_entry('directory', u'', None, b'new-root-id',
2690+ revision=b'changed-in')),
2691 # an old root:
2692- ('', 'old-root', 'TREE_ROOT',
2693- make_entry('directory', 'subdir-now', 'new-root-id',
2694- 'TREE_ROOT', revision='moved-root')),
2695+ (u'', u'old-root', b'TREE_ROOT',
2696+ make_entry('directory', u'subdir-now', b'new-root-id',
2697+ b'TREE_ROOT', revision=b'moved-root')),
2698 # a file that followed the root move
2699- ('under-old-root', 'old-root/under-old-root', 'moved-id',
2700- make_entry('file', 'under-old-root', 'TREE_ROOT', 'moved-id',
2701- revision='old-rev', executable=False, text_size=30,
2702- text_sha1='some-sha')),
2703+ (u'under-old-root', u'old-root/under-old-root', b'moved-id',
2704+ make_entry('file', u'under-old-root', b'TREE_ROOT',
2705+ b'moved-id', revision=b'old-rev', executable=False,
2706+ text_size=30, text_sha1=b'some-sha')),
2707 # a deleted path
2708- ('old-file', None, 'deleted-id', None),
2709+ (u'old-file', None, b'deleted-id', None),
2710 # a tree reference moved to the new root
2711- ('ref', 'ref', 'ref-id',
2712- make_entry('tree-reference', 'ref', 'new-root-id', 'ref-id',
2713- reference_revision='tree-reference-id',
2714- revision='new-rev')),
2715+ (u'ref', u'ref', b'ref-id',
2716+ make_entry('tree-reference', u'ref', b'new-root-id', b'ref-id',
2717+ reference_revision=b'tree-reference-id',
2718+ revision=b'new-rev')),
2719 # a symlink now in a deep dir
2720- ('dir/link', 'old-root/dir/link', 'link-id',
2721- make_entry('symlink', 'link', 'deep-id', 'link-id',
2722- symlink_target='target', revision='new-rev')),
2723+ (u'dir/link', u'old-root/dir/link', b'link-id',
2724+ make_entry('symlink', u'link', b'deep-id', b'link-id',
2725+ symlink_target=u'target', revision=b'new-rev')),
2726 # a deep dir
2727- ('dir', 'old-root/dir', 'deep-id',
2728- make_entry('directory', 'dir', 'TREE_ROOT', 'deep-id',
2729- revision='new-rev')),
2730+ (u'dir', u'old-root/dir', b'deep-id',
2731+ make_entry('directory', u'dir', b'TREE_ROOT', b'deep-id',
2732+ revision=b'new-rev')),
2733 # a file with an exec bit set
2734- (None, 'configure', 'exec-id',
2735- make_entry('file', 'configure', 'new-root-id', 'exec-id',
2736- executable=True, text_size=30, text_sha1='some-sha',
2737- revision='old-rev')),
2738+ (None, u'configure', b'exec-id',
2739+ make_entry('file', u'configure', b'new-root-id', b'exec-id',
2740+ executable=True, text_size=30, text_sha1=b'some-sha',
2741+ revision=b'old-rev')),
2742 ]
2743 serializer = inventory_delta.InventoryDeltaSerializer(
2744 versioned_root=True, tree_references=True)
2745- lines = serializer.delta_to_lines(NULL_REVISION, 'something', delta)
2746- expected = """format: bzr inventory delta v1 (bzr 1.14)
2747+ lines = serializer.delta_to_lines(NULL_REVISION, b'something', delta)
2748+ expected = b"""format: bzr inventory delta v1 (bzr 1.14)
2749 parent: null:
2750 version: something
2751 versioned_root: true
2752@@ -538,8 +539,8 @@
2753 None\x00/\x00new-root-id\x00\x00changed-in\x00dir
2754 None\x00/configure\x00exec-id\x00new-root-id\x00old-rev\x00file\x0030\x00Y\x00some-sha
2755 """
2756- serialized = ''.join(lines)
2757- self.assertIsInstance(serialized, str)
2758+ serialized = b''.join(lines)
2759+ self.assertIsInstance(serialized, bytes)
2760 self.assertEqual(expected, serialized)
2761
2762
2763@@ -547,79 +548,79 @@
2764 """Test serialization of the content part of a line."""
2765
2766 def test_dir(self):
2767- entry = inventory.make_entry('directory', 'a dir', None)
2768- self.assertEqual('dir', inventory_delta._directory_content(entry))
2769+ entry = inventory.make_entry('directory', u'a dir', None)
2770+ self.assertEqual(b'dir', inventory_delta._directory_content(entry))
2771
2772 def test_file_0_short_sha(self):
2773- file_entry = inventory.make_entry('file', 'a file', None, 'file-id')
2774- file_entry.text_sha1 = ''
2775+ file_entry = inventory.make_entry('file', u'a file', None, b'file-id')
2776+ file_entry.text_sha1 = b''
2777 file_entry.text_size = 0
2778- self.assertEqual('file\x000\x00\x00',
2779+ self.assertEqual(b'file\x000\x00\x00',
2780 inventory_delta._file_content(file_entry))
2781
2782 def test_file_10_foo(self):
2783- file_entry = inventory.make_entry('file', 'a file', None, 'file-id')
2784- file_entry.text_sha1 = 'foo'
2785+ file_entry = inventory.make_entry('file', u'a file', None, b'file-id')
2786+ file_entry.text_sha1 = b'foo'
2787 file_entry.text_size = 10
2788- self.assertEqual('file\x0010\x00\x00foo',
2789+ self.assertEqual(b'file\x0010\x00\x00foo',
2790 inventory_delta._file_content(file_entry))
2791
2792 def test_file_executable(self):
2793- file_entry = inventory.make_entry('file', 'a file', None, 'file-id')
2794+ file_entry = inventory.make_entry('file', u'a file', None, b'file-id')
2795 file_entry.executable = True
2796- file_entry.text_sha1 = 'foo'
2797+ file_entry.text_sha1 = b'foo'
2798 file_entry.text_size = 10
2799- self.assertEqual('file\x0010\x00Y\x00foo',
2800+ self.assertEqual(b'file\x0010\x00Y\x00foo',
2801 inventory_delta._file_content(file_entry))
2802
2803 def test_file_without_size(self):
2804- file_entry = inventory.make_entry('file', 'a file', None, 'file-id')
2805- file_entry.text_sha1 = 'foo'
2806+ file_entry = inventory.make_entry('file', u'a file', None, b'file-id')
2807+ file_entry.text_sha1 = b'foo'
2808 self.assertRaises(InventoryDeltaError,
2809 inventory_delta._file_content, file_entry)
2810
2811 def test_file_without_sha1(self):
2812- file_entry = inventory.make_entry('file', 'a file', None, 'file-id')
2813+ file_entry = inventory.make_entry('file', u'a file', None, b'file-id')
2814 file_entry.text_size = 10
2815 self.assertRaises(InventoryDeltaError,
2816 inventory_delta._file_content, file_entry)
2817
2818 def test_link_empty_target(self):
2819- entry = inventory.make_entry('symlink', 'a link', None)
2820- entry.symlink_target = ''
2821- self.assertEqual('link\x00',
2822+ entry = inventory.make_entry('symlink', u'a link', None)
2823+ entry.symlink_target = u''
2824+ self.assertEqual(b'link\x00',
2825 inventory_delta._link_content(entry))
2826
2827 def test_link_unicode_target(self):
2828- entry = inventory.make_entry('symlink', 'a link', None)
2829- entry.symlink_target = ' \xc3\xa5'.decode('utf8')
2830- self.assertEqual('link\x00 \xc3\xa5',
2831+ entry = inventory.make_entry('symlink', u'a link', None)
2832+ entry.symlink_target = b' \xc3\xa5'.decode('utf8')
2833+ self.assertEqual(b'link\x00 \xc3\xa5',
2834 inventory_delta._link_content(entry))
2835
2836 def test_link_space_target(self):
2837- entry = inventory.make_entry('symlink', 'a link', None)
2838- entry.symlink_target = ' '
2839- self.assertEqual('link\x00 ',
2840+ entry = inventory.make_entry('symlink', u'a link', None)
2841+ entry.symlink_target = u' '
2842+ self.assertEqual(b'link\x00 ',
2843 inventory_delta._link_content(entry))
2844
2845 def test_link_no_target(self):
2846- entry = inventory.make_entry('symlink', 'a link', None)
2847+ entry = inventory.make_entry('symlink', u'a link', None)
2848 self.assertRaises(InventoryDeltaError,
2849 inventory_delta._link_content, entry)
2850
2851 def test_reference_null(self):
2852- entry = inventory.make_entry('tree-reference', 'a tree', None)
2853+ entry = inventory.make_entry('tree-reference', u'a tree', None)
2854 entry.reference_revision = NULL_REVISION
2855- self.assertEqual('tree\x00null:',
2856+ self.assertEqual(b'tree\x00null:',
2857 inventory_delta._reference_content(entry))
2858
2859 def test_reference_revision(self):
2860- entry = inventory.make_entry('tree-reference', 'a tree', None)
2861- entry.reference_revision = 'foo@\xc3\xa5b-lah'
2862- self.assertEqual('tree\x00foo@\xc3\xa5b-lah',
2863+ entry = inventory.make_entry('tree-reference', u'a tree', None)
2864+ entry.reference_revision = b'foo@\xc3\xa5b-lah'
2865+ self.assertEqual(b'tree\x00foo@\xc3\xa5b-lah',
2866 inventory_delta._reference_content(entry))
2867
2868 def test_reference_no_reference(self):
2869- entry = inventory.make_entry('tree-reference', 'a tree', None)
2870+ entry = inventory.make_entry('tree-reference', u'a tree', None)
2871 self.assertRaises(InventoryDeltaError,
2872 inventory_delta._reference_content, entry)
2873
2874=== modified file 'breezy/transport/memory.py'
2875--- breezy/transport/memory.py 2017-05-24 19:44:00 +0000
2876+++ breezy/transport/memory.py 2017-06-11 01:47:17 +0000
2877@@ -164,7 +164,7 @@
2878
2879 def open_write_stream(self, relpath, mode=None):
2880 """See Transport.open_write_stream."""
2881- self.put_bytes(relpath, "", mode)
2882+ self.put_bytes(relpath, b"", mode)
2883 result = AppendBasedFileStream(self, relpath)
2884 _file_streams[self.abspath(relpath)] = result
2885 return result
2886
2887=== modified file 'breezy/versionedfile.py'
2888--- breezy/versionedfile.py 2017-06-05 20:48:31 +0000
2889+++ breezy/versionedfile.py 2017-06-11 01:47:17 +0000
2890@@ -120,7 +120,7 @@
2891 if storage_kind == 'chunked':
2892 return self._chunks
2893 elif storage_kind == 'fulltext':
2894- return ''.join(self._chunks)
2895+ return b''.join(self._chunks)
2896 raise errors.UnavailableRepresentation(self.key, storage_kind,
2897 self.storage_kind)
2898
2899@@ -1079,13 +1079,13 @@
2900 def _check_lines_not_unicode(self, lines):
2901 """Check that lines being added to a versioned file are not unicode."""
2902 for line in lines:
2903- if line.__class__ is not str:
2904+ if line.__class__ is not bytes:
2905 raise errors.BzrBadParameterUnicode("lines")
2906
2907 def _check_lines_are_lines(self, lines):
2908 """Check that the lines really are full lines without inline EOL."""
2909 for line in lines:
2910- if '\n' in line[:-1]:
2911+ if b'\n' in line[:-1]:
2912 raise errors.BzrBadParameterContainsNewline("lines")
2913
2914 def get_known_graph_ancestry(self, keys):
2915@@ -1792,7 +1792,7 @@
2916 "nostore_sha behaviour.")
2917 if key[-1] is None:
2918 sha1 = osutils.sha_strings(lines)
2919- key = ("sha1:" + sha1,)
2920+ key = (b"sha1:" + sha1,)
2921 else:
2922 sha1 = None
2923 if key in self._store.get_parent_map([key]):
2924@@ -1816,7 +1816,7 @@
2925 :param network_bytes: The bytes of a record.
2926 :return: A tuple (storage_kind, offset_of_remaining_bytes)
2927 """
2928- line_end = network_bytes.find('\n')
2929+ line_end = network_bytes.find(b'\n')
2930 storage_kind = network_bytes[:line_end]
2931 return storage_kind, line_end + 1
2932
2933@@ -1859,7 +1859,7 @@
2934 meta_len, = struct.unpack('!L', bytes[line_end:line_end+4])
2935 record_meta = bytes[line_end+4:line_end+4+meta_len]
2936 key, parents = bencode.bdecode_as_tuple(record_meta)
2937- if parents == 'nil':
2938+ if parents == b'nil':
2939 parents = None
2940 fulltext = bytes[line_end+4+meta_len:]
2941 return [FulltextContentFactory(key, parents, None, fulltext)]
2942@@ -1871,12 +1871,12 @@
2943
2944 def record_to_fulltext_bytes(record):
2945 if record.parents is None:
2946- parents = 'nil'
2947+ parents = b'nil'
2948 else:
2949 parents = record.parents
2950 record_meta = bencode.bencode((record.key, parents))
2951 record_content = record.get_bytes_as('fulltext')
2952- return "fulltext\n%s%s%s" % (
2953+ return b"fulltext\n%s%s%s" % (
2954 _length_prefix(record_meta), record_meta, record_content)
2955
2956
2957@@ -1893,8 +1893,8 @@
2958 per_prefix_map = {}
2959 for item in viewitems(parent_map):
2960 key = item[0]
2961- if isinstance(key, str) or len(key) == 1:
2962- prefix = ''
2963+ if isinstance(key, bytes) or len(key) == 1:
2964+ prefix = b''
2965 else:
2966 prefix = key[0]
2967 try:
2968
2969=== modified file 'breezy/xml_serializer.py'
2970--- breezy/xml_serializer.py 2017-05-25 01:35:55 +0000
2971+++ breezy/xml_serializer.py 2017-06-11 01:47:17 +0000
2972@@ -37,9 +37,9 @@
2973 except ImportError:
2974 from xml.parsers.expat import ExpatError as ParseError
2975
2976-(ElementTree, SubElement, Element, XMLTreeBuilder, fromstring, tostring) = (
2977+(ElementTree, SubElement, Element, fromstring, tostring) = (
2978 elementtree.ElementTree, elementtree.SubElement, elementtree.Element,
2979- elementtree.XMLTreeBuilder, elementtree.fromstring, elementtree.tostring)
2980+ elementtree.fromstring, elementtree.tostring)
2981
2982
2983 from . import (

Subscribers

People subscribed via source and target branches