Merge lp:~gz/brz/py3_bootstrap2 into lp:brz

Proposed by Martin Packman
Status: Merged
Approved by: Martin Packman
Approved revision: no longer in the source branch.
Merge reported by: The Breezy Bot
Merged at revision: not available
Proposed branch: lp:~gz/brz/py3_bootstrap2
Merge into: lp:brz
Diff against target: 2983 lines (+650/-616)
27 files modified
breezy/_chk_map_py.py (+22/-21)
breezy/_chunks_to_lines_py.py (+3/-3)
breezy/_dirstate_helpers_py.py (+2/-2)
breezy/_groupcompress_py.py (+10/-10)
breezy/bzrworkingtree.py (+5/-5)
breezy/chk_map.py (+38/-38)
breezy/commit.py (+2/-2)
breezy/config.py (+13/-7)
breezy/dirstate.py (+5/-5)
breezy/groupcompress.py (+54/-52)
breezy/index.py (+2/-2)
breezy/inventory.py (+59/-56)
breezy/inventory_delta.py (+52/-52)
breezy/osutils.py (+11/-11)
breezy/pack.py (+7/-7)
breezy/repofmt/groupcompress_repo.py (+7/-7)
breezy/repofmt/pack_repo.py (+3/-3)
breezy/repository.py (+2/-1)
breezy/revision.py (+10/-5)
breezy/sixish.py (+10/-0)
breezy/tests/test__chk_map.py (+96/-94)
breezy/tests/test__chunks_to_lines.py (+40/-38)
breezy/tests/test_inv.py (+37/-36)
breezy/tests/test_inventory_delta.py (+147/-146)
breezy/transport/memory.py (+1/-1)
breezy/versionedfile.py (+10/-10)
breezy/xml_serializer.py (+2/-2)
To merge this branch: bzr merge lp:~gz/brz/py3_bootstrap2
Reviewer Review Type Date Requested Status
Jelmer Vernooij Approve
Review via email: mp+325452@code.launchpad.net

Commit message

More progress towards Python 3 support

Description of the change

Another somewhat large branch mostly sorting out string semantics across a range of modules.

Also includes some test fixes for Python 3 to start passing, particularly bt.test_inventory_delta and the start of bt.test_inv as well.

To post a comment you must log in.
Revision history for this message
Jelmer Vernooij (jelmer) :
review: Approve
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote :

Running landing tests failed
http://10.242.247.184:8080/job/brz-dev/119/

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'breezy/_chk_map_py.py'
--- breezy/_chk_map_py.py 2017-05-22 00:56:52 +0000
+++ breezy/_chk_map_py.py 2017-06-11 01:47:17 +0000
@@ -21,6 +21,7 @@
21import zlib21import zlib
22import struct22import struct
2323
24from .sixish import bytesintern
24from .static_tuple import StaticTuple25from .static_tuple import StaticTuple
2526
26_LeafNode = None27_LeafNode = None
@@ -44,7 +45,7 @@
4445
45def _search_key_16(key):46def _search_key_16(key):
46 """Map the key tuple into a search key string which has 16-way fan out."""47 """Map the key tuple into a search key string which has 16-way fan out."""
47 return '\x00'.join(['%08X' % _crc32(bit) for bit in key])48 return b'\x00'.join([b'%08X' % _crc32(bit) for bit in key])
4849
4950
50def _search_key_255(key):51def _search_key_255(key):
@@ -53,11 +54,11 @@
53 We use 255-way because '\n' is used as a delimiter, and causes problems54 We use 255-way because '\n' is used as a delimiter, and causes problems
54 while parsing.55 while parsing.
55 """56 """
56 bytes = '\x00'.join([struct.pack('>L', _crc32(bit)) for bit in key])57 data = b'\x00'.join([struct.pack('>L', _crc32(bit)) for bit in key])
57 return bytes.replace('\n', '_')58 return data.replace(b'\n', b'_')
5859
5960
60def _deserialise_leaf_node(bytes, key, search_key_func=None):61def _deserialise_leaf_node(data, key, search_key_func=None):
61 """Deserialise bytes, with key key, into a LeafNode.62 """Deserialise bytes, with key key, into a LeafNode.
6263
63 :param bytes: The bytes of the node.64 :param bytes: The bytes of the node.
@@ -72,13 +73,13 @@
72 result = _LeafNode(search_key_func=search_key_func)73 result = _LeafNode(search_key_func=search_key_func)
73 # Splitlines can split on '\r' so don't use it, split('\n') adds an74 # Splitlines can split on '\r' so don't use it, split('\n') adds an
74 # extra '' if the bytes ends in a final newline.75 # extra '' if the bytes ends in a final newline.
75 lines = bytes.split('\n')76 lines = data.split(b'\n')
76 trailing = lines.pop()77 trailing = lines.pop()
77 if trailing != '':78 if trailing != b'':
78 raise AssertionError('We did not have a final newline for %s'79 raise AssertionError('We did not have a final newline for %s'
79 % (key,))80 % (key,))
80 items = {}81 items = {}
81 if lines[0] != 'chkleaf:':82 if lines[0] != b'chkleaf:':
82 raise ValueError("not a serialised leaf node: %r" % bytes)83 raise ValueError("not a serialised leaf node: %r" % bytes)
83 maximum_size = int(lines[1])84 maximum_size = int(lines[1])
84 width = int(lines[2])85 width = int(lines[2])
@@ -87,7 +88,7 @@
87 pos = 588 pos = 5
88 while pos < len(lines):89 while pos < len(lines):
89 line = prefix + lines[pos]90 line = prefix + lines[pos]
90 elements = line.split('\x00')91 elements = line.split(b'\x00')
91 pos += 192 pos += 1
92 if len(elements) != width + 1:93 if len(elements) != width + 1:
93 raise AssertionError(94 raise AssertionError(
@@ -96,7 +97,7 @@
96 num_value_lines = int(elements[-1])97 num_value_lines = int(elements[-1])
97 value_lines = lines[pos:pos+num_value_lines]98 value_lines = lines[pos:pos+num_value_lines]
98 pos += num_value_lines99 pos += num_value_lines
99 value = '\n'.join(value_lines)100 value = b'\n'.join(value_lines)
100 items[StaticTuple.from_sequence(elements[:-1])] = value101 items[StaticTuple.from_sequence(elements[:-1])] = value
101 if len(items) != length:102 if len(items) != length:
102 raise AssertionError("item count (%d) mismatch for key %s,"103 raise AssertionError("item count (%d) mismatch for key %s,"
@@ -115,12 +116,12 @@
115 else:116 else:
116 result._search_prefix = _unknown117 result._search_prefix = _unknown
117 result._common_serialised_prefix = prefix118 result._common_serialised_prefix = prefix
118 if len(bytes) != result._current_size():119 if len(data) != result._current_size():
119 raise AssertionError('_current_size computed incorrectly')120 raise AssertionError('_current_size computed incorrectly')
120 return result121 return result
121122
122123
123def _deserialise_internal_node(bytes, key, search_key_func=None):124def _deserialise_internal_node(data, key, search_key_func=None):
124 global _unknown, _LeafNode, _InternalNode125 global _unknown, _LeafNode, _InternalNode
125 if _InternalNode is None:126 if _InternalNode is None:
126 from breezy import chk_map127 from breezy import chk_map
@@ -131,12 +132,12 @@
131 # Splitlines can split on '\r' so don't use it, remove the extra ''132 # Splitlines can split on '\r' so don't use it, remove the extra ''
132 # from the result of split('\n') because we should have a trailing133 # from the result of split('\n') because we should have a trailing
133 # newline134 # newline
134 lines = bytes.split('\n')135 lines = data.split(b'\n')
135 if lines[-1] != '':136 if lines[-1] != b'':
136 raise ValueError("last line must be ''")137 raise ValueError("last line must be ''")
137 lines.pop(-1)138 lines.pop(-1)
138 items = {}139 items = {}
139 if lines[0] != 'chknode:':140 if lines[0] != b'chknode:':
140 raise ValueError("not a serialised internal node: %r" % bytes)141 raise ValueError("not a serialised internal node: %r" % bytes)
141 maximum_size = int(lines[1])142 maximum_size = int(lines[1])
142 width = int(lines[2])143 width = int(lines[2])
@@ -144,7 +145,7 @@
144 common_prefix = lines[4]145 common_prefix = lines[4]
145 for line in lines[5:]:146 for line in lines[5:]:
146 line = common_prefix + line147 line = common_prefix + line
147 prefix, flat_key = line.rsplit('\x00', 1)148 prefix, flat_key = line.rsplit(b'\x00', 1)
148 items[prefix] = StaticTuple(flat_key,)149 items[prefix] = StaticTuple(flat_key,)
149 if len(items) == 0:150 if len(items) == 0:
150 raise AssertionError("We didn't find any item for %s" % key)151 raise AssertionError("We didn't find any item for %s" % key)
@@ -161,9 +162,9 @@
161 return result162 return result
162163
163164
164def _bytes_to_text_key(bytes):165def _bytes_to_text_key(data):
165 """Take a CHKInventory value string and return a (file_id, rev_id) tuple"""166 """Take a CHKInventory value string and return a (file_id, rev_id) tuple"""
166 sections = bytes.split('\n')167 sections = data.split(b'\n')
167 kind, file_id = sections[0].split(': ')168 kind, file_id = sections[0].split(b': ')
168 return (intern(file_id), intern(sections[3]))169 return (bytesintern(file_id), bytesintern(sections[3]))
169170
170171
=== modified file 'breezy/_chunks_to_lines_py.py'
--- breezy/_chunks_to_lines_py.py 2017-05-21 18:10:28 +0000
+++ breezy/_chunks_to_lines_py.py 2017-06-11 01:47:17 +0000
@@ -41,10 +41,10 @@
41 if not chunk:41 if not chunk:
42 # Empty strings are never valid lines42 # Empty strings are never valid lines
43 break43 break
44 elif '\n' in chunk[:-1]:44 elif b'\n' in chunk[:-1]:
45 # This chunk has an extra '\n', so we will have to split it45 # This chunk has an extra '\n', so we will have to split it
46 break46 break
47 elif chunk[-1] != '\n':47 elif chunk[-1:] != b'\n':
48 # This chunk does not have a trailing newline48 # This chunk does not have a trailing newline
49 last_no_newline = True49 last_no_newline = True
50 else:50 else:
@@ -56,4 +56,4 @@
5656
57 # These aren't simple lines, just join and split again.57 # These aren't simple lines, just join and split again.
58 from breezy import osutils58 from breezy import osutils
59 return osutils._split_lines(''.join(chunks))59 return osutils._split_lines(b''.join(chunks))
6060
=== modified file 'breezy/_dirstate_helpers_py.py'
--- breezy/_dirstate_helpers_py.py 2017-06-10 01:57:23 +0000
+++ breezy/_dirstate_helpers_py.py 2017-06-11 01:47:17 +0000
@@ -184,7 +184,7 @@
184 if not isinstance(path2, bytes):184 if not isinstance(path2, bytes):
185 raise TypeError("'path2' must be a plain string, not %s: %r"185 raise TypeError("'path2' must be a plain string, not %s: %r"
186 % (type(path2), path2))186 % (type(path2), path2))
187 return path1.split('/') < path2.split('/')187 return path1.split(b'/') < path2.split(b'/')
188188
189189
190def _lt_path_by_dirblock(path1, path2):190def _lt_path_by_dirblock(path1, path2):
@@ -207,7 +207,7 @@
207 dirname1, basename1 = os.path.split(path1)207 dirname1, basename1 = os.path.split(path1)
208 key1 = (dirname1.split(b'/'), basename1)208 key1 = (dirname1.split(b'/'), basename1)
209 dirname2, basename2 = os.path.split(path2)209 dirname2, basename2 = os.path.split(path2)
210 key2 = (dirname2.split('/'), basename2)210 key2 = (dirname2.split(b'/'), basename2)
211 return key1 < key2211 return key1 < key2
212212
213213
214214
=== modified file 'breezy/_groupcompress_py.py'
--- breezy/_groupcompress_py.py 2017-06-04 18:09:30 +0000
+++ breezy/_groupcompress_py.py 2017-06-11 01:47:17 +0000
@@ -290,7 +290,7 @@
290 if bytes_length is None:290 if bytes_length is None:
291 bytes_length = sum(map(len, new_lines))291 bytes_length = sum(map(len, new_lines))
292 # reserved for content type, content length292 # reserved for content type, content length
293 out_lines = ['', '', encode_base128_int(bytes_length)]293 out_lines = [b'', b'', encode_base128_int(bytes_length)]
294 index_lines = [False, False, False]294 index_lines = [False, False, False]
295 output_handler = _OutputHandler(out_lines, index_lines,295 output_handler = _OutputHandler(out_lines, index_lines,
296 self._MIN_MATCH_BYTES)296 self._MIN_MATCH_BYTES)
@@ -316,26 +316,26 @@
316316
317def encode_base128_int(val):317def encode_base128_int(val):
318 """Convert an integer into a 7-bit lsb encoding."""318 """Convert an integer into a 7-bit lsb encoding."""
319 bytes = []319 data = bytearray()
320 count = 0320 count = 0
321 while val >= 0x80:321 while val >= 0x80:
322 bytes.append(chr((val | 0x80) & 0xFF))322 data.append((val | 0x80) & 0xFF)
323 val >>= 7323 val >>= 7
324 bytes.append(chr(val))324 data.append(val)
325 return ''.join(bytes)325 return bytes(data)
326326
327327
328def decode_base128_int(bytes):328def decode_base128_int(data):
329 """Decode an integer from a 7-bit lsb encoding."""329 """Decode an integer from a 7-bit lsb encoding."""
330 offset = 0330 offset = 0
331 val = 0331 val = 0
332 shift = 0332 shift = 0
333 bval = ord(bytes[offset])333 bval = ord(data[offset])
334 while bval >= 0x80:334 while bval >= 0x80:
335 val |= (bval & 0x7F) << shift335 val |= (bval & 0x7F) << shift
336 shift += 7336 shift += 7
337 offset += 1337 offset += 1
338 bval = ord(bytes[offset])338 bval = ord(data[offset])
339 val |= bval << shift339 val |= bval << shift
340 offset += 1340 offset += 1
341 return val, offset341 return val, offset
342342
=== modified file 'breezy/bzrworkingtree.py'
--- breezy/bzrworkingtree.py 2017-06-10 18:44:23 +0000
+++ breezy/bzrworkingtree.py 2017-06-11 01:47:17 +0000
@@ -78,11 +78,11 @@
78 )78 )
7979
8080
81MERGE_MODIFIED_HEADER_1 = "BZR merge-modified list format 1"81MERGE_MODIFIED_HEADER_1 = b"BZR merge-modified list format 1"
82# TODO: Modifying the conflict objects or their type is currently nearly82# TODO: Modifying the conflict objects or their type is currently nearly
83# impossible as there is no clear relationship between the working tree format83# impossible as there is no clear relationship between the working tree format
84# and the conflict list file format.84# and the conflict list file format.
85CONFLICT_HEADER_1 = "BZR conflict list format 1"85CONFLICT_HEADER_1 = b"BZR conflict list format 1"
8686
8787
88class InventoryWorkingTree(WorkingTree,MutableInventoryTree):88class InventoryWorkingTree(WorkingTree,MutableInventoryTree):
@@ -382,7 +382,7 @@
382 return _mod_conflicts.ConflictList()382 return _mod_conflicts.ConflictList()
383 try:383 try:
384 try:384 try:
385 if next(confile) != CONFLICT_HEADER_1 + '\n':385 if next(confile) != CONFLICT_HEADER_1 + b'\n':
386 raise errors.ConflictFormatError()386 raise errors.ConflictFormatError()
387 except StopIteration:387 except StopIteration:
388 raise errors.ConflictFormatError()388 raise errors.ConflictFormatError()
@@ -650,7 +650,7 @@
650650
651 def _put_rio(self, filename, stanzas, header):651 def _put_rio(self, filename, stanzas, header):
652 self._must_be_locked()652 self._must_be_locked()
653 my_file = _mod_rio.rio_file(stanzas, header.encode('ascii'))653 my_file = _mod_rio.rio_file(stanzas, header)
654 self._transport.put_file(filename, my_file,654 self._transport.put_file(filename, my_file,
655 mode=self.controldir._get_file_mode())655 mode=self.controldir._get_file_mode())
656656
@@ -680,7 +680,7 @@
680 try:680 try:
681 merge_hashes = {}681 merge_hashes = {}
682 try:682 try:
683 if next(hashfile) != MERGE_MODIFIED_HEADER_1 + '\n':683 if next(hashfile) != MERGE_MODIFIED_HEADER_1 + b'\n':
684 raise errors.MergeModifiedFormatError()684 raise errors.MergeModifiedFormatError()
685 except StopIteration:685 except StopIteration:
686 raise errors.MergeModifiedFormatError()686 raise errors.MergeModifiedFormatError()
687687
=== modified file 'breezy/chk_map.py'
--- breezy/chk_map.py 2017-06-05 20:48:31 +0000
+++ breezy/chk_map.py 2017-06-11 01:47:17 +0000
@@ -100,7 +100,7 @@
100100
101def _search_key_plain(key):101def _search_key_plain(key):
102 """Map the key tuple into a search string that just uses the key bytes."""102 """Map the key tuple into a search string that just uses the key bytes."""
103 return '\x00'.join(key)103 return b'\x00'.join(key)
104104
105105
106search_key_registry = registry.Registry()106search_key_registry = registry.Registry()
@@ -197,8 +197,8 @@
197 self._ensure_root()197 self._ensure_root()
198 res = self._dump_tree_node(self._root_node, prefix='', indent='',198 res = self._dump_tree_node(self._root_node, prefix='', indent='',
199 include_keys=include_keys)199 include_keys=include_keys)
200 res.append('') # Give a trailing '\n'200 res.append(b'') # Give a trailing '\n'
201 return '\n'.join(res)201 return b'\n'.join(res)
202202
203 def _dump_tree_node(self, node, prefix, indent, include_keys=True):203 def _dump_tree_node(self, node, prefix, indent, include_keys=True):
204 """For this node and all children, generate a string representation."""204 """For this node and all children, generate a string representation."""
@@ -208,11 +208,11 @@
208 else:208 else:
209 node_key = node.key()209 node_key = node.key()
210 if node_key is not None:210 if node_key is not None:
211 key_str = ' %s' % (node_key[0],)211 key_str = b' %s' % (node_key[0],)
212 else:212 else:
213 key_str = ' None'213 key_str = b' None'
214 result.append('%s%r %s%s' % (indent, prefix, node.__class__.__name__,214 result.append(b'%s%r %s%s' % (indent, prefix, node.__class__.__name__,
215 key_str))215 key_str))
216 if isinstance(node, InternalNode):216 if isinstance(node, InternalNode):
217 # Trigger all child nodes to get loaded217 # Trigger all child nodes to get loaded
218 list(node._iter_nodes(self._store))218 list(node._iter_nodes(self._store))
@@ -223,7 +223,7 @@
223 for key, value in sorted(viewitems(node._items)):223 for key, value in sorted(viewitems(node._items)):
224 # Don't use prefix nor indent here to line up when used in224 # Don't use prefix nor indent here to line up when used in
225 # tests in conjunction with assertEqualDiff225 # tests in conjunction with assertEqualDiff
226 result.append(' %r %r' % (tuple(key), value))226 result.append(b' %r %r' % (tuple(key), value))
227 return result227 return result
228228
229 @classmethod229 @classmethod
@@ -680,7 +680,7 @@
680 if not common_prefix:680 if not common_prefix:
681 # if common_prefix is the empty string, then we know it won't681 # if common_prefix is the empty string, then we know it won't
682 # change further682 # change further
683 return ''683 return b''
684 return common_prefix684 return common_prefix
685685
686686
@@ -786,7 +786,7 @@
786 # TODO: Should probably be done without actually joining the key, but786 # TODO: Should probably be done without actually joining the key, but
787 # then that can be done via the C extension787 # then that can be done via the C extension
788 return (len(self._serialise_key(key)) + 1788 return (len(self._serialise_key(key)) + 1
789 + len(str(value.count('\n'))) + 1789 + len(str(value.count(b'\n'))) + 1
790 + len(value) + 1)790 + len(value) + 1)
791791
792 def _search_key(self, key):792 def _search_key(self, key):
@@ -853,7 +853,7 @@
853 # may get a '\00' node anywhere, but won't have keys of853 # may get a '\00' node anywhere, but won't have keys of
854 # different lengths.854 # different lengths.
855 if len(prefix) < split_at:855 if len(prefix) < split_at:
856 prefix += '\x00'*(split_at - len(prefix))856 prefix += b'\x00'*(split_at - len(prefix))
857 if prefix not in result:857 if prefix not in result:
858 node = LeafNode(search_key_func=self._search_key_func)858 node = LeafNode(search_key_func=self._search_key_func)
859 node.set_maximum_size(self._maximum_size)859 node.set_maximum_size(self._maximum_size)
@@ -889,7 +889,7 @@
889 raise AssertionError('%r must be known' % self._search_prefix)889 raise AssertionError('%r must be known' % self._search_prefix)
890 return self._search_prefix, [("", self)]890 return self._search_prefix, [("", self)]
891891
892 _serialise_key = '\x00'.join892 _serialise_key = b'\x00'.join
893893
894 def serialise(self, store):894 def serialise(self, store):
895 """Serialise the LeafNode to store.895 """Serialise the LeafNode to store.
@@ -897,22 +897,22 @@
897 :param store: A VersionedFiles honouring the CHK extensions.897 :param store: A VersionedFiles honouring the CHK extensions.
898 :return: An iterable of the keys inserted by this operation.898 :return: An iterable of the keys inserted by this operation.
899 """899 """
900 lines = ["chkleaf:\n"]900 lines = [b"chkleaf:\n"]
901 lines.append("%d\n" % self._maximum_size)901 lines.append(b"%d\n" % self._maximum_size)
902 lines.append("%d\n" % self._key_width)902 lines.append(b"%d\n" % self._key_width)
903 lines.append("%d\n" % self._len)903 lines.append(b"%d\n" % self._len)
904 if self._common_serialised_prefix is None:904 if self._common_serialised_prefix is None:
905 lines.append('\n')905 lines.append(b'\n')
906 if len(self._items) != 0:906 if len(self._items) != 0:
907 raise AssertionError('If _common_serialised_prefix is None'907 raise AssertionError('If _common_serialised_prefix is None'
908 ' we should have no items')908 ' we should have no items')
909 else:909 else:
910 lines.append('%s\n' % (self._common_serialised_prefix,))910 lines.append(b'%s\n' % (self._common_serialised_prefix,))
911 prefix_len = len(self._common_serialised_prefix)911 prefix_len = len(self._common_serialised_prefix)
912 for key, value in sorted(viewitems(self._items)):912 for key, value in sorted(viewitems(self._items)):
913 # Always add a final newline913 # Always add a final newline
914 value_lines = osutils.chunks_to_lines([value + '\n'])914 value_lines = osutils.chunks_to_lines([value + b'\n'])
915 serialized = "%s\x00%s\n" % (self._serialise_key(key),915 serialized = b"%s\x00%d\n" % (self._serialise_key(key),
916 len(value_lines))916 len(value_lines))
917 if not serialized.startswith(self._common_serialised_prefix):917 if not serialized.startswith(self._common_serialised_prefix):
918 raise AssertionError('We thought the common prefix was %r'918 raise AssertionError('We thought the common prefix was %r'
@@ -921,11 +921,11 @@
921 lines.append(serialized[prefix_len:])921 lines.append(serialized[prefix_len:])
922 lines.extend(value_lines)922 lines.extend(value_lines)
923 sha1, _, _ = store.add_lines((None,), (), lines)923 sha1, _, _ = store.add_lines((None,), (), lines)
924 self._key = StaticTuple("sha1:" + sha1,).intern()924 self._key = StaticTuple(b"sha1:" + sha1,).intern()
925 bytes = ''.join(lines)925 data = b''.join(lines)
926 if len(bytes) != self._current_size():926 if len(data) != self._current_size():
927 raise AssertionError('Invalid _current_size')927 raise AssertionError('Invalid _current_size')
928 _get_cache()[self._key] = bytes928 _get_cache()[self._key] = data
929 return [self._key]929 return [self._key]
930930
931 def refs(self):931 def refs(self):
@@ -1304,34 +1304,34 @@
1304 continue1304 continue
1305 for key in node.serialise(store):1305 for key in node.serialise(store):
1306 yield key1306 yield key
1307 lines = ["chknode:\n"]1307 lines = [b"chknode:\n"]
1308 lines.append("%d\n" % self._maximum_size)1308 lines.append(b"%d\n" % self._maximum_size)
1309 lines.append("%d\n" % self._key_width)1309 lines.append(b"%d\n" % self._key_width)
1310 lines.append("%d\n" % self._len)1310 lines.append(b"%d\n" % self._len)
1311 if self._search_prefix is None:1311 if self._search_prefix is None:
1312 raise AssertionError("_search_prefix should not be None")1312 raise AssertionError("_search_prefix should not be None")
1313 lines.append('%s\n' % (self._search_prefix,))1313 lines.append(b'%s\n' % (self._search_prefix,))
1314 prefix_len = len(self._search_prefix)1314 prefix_len = len(self._search_prefix)
1315 for prefix, node in sorted(viewitems(self._items)):1315 for prefix, node in sorted(viewitems(self._items)):
1316 if isinstance(node, StaticTuple):1316 if isinstance(node, StaticTuple):
1317 key = node[0]1317 key = node[0]
1318 else:1318 else:
1319 key = node._key[0]1319 key = node._key[0]
1320 serialised = "%s\x00%s\n" % (prefix, key)1320 serialised = b"%s\x00%s\n" % (prefix, key)
1321 if not serialised.startswith(self._search_prefix):1321 if not serialised.startswith(self._search_prefix):
1322 raise AssertionError("prefixes mismatch: %s must start with %s"1322 raise AssertionError("prefixes mismatch: %s must start with %s"
1323 % (serialised, self._search_prefix))1323 % (serialised, self._search_prefix))
1324 lines.append(serialised[prefix_len:])1324 lines.append(serialised[prefix_len:])
1325 sha1, _, _ = store.add_lines((None,), (), lines)1325 sha1, _, _ = store.add_lines((None,), (), lines)
1326 self._key = StaticTuple("sha1:" + sha1,).intern()1326 self._key = StaticTuple(b"sha1:" + sha1,).intern()
1327 _get_cache()[self._key] = ''.join(lines)1327 _get_cache()[self._key] = b''.join(lines)
1328 yield self._key1328 yield self._key
13291329
1330 def _search_key(self, key):1330 def _search_key(self, key):
1331 """Return the serialised key for key in this node."""1331 """Return the serialised key for key in this node."""
1332 # search keys are fixed width. All will be self._node_width wide, so we1332 # search keys are fixed width. All will be self._node_width wide, so we
1333 # pad as necessary.1333 # pad as necessary.
1334 return (self._search_key_func(key) + '\x00'*self._node_width)[:self._node_width]1334 return (self._search_key_func(key) + b'\x00'*self._node_width)[:self._node_width]
13351335
1336 def _search_prefix_filter(self, key):1336 def _search_prefix_filter(self, key):
1337 """Serialise key for use as a prefix filter in iteritems."""1337 """Serialise key for use as a prefix filter in iteritems."""
@@ -1450,12 +1450,12 @@
1450 return new_leaf1450 return new_leaf
14511451
14521452
1453def _deserialise(bytes, key, search_key_func):1453def _deserialise(data, key, search_key_func):
1454 """Helper for repositorydetails - convert bytes to a node."""1454 """Helper for repositorydetails - convert bytes to a node."""
1455 if bytes.startswith("chkleaf:\n"):1455 if data.startswith(b"chkleaf:\n"):
1456 node = LeafNode.deserialise(bytes, key, search_key_func=search_key_func)1456 node = LeafNode.deserialise(data, key, search_key_func=search_key_func)
1457 elif bytes.startswith("chknode:\n"):1457 elif data.startswith(b"chknode:\n"):
1458 node = InternalNode.deserialise(bytes, key,1458 node = InternalNode.deserialise(data, key,
1459 search_key_func=search_key_func)1459 search_key_func=search_key_func)
1460 else:1460 else:
1461 raise AssertionError("Unknown node type.")1461 raise AssertionError("Unknown node type.")
14621462
=== modified file 'breezy/commit.py'
--- breezy/commit.py 2017-05-30 20:17:23 +0000
+++ breezy/commit.py 2017-06-11 01:47:17 +0000
@@ -280,7 +280,7 @@
280 raise errors.RootNotRich()280 raise errors.RootNotRich()
281 if message_callback is None:281 if message_callback is None:
282 if message is not None:282 if message is not None:
283 if isinstance(message, str):283 if isinstance(message, bytes):
284 message = message.decode(get_user_encoding())284 message = message.decode(get_user_encoding())
285 message_callback = lambda x: message285 message_callback = lambda x: message
286 else:286 else:
@@ -657,7 +657,7 @@
657 """657 """
658 exclude = self.exclude658 exclude = self.exclude
659 specific_files = self.specific_files659 specific_files = self.specific_files
660 mutter("Selecting files for commit with filter %s", specific_files)660 mutter("Selecting files for commit with filter %r", specific_files)
661661
662 self._check_strict()662 self._check_strict()
663 if self.use_record_iter_changes:663 if self.use_record_iter_changes:
664664
=== modified file 'breezy/config.py'
--- breezy/config.py 2017-06-10 12:56:18 +0000
+++ breezy/config.py 2017-06-11 01:47:17 +0000
@@ -477,10 +477,12 @@
477 If no username can be found, errors.NoWhoami exception is raised.477 If no username can be found, errors.NoWhoami exception is raised.
478 """478 """
479 v = os.environ.get('BRZ_EMAIL')479 v = os.environ.get('BRZ_EMAIL')
480 if v and not PY3:480 if v:
481 return v.decode(osutils.get_user_encoding())481 if not PY3:
482 v = v.decode(osutils.get_user_encoding())
483 return v
482 v = self._get_user_id()484 v = self._get_user_id()
483 if v and not PY3:485 if v:
484 return v486 return v
485 return default_email()487 return default_email()
486488
@@ -1497,11 +1499,15 @@
14971499
1498def default_email():1500def default_email():
1499 v = os.environ.get('BRZ_EMAIL')1501 v = os.environ.get('BRZ_EMAIL')
1500 if v and not PY3:1502 if v:
1501 return v.decode(osutils.get_user_encoding())1503 if not PY3:
1504 v = v.decode(osutils.get_user_encoding())
1505 return v
1502 v = os.environ.get('EMAIL')1506 v = os.environ.get('EMAIL')
1503 if v and not PY3:1507 if v:
1504 return v.decode(osutils.get_user_encoding())1508 if not PY3:
1509 v = v.decode(osutils.get_user_encoding())
1510 return v
1505 name, email = _auto_user_id()1511 name, email = _auto_user_id()
1506 if name and email:1512 if name and email:
1507 return u'%s <%s>' % (name, email)1513 return u'%s <%s>' % (name, email)
15081514
=== modified file 'breezy/dirstate.py'
--- breezy/dirstate.py 2017-06-10 02:39:00 +0000
+++ breezy/dirstate.py 2017-06-11 01:47:17 +0000
@@ -3541,9 +3541,13 @@
3541 source_details = DirState.NULL_PARENT_DETAILS3541 source_details = DirState.NULL_PARENT_DETAILS
3542 else:3542 else:
3543 source_details = entry[1][self.source_index]3543 source_details = entry[1][self.source_index]
3544 # GZ 2017-06-09: Eck, more sets.
3545 _fdltr = {b'f', b'd', b'l', b't', b'r'}
3546 _fdlt = {b'f', b'd', b'l', b't'}
3547 _ra = (b'r', b'a')
3544 target_details = entry[1][self.target_index]3548 target_details = entry[1][self.target_index]
3545 target_minikind = target_details[0]3549 target_minikind = target_details[0]
3546 if path_info is not None and target_minikind in 'fdlt':3550 if path_info is not None and target_minikind in _fdlt:
3547 if not (self.target_index == 0):3551 if not (self.target_index == 0):
3548 raise AssertionError()3552 raise AssertionError()
3549 link_or_sha1 = update_entry(self.state, entry,3553 link_or_sha1 = update_entry(self.state, entry,
@@ -3555,10 +3559,6 @@
3555 link_or_sha1 = None3559 link_or_sha1 = None
3556 file_id = entry[0][2]3560 file_id = entry[0][2]
3557 source_minikind = source_details[0]3561 source_minikind = source_details[0]
3558 # GZ 2017-06-09: Eck, more sets.
3559 _fdltr = {b'f', b'd', b'l', b't', b'r'}
3560 _fdlt = {b'f', b'd', b'l', b't'}
3561 _ra = (b'r', b'a')
3562 if source_minikind in _fdltr and target_minikind in _fdlt:3562 if source_minikind in _fdltr and target_minikind in _fdlt:
3563 # claimed content in both: diff3563 # claimed content in both: diff
3564 # r | fdlt | | add source to search, add id path move and perform3564 # r | fdlt | | add source to search, add id path move and perform
35653565
=== modified file 'breezy/groupcompress.py'
--- breezy/groupcompress.py 2017-06-05 20:48:31 +0000
+++ breezy/groupcompress.py 2017-06-11 01:47:17 +0000
@@ -61,7 +61,7 @@
61BATCH_SIZE = 2**1661BATCH_SIZE = 2**16
6262
63# osutils.sha_string('')63# osutils.sha_string('')
64_null_sha1 = 'da39a3ee5e6b4b0d3255bfef95601890afd80709'64_null_sha1 = b'da39a3ee5e6b4b0d3255bfef95601890afd80709'
6565
66def sort_gc_optimal(parent_map):66def sort_gc_optimal(parent_map):
67 """Sort and group the keys in parent_map into groupcompress order.67 """Sort and group the keys in parent_map into groupcompress order.
@@ -75,8 +75,8 @@
75 # properly grouped by file-id.75 # properly grouped by file-id.
76 per_prefix_map = {}76 per_prefix_map = {}
77 for key, value in viewitems(parent_map):77 for key, value in viewitems(parent_map):
78 if isinstance(key, str) or len(key) == 1:78 if isinstance(key, bytes) or len(key) == 1:
79 prefix = ''79 prefix = b''
80 else:80 else:
81 prefix = key[0]81 prefix = key[0]
82 try:82 try:
@@ -102,9 +102,9 @@
102 """102 """
103103
104 # Group Compress Block v1 Zlib104 # Group Compress Block v1 Zlib
105 GCB_HEADER = 'gcb1z\n'105 GCB_HEADER = b'gcb1z\n'
106 # Group Compress Block v1 Lzma106 # Group Compress Block v1 Lzma
107 GCB_LZ_HEADER = 'gcb1l\n'107 GCB_LZ_HEADER = b'gcb1l\n'
108 GCB_KNOWN_HEADERS = (GCB_HEADER, GCB_LZ_HEADER)108 GCB_KNOWN_HEADERS = (GCB_HEADER, GCB_LZ_HEADER)
109109
110 def __init__(self):110 def __init__(self):
@@ -141,7 +141,7 @@
141 # Expand the content if required141 # Expand the content if required
142 if self._content is None:142 if self._content is None:
143 if self._content_chunks is not None:143 if self._content_chunks is not None:
144 self._content = ''.join(self._content_chunks)144 self._content = b''.join(self._content_chunks)
145 self._content_chunks = None145 self._content_chunks = None
146 if self._content is None:146 if self._content is None:
147 # We join self._z_content_chunks here, because if we are147 # We join self._z_content_chunks here, because if we are
@@ -149,9 +149,9 @@
149 # chunk149 # chunk
150 if self._z_content_chunks is None:150 if self._z_content_chunks is None:
151 raise AssertionError('No content to decompress')151 raise AssertionError('No content to decompress')
152 z_content = ''.join(self._z_content_chunks)152 z_content = b''.join(self._z_content_chunks)
153 if z_content == '':153 if z_content == b'':
154 self._content = ''154 self._content = b''
155 elif self._compressor_name == 'lzma':155 elif self._compressor_name == 'lzma':
156 # We don't do partial lzma decomp yet156 # We don't do partial lzma decomp yet
157 import pylzma157 import pylzma
@@ -201,7 +201,7 @@
201 # The stream is finished201 # The stream is finished
202 self._z_content_decompressor = None202 self._z_content_decompressor = None
203203
204 def _parse_bytes(self, bytes, pos):204 def _parse_bytes(self, data, pos):
205 """Read the various lengths from the header.205 """Read the various lengths from the header.
206206
207 This also populates the various 'compressed' buffers.207 This also populates the various 'compressed' buffers.
@@ -211,17 +211,17 @@
211 # At present, we have 2 integers for the compressed and uncompressed211 # At present, we have 2 integers for the compressed and uncompressed
212 # content. In base10 (ascii) 14 bytes can represent > 1TB, so to avoid212 # content. In base10 (ascii) 14 bytes can represent > 1TB, so to avoid
213 # checking too far, cap the search to 14 bytes.213 # checking too far, cap the search to 14 bytes.
214 pos2 = bytes.index('\n', pos, pos + 14)214 pos2 = data.index(b'\n', pos, pos + 14)
215 self._z_content_length = int(bytes[pos:pos2])215 self._z_content_length = int(data[pos:pos2])
216 pos = pos2 + 1216 pos = pos2 + 1
217 pos2 = bytes.index('\n', pos, pos + 14)217 pos2 = data.index(b'\n', pos, pos + 14)
218 self._content_length = int(bytes[pos:pos2])218 self._content_length = int(data[pos:pos2])
219 pos = pos2 + 1219 pos = pos2 + 1
220 if len(bytes) != (pos + self._z_content_length):220 if len(data) != (pos + self._z_content_length):
221 # XXX: Define some GCCorrupt error ?221 # XXX: Define some GCCorrupt error ?
222 raise AssertionError('Invalid bytes: (%d) != %d + %d' %222 raise AssertionError('Invalid bytes: (%d) != %d + %d' %
223 (len(bytes), pos, self._z_content_length))223 (len(data), pos, self._z_content_length))
224 self._z_content_chunks = (bytes[pos:],)224 self._z_content_chunks = (data[pos:],)
225225
226 @property226 @property
227 def _z_content(self):227 def _z_content(self):
@@ -230,7 +230,7 @@
230 Meant only to be used by the test suite.230 Meant only to be used by the test suite.
231 """231 """
232 if self._z_content_chunks is not None:232 if self._z_content_chunks is not None:
233 return ''.join(self._z_content_chunks)233 return b''.join(self._z_content_chunks)
234 return None234 return None
235235
236 @classmethod236 @classmethod
@@ -257,17 +257,17 @@
257 :return: The bytes for the content257 :return: The bytes for the content
258 """258 """
259 if start == end == 0:259 if start == end == 0:
260 return ''260 return b''
261 self._ensure_content(end)261 self._ensure_content(end)
262 # The bytes are 'f' or 'd' for the type, then a variable-length262 # The bytes are 'f' or 'd' for the type, then a variable-length
263 # base128 integer for the content size, then the actual content263 # base128 integer for the content size, then the actual content
264 # We know that the variable-length integer won't be longer than 5264 # We know that the variable-length integer won't be longer than 5
265 # bytes (it takes 5 bytes to encode 2^32)265 # bytes (it takes 5 bytes to encode 2^32)
266 c = self._content[start]266 c = self._content[start]
267 if c == 'f':267 if c == b'f':
268 type = 'fulltext'268 type = 'fulltext'
269 else:269 else:
270 if c != 'd':270 if c != b'd':
271 raise ValueError('Unknown content control code: %s'271 raise ValueError('Unknown content control code: %s'
272 % (c,))272 % (c,))
273 type = 'delta'273 type = 'delta'
@@ -277,11 +277,10 @@
277 if end != content_start + content_len:277 if end != content_start + content_len:
278 raise ValueError('end != len according to field header'278 raise ValueError('end != len according to field header'
279 ' %s != %s' % (end, content_start + content_len))279 ' %s != %s' % (end, content_start + content_len))
280 if c == 'f':280 if c == b'f':
281 bytes = self._content[content_start:end]281 return self._content[content_start:end]
282 elif c == 'd':282 # Must be type delta as checked above
283 bytes = apply_delta_to_source(self._content, content_start, end)283 return apply_delta_to_source(self._content, content_start, end)
284 return bytes
285284
286 def set_chunked_content(self, content_chunks, length):285 def set_chunked_content(self, content_chunks, length):
287 """Set the content of this block to the given chunks."""286 """Set the content of this block to the given chunks."""
@@ -324,7 +323,7 @@
324 """Create the byte stream as a series of 'chunks'"""323 """Create the byte stream as a series of 'chunks'"""
325 self._create_z_content()324 self._create_z_content()
326 header = self.GCB_HEADER325 header = self.GCB_HEADER
327 chunks = ['%s%d\n%d\n'326 chunks = [b'%s%d\n%d\n'
328 % (header, self._z_content_length, self._content_length),327 % (header, self._z_content_length, self._content_length),
329 ]328 ]
330 chunks.extend(self._z_content_chunks)329 chunks.extend(self._z_content_chunks)
@@ -334,7 +333,7 @@
334 def to_bytes(self):333 def to_bytes(self):
335 """Encode the information into a byte stream."""334 """Encode the information into a byte stream."""
336 total_len, chunks = self.to_chunks()335 total_len, chunks = self.to_chunks()
337 return ''.join(chunks)336 return b''.join(chunks)
338337
339 def _dump(self, include_text=False):338 def _dump(self, include_text=False):
340 """Take this block, and spit out a human-readable structure.339 """Take this block, and spit out a human-readable structure.
@@ -352,7 +351,7 @@
352 while pos < self._content_length:351 while pos < self._content_length:
353 kind = self._content[pos]352 kind = self._content[pos]
354 pos += 1353 pos += 1
355 if kind not in ('f', 'd'):354 if kind not in (b'f', b'd'):
356 raise ValueError('invalid kind character: %r' % (kind,))355 raise ValueError('invalid kind character: %r' % (kind,))
357 content_len, len_len = decode_base128_int(356 content_len, len_len = decode_base128_int(
358 self._content[pos:pos + 5])357 self._content[pos:pos + 5])
@@ -360,18 +359,18 @@
360 if content_len + pos > self._content_length:359 if content_len + pos > self._content_length:
361 raise ValueError('invalid content_len %d for record @ pos %d'360 raise ValueError('invalid content_len %d for record @ pos %d'
362 % (content_len, pos - len_len - 1))361 % (content_len, pos - len_len - 1))
363 if kind == 'f': # Fulltext362 if kind == b'f': # Fulltext
364 if include_text:363 if include_text:
365 text = self._content[pos:pos+content_len]364 text = self._content[pos:pos+content_len]
366 result.append(('f', content_len, text))365 result.append((b'f', content_len, text))
367 else:366 else:
368 result.append(('f', content_len))367 result.append((b'f', content_len))
369 elif kind == 'd': # Delta368 elif kind == b'd': # Delta
370 delta_content = self._content[pos:pos+content_len]369 delta_content = self._content[pos:pos+content_len]
371 delta_info = []370 delta_info = []
372 # The first entry in a delta is the decompressed length371 # The first entry in a delta is the decompressed length
373 decomp_len, delta_pos = decode_base128_int(delta_content)372 decomp_len, delta_pos = decode_base128_int(delta_content)
374 result.append(('d', content_len, decomp_len, delta_info))373 result.append((b'd', content_len, decomp_len, delta_info))
375 measured_len = 0374 measured_len = 0
376 while delta_pos < content_len:375 while delta_pos < content_len:
377 c = ord(delta_content[delta_pos])376 c = ord(delta_content[delta_pos])
@@ -382,16 +381,16 @@
382 delta_pos)381 delta_pos)
383 if include_text:382 if include_text:
384 text = self._content[offset:offset+length]383 text = self._content[offset:offset+length]
385 delta_info.append(('c', offset, length, text))384 delta_info.append((b'c', offset, length, text))
386 else:385 else:
387 delta_info.append(('c', offset, length))386 delta_info.append((b'c', offset, length))
388 measured_len += length387 measured_len += length
389 else: # Insert388 else: # Insert
390 if include_text:389 if include_text:
391 txt = delta_content[delta_pos:delta_pos+c]390 txt = delta_content[delta_pos:delta_pos+c]
392 else:391 else:
393 txt = ''392 txt = ''
394 delta_info.append(('i', c, txt))393 delta_info.append((b'i', c, txt))
395 measured_len += c394 measured_len += c
396 delta_pos += c395 delta_pos += c
397 if delta_pos != content_len:396 if delta_pos != content_len:
@@ -447,7 +446,7 @@
447 # wire bytes, something...446 # wire bytes, something...
448 return self._manager._wire_bytes()447 return self._manager._wire_bytes()
449 else:448 else:
450 return ''449 return b''
451 if storage_kind in ('fulltext', 'chunked'):450 if storage_kind in ('fulltext', 'chunked'):
452 if self._bytes is None:451 if self._bytes is None:
453 # Grab and cache the raw bytes for this entry452 # Grab and cache the raw bytes for this entry
@@ -842,7 +841,9 @@
842 if sha1 == nostore_sha:841 if sha1 == nostore_sha:
843 raise errors.ExistingContent()842 raise errors.ExistingContent()
844 if key[-1] is None:843 if key[-1] is None:
845 key = key[:-1] + ('sha1:' + sha1,)844 # GZ 2017-06-10: Seems perverse to have to encode here.
845 sha1 = sha1.encode('ascii')
846 key = key[:-1] + (b'sha1:' + sha1,)
846847
847 start, end, type = self._compress(key, bytes, len(bytes) / 2, soft)848 start, end, type = self._compress(key, bytes, len(bytes) / 2, soft)
848 return sha1, start, end, type849 return sha1, start, end, type
@@ -875,7 +876,7 @@
875 (start_byte, start_chunk, end_byte, end_chunk) = self.labels_deltas[key]876 (start_byte, start_chunk, end_byte, end_chunk) = self.labels_deltas[key]
876 delta_chunks = self.chunks[start_chunk:end_chunk]877 delta_chunks = self.chunks[start_chunk:end_chunk]
877 stored_bytes = ''.join(delta_chunks)878 stored_bytes = ''.join(delta_chunks)
878 if stored_bytes[0] == 'f':879 if stored_bytes[0] == b'f':
879 fulltext_len, offset = decode_base128_int(stored_bytes[1:10])880 fulltext_len, offset = decode_base128_int(stored_bytes[1:10])
880 data_len = fulltext_len + 1 + offset881 data_len = fulltext_len + 1 + offset
881 if data_len != len(stored_bytes):882 if data_len != len(stored_bytes):
@@ -947,14 +948,14 @@
947 if delta_length > max_delta_size:948 if delta_length > max_delta_size:
948 # The delta is longer than the fulltext, insert a fulltext949 # The delta is longer than the fulltext, insert a fulltext
949 type = 'fulltext'950 type = 'fulltext'
950 out_lines = ['f', encode_base128_int(input_len)]951 out_lines = [b'f', encode_base128_int(input_len)]
951 out_lines.extend(new_lines)952 out_lines.extend(new_lines)
952 index_lines = [False, False]953 index_lines = [False, False]
953 index_lines.extend([True] * len(new_lines))954 index_lines.extend([True] * len(new_lines))
954 else:955 else:
955 # this is a worthy delta, output it956 # this is a worthy delta, output it
956 type = 'delta'957 type = 'delta'
957 out_lines[0] = 'd'958 out_lines[0] = b'd'
958 # Update the delta_length to include those two encoded integers959 # Update the delta_length to include those two encoded integers
959 out_lines[1] = encode_base128_int(delta_length)960 out_lines[1] = encode_base128_int(delta_length)
960 # Before insertion961 # Before insertion
@@ -1014,12 +1015,12 @@
1014 enc_length = encode_base128_int(len(bytes))1015 enc_length = encode_base128_int(len(bytes))
1015 len_mini_header = 1 + len(enc_length)1016 len_mini_header = 1 + len(enc_length)
1016 self._delta_index.add_source(bytes, len_mini_header)1017 self._delta_index.add_source(bytes, len_mini_header)
1017 new_chunks = ['f', enc_length, bytes]1018 new_chunks = [b'f', enc_length, bytes]
1018 else:1019 else:
1019 type = 'delta'1020 type = 'delta'
1020 enc_length = encode_base128_int(len(delta))1021 enc_length = encode_base128_int(len(delta))
1021 len_mini_header = 1 + len(enc_length)1022 len_mini_header = 1 + len(enc_length)
1022 new_chunks = ['d', enc_length, delta]1023 new_chunks = [b'd', enc_length, delta]
1023 self._delta_index.add_delta_source(delta, len_mini_header)1024 self._delta_index.add_delta_source(delta, len_mini_header)
1024 # Before insertion1025 # Before insertion
1025 start = self.endpoint1026 start = self.endpoint
@@ -1715,13 +1716,13 @@
1715 # the fulltext content at this point. Note that sometimes we1716 # the fulltext content at this point. Note that sometimes we
1716 # will want it later (streaming CHK pages), but most of the1717 # will want it later (streaming CHK pages), but most of the
1717 # time we won't (everything else)1718 # time we won't (everything else)
1718 bytes = ''.join(chunks)1719 data = b''.join(chunks)
1719 del chunks1720 del chunks
1720 index, start, length = self._access.add_raw_records(1721 index, start, length = self._access.add_raw_records(
1721 [(None, len(bytes))], bytes)[0]1722 [(None, len(data))], data)[0]
1722 nodes = []1723 nodes = []
1723 for key, reads, refs in keys_to_add:1724 for key, reads, refs in keys_to_add:
1724 nodes.append((key, "%d %d %s" % (start, length, reads), refs))1725 nodes.append((key, b"%d %d %s" % (start, length, reads), refs))
1725 self._index.add_records(nodes, random_id=random_id)1726 self._index.add_records(nodes, random_id=random_id)
1726 self._unadded_refs = {}1727 self._unadded_refs = {}
1727 del keys_to_add[:]1728 del keys_to_add[:]
@@ -1777,7 +1778,7 @@
1777 ' the current record, we cannot be positive'1778 ' the current record, we cannot be positive'
1778 ' that the appropriate content was inserted.'1779 ' that the appropriate content was inserted.'
1779 )1780 )
1780 value = "%d %d %d %d" % (block_start, block_length,1781 value = b"%d %d %d %d" % (block_start, block_length,
1781 record._start, record._end)1782 record._start, record._end)
1782 nodes = [(record.key, value, (record.parents,))]1783 nodes = [(record.key, value, (record.parents,))]
1783 # TODO: Consider buffering up many nodes to be added, not1784 # TODO: Consider buffering up many nodes to be added, not
@@ -1827,7 +1828,7 @@
1827 type) = self._compressor.compress(record.key, bytes,1828 type) = self._compressor.compress(record.key, bytes,
1828 record.sha1)1829 record.sha1)
1829 if record.key[-1] is None:1830 if record.key[-1] is None:
1830 key = record.key[:-1] + ('sha1:' + found_sha1,)1831 key = record.key[:-1] + (b'sha1:' + found_sha1,)
1831 else:1832 else:
1832 key = record.key1833 key = record.key
1833 self._unadded_refs[key] = record.parents1834 self._unadded_refs[key] = record.parents
@@ -1838,7 +1839,8 @@
1838 else:1839 else:
1839 parents = None1840 parents = None
1840 refs = static_tuple.StaticTuple(parents)1841 refs = static_tuple.StaticTuple(parents)
1841 keys_to_add.append((key, '%d %d' % (start_point, end_point), refs))1842 keys_to_add.append(
1843 (key, b'%d %d' % (start_point, end_point), refs))
1842 if len(keys_to_add):1844 if len(keys_to_add):
1843 flush()1845 flush()
1844 self._compressor = None1846 self._compressor = None
18451847
=== modified file 'breezy/index.py'
--- breezy/index.py 2017-06-10 01:57:00 +0000
+++ breezy/index.py 2017-06-11 01:47:17 +0000
@@ -56,8 +56,8 @@
56_SIGNATURE = b"Bazaar Graph Index 1\n"56_SIGNATURE = b"Bazaar Graph Index 1\n"
5757
5858
59_whitespace_re = re.compile('[\t\n\x0b\x0c\r\x00 ]')59_whitespace_re = re.compile(b'[\t\n\x0b\x0c\r\x00 ]')
60_newline_null_re = re.compile('[\n\0]')60_newline_null_re = re.compile(b'[\n\0]')
6161
6262
63def _has_key_from_parent_map(self, key):63def _has_key_from_parent_map(self, key):
6464
=== modified file 'breezy/inventory.py'
--- breezy/inventory.py 2017-06-10 01:57:00 +0000
+++ breezy/inventory.py 2017-06-11 01:47:17 +0000
@@ -49,6 +49,8 @@
49 trace,49 trace,
50 )50 )
51from .sixish import (51from .sixish import (
52 bytesintern,
53 PY3,
52 viewitems,54 viewitems,
53 viewvalues,55 viewvalues,
54 )56 )
@@ -217,7 +219,7 @@
217 Traceback (most recent call last):219 Traceback (most recent call last):
218 InvalidEntryName: Invalid entry name: src/hello.c220 InvalidEntryName: Invalid entry name: src/hello.c
219 """221 """
220 if '/' in name or '\\' in name:222 if u'/' in name or u'\\' in name:
221 raise errors.InvalidEntryName(name=name)223 raise errors.InvalidEntryName(name=name)
222 self.file_id = file_id224 self.file_id = file_id
223 self.revision = None225 self.revision = None
@@ -388,8 +390,8 @@
388 # to provide a per-fileid log. The hash of every directory content is390 # to provide a per-fileid log. The hash of every directory content is
389 # "da..." below (the sha1sum of '').391 # "da..." below (the sha1sum of '').
390 checker.add_pending_item(rev_id,392 checker.add_pending_item(rev_id,
391 ('texts', self.file_id, self.revision), 'text',393 (b'texts', self.file_id, self.revision), b'text',
392 'da39a3ee5e6b4b0d3255bfef95601890afd80709')394 b'da39a3ee5e6b4b0d3255bfef95601890afd80709')
393395
394 def copy(self):396 def copy(self):
395 other = InventoryDirectory(self.file_id, self.name, self.parent_id)397 other = InventoryDirectory(self.file_id, self.name, self.parent_id)
@@ -428,7 +430,7 @@
428 """See InventoryEntry._check"""430 """See InventoryEntry._check"""
429 # TODO: check size too.431 # TODO: check size too.
430 checker.add_pending_item(tree_revision_id,432 checker.add_pending_item(tree_revision_id,
431 ('texts', self.file_id, self.revision), 'text',433 (b'texts', self.file_id, self.revision), b'text',
432 self.text_sha1)434 self.text_sha1)
433 if self.text_size is None:435 if self.text_size is None:
434 checker._report_items.append(436 checker._report_items.append(
@@ -528,8 +530,8 @@
528 % (self.file_id, tree_revision_id))530 % (self.file_id, tree_revision_id))
529 # Symlinks are stored as ''531 # Symlinks are stored as ''
530 checker.add_pending_item(tree_revision_id,532 checker.add_pending_item(tree_revision_id,
531 ('texts', self.file_id, self.revision), 'text',533 (b'texts', self.file_id, self.revision), b'text',
532 'da39a3ee5e6b4b0d3255bfef95601890afd80709')534 b'da39a3ee5e6b4b0d3255bfef95601890afd80709')
533535
534 def copy(self):536 def copy(self):
535 other = InventoryLink(self.file_id, self.name, self.parent_id)537 other = InventoryLink(self.file_id, self.name, self.parent_id)
@@ -1398,25 +1400,25 @@
1398 if entry.parent_id is not None:1400 if entry.parent_id is not None:
1399 parent_str = entry.parent_id1401 parent_str = entry.parent_id
1400 else:1402 else:
1401 parent_str = ''1403 parent_str = b''
1402 name_str = entry.name.encode("utf8")1404 name_str = entry.name.encode("utf8")
1403 if entry.kind == 'file':1405 if entry.kind == 'file':
1404 if entry.executable:1406 if entry.executable:
1405 exec_str = "Y"1407 exec_str = b"Y"
1406 else:1408 else:
1407 exec_str = "N"1409 exec_str = b"N"
1408 return "file: %s\n%s\n%s\n%s\n%s\n%d\n%s" % (1410 return b"file: %s\n%s\n%s\n%s\n%s\n%d\n%s" % (
1409 entry.file_id, parent_str, name_str, entry.revision,1411 entry.file_id, parent_str, name_str, entry.revision,
1410 entry.text_sha1, entry.text_size, exec_str)1412 entry.text_sha1, entry.text_size, exec_str)
1411 elif entry.kind == 'directory':1413 elif entry.kind == 'directory':
1412 return "dir: %s\n%s\n%s\n%s" % (1414 return b"dir: %s\n%s\n%s\n%s" % (
1413 entry.file_id, parent_str, name_str, entry.revision)1415 entry.file_id, parent_str, name_str, entry.revision)
1414 elif entry.kind == 'symlink':1416 elif entry.kind == 'symlink':
1415 return "symlink: %s\n%s\n%s\n%s\n%s" % (1417 return b"symlink: %s\n%s\n%s\n%s\n%s" % (
1416 entry.file_id, parent_str, name_str, entry.revision,1418 entry.file_id, parent_str, name_str, entry.revision,
1417 entry.symlink_target.encode("utf8"))1419 entry.symlink_target.encode("utf8"))
1418 elif entry.kind == 'tree-reference':1420 elif entry.kind == 'tree-reference':
1419 return "tree: %s\n%s\n%s\n%s\n%s" % (1421 return b"tree: %s\n%s\n%s\n%s\n%s" % (
1420 entry.file_id, parent_str, name_str, entry.revision,1422 entry.file_id, parent_str, name_str, entry.revision,
1421 entry.reference_revision)1423 entry.reference_revision)
1422 else:1424 else:
@@ -1534,43 +1536,43 @@
1534 return other1536 return other
15351537
1536 @staticmethod1538 @staticmethod
1537 def _bytes_to_utf8name_key(bytes):1539 def _bytes_to_utf8name_key(data):
1538 """Get the file_id, revision_id key out of bytes."""1540 """Get the file_id, revision_id key out of data."""
1539 # We don't normally care about name, except for times when we want1541 # We don't normally care about name, except for times when we want
1540 # to filter out empty names because of non rich-root...1542 # to filter out empty names because of non rich-root...
1541 sections = bytes.split('\n')1543 sections = data.split(b'\n')
1542 kind, file_id = sections[0].split(': ')1544 kind, file_id = sections[0].split(b': ')
1543 return (sections[2], intern(file_id), intern(sections[3]))1545 return (sections[2], bytesintern(file_id), bytesintern(sections[3]))
15441546
1545 def _bytes_to_entry(self, bytes):1547 def _bytes_to_entry(self, bytes):
1546 """Deserialise a serialised entry."""1548 """Deserialise a serialised entry."""
1547 sections = bytes.split('\n')1549 sections = bytes.split(b'\n')
1548 if sections[0].startswith("file: "):1550 if sections[0].startswith(b"file: "):
1549 result = InventoryFile(sections[0][6:],1551 result = InventoryFile(sections[0][6:],
1550 sections[2].decode('utf8'),1552 sections[2].decode('utf8'),
1551 sections[1])1553 sections[1])
1552 result.text_sha1 = sections[4]1554 result.text_sha1 = sections[4]
1553 result.text_size = int(sections[5])1555 result.text_size = int(sections[5])
1554 result.executable = sections[6] == "Y"1556 result.executable = sections[6] == b"Y"
1555 elif sections[0].startswith("dir: "):1557 elif sections[0].startswith(b"dir: "):
1556 result = CHKInventoryDirectory(sections[0][5:],1558 result = CHKInventoryDirectory(sections[0][5:],
1557 sections[2].decode('utf8'),1559 sections[2].decode('utf8'),
1558 sections[1], self)1560 sections[1], self)
1559 elif sections[0].startswith("symlink: "):1561 elif sections[0].startswith(b"symlink: "):
1560 result = InventoryLink(sections[0][9:],1562 result = InventoryLink(sections[0][9:],
1561 sections[2].decode('utf8'),1563 sections[2].decode('utf8'),
1562 sections[1])1564 sections[1])
1563 result.symlink_target = sections[4].decode('utf8')1565 result.symlink_target = sections[4].decode('utf8')
1564 elif sections[0].startswith("tree: "):1566 elif sections[0].startswith(b"tree: "):
1565 result = TreeReference(sections[0][6:],1567 result = TreeReference(sections[0][6:],
1566 sections[2].decode('utf8'),1568 sections[2].decode('utf8'),
1567 sections[1])1569 sections[1])
1568 result.reference_revision = sections[4]1570 result.reference_revision = sections[4]
1569 else:1571 else:
1570 raise ValueError("Not a serialised entry %r" % bytes)1572 raise ValueError("Not a serialised entry %r" % bytes)
1571 result.file_id = intern(result.file_id)1573 result.file_id = bytesintern(result.file_id)
1572 result.revision = intern(sections[3])1574 result.revision = bytesintern(sections[3])
1573 if result.parent_id == '':1575 if result.parent_id == b'':
1574 result.parent_id = None1576 result.parent_id = None
1575 self._fileid_to_entry_cache[result.file_id] = result1577 self._fileid_to_entry_cache[result.file_id] = result
1576 return result1578 return result
@@ -1754,18 +1756,18 @@
1754 for.1756 for.
1755 :return: A CHKInventory1757 :return: A CHKInventory
1756 """1758 """
1757 lines = bytes.split('\n')1759 lines = bytes.split(b'\n')
1758 if lines[-1] != '':1760 if lines[-1] != b'':
1759 raise AssertionError('bytes to deserialize must end with an eol')1761 raise AssertionError('bytes to deserialize must end with an eol')
1760 lines.pop()1762 lines.pop()
1761 if lines[0] != 'chkinventory:':1763 if lines[0] != b'chkinventory:':
1762 raise ValueError("not a serialised CHKInventory: %r" % bytes)1764 raise ValueError("not a serialised CHKInventory: %r" % bytes)
1763 info = {}1765 info = {}
1764 allowed_keys = frozenset(['root_id', 'revision_id', 'search_key_name',1766 allowed_keys = frozenset((b'root_id', b'revision_id',
1765 'parent_id_basename_to_file_id',1767 b'parent_id_basename_to_file_id',
1766 'id_to_entry'])1768 b'search_key_name', b'id_to_entry'))
1767 for line in lines[1:]:1769 for line in lines[1:]:
1768 key, value = line.split(': ', 1)1770 key, value = line.split(b': ', 1)
1769 if key not in allowed_keys:1771 if key not in allowed_keys:
1770 raise errors.BzrError('Unknown key in inventory: %r\n%r'1772 raise errors.BzrError('Unknown key in inventory: %r\n%r'
1771 % (key, bytes))1773 % (key, bytes))
@@ -1773,16 +1775,16 @@
1773 raise errors.BzrError('Duplicate key in inventory: %r\n%r'1775 raise errors.BzrError('Duplicate key in inventory: %r\n%r'
1774 % (key, bytes))1776 % (key, bytes))
1775 info[key] = value1777 info[key] = value
1776 revision_id = intern(info['revision_id'])1778 revision_id = bytesintern(info[b'revision_id'])
1777 root_id = intern(info['root_id'])1779 root_id = bytesintern(info[b'root_id'])
1778 search_key_name = intern(info.get('search_key_name', 'plain'))1780 search_key_name = bytesintern(info.get(b'search_key_name', b'plain'))
1779 parent_id_basename_to_file_id = intern(info.get(1781 parent_id_basename_to_file_id = bytesintern(info.get(
1780 'parent_id_basename_to_file_id', None))1782 b'parent_id_basename_to_file_id', None))
1781 if not parent_id_basename_to_file_id.startswith('sha1:'):1783 if not parent_id_basename_to_file_id.startswith(b'sha1:'):
1782 raise ValueError('parent_id_basename_to_file_id should be a sha1'1784 raise ValueError('parent_id_basename_to_file_id should be a sha1'
1783 ' key not %r' % (parent_id_basename_to_file_id,))1785 ' key not %r' % (parent_id_basename_to_file_id,))
1784 id_to_entry = info['id_to_entry']1786 id_to_entry = info[b'id_to_entry']
1785 if not id_to_entry.startswith('sha1:'):1787 if not id_to_entry.startswith(b'sha1:'):
1786 raise ValueError('id_to_entry should be a sha1'1788 raise ValueError('id_to_entry should be a sha1'
1787 ' key not %r' % (id_to_entry,))1789 ' key not %r' % (id_to_entry,))
17881790
@@ -1790,7 +1792,7 @@
1790 result.revision_id = revision_id1792 result.revision_id = revision_id
1791 result.root_id = root_id1793 result.root_id = root_id
1792 search_key_func = chk_map.search_key_registry.get(1794 search_key_func = chk_map.search_key_registry.get(
1793 result._search_key_name)1795 result._search_key_name.decode("ascii"))
1794 if parent_id_basename_to_file_id is not None:1796 if parent_id_basename_to_file_id is not None:
1795 result.parent_id_basename_to_file_id = chk_map.CHKMap(1797 result.parent_id_basename_to_file_id = chk_map.CHKMap(
1796 chk_store, StaticTuple(parent_id_basename_to_file_id,),1798 chk_store, StaticTuple(parent_id_basename_to_file_id,),
@@ -1856,7 +1858,7 @@
1856 if entry.parent_id is not None:1858 if entry.parent_id is not None:
1857 parent_id = entry.parent_id1859 parent_id = entry.parent_id
1858 else:1860 else:
1859 parent_id = ''1861 parent_id = b''
1860 return StaticTuple(parent_id, entry.name.encode('utf8')).intern()1862 return StaticTuple(parent_id, entry.name.encode('utf8')).intern()
18611863
1862 def __getitem__(self, file_id):1864 def __getitem__(self, file_id):
@@ -1868,7 +1870,7 @@
1868 return result1870 return result
1869 try:1871 try:
1870 return self._bytes_to_entry(1872 return self._bytes_to_entry(
1871 self.id_to_entry.iteritems([StaticTuple(file_id,)]).next()[1])1873 next(self.id_to_entry.iteritems([StaticTuple(file_id,)]))[1])
1872 except StopIteration:1874 except StopIteration:
1873 # really we're passing an inventory, not a tree...1875 # really we're passing an inventory, not a tree...
1874 raise errors.NoSuchId(self, file_id)1876 raise errors.NoSuchId(self, file_id)
@@ -1951,7 +1953,7 @@
1951 last_parent_id = last_parent_ie = None1953 last_parent_id = last_parent_ie = None
1952 pid_items = self.parent_id_basename_to_file_id.iteritems()1954 pid_items = self.parent_id_basename_to_file_id.iteritems()
1953 for key, child_file_id in pid_items:1955 for key, child_file_id in pid_items:
1954 if key == ('', ''): # This is the root1956 if key == (b'', b''): # This is the root
1955 if child_file_id != self.root_id:1957 if child_file_id != self.root_id:
1956 raise ValueError('Data inconsistency detected.'1958 raise ValueError('Data inconsistency detected.'
1957 ' We expected data with key ("","") to match'1959 ' We expected data with key ("","") to match'
@@ -2129,22 +2131,23 @@
21292131
2130 def to_lines(self):2132 def to_lines(self):
2131 """Serialise the inventory to lines."""2133 """Serialise the inventory to lines."""
2132 lines = ["chkinventory:\n"]2134 lines = [b"chkinventory:\n"]
2133 if self._search_key_name != 'plain':2135 if self._search_key_name != 'plain':
2134 # custom ordering grouping things that don't change together2136 # custom ordering grouping things that don't change together
2135 lines.append('search_key_name: %s\n' % (self._search_key_name,))2137 lines.append(b'search_key_name: %s\n' % (
2136 lines.append("root_id: %s\n" % self.root_id)2138 self._search_key_name.encode('ascii')))
2137 lines.append('parent_id_basename_to_file_id: %s\n' %2139 lines.append(b"root_id: %s\n" % self.root_id)
2140 lines.append(b'parent_id_basename_to_file_id: %s\n' %
2138 (self.parent_id_basename_to_file_id.key()[0],))2141 (self.parent_id_basename_to_file_id.key()[0],))
2139 lines.append("revision_id: %s\n" % self.revision_id)2142 lines.append(b"revision_id: %s\n" % self.revision_id)
2140 lines.append("id_to_entry: %s\n" % (self.id_to_entry.key()[0],))2143 lines.append(b"id_to_entry: %s\n" % (self.id_to_entry.key()[0],))
2141 else:2144 else:
2142 lines.append("revision_id: %s\n" % self.revision_id)2145 lines.append(b"revision_id: %s\n" % self.revision_id)
2143 lines.append("root_id: %s\n" % self.root_id)2146 lines.append(b"root_id: %s\n" % self.root_id)
2144 if self.parent_id_basename_to_file_id is not None:2147 if self.parent_id_basename_to_file_id is not None:
2145 lines.append('parent_id_basename_to_file_id: %s\n' %2148 lines.append(b'parent_id_basename_to_file_id: %s\n' %
2146 (self.parent_id_basename_to_file_id.key()[0],))2149 (self.parent_id_basename_to_file_id.key()[0],))
2147 lines.append("id_to_entry: %s\n" % (self.id_to_entry.key()[0],))2150 lines.append(b"id_to_entry: %s\n" % (self.id_to_entry.key()[0],))
2148 return lines2151 return lines
21492152
2150 @property2153 @property
21512154
=== modified file 'breezy/inventory_delta.py'
--- breezy/inventory_delta.py 2017-06-09 16:31:49 +0000
+++ breezy/inventory_delta.py 2017-06-11 01:47:17 +0000
@@ -31,7 +31,7 @@
31from . import inventory31from . import inventory
32from .revision import NULL_REVISION32from .revision import NULL_REVISION
3333
34FORMAT_1 = 'bzr inventory delta v1 (bzr 1.14)'34FORMAT_1 = b'bzr inventory delta v1 (bzr 1.14)'
3535
3636
37class InventoryDeltaError(errors.BzrError):37class InventoryDeltaError(errors.BzrError):
@@ -61,7 +61,7 @@
61 61
62 :param entry: An InventoryDirectory.62 :param entry: An InventoryDirectory.
63 """63 """
64 return "dir"64 return b"dir"
6565
6666
67def _file_content(entry):67def _file_content(entry):
@@ -70,14 +70,14 @@
70 :param entry: An InventoryFile.70 :param entry: An InventoryFile.
71 """71 """
72 if entry.executable:72 if entry.executable:
73 exec_bytes = 'Y'73 exec_bytes = b'Y'
74 else:74 else:
75 exec_bytes = ''75 exec_bytes = b''
76 size_exec_sha = (entry.text_size, exec_bytes, entry.text_sha1)76 size_exec_sha = entry.text_size, exec_bytes, entry.text_sha1
77 if None in size_exec_sha:77 if None in size_exec_sha:
78 raise InventoryDeltaError(78 raise InventoryDeltaError(
79 'Missing size or sha for %(fileid)r', fileid=entry.file_id)79 'Missing size or sha for %(fileid)r', fileid=entry.file_id)
80 return "file\x00%d\x00%s\x00%s" % size_exec_sha80 return b"file\x00%d\x00%s\x00%s" % size_exec_sha
8181
8282
83def _link_content(entry):83def _link_content(entry):
@@ -89,7 +89,7 @@
89 if target is None:89 if target is None:
90 raise InventoryDeltaError(90 raise InventoryDeltaError(
91 'Missing target for %(fileid)r', fileid=entry.file_id)91 'Missing target for %(fileid)r', fileid=entry.file_id)
92 return "link\x00%s" % target.encode('utf8')92 return b"link\x00%s" % target.encode('utf8')
9393
9494
95def _reference_content(entry):95def _reference_content(entry):
@@ -101,7 +101,7 @@
101 if tree_revision is None:101 if tree_revision is None:
102 raise InventoryDeltaError(102 raise InventoryDeltaError(
103 'Missing reference revision for %(fileid)r', fileid=entry.file_id)103 'Missing reference revision for %(fileid)r', fileid=entry.file_id)
104 return "tree\x00%s" % tree_revision104 return b"tree\x00%s" % tree_revision
105105
106106
107def _dir_to_entry(content, name, parent_id, file_id, last_modified,107def _dir_to_entry(content, name, parent_id, file_id, last_modified,
@@ -179,11 +179,11 @@
179 takes.179 takes.
180 :return: The serialized delta as lines.180 :return: The serialized delta as lines.
181 """181 """
182 if not isinstance(old_name, str):182 if not isinstance(old_name, bytes):
183 raise TypeError('old_name should be str, got %r' % (old_name,))183 raise TypeError('old_name should be str, got %r' % (old_name,))
184 if not isinstance(new_name, str):184 if not isinstance(new_name, bytes):
185 raise TypeError('new_name should be str, got %r' % (new_name,))185 raise TypeError('new_name should be str, got %r' % (new_name,))
186 lines = ['', '', '', '', '']186 lines = [b'', b'', b'', b'', b'']
187 to_line = self._delta_item_to_line187 to_line = self._delta_item_to_line
188 for delta_item in delta_to_new:188 for delta_item in delta_to_new:
189 line = to_line(delta_item, new_name)189 line = to_line(delta_item, new_name)
@@ -193,48 +193,48 @@
193 'to_line gave non-bytes output %(line)r', line=lines[-1])193 'to_line gave non-bytes output %(line)r', line=lines[-1])
194 lines.append(line)194 lines.append(line)
195 lines.sort()195 lines.sort()
196 lines[0] = "format: %s\n" % FORMAT_1196 lines[0] = b"format: %s\n" % FORMAT_1
197 lines[1] = "parent: %s\n" % old_name197 lines[1] = b"parent: %s\n" % old_name
198 lines[2] = "version: %s\n" % new_name198 lines[2] = b"version: %s\n" % new_name
199 lines[3] = "versioned_root: %s\n" % self._serialize_bool(199 lines[3] = b"versioned_root: %s\n" % self._serialize_bool(
200 self._versioned_root)200 self._versioned_root)
201 lines[4] = "tree_references: %s\n" % self._serialize_bool(201 lines[4] = b"tree_references: %s\n" % self._serialize_bool(
202 self._tree_references)202 self._tree_references)
203 return lines203 return lines
204204
205 def _serialize_bool(self, value):205 def _serialize_bool(self, value):
206 if value:206 if value:
207 return "true"207 return b"true"
208 else:208 else:
209 return "false"209 return b"false"
210210
211 def _delta_item_to_line(self, delta_item, new_version):211 def _delta_item_to_line(self, delta_item, new_version):
212 """Convert delta_item to a line."""212 """Convert delta_item to a line."""
213 oldpath, newpath, file_id, entry = delta_item213 oldpath, newpath, file_id, entry = delta_item
214 if newpath is None:214 if newpath is None:
215 # delete215 # delete
216 oldpath_utf8 = '/' + oldpath.encode('utf8')216 oldpath_utf8 = b'/' + oldpath.encode('utf8')
217 newpath_utf8 = 'None'217 newpath_utf8 = b'None'
218 parent_id = ''218 parent_id = b''
219 last_modified = NULL_REVISION219 last_modified = NULL_REVISION
220 content = 'deleted\x00\x00'220 content = b'deleted\x00\x00'
221 else:221 else:
222 if oldpath is None:222 if oldpath is None:
223 oldpath_utf8 = 'None'223 oldpath_utf8 = b'None'
224 else:224 else:
225 oldpath_utf8 = '/' + oldpath.encode('utf8')225 oldpath_utf8 = b'/' + oldpath.encode('utf8')
226 if newpath == '/':226 if newpath == '/':
227 raise AssertionError(227 raise AssertionError(
228 "Bad inventory delta: '/' is not a valid newpath "228 "Bad inventory delta: '/' is not a valid newpath "
229 "(should be '') in delta item %r" % (delta_item,))229 "(should be '') in delta item %r" % (delta_item,))
230 # TODO: Test real-world utf8 cache hit rate. It may be a win.230 # TODO: Test real-world utf8 cache hit rate. It may be a win.
231 newpath_utf8 = '/' + newpath.encode('utf8')231 newpath_utf8 = b'/' + newpath.encode('utf8')
232 # Serialize None as ''232 # Serialize None as ''
233 parent_id = entry.parent_id or ''233 parent_id = entry.parent_id or b''
234 # Serialize unknown revisions as NULL_REVISION234 # Serialize unknown revisions as NULL_REVISION
235 last_modified = entry.revision235 last_modified = entry.revision
236 # special cases for /236 # special cases for /
237 if newpath_utf8 == '/' and not self._versioned_root:237 if newpath_utf8 == b'/' and not self._versioned_root:
238 # This is an entry for the root, this inventory does not238 # This is an entry for the root, this inventory does not
239 # support versioned roots. So this must be an unversioned239 # support versioned roots. So this must be an unversioned
240 # root, i.e. last_modified == new revision. Otherwise, this240 # root, i.e. last_modified == new revision. Otherwise, this
@@ -251,7 +251,7 @@
251 raise InventoryDeltaError(251 raise InventoryDeltaError(
252 "no version for fileid %(fileid)r", fileid=file_id)252 "no version for fileid %(fileid)r", fileid=file_id)
253 content = self._entry_to_content[entry.kind](entry)253 content = self._entry_to_content[entry.kind](entry)
254 return ("%s\x00%s\x00%s\x00%s\x00%s\x00%s\n" %254 return (b"%s\x00%s\x00%s\x00%s\x00%s\x00%s\n" %
255 (oldpath_utf8, newpath_utf8, file_id, parent_id, last_modified,255 (oldpath_utf8, newpath_utf8, file_id, parent_id, last_modified,
256 content))256 content))
257257
@@ -270,9 +270,9 @@
270 self._allow_tree_references = allow_tree_references270 self._allow_tree_references = allow_tree_references
271271
272 def _deserialize_bool(self, value):272 def _deserialize_bool(self, value):
273 if value == "true":273 if value == b"true":
274 return True274 return True
275 elif value == "false":275 elif value == b"false":
276 return False276 return False
277 else:277 else:
278 raise InventoryDeltaError("value %(val)r is not a bool", val=value)278 raise InventoryDeltaError("value %(val)r is not a bool", val=value)
@@ -289,24 +289,24 @@
289 :return: (parent_id, new_id, versioned_root, tree_references,289 :return: (parent_id, new_id, versioned_root, tree_references,
290 inventory_delta)290 inventory_delta)
291 """291 """
292 if bytes[-1:] != '\n':292 if bytes[-1:] != b'\n':
293 last_line = bytes.rsplit('\n', 1)[-1]293 last_line = bytes.rsplit(b'\n', 1)[-1]
294 raise InventoryDeltaError(294 raise InventoryDeltaError(
295 'last line not empty: %(line)r', line=last_line)295 'last line not empty: %(line)r', line=last_line)
296 lines = bytes.split('\n')[:-1] # discard the last empty line296 lines = bytes.split(b'\n')[:-1] # discard the last empty line
297 if not lines or lines[0] != 'format: %s' % FORMAT_1:297 if not lines or lines[0] != b'format: %s' % FORMAT_1:
298 raise InventoryDeltaError(298 raise InventoryDeltaError(
299 'unknown format %(line)r', line=lines[0:1])299 'unknown format %(line)r', line=lines[0:1])
300 if len(lines) < 2 or not lines[1].startswith('parent: '):300 if len(lines) < 2 or not lines[1].startswith(b'parent: '):
301 raise InventoryDeltaError('missing parent: marker')301 raise InventoryDeltaError('missing parent: marker')
302 delta_parent_id = lines[1][8:]302 delta_parent_id = lines[1][8:]
303 if len(lines) < 3 or not lines[2].startswith('version: '):303 if len(lines) < 3 or not lines[2].startswith(b'version: '):
304 raise InventoryDeltaError('missing version: marker')304 raise InventoryDeltaError('missing version: marker')
305 delta_version_id = lines[2][9:]305 delta_version_id = lines[2][9:]
306 if len(lines) < 4 or not lines[3].startswith('versioned_root: '):306 if len(lines) < 4 or not lines[3].startswith(b'versioned_root: '):
307 raise InventoryDeltaError('missing versioned_root: marker')307 raise InventoryDeltaError('missing versioned_root: marker')
308 delta_versioned_root = self._deserialize_bool(lines[3][16:])308 delta_versioned_root = self._deserialize_bool(lines[3][16:])
309 if len(lines) < 5 or not lines[4].startswith('tree_references: '):309 if len(lines) < 5 or not lines[4].startswith(b'tree_references: '):
310 raise InventoryDeltaError('missing tree_references: marker')310 raise InventoryDeltaError('missing tree_references: marker')
311 delta_tree_references = self._deserialize_bool(lines[4][17:])311 delta_tree_references = self._deserialize_bool(lines[4][17:])
312 if (not self._allow_versioned_root and delta_versioned_root):312 if (not self._allow_versioned_root and delta_versioned_root):
@@ -318,24 +318,24 @@
318 next(line_iter)318 next(line_iter)
319 for line in line_iter:319 for line in line_iter:
320 (oldpath_utf8, newpath_utf8, file_id, parent_id, last_modified,320 (oldpath_utf8, newpath_utf8, file_id, parent_id, last_modified,
321 content) = line.split('\x00', 5)321 content) = line.split(b'\x00', 5)
322 parent_id = parent_id or None322 parent_id = parent_id or None
323 if file_id in seen_ids:323 if file_id in seen_ids:
324 raise InventoryDeltaError(324 raise InventoryDeltaError(
325 "duplicate file id %(fileid)r", fileid=file_id)325 "duplicate file id %(fileid)r", fileid=file_id)
326 seen_ids.add(file_id)326 seen_ids.add(file_id)
327 if (newpath_utf8 == '/' and not delta_versioned_root and327 if (newpath_utf8 == b'/' and not delta_versioned_root and
328 last_modified != delta_version_id):328 last_modified != delta_version_id):
329 # Delta claims to be not have a versioned root, yet here's329 # Delta claims to be not have a versioned root, yet here's
330 # a root entry with a non-default version.330 # a root entry with a non-default version.
331 raise InventoryDeltaError(331 raise InventoryDeltaError(
332 "Versioned root found: %(line)r", line=line)332 "Versioned root found: %(line)r", line=line)
333 elif newpath_utf8 != 'None' and last_modified[-1] == ':':333 elif newpath_utf8 != b'None' and last_modified[-1:] == b':':
334 # Deletes have a last_modified of null:, but otherwise special334 # Deletes have a last_modified of null:, but otherwise special
335 # revision ids should not occur.335 # revision ids should not occur.
336 raise InventoryDeltaError(336 raise InventoryDeltaError(
337 'special revisionid found: %(line)r', line=line)337 'special revisionid found: %(line)r', line=line)
338 if content.startswith('tree\x00'):338 if content.startswith(b'tree\x00'):
339 if delta_tree_references is False:339 if delta_tree_references is False:
340 raise InventoryDeltaError(340 raise InventoryDeltaError(
341 "Tree reference found (but header said "341 "Tree reference found (but header said "
@@ -343,18 +343,18 @@
343 elif not self._allow_tree_references:343 elif not self._allow_tree_references:
344 raise IncompatibleInventoryDelta(344 raise IncompatibleInventoryDelta(
345 "Tree reference not allowed")345 "Tree reference not allowed")
346 if oldpath_utf8 == 'None':346 if oldpath_utf8 == b'None':
347 oldpath = None347 oldpath = None
348 elif oldpath_utf8[:1] != '/':348 elif oldpath_utf8[:1] != b'/':
349 raise InventoryDeltaError(349 raise InventoryDeltaError(
350 "oldpath invalid (does not start with /): %(path)r",350 "oldpath invalid (does not start with /): %(path)r",
351 path=oldpath_utf8)351 path=oldpath_utf8)
352 else:352 else:
353 oldpath_utf8 = oldpath_utf8[1:]353 oldpath_utf8 = oldpath_utf8[1:]
354 oldpath = oldpath_utf8.decode('utf8')354 oldpath = oldpath_utf8.decode('utf8')
355 if newpath_utf8 == 'None':355 if newpath_utf8 == b'None':
356 newpath = None356 newpath = None
357 elif newpath_utf8[:1] != '/':357 elif newpath_utf8[:1] != b'/':
358 raise InventoryDeltaError(358 raise InventoryDeltaError(
359 "newpath invalid (does not start with /): %(path)r",359 "newpath invalid (does not start with /): %(path)r",
360 path=newpath_utf8)360 path=newpath_utf8)
@@ -362,8 +362,8 @@
362 # Trim leading slash362 # Trim leading slash
363 newpath_utf8 = newpath_utf8[1:]363 newpath_utf8 = newpath_utf8[1:]
364 newpath = newpath_utf8.decode('utf8')364 newpath = newpath_utf8.decode('utf8')
365 content_tuple = tuple(content.split('\x00'))365 content_tuple = tuple(content.split(b'\x00'))
366 if content_tuple[0] == 'deleted':366 if content_tuple[0] == b'deleted':
367 entry = None367 entry = None
368 else:368 else:
369 entry = _parse_entry(369 entry = _parse_entry(
@@ -376,10 +376,10 @@
376376
377def _parse_entry(path, file_id, parent_id, last_modified, content):377def _parse_entry(path, file_id, parent_id, last_modified, content):
378 entry_factory = {378 entry_factory = {
379 'dir': _dir_to_entry,379 b'dir': _dir_to_entry,
380 'file': _file_to_entry,380 b'file': _file_to_entry,
381 'link': _link_to_entry,381 b'link': _link_to_entry,
382 'tree': _tree_to_entry,382 b'tree': _tree_to_entry,
383 }383 }
384 kind = content[0]384 kind = content[0]
385 if path.startswith('/'):385 if path.startswith('/'):
386386
=== modified file 'breezy/osutils.py'
--- breezy/osutils.py 2017-06-10 01:57:00 +0000
+++ breezy/osutils.py 2017-06-11 01:47:17 +0000
@@ -1209,11 +1209,11 @@
1209 # separators1209 # separators
1210 # 3) '\xa0' isn't unicode safe since it is >128.1210 # 3) '\xa0' isn't unicode safe since it is >128.
12111211
1212 # This should *not* be a unicode set of characters in case the source1212 if isinstance(s, str):
1213 # string is not a Unicode string. We can auto-up-cast the characters since1213 ws = ' \t\n\r\v\f'
1214 # they are ascii, but we don't want to auto-up-cast the string in case it1214 else:
1215 # is utf-81215 ws = (b' ', b'\t', b'\n', b'\r', b'\v', b'\f')
1216 for ch in ' \t\n\r\v\f':1216 for ch in ws:
1217 if ch in s:1217 if ch in s:
1218 return True1218 return True
1219 else:1219 else:
@@ -1370,7 +1370,7 @@
1370 If it is a str, it is returned.1370 If it is a str, it is returned.
1371 If it is Unicode, it is encoded into a utf-8 string.1371 If it is Unicode, it is encoded into a utf-8 string.
1372 """1372 """
1373 if isinstance(unicode_or_utf8_string, str):1373 if isinstance(unicode_or_utf8_string, bytes):
1374 # TODO: jam 20070209 This is overkill, and probably has an impact on1374 # TODO: jam 20070209 This is overkill, and probably has an impact on
1375 # performance if we are dealing with lots of apis that want a1375 # performance if we are dealing with lots of apis that want a
1376 # utf-8 revision id1376 # utf-8 revision id
@@ -1443,13 +1443,13 @@
1443 can be accessed by that path.1443 can be accessed by that path.
1444 """1444 """
14451445
1446 return unicodedata.normalize('NFC', unicode(path)), True1446 return unicodedata.normalize('NFC', text_type(path)), True
14471447
14481448
1449def _inaccessible_normalized_filename(path):1449def _inaccessible_normalized_filename(path):
1450 __doc__ = _accessible_normalized_filename.__doc__1450 __doc__ = _accessible_normalized_filename.__doc__
14511451
1452 normalized = unicodedata.normalize('NFC', unicode(path))1452 normalized = unicodedata.normalize('NFC', text_type(path))
1453 return normalized, normalized == path1453 return normalized, normalized == path
14541454
14551455
@@ -1878,10 +1878,10 @@
1878 _kind_from_mode = file_kind_from_stat_mode1878 _kind_from_mode = file_kind_from_stat_mode
18791879
1880 if prefix:1880 if prefix:
1881 relprefix = prefix + '/'1881 relprefix = prefix + b'/'
1882 else:1882 else:
1883 relprefix = ''1883 relprefix = b''
1884 top_slash = top + u'/'1884 top_slash = top + '/'
18851885
1886 dirblock = []1886 dirblock = []
1887 append = dirblock.append1887 append = dirblock.append
18881888
=== modified file 'breezy/pack.py'
--- breezy/pack.py 2017-05-25 01:35:55 +0000
+++ breezy/pack.py 2017-06-11 01:47:17 +0000
@@ -73,28 +73,28 @@
7373
74 def begin(self):74 def begin(self):
75 """Return the bytes to begin a container."""75 """Return the bytes to begin a container."""
76 return FORMAT_ONE + "\n"76 return FORMAT_ONE.encode("ascii") + b"\n"
7777
78 def end(self):78 def end(self):
79 """Return the bytes to finish a container."""79 """Return the bytes to finish a container."""
80 return "E"80 return b"E"
8181
82 def bytes_header(self, length, names):82 def bytes_header(self, length, names):
83 """Return the header for a Bytes record."""83 """Return the header for a Bytes record."""
84 # Kind marker84 # Kind marker
85 byte_sections = ["B"]85 byte_sections = [b"B"]
86 # Length86 # Length
87 byte_sections.append(str(length) + "\n")87 byte_sections.append(b"%d\n" % (length,))
88 # Names88 # Names
89 for name_tuple in names:89 for name_tuple in names:
90 # Make sure we're writing valid names. Note that we will leave a90 # Make sure we're writing valid names. Note that we will leave a
91 # half-written record if a name is bad!91 # half-written record if a name is bad!
92 for name in name_tuple:92 for name in name_tuple:
93 _check_name(name)93 _check_name(name)
94 byte_sections.append('\x00'.join(name_tuple) + "\n")94 byte_sections.append(b'\x00'.join(name_tuple) + b"\n")
95 # End of headers95 # End of headers
96 byte_sections.append("\n")96 byte_sections.append(b"\n")
97 return ''.join(byte_sections)97 return b''.join(byte_sections)
9898
99 def bytes_record(self, bytes, names):99 def bytes_record(self, bytes, names):
100 """Return the bytes for a Bytes record with the given name and100 """Return the bytes for a Bytes record with the given name and
101101
=== modified file 'breezy/repofmt/groupcompress_repo.py'
--- breezy/repofmt/groupcompress_repo.py 2017-06-10 00:52:37 +0000
+++ breezy/repofmt/groupcompress_repo.py 2017-06-11 01:47:17 +0000
@@ -147,15 +147,15 @@
147 # robertc says- this is a closure rather than a method on the object147 # robertc says- this is a closure rather than a method on the object
148 # so that the variables are locals, and faster than accessing object148 # so that the variables are locals, and faster than accessing object
149 # members.149 # members.
150 def _write_data(bytes, flush=False, _buffer=self._buffer,150 def _write_data(data, flush=False, _buffer=self._buffer,
151 _write=self.write_stream.write, _update=self._hash.update):151 _write=self.write_stream.write, _update=self._hash.update):
152 _buffer[0].append(bytes)152 _buffer[0].append(data)
153 _buffer[1] += len(bytes)153 _buffer[1] += len(data)
154 # buffer cap154 # buffer cap
155 if _buffer[1] > self._cache_limit or flush:155 if _buffer[1] > self._cache_limit or flush:
156 bytes = ''.join(_buffer[0])156 data = b''.join(_buffer[0])
157 _write(bytes)157 _write(data)
158 _update(bytes)158 _update(data)
159 _buffer[:] = [[], 0]159 _buffer[:] = [[], 0]
160 # expose this on self, for the occasion when clients want to add data.160 # expose this on self, for the occasion when clients want to add data.
161 self._write_data = _write_data161 self._write_data = _write_data
@@ -905,7 +905,7 @@
905 ' no new_path %r' % (file_id,))905 ' no new_path %r' % (file_id,))
906 if new_path == '':906 if new_path == '':
907 new_inv.root_id = file_id907 new_inv.root_id = file_id
908 parent_id_basename_key = StaticTuple('', '').intern()908 parent_id_basename_key = StaticTuple(b'', b'').intern()
909 else:909 else:
910 utf8_entry_name = entry.name.encode('utf-8')910 utf8_entry_name = entry.name.encode('utf-8')
911 parent_id_basename_key = StaticTuple(entry.parent_id,911 parent_id_basename_key = StaticTuple(entry.parent_id,
912912
=== modified file 'breezy/repofmt/pack_repo.py'
--- breezy/repofmt/pack_repo.py 2017-06-10 12:56:18 +0000
+++ breezy/repofmt/pack_repo.py 2017-06-11 01:47:17 +0000
@@ -419,7 +419,7 @@
419 _buffer[1] += len(bytes)419 _buffer[1] += len(bytes)
420 # buffer cap420 # buffer cap
421 if _buffer[1] > self._cache_limit or flush:421 if _buffer[1] > self._cache_limit or flush:
422 bytes = ''.join(_buffer[0])422 bytes = b''.join(_buffer[0])
423 _write(bytes)423 _write(bytes)
424 _update(bytes)424 _update(bytes)
425 _buffer[:] = [[], 0]425 _buffer[:] = [[], 0]
@@ -524,7 +524,7 @@
524 def flush(self):524 def flush(self):
525 """Flush any current data."""525 """Flush any current data."""
526 if self._buffer[1]:526 if self._buffer[1]:
527 bytes = ''.join(self._buffer[0])527 bytes = b''.join(self._buffer[0])
528 self.write_stream.write(bytes)528 self.write_stream.write(bytes)
529 self._hash.update(bytes)529 self._hash.update(bytes)
530 self._buffer[:] = [[], 0]530 self._buffer[:] = [[], 0]
@@ -1987,7 +1987,7 @@
1987 length), where the index field is the write_index object supplied1987 length), where the index field is the write_index object supplied
1988 to the PackAccess object.1988 to the PackAccess object.
1989 """1989 """
1990 if not isinstance(raw_data, str):1990 if not isinstance(raw_data, bytes):
1991 raise AssertionError(1991 raise AssertionError(
1992 'data must be plain bytes was %s' % type(raw_data))1992 'data must be plain bytes was %s' % type(raw_data))
1993 result = []1993 result = []
19941994
=== modified file 'breezy/repository.py'
--- breezy/repository.py 2017-06-10 00:52:37 +0000
+++ breezy/repository.py 2017-06-11 01:47:17 +0000
@@ -46,6 +46,7 @@
46from .inter import InterObject46from .inter import InterObject
47from .lock import _RelockDebugMixin, LogicalLockResult47from .lock import _RelockDebugMixin, LogicalLockResult
48from .sixish import (48from .sixish import (
49 text_type,
49 viewitems,50 viewitems,
50 viewvalues,51 viewvalues,
51 )52 )
@@ -145,7 +146,7 @@
145 for key, value in viewitems(revprops):146 for key, value in viewitems(revprops):
146 # We know that the XML serializers do not round trip '\r'147 # We know that the XML serializers do not round trip '\r'
147 # correctly, so refuse to accept them148 # correctly, so refuse to accept them
148 if not isinstance(value, basestring):149 if not isinstance(value, (text_type, str)):
149 raise ValueError('revision property (%s) is not a valid'150 raise ValueError('revision property (%s) is not a valid'
150 ' (unicode) string: %r' % (key, value))151 ' (unicode) string: %r' % (key, value))
151 self._validate_unicode_text(value,152 self._validate_unicode_text(value,
152153
=== modified file 'breezy/revision.py'
--- breezy/revision.py 2017-06-10 01:57:00 +0000
+++ breezy/revision.py 2017-06-11 01:47:17 +0000
@@ -26,8 +26,11 @@
26""")26""")
27from . import (27from . import (
28 errors,28 errors,
29 )29 osutils,
30from .osutils import contains_whitespace30 )
31from .sixish import (
32 text_type,
33 )
3134
32NULL_REVISION=b"null:"35NULL_REVISION=b"null:"
33CURRENT_REVISION=b"current:"36CURRENT_REVISION=b"current:"
@@ -86,9 +89,11 @@
86 def _check_properties(self):89 def _check_properties(self):
87 """Verify that all revision properties are OK."""90 """Verify that all revision properties are OK."""
88 for name, value in self.properties.items():91 for name, value in self.properties.items():
89 if not isinstance(name, basestring) or contains_whitespace(name):92 # GZ 2017-06-10: What sort of string are properties exactly?
93 not_text = not isinstance(name, (text_type, str))
94 if not_text or osutils.contains_whitespace(name):
90 raise ValueError("invalid property name %r" % name)95 raise ValueError("invalid property name %r" % name)
91 if not isinstance(value, basestring):96 if not isinstance(value, (text_type, bytes)):
92 raise ValueError("invalid property value %r for %r" %97 raise ValueError("invalid property value %r for %r" %
93 (value, name))98 (value, name))
9499
@@ -205,7 +210,7 @@
205210
206 :return: True if the revision is reserved, False otherwise211 :return: True if the revision is reserved, False otherwise
207 """212 """
208 return isinstance(revision_id, basestring) and revision_id.endswith(':')213 return isinstance(revision_id, bytes) and revision_id.endswith(b':')
209214
210215
211def check_not_reserved_id(revision_id):216def check_not_reserved_id(revision_id):
212217
=== modified file 'breezy/sixish.py'
--- breezy/sixish.py 2017-06-05 01:55:02 +0000
+++ breezy/sixish.py 2017-06-11 01:47:17 +0000
@@ -46,3 +46,13 @@
46 from StringIO import StringIO46 from StringIO import StringIO
47 from future_builtins import zip, map47 from future_builtins import zip, map
48 range = xrange48 range = xrange
49
50
51# GZ 2017-06-10: Work out if interning bits of inventory is behaviour we want
52# to retain outside of StaticTuple, if so need to implement for Python 3.
53if PY3:
54 def bytesintern(b):
55 """Dummy intern() function."""
56 return b
57else:
58 bytesintern = intern
4959
=== modified file 'breezy/tests/test__chk_map.py'
--- breezy/tests/test__chk_map.py 2017-05-23 14:08:03 +0000
+++ breezy/tests/test__chk_map.py 2017-06-11 01:47:17 +0000
@@ -42,18 +42,18 @@
42 self.assertEqual(expected, actual, 'actual: %r' % (actual,))42 self.assertEqual(expected, actual, 'actual: %r' % (actual,))
4343
44 def test_simple_16(self):44 def test_simple_16(self):
45 self.assertSearchKey16('8C736521', stuple('foo',))45 self.assertSearchKey16(b'8C736521', stuple('foo',))
46 self.assertSearchKey16('8C736521\x008C736521', stuple('foo', 'foo'))46 self.assertSearchKey16(b'8C736521\x008C736521', stuple('foo', 'foo'))
47 self.assertSearchKey16('8C736521\x0076FF8CAA', stuple('foo', 'bar'))47 self.assertSearchKey16(b'8C736521\x0076FF8CAA', stuple('foo', 'bar'))
48 self.assertSearchKey16('ED82CD11', stuple('abcd',))48 self.assertSearchKey16(b'ED82CD11', stuple('abcd',))
4949
50 def test_simple_255(self):50 def test_simple_255(self):
51 self.assertSearchKey255('\x8cse!', stuple('foo',))51 self.assertSearchKey255(b'\x8cse!', stuple('foo',))
52 self.assertSearchKey255('\x8cse!\x00\x8cse!', stuple('foo', 'foo'))52 self.assertSearchKey255(b'\x8cse!\x00\x8cse!', stuple('foo', 'foo'))
53 self.assertSearchKey255('\x8cse!\x00v\xff\x8c\xaa', stuple('foo', 'bar'))53 self.assertSearchKey255(b'\x8cse!\x00v\xff\x8c\xaa', stuple('foo', 'bar'))
54 # The standard mapping for these would include '\n', so it should be54 # The standard mapping for these would include '\n', so it should be
55 # mapped to '_'55 # mapped to '_'
56 self.assertSearchKey255('\xfdm\x93_\x00P_\x1bL', stuple('<', 'V'))56 self.assertSearchKey255(b'\xfdm\x93_\x00P_\x1bL', stuple('<', 'V'))
5757
58 def test_255_does_not_include_newline(self):58 def test_255_does_not_include_newline(self):
59 # When mapping via _search_key_255, we should never have the '\n'59 # When mapping via _search_key_255, we should never have the '\n'
@@ -64,7 +64,7 @@
64 chars_used.update(search_key)64 chars_used.update(search_key)
65 all_chars = {chr(x) for x in range(256)}65 all_chars = {chr(x) for x in range(256)}
66 unused_chars = all_chars.symmetric_difference(chars_used)66 unused_chars = all_chars.symmetric_difference(chars_used)
67 self.assertEqual(set('\n'), unused_chars)67 self.assertEqual(set(b'\n'), unused_chars)
6868
6969
70class TestDeserialiseLeafNode(tests.TestCase):70class TestDeserialiseLeafNode(tests.TestCase):
@@ -73,94 +73,94 @@
7373
74 def assertDeserialiseErrors(self, text):74 def assertDeserialiseErrors(self, text):
75 self.assertRaises((ValueError, IndexError),75 self.assertRaises((ValueError, IndexError),
76 self.module._deserialise_leaf_node, text, 'not-a-real-sha')76 self.module._deserialise_leaf_node, text, b'not-a-real-sha')
7777
78 def test_raises_on_non_leaf(self):78 def test_raises_on_non_leaf(self):
79 self.assertDeserialiseErrors('')79 self.assertDeserialiseErrors(b'')
80 self.assertDeserialiseErrors('short\n')80 self.assertDeserialiseErrors(b'short\n')
81 self.assertDeserialiseErrors('chknotleaf:\n')81 self.assertDeserialiseErrors(b'chknotleaf:\n')
82 self.assertDeserialiseErrors('chkleaf:x\n')82 self.assertDeserialiseErrors(b'chkleaf:x\n')
83 self.assertDeserialiseErrors('chkleaf:\n')83 self.assertDeserialiseErrors(b'chkleaf:\n')
84 self.assertDeserialiseErrors('chkleaf:\nnotint\n')84 self.assertDeserialiseErrors(b'chkleaf:\nnotint\n')
85 self.assertDeserialiseErrors('chkleaf:\n10\n')85 self.assertDeserialiseErrors(b'chkleaf:\n10\n')
86 self.assertDeserialiseErrors('chkleaf:\n10\n256\n')86 self.assertDeserialiseErrors(b'chkleaf:\n10\n256\n')
87 self.assertDeserialiseErrors('chkleaf:\n10\n256\n10\n')87 self.assertDeserialiseErrors(b'chkleaf:\n10\n256\n10\n')
8888
89 def test_deserialise_empty(self):89 def test_deserialise_empty(self):
90 node = self.module._deserialise_leaf_node(90 node = self.module._deserialise_leaf_node(
91 "chkleaf:\n10\n1\n0\n\n", stuple("sha1:1234",))91 b"chkleaf:\n10\n1\n0\n\n", stuple(b"sha1:1234",))
92 self.assertEqual(0, len(node))92 self.assertEqual(0, len(node))
93 self.assertEqual(10, node.maximum_size)93 self.assertEqual(10, node.maximum_size)
94 self.assertEqual(("sha1:1234",), node.key())94 self.assertEqual((b"sha1:1234",), node.key())
95 self.assertIsInstance(node.key(), StaticTuple)95 self.assertIsInstance(node.key(), StaticTuple)
96 self.assertIs(None, node._search_prefix)96 self.assertIs(None, node._search_prefix)
97 self.assertIs(None, node._common_serialised_prefix)97 self.assertIs(None, node._common_serialised_prefix)
9898
99 def test_deserialise_items(self):99 def test_deserialise_items(self):
100 node = self.module._deserialise_leaf_node(100 node = self.module._deserialise_leaf_node(
101 "chkleaf:\n0\n1\n2\n\nfoo bar\x001\nbaz\nquux\x001\nblarh\n",101 b"chkleaf:\n0\n1\n2\n\nfoo bar\x001\nbaz\nquux\x001\nblarh\n",
102 ("sha1:1234",))102 (b"sha1:1234",))
103 self.assertEqual(2, len(node))103 self.assertEqual(2, len(node))
104 self.assertEqual([(("foo bar",), "baz"), (("quux",), "blarh")],104 self.assertEqual([((b"foo bar",), b"baz"), ((b"quux",), b"blarh")],
105 sorted(node.iteritems(None)))105 sorted(node.iteritems(None)))
106106
107 def test_deserialise_item_with_null_width_1(self):107 def test_deserialise_item_with_null_width_1(self):
108 node = self.module._deserialise_leaf_node(108 node = self.module._deserialise_leaf_node(
109 "chkleaf:\n0\n1\n2\n\nfoo\x001\nbar\x00baz\nquux\x001\nblarh\n",109 b"chkleaf:\n0\n1\n2\n\nfoo\x001\nbar\x00baz\nquux\x001\nblarh\n",
110 ("sha1:1234",))110 (b"sha1:1234",))
111 self.assertEqual(2, len(node))111 self.assertEqual(2, len(node))
112 self.assertEqual([(("foo",), "bar\x00baz"), (("quux",), "blarh")],112 self.assertEqual([((b"foo",), b"bar\x00baz"), ((b"quux",), b"blarh")],
113 sorted(node.iteritems(None)))113 sorted(node.iteritems(None)))
114114
115 def test_deserialise_item_with_null_width_2(self):115 def test_deserialise_item_with_null_width_2(self):
116 node = self.module._deserialise_leaf_node(116 node = self.module._deserialise_leaf_node(
117 "chkleaf:\n0\n2\n2\n\nfoo\x001\x001\nbar\x00baz\n"117 b"chkleaf:\n0\n2\n2\n\nfoo\x001\x001\nbar\x00baz\n"
118 "quux\x00\x001\nblarh\n",118 b"quux\x00\x001\nblarh\n",
119 ("sha1:1234",))119 (b"sha1:1234",))
120 self.assertEqual(2, len(node))120 self.assertEqual(2, len(node))
121 self.assertEqual([(("foo", "1"), "bar\x00baz"), (("quux", ""), "blarh")],121 self.assertEqual([((b"foo", "1"), b"bar\x00baz"), ((b"quux", ""), b"blarh")],
122 sorted(node.iteritems(None)))122 sorted(node.iteritems(None)))
123123
124 def test_iteritems_selected_one_of_two_items(self):124 def test_iteritems_selected_one_of_two_items(self):
125 node = self.module._deserialise_leaf_node(125 node = self.module._deserialise_leaf_node(
126 "chkleaf:\n0\n1\n2\n\nfoo bar\x001\nbaz\nquux\x001\nblarh\n",126 b"chkleaf:\n0\n1\n2\n\nfoo bar\x001\nbaz\nquux\x001\nblarh\n",
127 ("sha1:1234",))127 (b"sha1:1234",))
128 self.assertEqual(2, len(node))128 self.assertEqual(2, len(node))
129 self.assertEqual([(("quux",), "blarh")],129 self.assertEqual([((b"quux",), b"blarh")],
130 sorted(node.iteritems(None, [("quux",), ("qaz",)])))130 sorted(node.iteritems(None, [(b"quux",), (b"qaz",)])))
131131
132 def test_deserialise_item_with_common_prefix(self):132 def test_deserialise_item_with_common_prefix(self):
133 node = self.module._deserialise_leaf_node(133 node = self.module._deserialise_leaf_node(
134 "chkleaf:\n0\n2\n2\nfoo\x00\n1\x001\nbar\x00baz\n2\x001\nblarh\n",134 b"chkleaf:\n0\n2\n2\nfoo\x00\n1\x001\nbar\x00baz\n2\x001\nblarh\n",
135 ("sha1:1234",))135 (b"sha1:1234",))
136 self.assertEqual(2, len(node))136 self.assertEqual(2, len(node))
137 self.assertEqual([(("foo", "1"), "bar\x00baz"), (("foo", "2"), "blarh")],137 self.assertEqual([((b"foo", b"1"), b"bar\x00baz"), ((b"foo", b"2"), b"blarh")],
138 sorted(node.iteritems(None)))138 sorted(node.iteritems(None)))
139 self.assertIs(chk_map._unknown, node._search_prefix)139 self.assertIs(chk_map._unknown, node._search_prefix)
140 self.assertEqual('foo\x00', node._common_serialised_prefix)140 self.assertEqual(b'foo\x00', node._common_serialised_prefix)
141141
142 def test_deserialise_multi_line(self):142 def test_deserialise_multi_line(self):
143 node = self.module._deserialise_leaf_node(143 node = self.module._deserialise_leaf_node(
144 "chkleaf:\n0\n2\n2\nfoo\x00\n1\x002\nbar\nbaz\n2\x002\nblarh\n\n",144 b"chkleaf:\n0\n2\n2\nfoo\x00\n1\x002\nbar\nbaz\n2\x002\nblarh\n\n",
145 ("sha1:1234",))145 (b"sha1:1234",))
146 self.assertEqual(2, len(node))146 self.assertEqual(2, len(node))
147 self.assertEqual([(("foo", "1"), "bar\nbaz"),147 self.assertEqual([((b"foo", b"1"), b"bar\nbaz"),
148 (("foo", "2"), "blarh\n"),148 ((b"foo", b"2"), b"blarh\n"),
149 ], sorted(node.iteritems(None)))149 ], sorted(node.iteritems(None)))
150 self.assertIs(chk_map._unknown, node._search_prefix)150 self.assertIs(chk_map._unknown, node._search_prefix)
151 self.assertEqual('foo\x00', node._common_serialised_prefix)151 self.assertEqual(b'foo\x00', node._common_serialised_prefix)
152152
153 def test_key_after_map(self):153 def test_key_after_map(self):
154 node = self.module._deserialise_leaf_node(154 node = self.module._deserialise_leaf_node(
155 "chkleaf:\n10\n1\n0\n\n", ("sha1:1234",))155 b"chkleaf:\n10\n1\n0\n\n", (b"sha1:1234",))
156 node.map(None, ("foo bar",), "baz quux")156 node.map(None, (b"foo bar",), b"baz quux")
157 self.assertEqual(None, node.key())157 self.assertEqual(None, node.key())
158158
159 def test_key_after_unmap(self):159 def test_key_after_unmap(self):
160 node = self.module._deserialise_leaf_node(160 node = self.module._deserialise_leaf_node(
161 "chkleaf:\n0\n1\n2\n\nfoo bar\x001\nbaz\nquux\x001\nblarh\n",161 b"chkleaf:\n0\n1\n2\n\nfoo bar\x001\nbaz\nquux\x001\nblarh\n",
162 ("sha1:1234",))162 (b"sha1:1234",))
163 node.unmap(None, ("foo bar",))163 node.unmap(None, (b"foo bar",))
164 self.assertEqual(None, node.key())164 self.assertEqual(None, node.key())
165165
166166
@@ -171,71 +171,73 @@
171 def assertDeserialiseErrors(self, text):171 def assertDeserialiseErrors(self, text):
172 self.assertRaises((ValueError, IndexError),172 self.assertRaises((ValueError, IndexError),
173 self.module._deserialise_internal_node, text,173 self.module._deserialise_internal_node, text,
174 stuple('not-a-real-sha',))174 stuple(b'not-a-real-sha',))
175175
176 def test_raises_on_non_internal(self):176 def test_raises_on_non_internal(self):
177 self.assertDeserialiseErrors('')177 self.assertDeserialiseErrors(b'')
178 self.assertDeserialiseErrors('short\n')178 self.assertDeserialiseErrors(b'short\n')
179 self.assertDeserialiseErrors('chknotnode:\n')179 self.assertDeserialiseErrors(b'chknotnode:\n')
180 self.assertDeserialiseErrors('chknode:x\n')180 self.assertDeserialiseErrors(b'chknode:x\n')
181 self.assertDeserialiseErrors('chknode:\n')181 self.assertDeserialiseErrors(b'chknode:\n')
182 self.assertDeserialiseErrors('chknode:\nnotint\n')182 self.assertDeserialiseErrors(b'chknode:\nnotint\n')
183 self.assertDeserialiseErrors('chknode:\n10\n')183 self.assertDeserialiseErrors(b'chknode:\n10\n')
184 self.assertDeserialiseErrors('chknode:\n10\n256\n')184 self.assertDeserialiseErrors(b'chknode:\n10\n256\n')
185 self.assertDeserialiseErrors('chknode:\n10\n256\n10\n')185 self.assertDeserialiseErrors(b'chknode:\n10\n256\n10\n')
186 # no trailing newline186 # no trailing newline
187 self.assertDeserialiseErrors('chknode:\n10\n256\n0\n1\nfo')187 self.assertDeserialiseErrors(b'chknode:\n10\n256\n0\n1\nfo')
188188
189 def test_deserialise_one(self):189 def test_deserialise_one(self):
190 node = self.module._deserialise_internal_node(190 node = self.module._deserialise_internal_node(
191 "chknode:\n10\n1\n1\n\na\x00sha1:abcd\n", stuple('sha1:1234',))191 b"chknode:\n10\n1\n1\n\na\x00sha1:abcd\n", stuple(b'sha1:1234',))
192 self.assertIsInstance(node, chk_map.InternalNode)192 self.assertIsInstance(node, chk_map.InternalNode)
193 self.assertEqual(1, len(node))193 self.assertEqual(1, len(node))
194 self.assertEqual(10, node.maximum_size)194 self.assertEqual(10, node.maximum_size)
195 self.assertEqual(("sha1:1234",), node.key())195 self.assertEqual((b"sha1:1234",), node.key())
196 self.assertEqual('', node._search_prefix)196 self.assertEqual(b'', node._search_prefix)
197 self.assertEqual({'a': ('sha1:abcd',)}, node._items)197 self.assertEqual({b'a': (b'sha1:abcd',)}, node._items)
198198
199 def test_deserialise_with_prefix(self):199 def test_deserialise_with_prefix(self):
200 node = self.module._deserialise_internal_node(200 node = self.module._deserialise_internal_node(
201 "chknode:\n10\n1\n1\npref\na\x00sha1:abcd\n", stuple('sha1:1234',))201 b"chknode:\n10\n1\n1\npref\na\x00sha1:abcd\n",
202 stuple(b'sha1:1234',))
202 self.assertIsInstance(node, chk_map.InternalNode)203 self.assertIsInstance(node, chk_map.InternalNode)
203 self.assertEqual(1, len(node))204 self.assertEqual(1, len(node))
204 self.assertEqual(10, node.maximum_size)205 self.assertEqual(10, node.maximum_size)
205 self.assertEqual(("sha1:1234",), node.key())206 self.assertEqual((b"sha1:1234",), node.key())
206 self.assertEqual('pref', node._search_prefix)207 self.assertEqual(b'pref', node._search_prefix)
207 self.assertEqual({'prefa': ('sha1:abcd',)}, node._items)208 self.assertEqual({b'prefa': (b'sha1:abcd',)}, node._items)
208209
209 node = self.module._deserialise_internal_node(210 node = self.module._deserialise_internal_node(
210 "chknode:\n10\n1\n1\npref\n\x00sha1:abcd\n", stuple('sha1:1234',))211 b"chknode:\n10\n1\n1\npref\n\x00sha1:abcd\n",
212 stuple(b'sha1:1234',))
211 self.assertIsInstance(node, chk_map.InternalNode)213 self.assertIsInstance(node, chk_map.InternalNode)
212 self.assertEqual(1, len(node))214 self.assertEqual(1, len(node))
213 self.assertEqual(10, node.maximum_size)215 self.assertEqual(10, node.maximum_size)
214 self.assertEqual(("sha1:1234",), node.key())216 self.assertEqual((b"sha1:1234",), node.key())
215 self.assertEqual('pref', node._search_prefix)217 self.assertEqual(b'pref', node._search_prefix)
216 self.assertEqual({'pref': ('sha1:abcd',)}, node._items)218 self.assertEqual({b'pref': (b'sha1:abcd',)}, node._items)
217219
218 def test_deserialise_pref_with_null(self):220 def test_deserialise_pref_with_null(self):
219 node = self.module._deserialise_internal_node(221 node = self.module._deserialise_internal_node(
220 "chknode:\n10\n1\n1\npref\x00fo\n\x00sha1:abcd\n",222 b"chknode:\n10\n1\n1\npref\x00fo\n\x00sha1:abcd\n",
221 stuple('sha1:1234',))223 stuple(b'sha1:1234',))
222 self.assertIsInstance(node, chk_map.InternalNode)224 self.assertIsInstance(node, chk_map.InternalNode)
223 self.assertEqual(1, len(node))225 self.assertEqual(1, len(node))
224 self.assertEqual(10, node.maximum_size)226 self.assertEqual(10, node.maximum_size)
225 self.assertEqual(("sha1:1234",), node.key())227 self.assertEqual((b"sha1:1234",), node.key())
226 self.assertEqual('pref\x00fo', node._search_prefix)228 self.assertEqual(b'pref\x00fo', node._search_prefix)
227 self.assertEqual({'pref\x00fo': ('sha1:abcd',)}, node._items)229 self.assertEqual({b'pref\x00fo': (b'sha1:abcd',)}, node._items)
228230
229 def test_deserialise_with_null_pref(self):231 def test_deserialise_with_null_pref(self):
230 node = self.module._deserialise_internal_node(232 node = self.module._deserialise_internal_node(
231 "chknode:\n10\n1\n1\npref\x00fo\n\x00\x00sha1:abcd\n",233 b"chknode:\n10\n1\n1\npref\x00fo\n\x00\x00sha1:abcd\n",
232 stuple('sha1:1234',))234 stuple(b'sha1:1234',))
233 self.assertIsInstance(node, chk_map.InternalNode)235 self.assertIsInstance(node, chk_map.InternalNode)
234 self.assertEqual(1, len(node))236 self.assertEqual(1, len(node))
235 self.assertEqual(10, node.maximum_size)237 self.assertEqual(10, node.maximum_size)
236 self.assertEqual(("sha1:1234",), node.key())238 self.assertEqual((b"sha1:1234",), node.key())
237 self.assertEqual('pref\x00fo', node._search_prefix)239 self.assertEqual(b'pref\x00fo', node._search_prefix)
238 self.assertEqual({'pref\x00fo\x00': ('sha1:abcd',)}, node._items)240 self.assertEqual({b'pref\x00fo\x00': (b'sha1:abcd',)}, node._items)
239241
240242
241class Test_BytesToTextKey(tests.TestCase):243class Test_BytesToTextKey(tests.TestCase):
@@ -251,29 +253,29 @@
251 self.assertRaises(Exception, self.module._bytes_to_text_key, bytes)253 self.assertRaises(Exception, self.module._bytes_to_text_key, bytes)
252254
253 def test_file(self):255 def test_file(self):
254 self.assertBytesToTextKey(('file-id', 'revision-id'),256 self.assertBytesToTextKey((b'file-id', b'revision-id'),
255 'file: file-id\nparent-id\nname\nrevision-id\n'257 b'file: file-id\nparent-id\nname\nrevision-id\n'
256 'da39a3ee5e6b4b0d3255bfef95601890afd80709\n100\nN')258 b'da39a3ee5e6b4b0d3255bfef95601890afd80709\n100\nN')
257259
258 def test_invalid_no_kind(self):260 def test_invalid_no_kind(self):
259 self.assertBytesToTextKeyRaises(261 self.assertBytesToTextKeyRaises(
260 'file file-id\nparent-id\nname\nrevision-id\n'262 b'file file-id\nparent-id\nname\nrevision-id\n'
261 'da39a3ee5e6b4b0d3255bfef95601890afd80709\n100\nN')263 b'da39a3ee5e6b4b0d3255bfef95601890afd80709\n100\nN')
262264
263 def test_invalid_no_space(self):265 def test_invalid_no_space(self):
264 self.assertBytesToTextKeyRaises(266 self.assertBytesToTextKeyRaises(
265 'file:file-id\nparent-id\nname\nrevision-id\n'267 b'file:file-id\nparent-id\nname\nrevision-id\n'
266 'da39a3ee5e6b4b0d3255bfef95601890afd80709\n100\nN')268 b'da39a3ee5e6b4b0d3255bfef95601890afd80709\n100\nN')
267269
268 def test_invalid_too_short_file_id(self):270 def test_invalid_too_short_file_id(self):
269 self.assertBytesToTextKeyRaises('file:file-id')271 self.assertBytesToTextKeyRaises(b'file:file-id')
270272
271 def test_invalid_too_short_parent_id(self):273 def test_invalid_too_short_parent_id(self):
272 self.assertBytesToTextKeyRaises('file:file-id\nparent-id')274 self.assertBytesToTextKeyRaises(b'file:file-id\nparent-id')
273275
274 def test_invalid_too_short_name(self):276 def test_invalid_too_short_name(self):
275 self.assertBytesToTextKeyRaises('file:file-id\nparent-id\nname')277 self.assertBytesToTextKeyRaises(b'file:file-id\nparent-id\nname')
276278
277 def test_dir(self):279 def test_dir(self):
278 self.assertBytesToTextKey(('dir-id', 'revision-id'),280 self.assertBytesToTextKey((b'dir-id', b'revision-id'),
279 'dir: dir-id\nparent-id\nname\nrevision-id')281 b'dir: dir-id\nparent-id\nname\nrevision-id')
280282
=== modified file 'breezy/tests/test__chunks_to_lines.py'
--- breezy/tests/test__chunks_to_lines.py 2017-05-23 14:08:03 +0000
+++ breezy/tests/test__chunks_to_lines.py 2017-06-11 01:47:17 +0000
@@ -47,58 +47,60 @@
47 self.assertIs(chunks, result)47 self.assertIs(chunks, result)
4848
49 def test_fulltext_chunk_to_lines(self):49 def test_fulltext_chunk_to_lines(self):
50 self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz\n'],50 self.assertChunksToLines(
51 ['foo\nbar\r\nba\rz\n'])51 [b'foo\n', b'bar\r\n', b'ba\rz\n'],
52 self.assertChunksToLines(['foobarbaz\n'], ['foobarbaz\n'],52 [b'foo\nbar\r\nba\rz\n'])
53 alreadly_lines=True)53 self.assertChunksToLines(
54 self.assertChunksToLines(['foo\n', 'bar\n', '\n', 'baz\n', '\n', '\n'],54 [b'foobarbaz\n'], [b'foobarbaz\n'], alreadly_lines=True)
55 ['foo\nbar\n\nbaz\n\n\n'])55 self.assertChunksToLines(
56 self.assertChunksToLines(['foobarbaz'], ['foobarbaz'],56 [b'foo\n', b'bar\n', b'\n', b'baz\n', b'\n', b'\n'],
57 alreadly_lines=True)57 [b'foo\nbar\n\nbaz\n\n\n'])
58 self.assertChunksToLines(['foobarbaz'], ['foo', 'bar', 'baz'])58 self.assertChunksToLines(
59 [b'foobarbaz'], [b'foobarbaz'], alreadly_lines=True)
60 self.assertChunksToLines([b'foobarbaz'], [b'foo', b'bar', b'baz'])
5961
60 def test_newlines(self):62 def test_newlines(self):
61 self.assertChunksToLines(['\n'], ['\n'], alreadly_lines=True)63 self.assertChunksToLines([b'\n'], [b'\n'], alreadly_lines=True)
62 self.assertChunksToLines(['\n'], ['', '\n', ''])64 self.assertChunksToLines([b'\n'], [b'', b'\n', b''])
63 self.assertChunksToLines(['\n'], ['\n', ''])65 self.assertChunksToLines([b'\n'], [b'\n', b''])
64 self.assertChunksToLines(['\n'], ['', '\n'])66 self.assertChunksToLines([b'\n'], [b'', b'\n'])
65 self.assertChunksToLines(['\n', '\n', '\n'], ['\n\n\n'])67 self.assertChunksToLines([b'\n', b'\n', b'\n'], [b'\n\n\n'])
66 self.assertChunksToLines(['\n', '\n', '\n'], ['\n', '\n', '\n'],68 self.assertChunksToLines([b'\n', b'\n', b'\n'], [b'\n', b'\n', b'\n'],
67 alreadly_lines=True)69 alreadly_lines=True)
6870
69 def test_lines_to_lines(self):71 def test_lines_to_lines(self):
70 self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz\n'],72 self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz\n'],
71 ['foo\n', 'bar\r\n', 'ba\rz\n'],73 [b'foo\n', b'bar\r\n', b'ba\rz\n'],
72 alreadly_lines=True)74 alreadly_lines=True)
7375
74 def test_no_final_newline(self):76 def test_no_final_newline(self):
75 self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz'],77 self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz'],
76 ['foo\nbar\r\nba\rz'])78 [b'foo\nbar\r\nba\rz'])
77 self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz'],79 self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz'],
78 ['foo\n', 'bar\r\n', 'ba\rz'],80 [b'foo\n', b'bar\r\n', b'ba\rz'],
79 alreadly_lines=True)81 alreadly_lines=True)
80 self.assertChunksToLines(('foo\n', 'bar\r\n', 'ba\rz'),82 self.assertChunksToLines((b'foo\n', b'bar\r\n', b'ba\rz'),
81 ('foo\n', 'bar\r\n', 'ba\rz'),83 (b'foo\n', b'bar\r\n', b'ba\rz'),
82 alreadly_lines=True)84 alreadly_lines=True)
83 self.assertChunksToLines([], [], alreadly_lines=True)85 self.assertChunksToLines([], [], alreadly_lines=True)
84 self.assertChunksToLines(['foobarbaz'], ['foobarbaz'],86 self.assertChunksToLines([b'foobarbaz'], [b'foobarbaz'],
85 alreadly_lines=True)87 alreadly_lines=True)
86 self.assertChunksToLines([], [''])88 self.assertChunksToLines([], [b''])
8789
88 def test_mixed(self):90 def test_mixed(self):
89 self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz'],91 self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz'],
90 ['foo\n', 'bar\r\nba\r', 'z'])92 [b'foo\n', b'bar\r\nba\r', b'z'])
91 self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz'],93 self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz'],
92 ['foo\nb', 'a', 'r\r\nba\r', 'z'])94 [b'foo\nb', b'a', b'r\r\nba\r', b'z'])
93 self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz'],95 self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz'],
94 ['foo\nbar\r\nba', '\r', 'z'])96 [b'foo\nbar\r\nba', b'\r', b'z'])
9597
96 self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz'],98 self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz'],
97 ['foo\n', '', 'bar\r\nba', '\r', 'z'])99 [b'foo\n', b'', b'bar\r\nba', b'\r', b'z'])
98 self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz\n'],100 self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz\n'],
99 ['foo\n', 'bar\r\n', 'ba\rz\n', ''])101 [b'foo\n', b'bar\r\n', b'ba\rz\n', b''])
100 self.assertChunksToLines(['foo\n', 'bar\r\n', 'ba\rz\n'],102 self.assertChunksToLines([b'foo\n', b'bar\r\n', b'ba\rz\n'],
101 ['foo\n', 'bar', '\r\n', 'ba\rz\n'])103 [b'foo\n', b'bar', b'\r\n', b'ba\rz\n'])
102104
103 def test_not_lines(self):105 def test_not_lines(self):
104 # We should raise a TypeError, not crash106 # We should raise a TypeError, not crash
@@ -107,4 +109,4 @@
107 self.assertRaises(TypeError, self.module.chunks_to_lines,109 self.assertRaises(TypeError, self.module.chunks_to_lines,
108 [object()])110 [object()])
109 self.assertRaises(TypeError, self.module.chunks_to_lines,111 self.assertRaises(TypeError, self.module.chunks_to_lines,
110 ['foo', object()])112 [b'foo', object()])
111113
=== modified file 'breezy/tests/test_inv.py'
--- breezy/tests/test_inv.py 2017-06-10 00:52:37 +0000
+++ breezy/tests/test_inv.py 2017-06-11 01:47:17 +0000
@@ -284,35 +284,35 @@
284284
285 def test_creation_from_root_id(self):285 def test_creation_from_root_id(self):
286 # iff a root id is passed to the constructor, a root directory is made286 # iff a root id is passed to the constructor, a root directory is made
287 inv = inventory.Inventory(root_id='tree-root')287 inv = inventory.Inventory(root_id=b'tree-root')
288 self.assertNotEqual(None, inv.root)288 self.assertNotEqual(None, inv.root)
289 self.assertEqual('tree-root', inv.root.file_id)289 self.assertEqual(b'tree-root', inv.root.file_id)
290290
291 def test_add_path_of_root(self):291 def test_add_path_of_root(self):
292 # if no root id is given at creation time, there is no root directory292 # if no root id is given at creation time, there is no root directory
293 inv = inventory.Inventory(root_id=None)293 inv = inventory.Inventory(root_id=None)
294 self.assertIs(None, inv.root)294 self.assertIs(None, inv.root)
295 # add a root entry by adding its path295 # add a root entry by adding its path
296 ie = inv.add_path("", "directory", "my-root")296 ie = inv.add_path(u"", "directory", b"my-root")
297 ie.revision = 'test-rev'297 ie.revision = b'test-rev'
298 self.assertEqual("my-root", ie.file_id)298 self.assertEqual(b"my-root", ie.file_id)
299 self.assertIs(ie, inv.root)299 self.assertIs(ie, inv.root)
300300
301 def test_add_path(self):301 def test_add_path(self):
302 inv = inventory.Inventory(root_id='tree_root')302 inv = inventory.Inventory(root_id=b'tree_root')
303 ie = inv.add_path('hello', 'file', 'hello-id')303 ie = inv.add_path(u'hello', 'file', b'hello-id')
304 self.assertEqual('hello-id', ie.file_id)304 self.assertEqual(b'hello-id', ie.file_id)
305 self.assertEqual('file', ie.kind)305 self.assertEqual('file', ie.kind)
306306
307 def test_copy(self):307 def test_copy(self):
308 """Make sure copy() works and creates a deep copy."""308 """Make sure copy() works and creates a deep copy."""
309 inv = inventory.Inventory(root_id='some-tree-root')309 inv = inventory.Inventory(root_id=b'some-tree-root')
310 ie = inv.add_path('hello', 'file', 'hello-id')310 ie = inv.add_path(u'hello', 'file', b'hello-id')
311 inv2 = inv.copy()311 inv2 = inv.copy()
312 inv.root.file_id = 'some-new-root'312 inv.root.file_id = b'some-new-root'
313 ie.name = 'file2'313 ie.name = u'file2'
314 self.assertEqual('some-tree-root', inv2.root.file_id)314 self.assertEqual(b'some-tree-root', inv2.root.file_id)
315 self.assertEqual('hello', inv2['hello-id'].name)315 self.assertEqual(u'hello', inv2[b'hello-id'].name)
316316
317 def test_copy_empty(self):317 def test_copy_empty(self):
318 """Make sure an empty inventory can be copied."""318 """Make sure an empty inventory can be copied."""
@@ -322,16 +322,17 @@
322322
323 def test_copy_copies_root_revision(self):323 def test_copy_copies_root_revision(self):
324 """Make sure the revision of the root gets copied."""324 """Make sure the revision of the root gets copied."""
325 inv = inventory.Inventory(root_id='someroot')325 inv = inventory.Inventory(root_id=b'someroot')
326 inv.root.revision = 'therev'326 inv.root.revision = b'therev'
327 inv2 = inv.copy()327 inv2 = inv.copy()
328 self.assertEqual('someroot', inv2.root.file_id)328 self.assertEqual(b'someroot', inv2.root.file_id)
329 self.assertEqual('therev', inv2.root.revision)329 self.assertEqual(b'therev', inv2.root.revision)
330330
331 def test_create_tree_reference(self):331 def test_create_tree_reference(self):
332 inv = inventory.Inventory('tree-root-123')332 inv = inventory.Inventory(b'tree-root-123')
333 inv.add(TreeReference('nested-id', 'nested', parent_id='tree-root-123',333 inv.add(TreeReference(
334 revision='rev', reference_revision='rev2'))334 b'nested-id', 'nested', parent_id=b'tree-root-123',
335 revision=b'rev', reference_revision=b'rev2'))
335336
336 def test_error_encoding(self):337 def test_error_encoding(self):
337 inv = inventory.Inventory('tree-root')338 inv = inventory.Inventory('tree-root')
@@ -997,30 +998,30 @@
997998
998 def test___getitem__(self):999 def test___getitem__(self):
999 inv = Inventory()1000 inv = Inventory()
1000 inv.revision_id = "revid"1001 inv.revision_id = b"revid"
1001 inv.root.revision = "rootrev"1002 inv.root.revision = b"rootrev"
1002 inv.add(InventoryFile("fileid", "file", inv.root.file_id))1003 inv.add(InventoryFile(b"fileid", u"file", inv.root.file_id))
1003 inv["fileid"].revision = "filerev"1004 inv[b"fileid"].revision = b"filerev"
1004 inv["fileid"].executable = True1005 inv[b"fileid"].executable = True
1005 inv["fileid"].text_sha1 = "ffff"1006 inv[b"fileid"].text_sha1 = b"ffff"
1006 inv["fileid"].text_size = 11007 inv[b"fileid"].text_size = 1
1007 chk_bytes = self.get_chk_bytes()1008 chk_bytes = self.get_chk_bytes()
1008 chk_inv = CHKInventory.from_inventory(chk_bytes, inv)1009 chk_inv = CHKInventory.from_inventory(chk_bytes, inv)
1009 bytes = ''.join(chk_inv.to_lines())1010 data = b''.join(chk_inv.to_lines())
1010 new_inv = CHKInventory.deserialise(chk_bytes, bytes, ("revid",))1011 new_inv = CHKInventory.deserialise(chk_bytes, data, (b"revid",))
1011 root_entry = new_inv[inv.root.file_id]1012 root_entry = new_inv[inv.root.file_id]
1012 file_entry = new_inv["fileid"]1013 file_entry = new_inv[b"fileid"]
1013 self.assertEqual("directory", root_entry.kind)1014 self.assertEqual("directory", root_entry.kind)
1014 self.assertEqual(inv.root.file_id, root_entry.file_id)1015 self.assertEqual(inv.root.file_id, root_entry.file_id)
1015 self.assertEqual(inv.root.parent_id, root_entry.parent_id)1016 self.assertEqual(inv.root.parent_id, root_entry.parent_id)
1016 self.assertEqual(inv.root.name, root_entry.name)1017 self.assertEqual(inv.root.name, root_entry.name)
1017 self.assertEqual("rootrev", root_entry.revision)1018 self.assertEqual(b"rootrev", root_entry.revision)
1018 self.assertEqual("file", file_entry.kind)1019 self.assertEqual("file", file_entry.kind)
1019 self.assertEqual("fileid", file_entry.file_id)1020 self.assertEqual(b"fileid", file_entry.file_id)
1020 self.assertEqual(inv.root.file_id, file_entry.parent_id)1021 self.assertEqual(inv.root.file_id, file_entry.parent_id)
1021 self.assertEqual("file", file_entry.name)1022 self.assertEqual(u"file", file_entry.name)
1022 self.assertEqual("filerev", file_entry.revision)1023 self.assertEqual(b"filerev", file_entry.revision)
1023 self.assertEqual("ffff", file_entry.text_sha1)1024 self.assertEqual(b"ffff", file_entry.text_sha1)
1024 self.assertEqual(1, file_entry.text_size)1025 self.assertEqual(1, file_entry.text_size)
1025 self.assertEqual(True, file_entry.executable)1026 self.assertEqual(True, file_entry.executable)
1026 self.assertRaises(errors.NoSuchId, new_inv.__getitem__, 'missing')1027 self.assertRaises(errors.NoSuchId, new_inv.__getitem__, 'missing')
10271028
=== modified file 'breezy/tests/test_inventory_delta.py'
--- breezy/tests/test_inventory_delta.py 2017-06-09 16:31:49 +0000
+++ breezy/tests/test_inventory_delta.py 2017-06-11 01:47:17 +0000
@@ -32,14 +32,14 @@
32from . import TestCase32from . import TestCase
3333
34### DO NOT REFLOW THESE TEXTS. NEW LINES ARE SIGNIFICANT. ###34### DO NOT REFLOW THESE TEXTS. NEW LINES ARE SIGNIFICANT. ###
35empty_lines = """format: bzr inventory delta v1 (bzr 1.14)35empty_lines = b"""format: bzr inventory delta v1 (bzr 1.14)
36parent: null:36parent: null:
37version: null:37version: null:
38versioned_root: true38versioned_root: true
39tree_references: true39tree_references: true
40"""40"""
4141
42root_only_lines = """format: bzr inventory delta v1 (bzr 1.14)42root_only_lines = b"""format: bzr inventory delta v1 (bzr 1.14)
43parent: null:43parent: null:
44version: entry-version44version: entry-version
45versioned_root: true45versioned_root: true
@@ -48,7 +48,7 @@
48"""48"""
4949
5050
51root_change_lines = """format: bzr inventory delta v1 (bzr 1.14)51root_change_lines = b"""format: bzr inventory delta v1 (bzr 1.14)
52parent: entry-version52parent: entry-version
53version: changed-root53version: changed-root
54versioned_root: true54versioned_root: true
@@ -56,7 +56,7 @@
56/\x00an-id\x00\x00different-version\x00dir56/\x00an-id\x00\x00different-version\x00dir
57"""57"""
5858
59corrupt_parent_lines = """format: bzr inventory delta v1 (bzr 1.14)59corrupt_parent_lines = b"""format: bzr inventory delta v1 (bzr 1.14)
60parent: entry-version60parent: entry-version
61version: changed-root61version: changed-root
62versioned_root: false62versioned_root: false
@@ -64,7 +64,7 @@
64/\x00an-id\x00\x00different-version\x00dir64/\x00an-id\x00\x00different-version\x00dir
65"""65"""
6666
67root_only_unversioned = """format: bzr inventory delta v1 (bzr 1.14)67root_only_unversioned = b"""format: bzr inventory delta v1 (bzr 1.14)
68parent: null:68parent: null:
69version: entry-version69version: entry-version
70versioned_root: false70versioned_root: false
@@ -72,7 +72,7 @@
72None\x00/\x00TREE_ROOT\x00\x00entry-version\x00dir72None\x00/\x00TREE_ROOT\x00\x00entry-version\x00dir
73"""73"""
7474
75reference_lines = """format: bzr inventory delta v1 (bzr 1.14)75reference_lines = b"""format: bzr inventory delta v1 (bzr 1.14)
76parent: null:76parent: null:
77version: entry-version77version: entry-version
78versioned_root: true78versioned_root: true
@@ -81,7 +81,7 @@
81None\x00/foo\x00id\x00TREE_ROOT\x00changed\x00tree\x00subtree-version81None\x00/foo\x00id\x00TREE_ROOT\x00changed\x00tree\x00subtree-version
82"""82"""
8383
84change_tree_lines = """format: bzr inventory delta v1 (bzr 1.14)84change_tree_lines = b"""format: bzr inventory delta v1 (bzr 1.14)
85parent: entry-version85parent: entry-version
86version: change-tree86version: change-tree
87versioned_root: false87versioned_root: false
@@ -96,34 +96,34 @@
96 def test_parse_no_bytes(self):96 def test_parse_no_bytes(self):
97 deserializer = inventory_delta.InventoryDeltaDeserializer()97 deserializer = inventory_delta.InventoryDeltaDeserializer()
98 err = self.assertRaises(98 err = self.assertRaises(
99 InventoryDeltaError, deserializer.parse_text_bytes, '')99 InventoryDeltaError, deserializer.parse_text_bytes, b'')
100 self.assertContainsRe(str(err), 'last line not empty')100 self.assertContainsRe(str(err), 'last line not empty')
101101
102 def test_parse_bad_format(self):102 def test_parse_bad_format(self):
103 deserializer = inventory_delta.InventoryDeltaDeserializer()103 deserializer = inventory_delta.InventoryDeltaDeserializer()
104 err = self.assertRaises(InventoryDeltaError,104 err = self.assertRaises(InventoryDeltaError,
105 deserializer.parse_text_bytes, 'format: foo\n')105 deserializer.parse_text_bytes, b'format: foo\n')
106 self.assertContainsRe(str(err), 'unknown format')106 self.assertContainsRe(str(err), 'unknown format')
107107
108 def test_parse_no_parent(self):108 def test_parse_no_parent(self):
109 deserializer = inventory_delta.InventoryDeltaDeserializer()109 deserializer = inventory_delta.InventoryDeltaDeserializer()
110 err = self.assertRaises(InventoryDeltaError,110 err = self.assertRaises(InventoryDeltaError,
111 deserializer.parse_text_bytes,111 deserializer.parse_text_bytes,
112 'format: bzr inventory delta v1 (bzr 1.14)\n')112 b'format: bzr inventory delta v1 (bzr 1.14)\n')
113 self.assertContainsRe(str(err), 'missing parent: marker')113 self.assertContainsRe(str(err), 'missing parent: marker')
114114
115 def test_parse_no_version(self):115 def test_parse_no_version(self):
116 deserializer = inventory_delta.InventoryDeltaDeserializer()116 deserializer = inventory_delta.InventoryDeltaDeserializer()
117 err = self.assertRaises(InventoryDeltaError,117 err = self.assertRaises(InventoryDeltaError,
118 deserializer.parse_text_bytes,118 deserializer.parse_text_bytes,
119 'format: bzr inventory delta v1 (bzr 1.14)\n'119 b'format: bzr inventory delta v1 (bzr 1.14)\n'
120 'parent: null:\n')120 b'parent: null:\n')
121 self.assertContainsRe(str(err), 'missing version: marker')121 self.assertContainsRe(str(err), 'missing version: marker')
122 122
123 def test_parse_duplicate_key_errors(self):123 def test_parse_duplicate_key_errors(self):
124 deserializer = inventory_delta.InventoryDeltaDeserializer()124 deserializer = inventory_delta.InventoryDeltaDeserializer()
125 double_root_lines = \125 double_root_lines = \
126"""format: bzr inventory delta v1 (bzr 1.14)126b"""format: bzr inventory delta v1 (bzr 1.14)
127parent: null:127parent: null:
128version: null:128version: null:
129versioned_root: true129versioned_root: true
@@ -139,16 +139,16 @@
139 deserializer = inventory_delta.InventoryDeltaDeserializer()139 deserializer = inventory_delta.InventoryDeltaDeserializer()
140 parse_result = deserializer.parse_text_bytes(root_only_lines)140 parse_result = deserializer.parse_text_bytes(root_only_lines)
141 expected_entry = inventory.make_entry(141 expected_entry = inventory.make_entry(
142 'directory', u'', None, 'an-id')142 'directory', u'', None, b'an-id')
143 expected_entry.revision = 'a@e\xc3\xa5ample.com--2004'143 expected_entry.revision = b'a@e\xc3\xa5ample.com--2004'
144 self.assertEqual(144 self.assertEqual(
145 ('null:', 'entry-version', True, True,145 (b'null:', b'entry-version', True, True,
146 [(None, '', 'an-id', expected_entry)]),146 [(None, u'', b'an-id', expected_entry)]),
147 parse_result)147 parse_result)
148148
149 def test_parse_special_revid_not_valid_last_mod(self):149 def test_parse_special_revid_not_valid_last_mod(self):
150 deserializer = inventory_delta.InventoryDeltaDeserializer()150 deserializer = inventory_delta.InventoryDeltaDeserializer()
151 root_only_lines = """format: bzr inventory delta v1 (bzr 1.14)151 root_only_lines = b"""format: bzr inventory delta v1 (bzr 1.14)
152parent: null:152parent: null:
153version: null:153version: null:
154versioned_root: false154versioned_root: false
@@ -161,7 +161,7 @@
161161
162 def test_parse_versioned_root_versioned_disabled(self):162 def test_parse_versioned_root_versioned_disabled(self):
163 deserializer = inventory_delta.InventoryDeltaDeserializer()163 deserializer = inventory_delta.InventoryDeltaDeserializer()
164 root_only_lines = """format: bzr inventory delta v1 (bzr 1.14)164 root_only_lines = b"""format: bzr inventory delta v1 (bzr 1.14)
165parent: null:165parent: null:
166version: null:166version: null:
167versioned_root: false167versioned_root: false
@@ -174,7 +174,7 @@
174174
175 def test_parse_unique_root_id_root_versioned_disabled(self):175 def test_parse_unique_root_id_root_versioned_disabled(self):
176 deserializer = inventory_delta.InventoryDeltaDeserializer()176 deserializer = inventory_delta.InventoryDeltaDeserializer()
177 root_only_lines = """format: bzr inventory delta v1 (bzr 1.14)177 root_only_lines = b"""format: bzr inventory delta v1 (bzr 1.14)
178parent: parent-id178parent: parent-id
179version: a@e\xc3\xa5ample.com--2004179version: a@e\xc3\xa5ample.com--2004
180versioned_root: false180versioned_root: false
@@ -189,11 +189,11 @@
189 deserializer = inventory_delta.InventoryDeltaDeserializer()189 deserializer = inventory_delta.InventoryDeltaDeserializer()
190 parse_result = deserializer.parse_text_bytes(root_only_unversioned)190 parse_result = deserializer.parse_text_bytes(root_only_unversioned)
191 expected_entry = inventory.make_entry(191 expected_entry = inventory.make_entry(
192 'directory', u'', None, 'TREE_ROOT')192 'directory', u'', None, b'TREE_ROOT')
193 expected_entry.revision = 'entry-version'193 expected_entry.revision = b'entry-version'
194 self.assertEqual(194 self.assertEqual(
195 ('null:', 'entry-version', False, False,195 (b'null:', b'entry-version', False, False,
196 [(None, u'', 'TREE_ROOT', expected_entry)]),196 [(None, u'', b'TREE_ROOT', expected_entry)]),
197 parse_result)197 parse_result)
198198
199 def test_parse_versioned_root_when_disabled(self):199 def test_parse_versioned_root_when_disabled(self):
@@ -215,7 +215,7 @@
215 deserializer = inventory_delta.InventoryDeltaDeserializer()215 deserializer = inventory_delta.InventoryDeltaDeserializer()
216 # A serialised inventory delta with a header saying no tree refs, but216 # A serialised inventory delta with a header saying no tree refs, but
217 # that has a tree ref in its content.217 # that has a tree ref in its content.
218 lines = """format: bzr inventory delta v1 (bzr 1.14)218 lines = b"""format: bzr inventory delta v1 (bzr 1.14)
219parent: null:219parent: null:
220version: entry-version220version: entry-version
221versioned_root: false221versioned_root: false
@@ -231,7 +231,7 @@
231 deserializer = inventory_delta.InventoryDeltaDeserializer()231 deserializer = inventory_delta.InventoryDeltaDeserializer()
232 # A serialised inventory delta with a header saying no tree refs, but232 # A serialised inventory delta with a header saying no tree refs, but
233 # that has a tree ref in its content.233 # that has a tree ref in its content.
234 lines = """format: bzr inventory delta v1 (bzr 1.14)234 lines = b"""format: bzr inventory delta v1 (bzr 1.14)
235parent: null:235parent: null:
236version: entry-version236version: entry-version
237versioned_root: false237versioned_root: false
@@ -254,7 +254,7 @@
254 def test_parse_invalid_newpath(self):254 def test_parse_invalid_newpath(self):
255 """newpath must start with / if it is not None."""255 """newpath must start with / if it is not None."""
256 lines = empty_lines256 lines = empty_lines
257 lines += "None\x00bad\x00TREE_ROOT\x00\x00version\x00dir\n"257 lines += b"None\x00bad\x00TREE_ROOT\x00\x00version\x00dir\n"
258 deserializer = inventory_delta.InventoryDeltaDeserializer()258 deserializer = inventory_delta.InventoryDeltaDeserializer()
259 err = self.assertRaises(InventoryDeltaError,259 err = self.assertRaises(InventoryDeltaError,
260 deserializer.parse_text_bytes, lines)260 deserializer.parse_text_bytes, lines)
@@ -263,39 +263,39 @@
263 def test_parse_invalid_oldpath(self):263 def test_parse_invalid_oldpath(self):
264 """oldpath must start with / if it is not None."""264 """oldpath must start with / if it is not None."""
265 lines = root_only_lines265 lines = root_only_lines
266 lines += "bad\x00/new\x00file-id\x00\x00version\x00dir\n"266 lines += b"bad\x00/new\x00file-id\x00\x00version\x00dir\n"
267 deserializer = inventory_delta.InventoryDeltaDeserializer()267 deserializer = inventory_delta.InventoryDeltaDeserializer()
268 err = self.assertRaises(InventoryDeltaError,268 err = self.assertRaises(InventoryDeltaError,
269 deserializer.parse_text_bytes, lines)269 deserializer.parse_text_bytes, lines)
270 self.assertContainsRe(str(err), 'oldpath invalid')270 self.assertContainsRe(str(err), 'oldpath invalid')
271 271
272 def test_parse_new_file(self):272 def test_parse_new_file(self):
273 """a new file is parsed correctly"""273 """a new file is parsed correctly"""
274 lines = root_only_lines274 lines = root_only_lines
275 fake_sha = "deadbeef" * 5275 fake_sha = b"deadbeef" * 5
276 lines += (276 lines += (
277 "None\x00/new\x00file-id\x00an-id\x00version\x00file\x00123\x00" +277 b"None\x00/new\x00file-id\x00an-id\x00version\x00file\x00123\x00" +
278 "\x00" + fake_sha + "\n")278 b"\x00" + fake_sha + b"\n")
279 deserializer = inventory_delta.InventoryDeltaDeserializer()279 deserializer = inventory_delta.InventoryDeltaDeserializer()
280 parse_result = deserializer.parse_text_bytes(lines)280 parse_result = deserializer.parse_text_bytes(lines)
281 expected_entry = inventory.make_entry(281 expected_entry = inventory.make_entry(
282 'file', u'new', 'an-id', 'file-id')282 'file', u'new', b'an-id', b'file-id')
283 expected_entry.revision = 'version'283 expected_entry.revision = b'version'
284 expected_entry.text_size = 123284 expected_entry.text_size = 123
285 expected_entry.text_sha1 = fake_sha285 expected_entry.text_sha1 = fake_sha
286 delta = parse_result[4]286 delta = parse_result[4]
287 self.assertEqual(287 self.assertEqual(
288 (None, u'new', 'file-id', expected_entry), delta[-1])288 (None, u'new', b'file-id', expected_entry), delta[-1])
289289
290 def test_parse_delete(self):290 def test_parse_delete(self):
291 lines = root_only_lines291 lines = root_only_lines
292 lines += (292 lines += (
293 "/old-file\x00None\x00deleted-id\x00\x00null:\x00deleted\x00\x00\n")293 b"/old-file\x00None\x00deleted-id\x00\x00null:\x00deleted\x00\x00\n")
294 deserializer = inventory_delta.InventoryDeltaDeserializer()294 deserializer = inventory_delta.InventoryDeltaDeserializer()
295 parse_result = deserializer.parse_text_bytes(lines)295 parse_result = deserializer.parse_text_bytes(lines)
296 delta = parse_result[4]296 delta = parse_result[4]
297 self.assertEqual(297 self.assertEqual(
298 (u'old-file', None, 'deleted-id', None), delta[-1])298 (u'old-file', None, b'deleted-id', None), delta[-1])
299299
300300
301class TestSerialization(TestCase):301class TestSerialization(TestCase):
@@ -313,86 +313,86 @@
313 def test_root_only_to_lines(self):313 def test_root_only_to_lines(self):
314 old_inv = Inventory(None)314 old_inv = Inventory(None)
315 new_inv = Inventory(None)315 new_inv = Inventory(None)
316 root = new_inv.make_entry('directory', '', None, 'an-id')316 root = new_inv.make_entry('directory', u'', None, b'an-id')
317 root.revision = 'a@e\xc3\xa5ample.com--2004'317 root.revision = b'a@e\xc3\xa5ample.com--2004'
318 new_inv.add(root)318 new_inv.add(root)
319 delta = new_inv._make_delta(old_inv)319 delta = new_inv._make_delta(old_inv)
320 serializer = inventory_delta.InventoryDeltaSerializer(320 serializer = inventory_delta.InventoryDeltaSerializer(
321 versioned_root=True, tree_references=True)321 versioned_root=True, tree_references=True)
322 self.assertEqual(BytesIO(root_only_lines).readlines(),322 self.assertEqual(BytesIO(root_only_lines).readlines(),
323 serializer.delta_to_lines(NULL_REVISION, 'entry-version', delta))323 serializer.delta_to_lines(NULL_REVISION, b'entry-version', delta))
324324
325 def test_unversioned_root(self):325 def test_unversioned_root(self):
326 old_inv = Inventory(None)326 old_inv = Inventory(None)
327 new_inv = Inventory(None)327 new_inv = Inventory(None)
328 root = new_inv.make_entry('directory', '', None, 'TREE_ROOT')328 root = new_inv.make_entry('directory', u'', None, b'TREE_ROOT')
329 # Implicit roots are considered modified in every revision.329 # Implicit roots are considered modified in every revision.
330 root.revision = 'entry-version'330 root.revision = b'entry-version'
331 new_inv.add(root)331 new_inv.add(root)
332 delta = new_inv._make_delta(old_inv)332 delta = new_inv._make_delta(old_inv)
333 serializer = inventory_delta.InventoryDeltaSerializer(333 serializer = inventory_delta.InventoryDeltaSerializer(
334 versioned_root=False, tree_references=False)334 versioned_root=False, tree_references=False)
335 serialized_lines = serializer.delta_to_lines(335 serialized_lines = serializer.delta_to_lines(
336 NULL_REVISION, 'entry-version', delta)336 NULL_REVISION, b'entry-version', delta)
337 self.assertEqual(BytesIO(root_only_unversioned).readlines(),337 self.assertEqual(BytesIO(root_only_unversioned).readlines(),
338 serialized_lines)338 serialized_lines)
339 deserializer = inventory_delta.InventoryDeltaDeserializer()339 deserializer = inventory_delta.InventoryDeltaDeserializer()
340 self.assertEqual(340 self.assertEqual(
341 (NULL_REVISION, 'entry-version', False, False, delta),341 (NULL_REVISION, b'entry-version', False, False, delta),
342 deserializer.parse_text_bytes(''.join(serialized_lines)))342 deserializer.parse_text_bytes(b''.join(serialized_lines)))
343343
344 def test_unversioned_non_root_errors(self):344 def test_unversioned_non_root_errors(self):
345 old_inv = Inventory(None)345 old_inv = Inventory(None)
346 new_inv = Inventory(None)346 new_inv = Inventory(None)
347 root = new_inv.make_entry('directory', '', None, 'TREE_ROOT')347 root = new_inv.make_entry('directory', u'', None, b'TREE_ROOT')
348 root.revision = 'a@e\xc3\xa5ample.com--2004'348 root.revision = b'a@e\xc3\xa5ample.com--2004'
349 new_inv.add(root)349 new_inv.add(root)
350 non_root = new_inv.make_entry('directory', 'foo', root.file_id, 'id')350 non_root = new_inv.make_entry('directory', u'foo', root.file_id, b'id')
351 new_inv.add(non_root)351 new_inv.add(non_root)
352 delta = new_inv._make_delta(old_inv)352 delta = new_inv._make_delta(old_inv)
353 serializer = inventory_delta.InventoryDeltaSerializer(353 serializer = inventory_delta.InventoryDeltaSerializer(
354 versioned_root=True, tree_references=True)354 versioned_root=True, tree_references=True)
355 err = self.assertRaises(InventoryDeltaError,355 err = self.assertRaises(InventoryDeltaError,
356 serializer.delta_to_lines, NULL_REVISION, 'entry-version', delta)356 serializer.delta_to_lines, NULL_REVISION, b'entry-version', delta)
357 self.assertContainsRe(str(err), "^no version for fileid b?'id'$")357 self.assertContainsRe(str(err), "^no version for fileid b?'id'$")
358358
359 def test_richroot_unversioned_root_errors(self):359 def test_richroot_unversioned_root_errors(self):
360 old_inv = Inventory(None)360 old_inv = Inventory(None)
361 new_inv = Inventory(None)361 new_inv = Inventory(None)
362 root = new_inv.make_entry('directory', '', None, 'TREE_ROOT')362 root = new_inv.make_entry('directory', '', None, b'TREE_ROOT')
363 new_inv.add(root)363 new_inv.add(root)
364 delta = new_inv._make_delta(old_inv)364 delta = new_inv._make_delta(old_inv)
365 serializer = inventory_delta.InventoryDeltaSerializer(365 serializer = inventory_delta.InventoryDeltaSerializer(
366 versioned_root=True, tree_references=True)366 versioned_root=True, tree_references=True)
367 err = self.assertRaises(InventoryDeltaError,367 err = self.assertRaises(InventoryDeltaError,
368 serializer.delta_to_lines, NULL_REVISION, 'entry-version', delta)368 serializer.delta_to_lines, NULL_REVISION, b'entry-version', delta)
369 self.assertContainsRe(369 self.assertContainsRe(
370 str(err), "no version for fileid b?'TREE_ROOT'$")370 str(err), "no version for fileid b?'TREE_ROOT'$")
371371
372 def test_nonrichroot_versioned_root_errors(self):372 def test_nonrichroot_versioned_root_errors(self):
373 old_inv = Inventory(None)373 old_inv = Inventory(None)
374 new_inv = Inventory(None)374 new_inv = Inventory(None)
375 root = new_inv.make_entry('directory', '', None, 'TREE_ROOT')375 root = new_inv.make_entry('directory', u'', None, b'TREE_ROOT')
376 root.revision = 'a@e\xc3\xa5ample.com--2004'376 root.revision = b'a@e\xc3\xa5ample.com--2004'
377 new_inv.add(root)377 new_inv.add(root)
378 delta = new_inv._make_delta(old_inv)378 delta = new_inv._make_delta(old_inv)
379 serializer = inventory_delta.InventoryDeltaSerializer(379 serializer = inventory_delta.InventoryDeltaSerializer(
380 versioned_root=False, tree_references=True)380 versioned_root=False, tree_references=True)
381 err = self.assertRaises(InventoryDeltaError,381 err = self.assertRaises(InventoryDeltaError,
382 serializer.delta_to_lines, NULL_REVISION, 'entry-version', delta)382 serializer.delta_to_lines, NULL_REVISION, b'entry-version', delta)
383 self.assertContainsRe(383 self.assertContainsRe(
384 str(err), "^Version present for / in b?'TREE_ROOT'")384 str(err), "^Version present for / in b?'TREE_ROOT'")
385385
386 def test_unknown_kind_errors(self):386 def test_unknown_kind_errors(self):
387 old_inv = Inventory(None)387 old_inv = Inventory(None)
388 new_inv = Inventory(None)388 new_inv = Inventory(None)
389 root = new_inv.make_entry('directory', '', None, 'my-rich-root-id')389 root = new_inv.make_entry('directory', u'', None, b'my-rich-root-id')
390 root.revision = 'changed'390 root.revision = b'changed'
391 new_inv.add(root)391 new_inv.add(root)
392 class StrangeInventoryEntry(inventory.InventoryEntry):392 class StrangeInventoryEntry(inventory.InventoryEntry):
393 kind = 'strange'393 kind = 'strange'
394 non_root = StrangeInventoryEntry('id', 'foo', root.file_id)394 non_root = StrangeInventoryEntry('id', u'foo', root.file_id)
395 non_root.revision = 'changed'395 non_root.revision = b'changed'
396 new_inv.add(non_root)396 new_inv.add(non_root)
397 delta = new_inv._make_delta(old_inv)397 delta = new_inv._make_delta(old_inv)
398 serializer = inventory_delta.InventoryDeltaSerializer(398 serializer = inventory_delta.InventoryDeltaSerializer(
@@ -400,19 +400,19 @@
400 # we expect keyerror because there is little value wrapping this.400 # we expect keyerror because there is little value wrapping this.
401 # This test aims to prove that it errors more than how it errors.401 # This test aims to prove that it errors more than how it errors.
402 err = self.assertRaises(KeyError,402 err = self.assertRaises(KeyError,
403 serializer.delta_to_lines, NULL_REVISION, 'entry-version', delta)403 serializer.delta_to_lines, NULL_REVISION, b'entry-version', delta)
404 self.assertEqual(('strange',), err.args)404 self.assertEqual(('strange',), err.args)
405405
406 def test_tree_reference_disabled(self):406 def test_tree_reference_disabled(self):
407 old_inv = Inventory(None)407 old_inv = Inventory(None)
408 new_inv = Inventory(None)408 new_inv = Inventory(None)
409 root = new_inv.make_entry('directory', '', None, 'TREE_ROOT')409 root = new_inv.make_entry('directory', u'', None, b'TREE_ROOT')
410 root.revision = 'a@e\xc3\xa5ample.com--2004'410 root.revision = b'a@e\xc3\xa5ample.com--2004'
411 new_inv.add(root)411 new_inv.add(root)
412 non_root = new_inv.make_entry(412 non_root = new_inv.make_entry(
413 'tree-reference', 'foo', root.file_id, 'id')413 'tree-reference', u'foo', root.file_id, b'id')
414 non_root.revision = 'changed'414 non_root.revision = b'changed'
415 non_root.reference_revision = 'subtree-version'415 non_root.reference_revision = b'subtree-version'
416 new_inv.add(non_root)416 new_inv.add(non_root)
417 delta = new_inv._make_delta(old_inv)417 delta = new_inv._make_delta(old_inv)
418 serializer = inventory_delta.InventoryDeltaSerializer(418 serializer = inventory_delta.InventoryDeltaSerializer(
@@ -420,59 +420,60 @@
420 # we expect keyerror because there is little value wrapping this.420 # we expect keyerror because there is little value wrapping this.
421 # This test aims to prove that it errors more than how it errors.421 # This test aims to prove that it errors more than how it errors.
422 err = self.assertRaises(KeyError,422 err = self.assertRaises(KeyError,
423 serializer.delta_to_lines, NULL_REVISION, 'entry-version', delta)423 serializer.delta_to_lines, NULL_REVISION, b'entry-version', delta)
424 self.assertEqual(('tree-reference',), err.args)424 self.assertEqual(('tree-reference',), err.args)
425425
426 def test_tree_reference_enabled(self):426 def test_tree_reference_enabled(self):
427 old_inv = Inventory(None)427 old_inv = Inventory(None)
428 new_inv = Inventory(None)428 new_inv = Inventory(None)
429 root = new_inv.make_entry('directory', '', None, 'TREE_ROOT')429 root = new_inv.make_entry('directory', u'', None, b'TREE_ROOT')
430 root.revision = 'a@e\xc3\xa5ample.com--2004'430 root.revision = b'a@e\xc3\xa5ample.com--2004'
431 new_inv.add(root)431 new_inv.add(root)
432 non_root = new_inv.make_entry(432 non_root = new_inv.make_entry(
433 'tree-reference', 'foo', root.file_id, 'id')433 'tree-reference', u'foo', root.file_id, b'id')
434 non_root.revision = 'changed'434 non_root.revision = b'changed'
435 non_root.reference_revision = 'subtree-version'435 non_root.reference_revision = b'subtree-version'
436 new_inv.add(non_root)436 new_inv.add(non_root)
437 delta = new_inv._make_delta(old_inv)437 delta = new_inv._make_delta(old_inv)
438 serializer = inventory_delta.InventoryDeltaSerializer(438 serializer = inventory_delta.InventoryDeltaSerializer(
439 versioned_root=True, tree_references=True)439 versioned_root=True, tree_references=True)
440 self.assertEqual(BytesIO(reference_lines).readlines(),440 self.assertEqual(BytesIO(reference_lines).readlines(),
441 serializer.delta_to_lines(NULL_REVISION, 'entry-version', delta))441 serializer.delta_to_lines(NULL_REVISION, b'entry-version', delta))
442442
443 def test_to_inventory_root_id_versioned_not_permitted(self):443 def test_to_inventory_root_id_versioned_not_permitted(self):
444 root_entry = inventory.make_entry('directory', '', None, 'TREE_ROOT')444 root_entry = inventory.make_entry('directory', u'', None, b'TREE_ROOT')
445 root_entry.revision = 'some-version'445 root_entry.revision = b'some-version'
446 delta = [(None, '', 'TREE_ROOT', root_entry)]446 delta = [(None, u'', b'TREE_ROOT', root_entry)]
447 serializer = inventory_delta.InventoryDeltaSerializer(447 serializer = inventory_delta.InventoryDeltaSerializer(
448 versioned_root=False, tree_references=True)448 versioned_root=False, tree_references=True)
449 self.assertRaises(449 self.assertRaises(
450 InventoryDeltaError, serializer.delta_to_lines, 'old-version',450 InventoryDeltaError, serializer.delta_to_lines, b'old-version',
451 'new-version', delta)451 b'new-version', delta)
452452
453 def test_to_inventory_root_id_not_versioned(self):453 def test_to_inventory_root_id_not_versioned(self):
454 delta = [(None, '', 'an-id', inventory.make_entry(454 delta = [(None, u'', b'an-id', inventory.make_entry(
455 'directory', '', None, 'an-id'))]455 'directory', u'', None, b'an-id'))]
456 serializer = inventory_delta.InventoryDeltaSerializer(456 serializer = inventory_delta.InventoryDeltaSerializer(
457 versioned_root=True, tree_references=True)457 versioned_root=True, tree_references=True)
458 self.assertRaises(458 self.assertRaises(
459 InventoryDeltaError, serializer.delta_to_lines, 'old-version',459 InventoryDeltaError, serializer.delta_to_lines, b'old-version',
460 'new-version', delta)460 b'new-version', delta)
461461
462 def test_to_inventory_has_tree_not_meant_to(self):462 def test_to_inventory_has_tree_not_meant_to(self):
463 make_entry = inventory.make_entry463 make_entry = inventory.make_entry
464 tree_ref = make_entry('tree-reference', 'foo', 'changed-in', 'ref-id')464 tree_ref = make_entry(
465 tree_ref.reference_revision = 'ref-revision'465 'tree-reference', u'foo', b'changed-in', b'ref-id')
466 tree_ref.reference_revision = b'ref-revision'
466 delta = [467 delta = [
467 (None, '', 'an-id',468 (None, u'', b'an-id',
468 make_entry('directory', '', 'changed-in', 'an-id')),469 make_entry('directory', u'', b'changed-in', b'an-id')),
469 (None, 'foo', 'ref-id', tree_ref)470 (None, u'foo', b'ref-id', tree_ref)
470 # a file that followed the root move471 # a file that followed the root move
471 ]472 ]
472 serializer = inventory_delta.InventoryDeltaSerializer(473 serializer = inventory_delta.InventoryDeltaSerializer(
473 versioned_root=True, tree_references=True)474 versioned_root=True, tree_references=True)
474 self.assertRaises(InventoryDeltaError, serializer.delta_to_lines,475 self.assertRaises(InventoryDeltaError, serializer.delta_to_lines,
475 'old-version', 'new-version', delta)476 b'old-version', b'new-version', delta)
476477
477 def test_to_inventory_torture(self):478 def test_to_inventory_torture(self):
478 def make_entry(kind, name, parent_id, file_id, **attrs):479 def make_entry(kind, name, parent_id, file_id, **attrs):
@@ -488,43 +489,43 @@
488 # - files with and without exec bit489 # - files with and without exec bit
489 delta = [490 delta = [
490 # new root:491 # new root:
491 (None, '', 'new-root-id',492 (None, u'', b'new-root-id',
492 make_entry('directory', '', None, 'new-root-id',493 make_entry('directory', u'', None, b'new-root-id',
493 revision='changed-in')),494 revision=b'changed-in')),
494 # an old root:495 # an old root:
495 ('', 'old-root', 'TREE_ROOT',496 (u'', u'old-root', b'TREE_ROOT',
496 make_entry('directory', 'subdir-now', 'new-root-id',497 make_entry('directory', u'subdir-now', b'new-root-id',
497 'TREE_ROOT', revision='moved-root')),498 b'TREE_ROOT', revision=b'moved-root')),
498 # a file that followed the root move499 # a file that followed the root move
499 ('under-old-root', 'old-root/under-old-root', 'moved-id',500 (u'under-old-root', u'old-root/under-old-root', b'moved-id',
500 make_entry('file', 'under-old-root', 'TREE_ROOT', 'moved-id',501 make_entry('file', u'under-old-root', b'TREE_ROOT',
501 revision='old-rev', executable=False, text_size=30,502 b'moved-id', revision=b'old-rev', executable=False,
502 text_sha1='some-sha')),503 text_size=30, text_sha1=b'some-sha')),
503 # a deleted path504 # a deleted path
504 ('old-file', None, 'deleted-id', None),505 (u'old-file', None, b'deleted-id', None),
505 # a tree reference moved to the new root506 # a tree reference moved to the new root
506 ('ref', 'ref', 'ref-id',507 (u'ref', u'ref', b'ref-id',
507 make_entry('tree-reference', 'ref', 'new-root-id', 'ref-id',508 make_entry('tree-reference', u'ref', b'new-root-id', b'ref-id',
508 reference_revision='tree-reference-id',509 reference_revision=b'tree-reference-id',
509 revision='new-rev')),510 revision=b'new-rev')),
510 # a symlink now in a deep dir511 # a symlink now in a deep dir
511 ('dir/link', 'old-root/dir/link', 'link-id',512 (u'dir/link', u'old-root/dir/link', b'link-id',
512 make_entry('symlink', 'link', 'deep-id', 'link-id',513 make_entry('symlink', u'link', b'deep-id', b'link-id',
513 symlink_target='target', revision='new-rev')),514 symlink_target=u'target', revision=b'new-rev')),
514 # a deep dir515 # a deep dir
515 ('dir', 'old-root/dir', 'deep-id',516 (u'dir', u'old-root/dir', b'deep-id',
516 make_entry('directory', 'dir', 'TREE_ROOT', 'deep-id',517 make_entry('directory', u'dir', b'TREE_ROOT', b'deep-id',
517 revision='new-rev')),518 revision=b'new-rev')),
518 # a file with an exec bit set519 # a file with an exec bit set
519 (None, 'configure', 'exec-id',520 (None, u'configure', b'exec-id',
520 make_entry('file', 'configure', 'new-root-id', 'exec-id',521 make_entry('file', u'configure', b'new-root-id', b'exec-id',
521 executable=True, text_size=30, text_sha1='some-sha',522 executable=True, text_size=30, text_sha1=b'some-sha',
522 revision='old-rev')),523 revision=b'old-rev')),
523 ]524 ]
524 serializer = inventory_delta.InventoryDeltaSerializer(525 serializer = inventory_delta.InventoryDeltaSerializer(
525 versioned_root=True, tree_references=True)526 versioned_root=True, tree_references=True)
526 lines = serializer.delta_to_lines(NULL_REVISION, 'something', delta)527 lines = serializer.delta_to_lines(NULL_REVISION, b'something', delta)
527 expected = """format: bzr inventory delta v1 (bzr 1.14)528 expected = b"""format: bzr inventory delta v1 (bzr 1.14)
528parent: null:529parent: null:
529version: something530version: something
530versioned_root: true531versioned_root: true
@@ -538,8 +539,8 @@
538None\x00/\x00new-root-id\x00\x00changed-in\x00dir539None\x00/\x00new-root-id\x00\x00changed-in\x00dir
539None\x00/configure\x00exec-id\x00new-root-id\x00old-rev\x00file\x0030\x00Y\x00some-sha540None\x00/configure\x00exec-id\x00new-root-id\x00old-rev\x00file\x0030\x00Y\x00some-sha
540"""541"""
541 serialized = ''.join(lines)542 serialized = b''.join(lines)
542 self.assertIsInstance(serialized, str)543 self.assertIsInstance(serialized, bytes)
543 self.assertEqual(expected, serialized)544 self.assertEqual(expected, serialized)
544545
545546
@@ -547,79 +548,79 @@
547 """Test serialization of the content part of a line."""548 """Test serialization of the content part of a line."""
548549
549 def test_dir(self):550 def test_dir(self):
550 entry = inventory.make_entry('directory', 'a dir', None)551 entry = inventory.make_entry('directory', u'a dir', None)
551 self.assertEqual('dir', inventory_delta._directory_content(entry))552 self.assertEqual(b'dir', inventory_delta._directory_content(entry))
552553
553 def test_file_0_short_sha(self):554 def test_file_0_short_sha(self):
554 file_entry = inventory.make_entry('file', 'a file', None, 'file-id')555 file_entry = inventory.make_entry('file', u'a file', None, b'file-id')
555 file_entry.text_sha1 = ''556 file_entry.text_sha1 = b''
556 file_entry.text_size = 0557 file_entry.text_size = 0
557 self.assertEqual('file\x000\x00\x00',558 self.assertEqual(b'file\x000\x00\x00',
558 inventory_delta._file_content(file_entry))559 inventory_delta._file_content(file_entry))
559560
560 def test_file_10_foo(self):561 def test_file_10_foo(self):
561 file_entry = inventory.make_entry('file', 'a file', None, 'file-id')562 file_entry = inventory.make_entry('file', u'a file', None, b'file-id')
562 file_entry.text_sha1 = 'foo'563 file_entry.text_sha1 = b'foo'
563 file_entry.text_size = 10564 file_entry.text_size = 10
564 self.assertEqual('file\x0010\x00\x00foo',565 self.assertEqual(b'file\x0010\x00\x00foo',
565 inventory_delta._file_content(file_entry))566 inventory_delta._file_content(file_entry))
566567
567 def test_file_executable(self):568 def test_file_executable(self):
568 file_entry = inventory.make_entry('file', 'a file', None, 'file-id')569 file_entry = inventory.make_entry('file', u'a file', None, b'file-id')
569 file_entry.executable = True570 file_entry.executable = True
570 file_entry.text_sha1 = 'foo'571 file_entry.text_sha1 = b'foo'
571 file_entry.text_size = 10572 file_entry.text_size = 10
572 self.assertEqual('file\x0010\x00Y\x00foo',573 self.assertEqual(b'file\x0010\x00Y\x00foo',
573 inventory_delta._file_content(file_entry))574 inventory_delta._file_content(file_entry))
574575
575 def test_file_without_size(self):576 def test_file_without_size(self):
576 file_entry = inventory.make_entry('file', 'a file', None, 'file-id')577 file_entry = inventory.make_entry('file', u'a file', None, b'file-id')
577 file_entry.text_sha1 = 'foo'578 file_entry.text_sha1 = b'foo'
578 self.assertRaises(InventoryDeltaError,579 self.assertRaises(InventoryDeltaError,
579 inventory_delta._file_content, file_entry)580 inventory_delta._file_content, file_entry)
580581
581 def test_file_without_sha1(self):582 def test_file_without_sha1(self):
582 file_entry = inventory.make_entry('file', 'a file', None, 'file-id')583 file_entry = inventory.make_entry('file', u'a file', None, b'file-id')
583 file_entry.text_size = 10584 file_entry.text_size = 10
584 self.assertRaises(InventoryDeltaError,585 self.assertRaises(InventoryDeltaError,
585 inventory_delta._file_content, file_entry)586 inventory_delta._file_content, file_entry)
586587
587 def test_link_empty_target(self):588 def test_link_empty_target(self):
588 entry = inventory.make_entry('symlink', 'a link', None)589 entry = inventory.make_entry('symlink', u'a link', None)
589 entry.symlink_target = ''590 entry.symlink_target = u''
590 self.assertEqual('link\x00',591 self.assertEqual(b'link\x00',
591 inventory_delta._link_content(entry))592 inventory_delta._link_content(entry))
592593
593 def test_link_unicode_target(self):594 def test_link_unicode_target(self):
594 entry = inventory.make_entry('symlink', 'a link', None)595 entry = inventory.make_entry('symlink', u'a link', None)
595 entry.symlink_target = ' \xc3\xa5'.decode('utf8')596 entry.symlink_target = b' \xc3\xa5'.decode('utf8')
596 self.assertEqual('link\x00 \xc3\xa5',597 self.assertEqual(b'link\x00 \xc3\xa5',
597 inventory_delta._link_content(entry))598 inventory_delta._link_content(entry))
598599
599 def test_link_space_target(self):600 def test_link_space_target(self):
600 entry = inventory.make_entry('symlink', 'a link', None)601 entry = inventory.make_entry('symlink', u'a link', None)
601 entry.symlink_target = ' '602 entry.symlink_target = u' '
602 self.assertEqual('link\x00 ',603 self.assertEqual(b'link\x00 ',
603 inventory_delta._link_content(entry))604 inventory_delta._link_content(entry))
604605
605 def test_link_no_target(self):606 def test_link_no_target(self):
606 entry = inventory.make_entry('symlink', 'a link', None)607 entry = inventory.make_entry('symlink', u'a link', None)
607 self.assertRaises(InventoryDeltaError,608 self.assertRaises(InventoryDeltaError,
608 inventory_delta._link_content, entry)609 inventory_delta._link_content, entry)
609610
610 def test_reference_null(self):611 def test_reference_null(self):
611 entry = inventory.make_entry('tree-reference', 'a tree', None)612 entry = inventory.make_entry('tree-reference', u'a tree', None)
612 entry.reference_revision = NULL_REVISION613 entry.reference_revision = NULL_REVISION
613 self.assertEqual('tree\x00null:',614 self.assertEqual(b'tree\x00null:',
614 inventory_delta._reference_content(entry))615 inventory_delta._reference_content(entry))
615616
616 def test_reference_revision(self):617 def test_reference_revision(self):
617 entry = inventory.make_entry('tree-reference', 'a tree', None)618 entry = inventory.make_entry('tree-reference', u'a tree', None)
618 entry.reference_revision = 'foo@\xc3\xa5b-lah'619 entry.reference_revision = b'foo@\xc3\xa5b-lah'
619 self.assertEqual('tree\x00foo@\xc3\xa5b-lah',620 self.assertEqual(b'tree\x00foo@\xc3\xa5b-lah',
620 inventory_delta._reference_content(entry))621 inventory_delta._reference_content(entry))
621622
622 def test_reference_no_reference(self):623 def test_reference_no_reference(self):
623 entry = inventory.make_entry('tree-reference', 'a tree', None)624 entry = inventory.make_entry('tree-reference', u'a tree', None)
624 self.assertRaises(InventoryDeltaError,625 self.assertRaises(InventoryDeltaError,
625 inventory_delta._reference_content, entry)626 inventory_delta._reference_content, entry)
626627
=== modified file 'breezy/transport/memory.py'
--- breezy/transport/memory.py 2017-05-24 19:44:00 +0000
+++ breezy/transport/memory.py 2017-06-11 01:47:17 +0000
@@ -164,7 +164,7 @@
164164
165 def open_write_stream(self, relpath, mode=None):165 def open_write_stream(self, relpath, mode=None):
166 """See Transport.open_write_stream."""166 """See Transport.open_write_stream."""
167 self.put_bytes(relpath, "", mode)167 self.put_bytes(relpath, b"", mode)
168 result = AppendBasedFileStream(self, relpath)168 result = AppendBasedFileStream(self, relpath)
169 _file_streams[self.abspath(relpath)] = result169 _file_streams[self.abspath(relpath)] = result
170 return result170 return result
171171
=== modified file 'breezy/versionedfile.py'
--- breezy/versionedfile.py 2017-06-05 20:48:31 +0000
+++ breezy/versionedfile.py 2017-06-11 01:47:17 +0000
@@ -120,7 +120,7 @@
120 if storage_kind == 'chunked':120 if storage_kind == 'chunked':
121 return self._chunks121 return self._chunks
122 elif storage_kind == 'fulltext':122 elif storage_kind == 'fulltext':
123 return ''.join(self._chunks)123 return b''.join(self._chunks)
124 raise errors.UnavailableRepresentation(self.key, storage_kind,124 raise errors.UnavailableRepresentation(self.key, storage_kind,
125 self.storage_kind)125 self.storage_kind)
126126
@@ -1079,13 +1079,13 @@
1079 def _check_lines_not_unicode(self, lines):1079 def _check_lines_not_unicode(self, lines):
1080 """Check that lines being added to a versioned file are not unicode."""1080 """Check that lines being added to a versioned file are not unicode."""
1081 for line in lines:1081 for line in lines:
1082 if line.__class__ is not str:1082 if line.__class__ is not bytes:
1083 raise errors.BzrBadParameterUnicode("lines")1083 raise errors.BzrBadParameterUnicode("lines")
10841084
1085 def _check_lines_are_lines(self, lines):1085 def _check_lines_are_lines(self, lines):
1086 """Check that the lines really are full lines without inline EOL."""1086 """Check that the lines really are full lines without inline EOL."""
1087 for line in lines:1087 for line in lines:
1088 if '\n' in line[:-1]:1088 if b'\n' in line[:-1]:
1089 raise errors.BzrBadParameterContainsNewline("lines")1089 raise errors.BzrBadParameterContainsNewline("lines")
10901090
1091 def get_known_graph_ancestry(self, keys):1091 def get_known_graph_ancestry(self, keys):
@@ -1792,7 +1792,7 @@
1792 "nostore_sha behaviour.")1792 "nostore_sha behaviour.")
1793 if key[-1] is None:1793 if key[-1] is None:
1794 sha1 = osutils.sha_strings(lines)1794 sha1 = osutils.sha_strings(lines)
1795 key = ("sha1:" + sha1,)1795 key = (b"sha1:" + sha1,)
1796 else:1796 else:
1797 sha1 = None1797 sha1 = None
1798 if key in self._store.get_parent_map([key]):1798 if key in self._store.get_parent_map([key]):
@@ -1816,7 +1816,7 @@
1816 :param network_bytes: The bytes of a record.1816 :param network_bytes: The bytes of a record.
1817 :return: A tuple (storage_kind, offset_of_remaining_bytes)1817 :return: A tuple (storage_kind, offset_of_remaining_bytes)
1818 """1818 """
1819 line_end = network_bytes.find('\n')1819 line_end = network_bytes.find(b'\n')
1820 storage_kind = network_bytes[:line_end]1820 storage_kind = network_bytes[:line_end]
1821 return storage_kind, line_end + 11821 return storage_kind, line_end + 1
18221822
@@ -1859,7 +1859,7 @@
1859 meta_len, = struct.unpack('!L', bytes[line_end:line_end+4])1859 meta_len, = struct.unpack('!L', bytes[line_end:line_end+4])
1860 record_meta = bytes[line_end+4:line_end+4+meta_len]1860 record_meta = bytes[line_end+4:line_end+4+meta_len]
1861 key, parents = bencode.bdecode_as_tuple(record_meta)1861 key, parents = bencode.bdecode_as_tuple(record_meta)
1862 if parents == 'nil':1862 if parents == b'nil':
1863 parents = None1863 parents = None
1864 fulltext = bytes[line_end+4+meta_len:]1864 fulltext = bytes[line_end+4+meta_len:]
1865 return [FulltextContentFactory(key, parents, None, fulltext)]1865 return [FulltextContentFactory(key, parents, None, fulltext)]
@@ -1871,12 +1871,12 @@
18711871
1872def record_to_fulltext_bytes(record):1872def record_to_fulltext_bytes(record):
1873 if record.parents is None:1873 if record.parents is None:
1874 parents = 'nil'1874 parents = b'nil'
1875 else:1875 else:
1876 parents = record.parents1876 parents = record.parents
1877 record_meta = bencode.bencode((record.key, parents))1877 record_meta = bencode.bencode((record.key, parents))
1878 record_content = record.get_bytes_as('fulltext')1878 record_content = record.get_bytes_as('fulltext')
1879 return "fulltext\n%s%s%s" % (1879 return b"fulltext\n%s%s%s" % (
1880 _length_prefix(record_meta), record_meta, record_content)1880 _length_prefix(record_meta), record_meta, record_content)
18811881
18821882
@@ -1893,8 +1893,8 @@
1893 per_prefix_map = {}1893 per_prefix_map = {}
1894 for item in viewitems(parent_map):1894 for item in viewitems(parent_map):
1895 key = item[0]1895 key = item[0]
1896 if isinstance(key, str) or len(key) == 1:1896 if isinstance(key, bytes) or len(key) == 1:
1897 prefix = ''1897 prefix = b''
1898 else:1898 else:
1899 prefix = key[0]1899 prefix = key[0]
1900 try:1900 try:
19011901
=== modified file 'breezy/xml_serializer.py'
--- breezy/xml_serializer.py 2017-05-25 01:35:55 +0000
+++ breezy/xml_serializer.py 2017-06-11 01:47:17 +0000
@@ -37,9 +37,9 @@
37 except ImportError:37 except ImportError:
38 from xml.parsers.expat import ExpatError as ParseError38 from xml.parsers.expat import ExpatError as ParseError
3939
40(ElementTree, SubElement, Element, XMLTreeBuilder, fromstring, tostring) = (40(ElementTree, SubElement, Element, fromstring, tostring) = (
41 elementtree.ElementTree, elementtree.SubElement, elementtree.Element,41 elementtree.ElementTree, elementtree.SubElement, elementtree.Element,
42 elementtree.XMLTreeBuilder, elementtree.fromstring, elementtree.tostring)42 elementtree.fromstring, elementtree.tostring)
4343
4444
45from . import (45from . import (

Subscribers

People subscribed via source and target branches