Merge lp:~jelmer/brz/python3-j into lp:brz

Proposed by Jelmer Vernooij
Status: Merged
Approved by: Jelmer Vernooij
Approved revision: no longer in the source branch.
Merge reported by: The Breezy Bot
Merged at revision: not available
Proposed branch: lp:~jelmer/brz/python3-j
Merge into: lp:brz
Prerequisite: lp:~jelmer/brz/python3-i
Diff against target: 10872 lines (+3726/-1698)
47 files modified
breezy/builtins.py (+2/-9)
breezy/bundle/apply_bundle.py (+1/-4)
breezy/bundle/serializer/v4.py (+1/-4)
breezy/bzr/branch.py (+2/-8)
breezy/bzr/bzrdir.py (+1/-4)
breezy/bzr/dirstate.py (+21/-12)
breezy/bzr/index.py (+38/-18)
breezy/bzr/inventory.py (+4/-0)
breezy/bzr/knit.py (+8/-11)
breezy/bzr/knitrepo.py (+4/-4)
breezy/bzr/smart/repository.py (+2/-5)
breezy/bzr/vf_repository.py (+1/-1)
breezy/diff.py (+2/-1)
breezy/export.py (+1/-1)
breezy/export_pot.py (+2/-1)
breezy/fetch.py (+1/-1)
breezy/merge_directive.py (+9/-9)
breezy/plugins/git/__init__.py (+3/-2)
breezy/plugins/git/cache.py (+25/-24)
breezy/plugins/git/tests/test_cache.py (+10/-10)
breezy/plugins/git/tests/test_object_store.py (+6/-6)
breezy/shelf.py (+1/-1)
breezy/tests/__init__.py (+2/-1)
breezy/tests/http_server.py (+4/-2)
breezy/tests/per_intertree/test_compare.py (+2/-2)
breezy/tests/per_repository/test_signatures.py (+11/-11)
breezy/tests/per_repository_vf/test_repository.py (+23/-23)
breezy/tests/per_tree/test_tree.py (+7/-7)
breezy/tests/per_workingtree/test_parents.py (+136/-136)
breezy/tests/test_conflicts.py (+63/-63)
breezy/tests/test_export.py (+14/-14)
breezy/tests/test_export_pot.py (+57/-57)
breezy/tests/test_fetch.py (+3/-3)
breezy/tests/test_http.py (+7/-7)
breezy/tests/test_http_response.py (+59/-57)
breezy/tests/test_index.py (+476/-475)
breezy/tests/test_inv.py (+346/-350)
breezy/tests/test_inventory_delta.py (+1/-1)
breezy/tests/test_knit.py (+13/-13)
breezy/tests/test_merge.py (+309/-311)
breezy/tests/test_merge3.py (+13/-13)
breezy/tests/test_merge_directive.py (+10/-10)
breezy/tests/test_mergetools.py (+2/-2)
breezy/tests/test_osutils.py (+1/-1)
breezy/transform.py (+2/-2)
breezy/transport/__init__.py (+1/-1)
python3.passing (+2019/-0)
To merge this branch: bzr merge lp:~jelmer/brz/python3-j
Reviewer Review Type Date Requested Status
Martin Packman Approve
Review via email: mp+348346@code.launchpad.net

Commit message

Fix another 1.5k tests on python3.

Description of the change

This brings the number of failing python3 tests to below 10k.

To post a comment you must log in.
Revision history for this message
Martin Packman (gz) wrote :

Thanks! See some inline comments.

review: Needs Fixing
Revision history for this message
Martin Packman (gz) wrote :

Thanks! Lets try it.

review: Approve
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote :

Running landing tests failed
https://ci.breezy-vcs.org/job/land-brz/211/

Revision history for this message
The Breezy Bot (the-breezy-bot) wrote :

Running landing tests failed
https://ci.breezy-vcs.org/job/land-brz/212/

Revision history for this message
The Breezy Bot (the-breezy-bot) wrote :

Running landing tests failed
https://ci.breezy-vcs.org/job/land-brz/213/

Revision history for this message
The Breezy Bot (the-breezy-bot) wrote :

Running landing tests failed
https://ci.breezy-vcs.org/job/land-brz/214/

Revision history for this message
The Breezy Bot (the-breezy-bot) wrote :

Running landing tests failed
https://ci.breezy-vcs.org/job/land-brz/215/

Revision history for this message
The Breezy Bot (the-breezy-bot) wrote :

Running landing tests failed
https://ci.breezy-vcs.org/job/land-brz/218/

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'breezy/builtins.py'
2--- breezy/builtins.py 2018-06-04 11:35:48 +0000
3+++ breezy/builtins.py 2018-06-30 17:35:22 +0000
4@@ -6764,15 +6764,8 @@
5 from .transform import link_tree
6 target_tree = WorkingTree.open_containing(".")[0]
7 source_tree = WorkingTree.open(location)
8- target_tree.lock_write()
9- try:
10- source_tree.lock_read()
11- try:
12- link_tree(target_tree, source_tree)
13- finally:
14- source_tree.unlock()
15- finally:
16- target_tree.unlock()
17+ with target_tree.lock_write(), source_tree.lock_read():
18+ link_tree(target_tree, source_tree)
19
20
21 class cmd_fetch_ghosts(Command):
22
23=== modified file 'breezy/bundle/apply_bundle.py'
24--- breezy/bundle/apply_bundle.py 2018-03-01 01:41:45 +0000
25+++ breezy/bundle/apply_bundle.py 2018-06-30 17:35:22 +0000
26@@ -46,8 +46,7 @@
27 def merge_bundle(reader, tree, check_clean, merge_type,
28 reprocess, show_base, change_reporter=None):
29 """Merge a revision bundle into the current tree."""
30- pb = ui.ui_factory.nested_progress_bar()
31- try:
32+ with ui.ui_factory.nested_progress_bar() as pb:
33 pp = ProgressPhase("Merge phase", 6, pb)
34 pp.next_phase()
35 install_bundle(tree.branch.repository, reader)
36@@ -70,6 +69,4 @@
37 merger.reprocess = reprocess
38 conflicts = merger.do_merge()
39 merger.set_pending()
40- finally:
41- pb.clear()
42 return conflicts
43
44=== modified file 'breezy/bundle/serializer/v4.py'
45--- breezy/bundle/serializer/v4.py 2018-06-22 01:22:25 +0000
46+++ breezy/bundle/serializer/v4.py 2018-06-30 17:35:22 +0000
47@@ -468,13 +468,10 @@
48 all into memory at once. Reading it into memory all at once is
49 (currently) faster.
50 """
51- repository.lock_write()
52- try:
53+ with repository.lock_write():
54 ri = RevisionInstaller(self.get_bundle_reader(stream_input),
55 self._serializer, repository)
56 return ri.install()
57- finally:
58- repository.unlock()
59
60 def get_merge_request(self, target_repo):
61 """Provide data for performing a merge
62
63=== modified file 'breezy/bzr/branch.py'
64--- breezy/bzr/branch.py 2018-06-22 01:50:22 +0000
65+++ breezy/bzr/branch.py 2018-06-30 17:35:22 +0000
66@@ -1052,13 +1052,10 @@
67
68 # Copy source data into target
69 new_branch._write_last_revision_info(*branch.last_revision_info())
70- new_branch.lock_write()
71- try:
72+ with new_branch.lock_write():
73 new_branch.set_parent(branch.get_parent())
74 new_branch.set_bound_location(branch.get_bound_location())
75 new_branch.set_push_location(branch.get_push_location())
76- finally:
77- new_branch.unlock()
78
79 # New branch has no tags by default
80 new_branch.tags._set_tag_dict({})
81@@ -1070,15 +1067,12 @@
82
83 # Clean up old files
84 new_branch._transport.delete('revision-history')
85- branch.lock_write()
86- try:
87+ with branch.lock_write():
88 try:
89 branch.set_parent(None)
90 except errors.NoSuchFile:
91 pass
92 branch.set_bound_location(None)
93- finally:
94- branch.unlock()
95
96
97 class Converter6to7(object):
98
99=== modified file 'breezy/bzr/bzrdir.py'
100--- breezy/bzr/bzrdir.py 2018-06-16 11:29:34 +0000
101+++ breezy/bzr/bzrdir.py 2018-06-30 17:35:22 +0000
102@@ -450,15 +450,12 @@
103 (result_repo is None or result_repo.make_working_trees())):
104 wt = result.create_workingtree(accelerator_tree=accelerator_tree,
105 hardlink=hardlink, from_branch=result_branch)
106- wt.lock_write()
107- try:
108+ with wt.lock_write():
109 if not wt.is_versioned(''):
110 try:
111 wt.set_root_id(self.open_workingtree.get_root_id())
112 except errors.NoWorkingTree:
113 pass
114- finally:
115- wt.unlock()
116 else:
117 wt = None
118 if recurse == 'down':
119
120=== modified file 'breezy/bzr/dirstate.py'
121--- breezy/bzr/dirstate.py 2018-05-27 18:00:21 +0000
122+++ breezy/bzr/dirstate.py 2018-06-30 17:35:22 +0000
123@@ -1319,12 +1319,21 @@
124 return result
125
126 def _check_delta_is_valid(self, delta):
127- return list(inventory._check_delta_unique_ids(
128- inventory._check_delta_unique_old_paths(
129- inventory._check_delta_unique_new_paths(
130- inventory._check_delta_ids_match_entry(
131- inventory._check_delta_ids_are_valid(
132- inventory._check_delta_new_path_entry_both_or_None(delta)))))))
133+ delta = list(inventory._check_delta_unique_ids(
134+ inventory._check_delta_unique_old_paths(
135+ inventory._check_delta_unique_new_paths(
136+ inventory._check_delta_ids_match_entry(
137+ inventory._check_delta_ids_are_valid(
138+ inventory._check_delta_new_path_entry_both_or_None(delta)))))))
139+ def delta_key(d):
140+ (old_path, new_path, file_id, new_entry) = d
141+ if old_path is None:
142+ old_path = ''
143+ if new_path is None:
144+ new_path = ''
145+ return (old_path, new_path, file_id, new_entry)
146+ delta.sort(key=delta_key, reverse=True)
147+ return delta
148
149 def update_by_delta(self, delta):
150 """Apply an inventory delta to the dirstate for tree 0
151@@ -1349,9 +1358,9 @@
152 new_ids = set()
153 # This loop transforms the delta to single atomic operations that can
154 # be executed and validated.
155- delta = sorted(self._check_delta_is_valid(delta), reverse=True)
156+ delta = self._check_delta_is_valid(delta)
157 for old_path, new_path, file_id, inv_entry in delta:
158- if file_id.__class__ is not bytes:
159+ if not isinstance(file_id, bytes):
160 raise AssertionError(
161 "must be a utf8 file_id not %s" % (type(file_id), ))
162 if (file_id in insertions) or (file_id in removals):
163@@ -1486,7 +1495,7 @@
164
165 self._parents[0] = new_revid
166
167- delta = sorted(self._check_delta_is_valid(delta), reverse=True)
168+ delta = self._check_delta_is_valid(delta)
169 adds = []
170 changes = []
171 deletes = []
172@@ -1634,7 +1643,7 @@
173 if entry[0][2] != file_id:
174 # Different file_id, so not what we want.
175 continue
176- self._raise_invalid(("%s/%s" % key[0:2]).decode('utf8'), file_id,
177+ self._raise_invalid((b"%s/%s" % key[0:2]).decode('utf8'), file_id,
178 "This file_id is new in the delta but already present in "
179 "the target")
180
181@@ -1851,12 +1860,12 @@
182 # removed.
183 entry[1][1] = null
184 block_i, entry_i, d_present, f_present = \
185- self._get_block_entry_index(old_path, '', 1)
186+ self._get_block_entry_index(old_path, b'', 1)
187 if d_present:
188 dir_block = self._dirblocks[block_i][1]
189 for child_entry in dir_block:
190 child_basis_kind = child_entry[1][1][0]
191- if child_basis_kind not in 'ar':
192+ if child_basis_kind not in b'ar':
193 self._raise_invalid(old_path, file_id,
194 "The file id was deleted but its children were "
195 "not deleted.")
196
197=== modified file 'breezy/bzr/index.py'
198--- breezy/bzr/index.py 2018-05-13 02:18:13 +0000
199+++ breezy/bzr/index.py 2018-06-30 17:35:22 +0000
200@@ -43,8 +43,10 @@
201 )
202 from ..sixish import (
203 BytesIO,
204+ bytesintern,
205 viewvalues,
206 viewitems,
207+ zip,
208 )
209 from ..static_tuple import StaticTuple
210
211@@ -286,15 +288,15 @@
212 """
213 (node_refs,
214 absent_references) = self._check_key_ref_value(key, references, value)
215- if key in self._nodes and self._nodes[key][0] != 'a':
216+ if key in self._nodes and self._nodes[key][0] != b'a':
217 raise BadIndexDuplicateKey(key, self)
218 for reference in absent_references:
219 # There may be duplicates, but I don't think it is worth worrying
220 # about
221- self._nodes[reference] = ('a', (), '')
222+ self._nodes[reference] = (b'a', (), b'')
223 self._absent_keys.update(absent_references)
224 self._absent_keys.discard(key)
225- self._nodes[key] = ('', node_refs, value)
226+ self._nodes[key] = (b'', node_refs, value)
227 if self._nodes_by_key is not None and self._key_length > 1:
228 self._update_nodes_by_key(key, value, node_refs)
229
230@@ -499,6 +501,16 @@
231 def __ne__(self, other):
232 return not self.__eq__(other)
233
234+ def __lt__(self, other):
235+ # We don't really care about the order, just that there is an order.
236+ if (not isinstance(other, GraphIndex) and
237+ not isinstance(other, InMemoryGraphIndex)):
238+ raise TypeError(other)
239+ return hash(self) < hash(other)
240+
241+ def __hash__(self):
242+ return hash((type(self), self._transport, self._name, self._size))
243+
244 def __repr__(self):
245 return "%s(%r)" % (self.__class__.__name__,
246 self._transport.abspath(self._name))
247@@ -520,19 +532,20 @@
248 # This is wasteful, but it is better than dealing with
249 # adjusting all the offsets, etc.
250 stream = BytesIO(stream.read()[self._base_offset:])
251- self._read_prefix(stream)
252- self._expected_elements = 3 + self._key_length
253- line_count = 0
254- # raw data keyed by offset
255- self._keys_by_offset = {}
256- # ready-to-return key:value or key:value, node_ref_lists
257- self._nodes = {}
258- self._nodes_by_key = None
259- trailers = 0
260- pos = stream.tell()
261- lines = stream.read().split(b'\n')
262- # GZ 2009-09-20: Should really use a try/finally block to ensure close
263- stream.close()
264+ try:
265+ self._read_prefix(stream)
266+ self._expected_elements = 3 + self._key_length
267+ line_count = 0
268+ # raw data keyed by offset
269+ self._keys_by_offset = {}
270+ # ready-to-return key:value or key:value, node_ref_lists
271+ self._nodes = {}
272+ self._nodes_by_key = None
273+ trailers = 0
274+ pos = stream.tell()
275+ lines = stream.read().split(b'\n')
276+ finally:
277+ stream.close()
278 del lines[-1]
279 _, _, _, trailers = self._parse_lines(lines, pos)
280 for key, absent, references, value in viewvalues(self._keys_by_offset):
281@@ -1145,7 +1158,7 @@
282 raise BadIndexData(self)
283 # keys are tuples. Each element is a string that may occur many
284 # times, so we intern them to save space. AB, RC, 200807
285- key = tuple([intern(element) for element in elements[:self._key_length]])
286+ key = tuple([bytesintern(element) for element in elements[:self._key_length]])
287 if first_key is None:
288 first_key = key
289 absent, references, value = elements[-3:]
290@@ -1599,7 +1612,7 @@
291 """
292 if self._reload_func is None:
293 return False
294- trace.mutter('Trying to reload after getting exception: %s', error)
295+ trace.mutter('Trying to reload after getting exception: %s', str(error))
296 if not self._reload_func():
297 # We tried to reload, but nothing changed, so we fail anyway
298 trace.mutter('_reload_func indicated nothing has changed.'
299@@ -1731,6 +1744,13 @@
300 def validate(self):
301 """In memory index's have no known corruption at the moment."""
302
303+ def __lt__(self, other):
304+ # We don't really care about the order, just that there is an order.
305+ if (not isinstance(other, GraphIndex) and
306+ not isinstance(other, InMemoryGraphIndex)):
307+ raise TypeError(other)
308+ return hash(self) < hash(other)
309+
310
311 class GraphIndexPrefixAdapter(object):
312 """An adapter between GraphIndex with different key lengths.
313
314=== modified file 'breezy/bzr/inventory.py'
315--- breezy/bzr/inventory.py 2018-06-18 00:19:26 +0000
316+++ breezy/bzr/inventory.py 2018-06-30 17:35:22 +0000
317@@ -227,6 +227,8 @@
318 """
319 if u'/' in name:
320 raise errors.InvalidEntryName(name=name)
321+ if not isinstance(file_id, bytes):
322+ raise TypeError(file_id)
323 self.file_id = file_id
324 self.revision = None
325 self.name = name
326@@ -1152,6 +1154,8 @@
327 >>> inv.get_entry('123123').name
328 'hello.c'
329 """
330+ if not isinstance(file_id, bytes):
331+ raise TypeError(file_id)
332 try:
333 return self._byid[file_id]
334 except KeyError:
335
336=== modified file 'breezy/bzr/knit.py'
337--- breezy/bzr/knit.py 2018-06-14 18:03:42 +0000
338+++ breezy/bzr/knit.py 2018-06-30 17:35:22 +0000
339@@ -2750,13 +2750,10 @@
340 self._filename = prefix
341 try:
342 path = self._mapper.map(prefix) + '.kndx'
343- fp = self._transport.get(path)
344- try:
345+ with self._transport.get(path) as fp:
346 # _load_data may raise NoSuchFile if the target knit is
347 # completely empty.
348 _load_data(self, fp)
349- finally:
350- fp.close()
351 self._kndx_cache[prefix] = (self._cache, self._history)
352 del self._cache
353 del self._filename
354@@ -2917,17 +2914,17 @@
355 if key_dependencies is not None:
356 key_dependencies.add_references(key, parents)
357 index, pos, size = access_memo
358- if 'no-eol' in options:
359- value = 'N'
360+ if b'no-eol' in options:
361+ value = b'N'
362 else:
363- value = ' '
364- value += "%d %d" % (pos, size)
365+ value = b' '
366+ value += b"%d %d" % (pos, size)
367 if not self._deltas:
368- if 'line-delta' in options:
369+ if b'line-delta' in options:
370 raise KnitCorrupt(self, "attempt to add line-delta in non-delta knit")
371 if self._parents:
372 if self._deltas:
373- if 'line-delta' in options:
374+ if b'line-delta' in options:
375 node_refs = (parents, (parents[0],))
376 if missing_compression_parents:
377 compression_parents.add(parents[0])
378@@ -2949,7 +2946,7 @@
379 # Sometimes these are passed as a list rather than a tuple
380 passed = static_tuple.as_tuples(keys[key])
381 passed_parents = passed[1][:1]
382- if (value[0] != keys[key][0][0] or
383+ if (value[0:1] != keys[key][0][0:1] or
384 parents != passed_parents):
385 node_refs = static_tuple.as_tuples(node_refs)
386 raise KnitCorrupt(self, "inconsistent details in add_records"
387
388=== modified file 'breezy/bzr/knitrepo.py'
389--- breezy/bzr/knitrepo.py 2018-06-23 18:15:06 +0000
390+++ breezy/bzr/knitrepo.py 2018-06-30 17:35:22 +0000
391@@ -181,7 +181,7 @@
392 # Reconciling when the output has no revisions would result in no
393 # writes - but we want to ensure there is an inventory for
394 # compatibility with older clients that don't lazy-load.
395- result.get_parent_map([('A',)])
396+ result.get_parent_map([(b'A',)])
397 return result
398
399 def get_revision(self, revision_id):
400@@ -309,9 +309,9 @@
401 result.lock_write()
402 # the revision id here is irrelevant: it will not be stored, and cannot
403 # already exist, we do this to create files on disk for older clients.
404- result.inventories.get_parent_map([('A',)])
405- result.revisions.get_parent_map([('A',)])
406- result.signatures.get_parent_map([('A',)])
407+ result.inventories.get_parent_map([(b'A',)])
408+ result.revisions.get_parent_map([(b'A',)])
409+ result.signatures.get_parent_map([(b'A',)])
410 result.unlock()
411 self._run_post_repo_init_hooks(result, a_controldir, shared)
412 return result
413
414=== modified file 'breezy/bzr/smart/repository.py'
415--- breezy/bzr/smart/repository.py 2018-06-30 14:28:33 +0000
416+++ breezy/bzr/smart/repository.py 2018-06-30 17:35:22 +0000
417@@ -845,9 +845,8 @@
418 def _tarball_of_dir(self, dirname, compression, ofile):
419 import tarfile
420 filename = os.path.basename(ofile.name)
421- tarball = tarfile.open(fileobj=ofile, name=filename,
422- mode='w|' + compression)
423- try:
424+ with tarfile.open(fileobj=ofile, name=filename,
425+ mode='w|' + compression) as tarball:
426 # The tarball module only accepts ascii names, and (i guess)
427 # packs them with their 8bit names. We know all the files
428 # within the repository have ASCII names so the should be safe
429@@ -858,8 +857,6 @@
430 if not dirname.endswith('.bzr'):
431 raise ValueError(dirname)
432 tarball.add(dirname, '.bzr') # recursive by default
433- finally:
434- tarball.close()
435
436
437 class SmartServerRepositoryInsertStreamLocked(SmartServerRepositoryRequest):
438
439=== modified file 'breezy/bzr/vf_repository.py'
440--- breezy/bzr/vf_repository.py 2018-06-30 14:38:26 +0000
441+++ breezy/bzr/vf_repository.py 2018-06-30 17:35:22 +0000
442@@ -1276,7 +1276,7 @@
443 # always a tricky proposition.
444 inventory_cache = lru_cache.LRUCache(10)
445 batch_size = 10 # should be ~150MB on a 55K path tree
446- batch_count = len(revision_order) / batch_size + 1
447+ batch_count = len(revision_order) // batch_size + 1
448 processed_texts = 0
449 pb.update(gettext("Calculating text parents"), processed_texts, text_count)
450 for offset in range(batch_count):
451
452=== modified file 'breezy/diff.py'
453--- breezy/diff.py 2018-06-18 20:53:01 +0000
454+++ breezy/diff.py 2018-06-30 17:35:22 +0000
455@@ -990,7 +990,8 @@
456 properties_changed.extend(get_executable_change(executable[0], executable[1]))
457
458 if properties_changed:
459- prop_str = b" (properties changed: %s)" % (", ".join(properties_changed),)
460+ prop_str = b" (properties changed: %s)" % (
461+ b", ".join(properties_changed),)
462 else:
463 prop_str = b""
464
465
466=== modified file 'breezy/export.py'
467--- breezy/export.py 2018-05-21 21:32:43 +0000
468+++ breezy/export.py 2018-06-30 17:35:22 +0000
469@@ -91,7 +91,7 @@
470 else:
471 with open(dest, 'wb') as f:
472 for chunk in chunks:
473- f.writelines(chunk)
474+ f.write(chunk)
475
476
477 def guess_format(filename, default='dir'):
478
479=== modified file 'breezy/export_pot.py'
480--- breezy/export_pot.py 2017-08-26 19:31:51 +0000
481+++ breezy/export_pot.py 2018-06-30 17:35:22 +0000
482@@ -150,13 +150,14 @@
483 else:
484 comment = "# %s\n" % comment
485 mutter("Exporting msg %r at line %d in %r", s[:20], lineno, path)
486- self.outf.write(
487+ line = (
488 "#: {path}:{lineno}\n"
489 "{comment}"
490 "msgid {msg}\n"
491 "msgstr \"\"\n"
492 "\n".format(
493 path=path, lineno=lineno, comment=comment, msg=_normalize(s)))
494+ self.outf.write(line.encode('utf-8'))
495
496 def poentry_in_context(self, context, string, comment=None):
497 context = context.from_string(string)
498
499=== modified file 'breezy/fetch.py'
500--- breezy/fetch.py 2018-05-13 02:18:13 +0000
501+++ breezy/fetch.py 2018-06-30 17:35:22 +0000
502@@ -268,7 +268,7 @@
503 parent_keys = _parent_keys_for_root_version(
504 root_id, rev_id, rev_id_to_root_id_map, parent_map, repo, graph)
505 yield versionedfile.FulltextContentFactory(
506- root_key, parent_keys, None, '')
507+ root_key, parent_keys, None, b'')
508
509
510 def _parent_keys_for_root_version(
511
512=== modified file 'breezy/merge_directive.py'
513--- breezy/merge_directive.py 2018-06-29 23:04:43 +0000
514+++ breezy/merge_directive.py 2018-06-30 17:35:22 +0000
515@@ -253,7 +253,7 @@
516 :return: a string
517 """
518 my_gpg = gpg.GPGStrategy(branch.get_config_stack())
519- return my_gpg.sign(''.join(self.to_lines()), gpg.MODE_CLEAR)
520+ return my_gpg.sign(b''.join(self.to_lines()), gpg.MODE_CLEAR)
521
522 def to_email(self, mail_to, branch, sign=False):
523 """Serialize as an email message.
524@@ -273,7 +273,7 @@
525 if sign:
526 body = self.to_signed(branch)
527 else:
528- body = ''.join(self.to_lines())
529+ body = b''.join(self.to_lines())
530 message = email_message.EmailMessage(mail_from, mail_to, subject,
531 body)
532 return message
533@@ -349,7 +349,7 @@
534 ' client %s does not support message bodies.',
535 mail_client.__class__.__name__)
536 mail_client.compose_merge_request(to, subject,
537- ''.join(self.to_lines()),
538+ b''.join(self.to_lines()),
539 basename, body)
540
541
542@@ -437,7 +437,7 @@
543 patch = None
544 patch_type = None
545 else:
546- patch = ''.join(patch_lines)
547+ patch = b''.join(patch_lines)
548 try:
549 bundle_serializer.read_bundle(BytesIO(patch))
550 except (errors.NotABundle, errors.BundleNotSupported,
551@@ -522,19 +522,19 @@
552 except StopIteration:
553 pass
554 else:
555- if start.startswith('# Begin patch'):
556+ if start.startswith(b'# Begin patch'):
557 patch_lines = []
558 for line in line_iter:
559- if line.startswith('# Begin bundle'):
560+ if line.startswith(b'# Begin bundle'):
561 start = line
562 break
563 patch_lines.append(line)
564 else:
565 start = None
566- patch = ''.join(patch_lines)
567+ patch = b''.join(patch_lines)
568 if start is not None:
569- if start.startswith('# Begin bundle'):
570- bundle = ''.join(line_iter)
571+ if start.startswith(b'# Begin bundle'):
572+ bundle = b''.join(line_iter)
573 else:
574 raise errors.IllegalMergeDirectivePayload(start)
575 time, timezone = timestamp.parse_patch_date(stanza.get('timestamp'))
576
577=== modified file 'breezy/plugins/git/__init__.py'
578--- breezy/plugins/git/__init__.py 2018-06-17 11:15:04 +0000
579+++ breezy/plugins/git/__init__.py 2018-06-30 17:35:22 +0000
580@@ -151,10 +151,11 @@
581 headers = {"Content-Type": "application/x-git-upload-pack-request"}
582 req = Request('GET', url, accepted_errors=[200, 403, 404, 405],
583 headers=headers)
584- if req.get_host() == "github.com":
585+ (scheme, user, password, host, port, path) = urlutils.parse_url(req.get_full_url())
586+ if host == "github.com":
587 # GitHub requires we lie. https://github.com/dulwich/dulwich/issues/562
588 req.add_header("User-Agent", user_agent_for_github())
589- elif req.get_host() == "bazaar.launchpad.net":
590+ elif host == "bazaar.launchpad.net":
591 # Don't attempt Git probes against bazaar.launchpad.net; pad.lv/1744830
592 raise bzr_errors.NotBranchError(transport.base)
593 req.follow_redirections = True
594
595=== modified file 'breezy/plugins/git/cache.py'
596--- breezy/plugins/git/cache.py 2018-06-30 12:28:52 +0000
597+++ breezy/plugins/git/cache.py 2018-06-30 17:35:22 +0000
598@@ -529,10 +529,10 @@
599 (type_name, hexsha) = obj
600 sha = hex_to_sha(hexsha)
601 else:
602- type_name = obj.type_name
603+ type_name = obj.type_name.decode('ascii')
604 sha = obj.sha().digest()
605 if type_name == "commit":
606- self.db["commit\0" + self.revid] = "\0".join((sha, obj.tree))
607+ self.db[b"commit\0" + self.revid] = b"\0".join((sha, obj.tree))
608 if type(bzr_key_data) is not dict:
609 raise TypeError(bzr_key_data)
610 type_data = (self.revid, obj.tree)
611@@ -544,7 +544,7 @@
612 elif type_name == "blob":
613 if bzr_key_data is None:
614 return
615- self.db["\0".join(("blob", bzr_key_data[0], bzr_key_data[1]))] = sha
616+ self.db[b"\0".join((b"blob", bzr_key_data[0], bzr_key_data[1]))] = sha
617 type_data = bzr_key_data
618 elif type_name == "tree":
619 if bzr_key_data is None:
620@@ -552,17 +552,17 @@
621 type_data = bzr_key_data
622 else:
623 raise AssertionError
624- entry = "\0".join((type_name, ) + type_data) + "\n"
625- key = "git\0" + sha
626+ entry = b"\0".join((type_name.encode('ascii'), ) + type_data) + b"\n"
627+ key = b"git\0" + sha
628 try:
629 oldval = self.db[key]
630 except KeyError:
631 self.db[key] = entry
632 else:
633- if oldval[-1] != "\n":
634- self.db[key] = "".join([oldval, "\n", entry])
635+ if not oldval.endswith(b'\n'):
636+ self.db[key] = b"".join([oldval, b"\n", entry])
637 else:
638- self.db[key] = "".join([oldval, entry])
639+ self.db[key] = b"".join([oldval, entry])
640
641 def finish(self):
642 if self._commit is None:
643@@ -619,13 +619,13 @@
644 os.O_RDWR|os.O_CREAT)
645 self.db = mapdbs()[path]
646 try:
647- if int(self.db["version"]) not in (2, 3):
648+ if int(self.db[b"version"]) not in (2, 3):
649 trace.warning("SHA Map is incompatible (%s -> %d), rebuilding database.",
650- self.db["version"], self.TDB_MAP_VERSION)
651+ self.db[b"version"], self.TDB_MAP_VERSION)
652 self.db.clear()
653 except KeyError:
654 pass
655- self.db["version"] = str(self.TDB_MAP_VERSION)
656+ self.db[b"version"] = b'%d' % self.TDB_MAP_VERSION
657
658 def start_write_group(self):
659 """Start writing changes."""
660@@ -644,12 +644,12 @@
661
662 def lookup_commit(self, revid):
663 try:
664- return sha_to_hex(self.db["commit\0" + revid][:20])
665+ return sha_to_hex(self.db[b"commit\0" + revid][:20])
666 except KeyError:
667 raise KeyError("No cache entry for %r" % revid)
668
669 def lookup_blob_id(self, fileid, revision):
670- return sha_to_hex(self.db["\0".join(("blob", fileid, revision))])
671+ return sha_to_hex(self.db[b"\0".join(("blob", fileid, revision))])
672
673 def lookup_git_sha(self, sha):
674 """Lookup a Git sha in the database.
675@@ -662,36 +662,37 @@
676 """
677 if len(sha) == 40:
678 sha = hex_to_sha(sha)
679- value = self.db["git\0" + sha]
680+ value = self.db[b"git\0" + sha]
681 for data in value.splitlines():
682- data = data.split("\0")
683- if data[0] == "commit":
684+ data = data.split(b"\0")
685+ type_name = data[0].decode('ascii')
686+ if type_name == "commit":
687 if len(data) == 3:
688- yield (data[0], (data[1], data[2], {}))
689+ yield (type_name, (data[1], data[2], {}))
690 else:
691- yield (data[0], (data[1], data[2], {"testament3-sha1": data[3]}))
692- elif data[0] in ("tree", "blob"):
693- yield (data[0], tuple(data[1:]))
694+ yield (type_name, (data[1], data[2], {"testament3-sha1": data[3]}))
695+ elif type_name in ("tree", "blob"):
696+ yield (type_name, tuple(data[1:]))
697 else:
698- raise AssertionError("unknown type %r" % data[0])
699+ raise AssertionError("unknown type %r" % type_name)
700
701 def missing_revisions(self, revids):
702 ret = set()
703 for revid in revids:
704- if self.db.get("commit\0" + revid) is None:
705+ if self.db.get(b"commit\0" + revid) is None:
706 ret.add(revid)
707 return ret
708
709 def revids(self):
710 """List the revision ids known."""
711 for key in self.db.iterkeys():
712- if key.startswith("commit\0"):
713+ if key.startswith(b"commit\0"):
714 yield key[7:]
715
716 def sha1s(self):
717 """List the SHA1s."""
718 for key in self.db.iterkeys():
719- if key.startswith("git\0"):
720+ if key.startswith(b"git\0"):
721 yield sha_to_hex(key[4:])
722
723
724
725=== modified file 'breezy/plugins/git/tests/test_cache.py'
726--- breezy/plugins/git/tests/test_cache.py 2018-06-30 12:28:52 +0000
727+++ breezy/plugins/git/tests/test_cache.py 2018-06-30 17:35:22 +0000
728@@ -60,32 +60,32 @@
729
730 def _get_test_commit(self):
731 c = Commit()
732- c.committer = "Jelmer <jelmer@samba.org>"
733+ c.committer = b"Jelmer <jelmer@samba.org>"
734 c.commit_time = 0
735 c.commit_timezone = 0
736- c.author = "Jelmer <jelmer@samba.org>"
737+ c.author = b"Jelmer <jelmer@samba.org>"
738 c.author_time = 0
739 c.author_timezone = 0
740- c.message = "Teh foo bar"
741- c.tree = "cc9462f7f8263ef5adfbeff2fb936bb36b504cba"
742+ c.message = b"Teh foo bar"
743+ c.tree = b"cc9462f7f8263ef5adfbeff2fb936bb36b504cba"
744 return c
745
746 def test_commit(self):
747 self.map.start_write_group()
748- updater = self.cache.get_updater(Revision("myrevid"))
749+ updater = self.cache.get_updater(Revision(b"myrevid"))
750 c = self._get_test_commit()
751 updater.add_object(c, {
752- "testament3-sha1": "cc9462f7f8263ef5adf8eff2fb936bb36b504cba"},
753+ "testament3-sha1": b"cc9462f7f8263ef5adf8eff2fb936bb36b504cba"},
754 None)
755 updater.finish()
756 self.map.commit_write_group()
757 self.assertEqual(
758- [("commit", ("myrevid",
759- "cc9462f7f8263ef5adfbeff2fb936bb36b504cba",
760- {"testament3-sha1": "cc9462f7f8263ef5adf8eff2fb936bb36b504cba"},
761+ [("commit", (b"myrevid",
762+ b"cc9462f7f8263ef5adfbeff2fb936bb36b504cba",
763+ {"testament3-sha1": b"cc9462f7f8263ef5adf8eff2fb936bb36b504cba"},
764 ))],
765 list(self.map.lookup_git_sha(c.id)))
766- self.assertEqual(c.id, self.map.lookup_commit("myrevid"))
767+ self.assertEqual(c.id, self.map.lookup_commit(b"myrevid"))
768
769 def test_lookup_notfound(self):
770 self.assertRaises(KeyError, list,
771
772=== modified file 'breezy/plugins/git/tests/test_object_store.py'
773--- breezy/plugins/git/tests/test_object_store.py 2018-05-27 18:00:21 +0000
774+++ breezy/plugins/git/tests/test_object_store.py 2018-06-30 17:35:22 +0000
775@@ -234,25 +234,25 @@
776 self.assertEqual(None, t)
777
778 def test_empty_dir(self):
779- child_ie = InventoryDirectory('bar', 'bar', 'bar')
780+ child_ie = InventoryDirectory(b'bar', 'bar', b'bar')
781 t = directory_to_tree('', [child_ie], lambda p, x: None, {}, None,
782 allow_empty=False)
783 self.assertEqual(None, t)
784
785 def test_empty_dir_dummy_files(self):
786- child_ie = InventoryDirectory('bar', 'bar', 'bar')
787+ child_ie = InventoryDirectory(b'bar', 'bar', b'bar')
788 t = directory_to_tree('', [child_ie], lambda p, x: None, {}, ".mydummy",
789 allow_empty=False)
790 self.assertTrue(".mydummy" in t)
791
792 def test_empty_root(self):
793- child_ie = InventoryDirectory('bar', 'bar', 'bar')
794+ child_ie = InventoryDirectory(b'bar', 'bar', b'bar')
795 t = directory_to_tree('', [child_ie], lambda p, x: None, {}, None,
796 allow_empty=True)
797 self.assertEqual(Tree(), t)
798
799 def test_with_file(self):
800- child_ie = InventoryFile('bar', 'bar', 'bar')
801+ child_ie = InventoryFile(b'bar', 'bar', b'bar')
802 b = Blob.from_string("bla")
803 t1 = directory_to_tree('', [child_ie], lambda p, x: b.id, {}, None,
804 allow_empty=False)
805@@ -261,8 +261,8 @@
806 self.assertEqual(t1, t2)
807
808 def test_with_gitdir(self):
809- child_ie = InventoryFile('bar', 'bar', 'bar')
810- git_file_ie = InventoryFile('gitid', '.git', 'bar')
811+ child_ie = InventoryFile(b'bar', 'bar', b'bar')
812+ git_file_ie = InventoryFile(b'gitid', '.git', b'bar')
813 b = Blob.from_string("bla")
814 t1 = directory_to_tree('', [child_ie, git_file_ie],
815 lambda p, x: b.id, {}, None,
816
817=== modified file 'breezy/shelf.py'
818--- breezy/shelf.py 2018-06-16 11:29:34 +0000
819+++ breezy/shelf.py 2018-06-30 17:35:22 +0000
820@@ -266,7 +266,7 @@
821 to_transform.adjust_path(name, s_parent_id, s_trans_id)
822 if existing_path is None:
823 if kind is None:
824- to_transform.create_file('', s_trans_id)
825+ to_transform.create_file([b''], s_trans_id)
826 else:
827 transform.create_from_tree(
828 to_transform, s_trans_id, tree,
829
830=== modified file 'breezy/tests/__init__.py'
831--- breezy/tests/__init__.py 2018-06-22 01:50:22 +0000
832+++ breezy/tests/__init__.py 2018-06-30 17:35:22 +0000
833@@ -1524,7 +1524,8 @@
834 def assertFileEqual(self, content, path):
835 """Fail if path does not contain 'content'."""
836 self.assertPathExists(path)
837- with open(path, 'rb') as f:
838+
839+ with open(path, 'r' + ('b' if isinstance(content, bytes) else '')) as f:
840 s = f.read()
841 self.assertEqualDiff(content, s)
842
843
844=== modified file 'breezy/tests/http_server.py'
845--- breezy/tests/http_server.py 2018-04-01 02:02:12 +0000
846+++ breezy/tests/http_server.py 2018-06-30 17:35:22 +0000
847@@ -26,6 +26,7 @@
848 import random
849 import re
850 import socket
851+import sys
852 try:
853 from urlparse import urlparse
854 except ImportError:
855@@ -125,7 +126,7 @@
856 self.send_header('Connection', 'close')
857 self.end_headers()
858 if self.command != 'HEAD' and code >= 200 and code not in (204, 304):
859- self.wfile.write(content)
860+ self.wfile.write(content.encode('utf-8'))
861
862 def _handle_one_request(self):
863 http_server.SimpleHTTPRequestHandler.handle_one_request(self)
864@@ -339,7 +340,8 @@
865 # abandon query parameters
866 path = urlparse(path)[2]
867 path = posixpath.normpath(urlutils.unquote(path))
868- path = path.decode('utf-8')
869+ if sys.version_info[0] == 2:
870+ path = path.decode('utf-8')
871 words = path.split('/')
872 path = self._cwd
873 for num, word in enumerate(w for w in words if w):
874
875=== modified file 'breezy/tests/per_intertree/test_compare.py'
876--- breezy/tests/per_intertree/test_compare.py 2018-06-16 13:09:13 +0000
877+++ breezy/tests/per_intertree/test_compare.py 2018-06-30 17:35:22 +0000
878@@ -755,8 +755,8 @@
879 tree2 = self.get_tree_no_parents_abc_content(tree2)
880 # the pathname is chosen to fall between 'a' and 'b'.
881 self.build_tree(['1/a-empty/', '2/a-empty/'])
882- tree1.add(['a-empty'], ['a-empty'])
883- tree2.add(['a-empty'], ['a-empty'])
884+ tree1.add(['a-empty'], [b'a-empty'])
885+ tree2.add(['a-empty'], [b'a-empty'])
886 tree1, tree2 = self.mutable_trees_to_test_trees(self, tree1, tree2)
887 expected = []
888 self.assertEqual(expected, self.do_iter_changes(tree1, tree2))
889
890=== modified file 'breezy/tests/per_repository/test_signatures.py'
891--- breezy/tests/per_repository/test_signatures.py 2018-03-25 00:39:16 +0000
892+++ breezy/tests/per_repository/test_signatures.py 2018-06-30 17:35:22 +0000
893@@ -48,10 +48,10 @@
894 repo.start_write_group()
895 repo.sign_revision(a, strategy)
896 repo.commit_write_group()
897- self.assertEqual('-----BEGIN PSEUDO-SIGNED CONTENT-----\n' +
898+ self.assertEqual(b'-----BEGIN PSEUDO-SIGNED CONTENT-----\n' +
899 Testament.from_revision(repo,
900 a).as_short_text() +
901- '-----END PSEUDO-SIGNED CONTENT-----\n',
902+ b'-----END PSEUDO-SIGNED CONTENT-----\n',
903 repo.get_signature_text(a))
904
905 def test_store_signature(self):
906@@ -62,7 +62,7 @@
907 branch.repository.start_write_group()
908 try:
909 branch.repository.store_revision_signature(
910- gpg.LoopbackGPGStrategy(None), 'FOO', 'A')
911+ gpg.LoopbackGPGStrategy(None), b'FOO', b'A')
912 except errors.NoSuchRevision:
913 branch.repository.abort_write_group()
914 raise tests.TestNotApplicable(
915@@ -78,11 +78,11 @@
916 # A signature without a revision should not be accessible.
917 self.assertRaises(errors.NoSuchRevision,
918 branch.repository.has_signature_for_revision_id,
919- 'A')
920+ b'A')
921 if wt.branch.repository._format.supports_setting_revision_ids:
922 wt.commit("base", rev_id=b'A', allow_pointless=True)
923- self.assertEqual('-----BEGIN PSEUDO-SIGNED CONTENT-----\n'
924- 'FOO-----END PSEUDO-SIGNED CONTENT-----\n',
925+ self.assertEqual(b'-----BEGIN PSEUDO-SIGNED CONTENT-----\n'
926+ b'FOO-----END PSEUDO-SIGNED CONTENT-----\n',
927 branch.repository.get_signature_text('A'))
928
929 def test_clone_preserves_signatures(self):
930@@ -118,9 +118,9 @@
931 repo.start_write_group()
932 repo.sign_revision(a, strategy)
933 repo.commit_write_group()
934- self.assertEqual('-----BEGIN PSEUDO-SIGNED CONTENT-----\n' +
935+ self.assertEqual(b'-----BEGIN PSEUDO-SIGNED CONTENT-----\n' +
936 Testament.from_revision(repo, a).as_short_text() +
937- '-----END PSEUDO-SIGNED CONTENT-----\n',
938+ b'-----END PSEUDO-SIGNED CONTENT-----\n',
939 repo.get_signature_text(a))
940 self.assertEqual(
941 (gpg.SIGNATURE_VALID, None),
942@@ -136,9 +136,9 @@
943 repo.start_write_group()
944 repo.sign_revision(a, strategy)
945 repo.commit_write_group()
946- self.assertEqual('-----BEGIN PSEUDO-SIGNED CONTENT-----\n' +
947+ self.assertEqual(b'-----BEGIN PSEUDO-SIGNED CONTENT-----\n' +
948 Testament.from_revision(repo, a).as_short_text() +
949- '-----END PSEUDO-SIGNED CONTENT-----\n',
950+ b'-----END PSEUDO-SIGNED CONTENT-----\n',
951 repo.get_signature_text(a))
952 self.assertEqual(
953 [(a, gpg.SIGNATURE_VALID, None),
954@@ -153,7 +153,7 @@
955 raise tests.TestNotApplicable(
956 "repository supports signing revisions")
957 wt = self.make_branch_and_tree('source')
958- a = wt.commit('A', allow_pointless=True)
959+ a = wt.commit(b'A', allow_pointless=True)
960 repo = wt.branch.repository
961 repo.lock_write()
962 repo.start_write_group()
963
964=== modified file 'breezy/tests/per_repository_vf/test_repository.py'
965--- breezy/tests/per_repository_vf/test_repository.py 2018-06-19 01:27:34 +0000
966+++ breezy/tests/per_repository_vf/test_repository.py 2018-06-30 17:35:22 +0000
967@@ -330,7 +330,7 @@
968 tree_a.branch.repository.start_write_group()
969 try:
970 inv_file = tree_a.branch.repository.inventories
971- inv_file.add_lines(('orphan',), [], [])
972+ inv_file.add_lines((b'orphan',), [], [])
973 except:
974 tree_a.branch.repository.commit_write_group()
975 raise
976@@ -370,7 +370,7 @@
977 repository.lock_read()
978 self.addCleanup(repository.unlock)
979 revisions = [repository.get_revision(r) for r in
980- ['rev1', 'rev2', 'rev3', 'rev4']]
981+ [b'rev1', b'rev2', b'rev3', b'rev4']]
982 deltas1 = list(repository.get_deltas_for_revisions(revisions))
983 deltas2 = [repository.get_revision_delta(r.revision_id) for r in
984 revisions]
985@@ -378,7 +378,7 @@
986
987 def test_all_revision_ids(self):
988 # all_revision_ids -> all revisions
989- self.assertEqual({'rev1', 'rev2', 'rev3', 'rev4'},
990+ self.assertEqual({b'rev1', b'rev2', b'rev3', b'rev4'},
991 set(self.controldir.open_repository().all_revision_ids()))
992
993 def test_reserved_id(self):
994@@ -387,11 +387,11 @@
995 repo.start_write_group()
996 try:
997 self.assertRaises(errors.ReservedId, repo.add_inventory,
998- 'reserved:', None, None)
999+ b'reserved:', None, None)
1000 self.assertRaises(errors.ReservedId, repo.add_inventory_by_delta,
1001- "foo", [], 'reserved:', None)
1002+ "foo", [], b'reserved:', None)
1003 self.assertRaises(errors.ReservedId, repo.add_revision,
1004- 'reserved:', None)
1005+ b'reserved:', None)
1006 finally:
1007 repo.abort_write_group()
1008 repo.unlock()
1009@@ -408,37 +408,37 @@
1010 repo = self.make_repository('inventory_with_unnecessary_ghost')
1011 repo.lock_write()
1012 repo.start_write_group()
1013- inv = inventory.Inventory(revision_id = 'ghost')
1014- inv.root.revision = 'ghost'
1015+ inv = inventory.Inventory(revision_id = b'ghost')
1016+ inv.root.revision = b'ghost'
1017 if repo.supports_rich_root():
1018 root_id = inv.root.file_id
1019- repo.texts.add_lines((root_id, 'ghost'), [], [])
1020- sha1 = repo.add_inventory('ghost', inv, [])
1021+ repo.texts.add_lines((root_id, b'ghost'), [], [])
1022+ sha1 = repo.add_inventory(b'ghost', inv, [])
1023 rev = _mod_revision.Revision(
1024 timestamp=0, timezone=None, committer="Foo Bar <foo@example.com>",
1025 message="Message", inventory_sha1=sha1, revision_id=b'ghost')
1026- rev.parent_ids = ['the_ghost']
1027+ rev.parent_ids = [b'the_ghost']
1028 try:
1029- repo.add_revision('ghost', rev)
1030+ repo.add_revision(b'ghost', rev)
1031 except (errors.NoSuchRevision, errors.RevisionNotPresent):
1032 raise tests.TestNotApplicable(
1033 "Cannot test with ghosts for this format.")
1034
1035- inv = inventory.Inventory(revision_id = 'the_ghost')
1036- inv.root.revision = 'the_ghost'
1037+ inv = inventory.Inventory(revision_id = b'the_ghost')
1038+ inv.root.revision = b'the_ghost'
1039 if repo.supports_rich_root():
1040 root_id = inv.root.file_id
1041- repo.texts.add_lines((root_id, 'the_ghost'), [], [])
1042- sha1 = repo.add_inventory('the_ghost', inv, [])
1043+ repo.texts.add_lines((root_id, b'the_ghost'), [], [])
1044+ sha1 = repo.add_inventory(b'the_ghost', inv, [])
1045 rev = _mod_revision.Revision(
1046 timestamp=0, timezone=None, committer="Foo Bar <foo@example.com>",
1047 message="Message", inventory_sha1=sha1, revision_id=b'the_ghost')
1048 rev.parent_ids = []
1049- repo.add_revision('the_ghost', rev)
1050+ repo.add_revision(b'the_ghost', rev)
1051 # check its setup usefully
1052 inv_weave = repo.inventories
1053- possible_parents = (None, (('ghost',),))
1054- self.assertSubset(inv_weave.get_parent_map([('ghost',)])[('ghost',)],
1055+ possible_parents = (None, ((b'ghost',),))
1056+ self.assertSubset(inv_weave.get_parent_map([(b'ghost',)])[(b'ghost',)],
1057 possible_parents)
1058 repo.commit_write_group()
1059 repo.unlock()
1060@@ -446,19 +446,19 @@
1061 def test_corrupt_revision_access_asserts_if_reported_wrong(self):
1062 repo_url = self.get_url('inventory_with_unnecessary_ghost')
1063 repo = _mod_repository.Repository.open(repo_url)
1064- m = MatchesAncestry(repo, 'ghost')
1065+ m = MatchesAncestry(repo, b'ghost')
1066 reported_wrong = False
1067 try:
1068- if m.match(['the_ghost', 'ghost']) is not None:
1069+ if m.match([b'the_ghost', b'ghost']) is not None:
1070 reported_wrong = True
1071 except errors.CorruptRepository:
1072 # caught the bad data:
1073 return
1074 if not reported_wrong:
1075 return
1076- self.assertRaises(errors.CorruptRepository, repo.get_revision, 'ghost')
1077+ self.assertRaises(errors.CorruptRepository, repo.get_revision, b'ghost')
1078
1079 def test_corrupt_revision_get_revision_reconcile(self):
1080 repo_url = self.get_url('inventory_with_unnecessary_ghost')
1081 repo = _mod_repository.Repository.open(repo_url)
1082- repo.get_revision_reconcile('ghost')
1083+ repo.get_revision_reconcile(b'ghost')
1084
1085=== modified file 'breezy/tests/per_tree/test_tree.py'
1086--- breezy/tests/per_tree/test_tree.py 2018-06-17 11:15:04 +0000
1087+++ breezy/tests/per_tree/test_tree.py 2018-06-30 17:35:22 +0000
1088@@ -65,12 +65,12 @@
1089 tree_b.lock_read()
1090 self.addCleanup(tree_b.unlock)
1091 self.assertEqual([
1092- ('killed-a', 'a\n'),
1093- ('killed-b', 'b\n'),
1094- ('unchanged', 'c\n'),
1095- ('unchanged', 'd\n'),
1096- ('new-a', 'e\n'),
1097- ('new-b', 'f\n'),
1098+ ('killed-a', b'a\n'),
1099+ ('killed-b', b'b\n'),
1100+ ('unchanged', b'c\n'),
1101+ ('unchanged', b'd\n'),
1102+ ('new-a', b'e\n'),
1103+ ('new-b', b'f\n'),
1104 ], list(tree_a.plan_file_merge(file_id, tree_b)))
1105
1106
1107@@ -115,7 +115,7 @@
1108 tree = self.make_branch_and_tree('tree')
1109 root_id = tree.get_root_id()
1110 if root_id is not None:
1111- self.assertIsInstance(root_id, str)
1112+ self.assertIsInstance(root_id, bytes)
1113
1114 def test_is_versioned(self):
1115 tree = self.make_branch_and_tree('tree')
1116
1117=== modified file 'breezy/tests/per_workingtree/test_parents.py'
1118--- breezy/tests/per_workingtree/test_parents.py 2018-06-19 18:31:46 +0000
1119+++ breezy/tests/per_workingtree/test_parents.py 2018-06-30 17:35:22 +0000
1120@@ -79,25 +79,25 @@
1121
1122 def test_set_null_parent(self):
1123 t = self.make_branch_and_tree('.')
1124- self.assertRaises(errors.ReservedId, t.set_parent_ids, ['null:'],
1125+ self.assertRaises(errors.ReservedId, t.set_parent_ids, [b'null:'],
1126 allow_leftmost_as_ghost=True)
1127 self.assertRaises(errors.ReservedId, t.set_parent_trees,
1128- [('null:', None)], allow_leftmost_as_ghost=True)
1129+ [(b'null:', None)], allow_leftmost_as_ghost=True)
1130
1131 def test_set_one_ghost_parent_rejects(self):
1132 t = self.make_branch_and_tree('.')
1133 self.assertRaises(errors.GhostRevisionUnusableHere,
1134- t.set_parent_trees, [('missing-revision-id', None)])
1135+ t.set_parent_trees, [(b'missing-revision-id', None)])
1136
1137 def test_set_one_ghost_parent_force(self):
1138 t = self.make_branch_and_tree('.')
1139 if t._format.supports_leftmost_parent_id_as_ghost:
1140- t.set_parent_trees([('missing-revision-id', None)],
1141+ t.set_parent_trees([(b'missing-revision-id', None)],
1142 allow_leftmost_as_ghost=True)
1143- self.assertConsistentParents(['missing-revision-id'], t)
1144+ self.assertConsistentParents([b'missing-revision-id'], t)
1145 else:
1146 self.assertRaises(errors.GhostRevisionUnusableHere,
1147- t.set_parent_trees, [('missing-revision-id', None)])
1148+ t.set_parent_trees, [(b'missing-revision-id', None)])
1149 self.assertConsistentParents([], t)
1150
1151 def test_set_two_parents_one_ghost(self):
1152@@ -108,12 +108,12 @@
1153 rev_tree = t.branch.repository.revision_tree(revision_in_repo)
1154 if t._format.supports_righthand_parent_id_as_ghost:
1155 t.set_parent_trees([(revision_in_repo, rev_tree),
1156- ('another-missing', None)])
1157- self.assertConsistentParents([revision_in_repo, 'another-missing'], t)
1158+ (b'another-missing', None)])
1159+ self.assertConsistentParents([revision_in_repo, b'another-missing'], t)
1160 else:
1161 self.assertRaises(errors.GhostRevisionUnusableHere,
1162 t.set_parent_trees, [(revision_in_repo, rev_tree),
1163- ('another-missing', None)])
1164+ (b'another-missing', None)])
1165
1166 def test_set_three_parents(self):
1167 t = self.make_branch_and_tree('.')
1168@@ -144,7 +144,7 @@
1169 def test_set_one_ghost_parent_ids_rejects(self):
1170 t = self.make_branch_and_tree('.')
1171 self.assertRaises(errors.GhostRevisionUnusableHere,
1172- t.set_parent_ids, ['missing-revision-id'])
1173+ t.set_parent_ids, [b'missing-revision-id'])
1174
1175 def test_set_one_ghost_parent_ids_force(self):
1176 t = self.make_branch_and_tree('.')
1177@@ -488,9 +488,9 @@
1178
1179 def add_new_root(self, new_shape, old_revid, new_revid):
1180 if self.bzrdir_format.repository_format.rich_root_data:
1181- self.add_dir(new_shape, old_revid, 'root-id', None, '')
1182+ self.add_dir(new_shape, old_revid, b'root-id', None, '')
1183 else:
1184- self.add_dir(new_shape, new_revid, 'root-id', None, '')
1185+ self.add_dir(new_shape, new_revid, b'root-id', None, '')
1186
1187 def assertTransitionFromBasisToShape(self, basis_shape, basis_revid,
1188 new_shape, new_revid, extra_parent=None, set_current_inventory=True):
1189@@ -533,66 +533,66 @@
1190 def test_no_parents_full_tree(self):
1191 """Test doing a regular initial commit with files and dirs."""
1192 basis_shape = Inventory(root_id=None) # empty tree
1193- revid = 'new-parent'
1194+ revid = b'new-parent'
1195 new_shape = Inventory(root_id=None)
1196- self.add_dir(new_shape, revid, 'root-id', None, '')
1197- self.add_link(new_shape, revid, 'link-id', 'root-id', 'link', 'target')
1198- self.add_file(new_shape, revid, 'file-id', 'root-id', 'file', '1' * 32,
1199+ self.add_dir(new_shape, revid, b'root-id', None, '')
1200+ self.add_link(new_shape, revid, b'link-id', b'root-id', 'link', 'target')
1201+ self.add_file(new_shape, revid, b'file-id', b'root-id', 'file', '1' * 32,
1202 12)
1203- self.add_dir(new_shape, revid, 'dir-id', 'root-id', 'dir')
1204- self.add_file(new_shape, revid, 'subfile-id', 'dir-id', 'subfile',
1205+ self.add_dir(new_shape, revid, b'dir-id', b'root-id', 'dir')
1206+ self.add_file(new_shape, revid, b'subfile-id', b'dir-id', 'subfile',
1207 '2' * 32, 24)
1208 self.assertTransitionFromBasisToShape(basis_shape, None, new_shape,
1209 revid)
1210
1211 def test_file_content_change(self):
1212- old_revid = 'old-parent'
1213+ old_revid = b'old-parent'
1214 basis_shape = Inventory(root_id=None)
1215- self.add_dir(basis_shape, old_revid, 'root-id', None, '')
1216- self.add_file(basis_shape, old_revid, 'file-id', 'root-id', 'file',
1217+ self.add_dir(basis_shape, old_revid, b'root-id', None, '')
1218+ self.add_file(basis_shape, old_revid, b'file-id', b'root-id', 'file',
1219 '1' * 32, 12)
1220- new_revid = 'new-parent'
1221+ new_revid = b'new-parent'
1222 new_shape = Inventory(root_id=None)
1223 self.add_new_root(new_shape, old_revid, new_revid)
1224- self.add_file(new_shape, new_revid, 'file-id', 'root-id', 'file',
1225+ self.add_file(new_shape, new_revid, b'file-id', b'root-id', 'file',
1226 '2' * 32, 24)
1227 self.assertTransitionFromBasisToShape(basis_shape, old_revid,
1228 new_shape, new_revid)
1229
1230 def test_link_content_change(self):
1231- old_revid = 'old-parent'
1232+ old_revid = b'old-parent'
1233 basis_shape = Inventory(root_id=None)
1234- self.add_dir(basis_shape, old_revid, 'root-id', None, '')
1235- self.add_link(basis_shape, old_revid, 'link-id', 'root-id', 'link',
1236+ self.add_dir(basis_shape, old_revid, b'root-id', None, '')
1237+ self.add_link(basis_shape, old_revid, b'link-id', b'root-id', 'link',
1238 'old-target')
1239- new_revid = 'new-parent'
1240+ new_revid = b'new-parent'
1241 new_shape = Inventory(root_id=None)
1242 self.add_new_root(new_shape, old_revid, new_revid)
1243- self.add_link(new_shape, new_revid, 'link-id', 'root-id', 'link',
1244+ self.add_link(new_shape, new_revid, b'link-id', b'root-id', 'link',
1245 'new-target')
1246 self.assertTransitionFromBasisToShape(basis_shape, old_revid,
1247 new_shape, new_revid)
1248
1249 def test_kind_changes(self):
1250 def do_file(inv, revid):
1251- self.add_file(inv, revid, 'path-id', 'root-id', 'path', '1' * 32,
1252+ self.add_file(inv, revid, b'path-id', b'root-id', 'path', '1' * 32,
1253 12)
1254
1255 def do_link(inv, revid):
1256- self.add_link(inv, revid, 'path-id', 'root-id', 'path', 'target')
1257+ self.add_link(inv, revid, b'path-id', b'root-id', 'path', 'target')
1258
1259 def do_dir(inv, revid):
1260- self.add_dir(inv, revid, 'path-id', 'root-id', 'path')
1261+ self.add_dir(inv, revid, b'path-id', b'root-id', 'path')
1262
1263 for old_factory in (do_file, do_link, do_dir):
1264 for new_factory in (do_file, do_link, do_dir):
1265 if old_factory == new_factory:
1266 continue
1267- old_revid = 'old-parent'
1268+ old_revid = b'old-parent'
1269 basis_shape = Inventory(root_id=None)
1270- self.add_dir(basis_shape, old_revid, 'root-id', None, '')
1271+ self.add_dir(basis_shape, old_revid, b'root-id', None, '')
1272 old_factory(basis_shape, old_revid)
1273- new_revid = 'new-parent'
1274+ new_revid = b'new-parent'
1275 new_shape = Inventory(root_id=None)
1276 self.add_new_root(new_shape, old_revid, new_revid)
1277 new_factory(new_shape, new_revid)
1278@@ -602,23 +602,23 @@
1279 def test_content_from_second_parent_is_dropped(self):
1280 left_revid = 'left-parent'
1281 basis_shape = Inventory(root_id=None)
1282- self.add_dir(basis_shape, left_revid, 'root-id', None, '')
1283- self.add_link(basis_shape, left_revid, 'link-id', 'root-id', 'link',
1284+ self.add_dir(basis_shape, left_revid, b'root-id', None, '')
1285+ self.add_link(basis_shape, left_revid, b'link-id', b'root-id', 'link',
1286 'left-target')
1287 # the right shape has content - file, link, subdir with a child,
1288 # that should all be discarded by the call.
1289 right_revid = 'right-parent'
1290 right_shape = Inventory(root_id=None)
1291- self.add_dir(right_shape, left_revid, 'root-id', None, '')
1292- self.add_link(right_shape, right_revid, 'link-id', 'root-id', 'link',
1293+ self.add_dir(right_shape, left_revid, b'root-id', None, '')
1294+ self.add_link(right_shape, right_revid, b'link-id', b'root-id', 'link',
1295 'some-target')
1296- self.add_dir(right_shape, right_revid, 'subdir-id', 'root-id', 'dir')
1297- self.add_file(right_shape, right_revid, 'file-id', 'subdir-id', 'file',
1298+ self.add_dir(right_shape, right_revid, b'subdir-id', b'root-id', 'dir')
1299+ self.add_file(right_shape, right_revid, b'file-id', b'subdir-id', 'file',
1300 '2' * 32, 24)
1301- new_revid = 'new-parent'
1302+ new_revid = b'new-parent'
1303 new_shape = Inventory(root_id=None)
1304 self.add_new_root(new_shape, left_revid, new_revid)
1305- self.add_link(new_shape, new_revid, 'link-id', 'root-id', 'link',
1306+ self.add_link(new_shape, new_revid, b'link-id', b'root-id', 'link',
1307 'new-target')
1308 self.assertTransitionFromBasisToShape(basis_shape, left_revid,
1309 new_shape, new_revid, right_revid)
1310@@ -626,123 +626,123 @@
1311 def test_parent_id_changed(self):
1312 # test that when the only change to an entry is its parent id changing
1313 # that it is handled correctly (that is it keeps the same path)
1314- old_revid = 'old-parent'
1315+ old_revid = b'old-parent'
1316 basis_shape = Inventory(root_id=None)
1317- self.add_dir(basis_shape, old_revid, 'root-id', None, '')
1318- self.add_dir(basis_shape, old_revid, 'orig-parent-id', 'root-id', 'dir')
1319- self.add_dir(basis_shape, old_revid, 'dir-id', 'orig-parent-id', 'dir')
1320- new_revid = 'new-parent'
1321+ self.add_dir(basis_shape, old_revid, b'root-id', None, '')
1322+ self.add_dir(basis_shape, old_revid, b'orig-parent-id', b'root-id', 'dir')
1323+ self.add_dir(basis_shape, old_revid, b'dir-id', b'orig-parent-id', 'dir')
1324+ new_revid = b'new-parent'
1325 new_shape = Inventory(root_id=None)
1326 self.add_new_root(new_shape, old_revid, new_revid)
1327- self.add_dir(new_shape, new_revid, 'new-parent-id', 'root-id', 'dir')
1328- self.add_dir(new_shape, new_revid, 'dir-id', 'new-parent-id', 'dir')
1329+ self.add_dir(new_shape, new_revid, b'new-parent-id', b'root-id', 'dir')
1330+ self.add_dir(new_shape, new_revid, b'dir-id', b'new-parent-id', 'dir')
1331 self.assertTransitionFromBasisToShape(basis_shape, old_revid,
1332 new_shape, new_revid)
1333
1334 def test_name_changed(self):
1335 # test that when the only change to an entry is its name changing that
1336 # it is handled correctly (that is it keeps the same parent id)
1337- old_revid = 'old-parent'
1338+ old_revid = b'old-parent'
1339 basis_shape = Inventory(root_id=None)
1340- self.add_dir(basis_shape, old_revid, 'root-id', None, '')
1341- self.add_dir(basis_shape, old_revid, 'parent-id', 'root-id', 'origdir')
1342- self.add_dir(basis_shape, old_revid, 'dir-id', 'parent-id', 'olddir')
1343- new_revid = 'new-parent'
1344+ self.add_dir(basis_shape, old_revid, b'root-id', None, '')
1345+ self.add_dir(basis_shape, old_revid, b'parent-id', b'root-id', 'origdir')
1346+ self.add_dir(basis_shape, old_revid, b'dir-id', b'parent-id', 'olddir')
1347+ new_revid = b'new-parent'
1348 new_shape = Inventory(root_id=None)
1349 self.add_new_root(new_shape, old_revid, new_revid)
1350- self.add_dir(new_shape, new_revid, 'parent-id', 'root-id', 'newdir')
1351- self.add_dir(new_shape, new_revid, 'dir-id', 'parent-id', 'newdir')
1352+ self.add_dir(new_shape, new_revid, b'parent-id', b'root-id', 'newdir')
1353+ self.add_dir(new_shape, new_revid, b'dir-id', b'parent-id', 'newdir')
1354 self.assertTransitionFromBasisToShape(basis_shape, old_revid,
1355 new_shape, new_revid)
1356
1357 def test_parent_child_swap(self):
1358 # test a A->A/B and A/B->A path swap.
1359- old_revid = 'old-parent'
1360+ old_revid = b'old-parent'
1361 basis_shape = Inventory(root_id=None)
1362- self.add_dir(basis_shape, old_revid, 'root-id', None, '')
1363- self.add_dir(basis_shape, old_revid, 'dir-id-A', 'root-id', 'A')
1364- self.add_dir(basis_shape, old_revid, 'dir-id-B', 'dir-id-A', 'B')
1365- self.add_link(basis_shape, old_revid, 'link-id-C', 'dir-id-B', 'C', 'C')
1366- new_revid = 'new-parent'
1367+ self.add_dir(basis_shape, old_revid, b'root-id', None, '')
1368+ self.add_dir(basis_shape, old_revid, b'dir-id-A', b'root-id', 'A')
1369+ self.add_dir(basis_shape, old_revid, b'dir-id-B', b'dir-id-A', 'B')
1370+ self.add_link(basis_shape, old_revid, b'link-id-C', b'dir-id-B', 'C', 'C')
1371+ new_revid = b'new-parent'
1372 new_shape = Inventory(root_id=None)
1373 self.add_new_root(new_shape, old_revid, new_revid)
1374- self.add_dir(new_shape, new_revid, 'dir-id-B', 'root-id', 'A')
1375- self.add_dir(new_shape, new_revid, 'dir-id-A', 'dir-id-B', 'B')
1376- self.add_link(new_shape, new_revid, 'link-id-C', 'dir-id-A', 'C', 'C')
1377+ self.add_dir(new_shape, new_revid, b'dir-id-B', b'root-id', 'A')
1378+ self.add_dir(new_shape, new_revid, b'dir-id-A', b'dir-id-B', 'B')
1379+ self.add_link(new_shape, new_revid, b'link-id-C', b'dir-id-A', 'C', 'C')
1380 self.assertTransitionFromBasisToShape(basis_shape, old_revid,
1381 new_shape, new_revid)
1382
1383 def test_parent_deleted_child_renamed(self):
1384 # test a A->None and A/B->A.
1385- old_revid = 'old-parent'
1386+ old_revid = b'old-parent'
1387 basis_shape = Inventory(root_id=None)
1388- self.add_dir(basis_shape, old_revid, 'root-id', None, '')
1389- self.add_dir(basis_shape, old_revid, 'dir-id-A', 'root-id', 'A')
1390- self.add_dir(basis_shape, old_revid, 'dir-id-B', 'dir-id-A', 'B')
1391- self.add_link(basis_shape, old_revid, 'link-id-C', 'dir-id-B', 'C', 'C')
1392- new_revid = 'new-parent'
1393+ self.add_dir(basis_shape, old_revid, b'root-id', None, '')
1394+ self.add_dir(basis_shape, old_revid, b'dir-id-A', b'root-id', 'A')
1395+ self.add_dir(basis_shape, old_revid, b'dir-id-B', b'dir-id-A', 'B')
1396+ self.add_link(basis_shape, old_revid, b'link-id-C', b'dir-id-B', 'C', 'C')
1397+ new_revid = b'new-parent'
1398 new_shape = Inventory(root_id=None)
1399 self.add_new_root(new_shape, old_revid, new_revid)
1400- self.add_dir(new_shape, new_revid, 'dir-id-B', 'root-id', 'A')
1401- self.add_link(new_shape, old_revid, 'link-id-C', 'dir-id-B', 'C', 'C')
1402+ self.add_dir(new_shape, new_revid, b'dir-id-B', b'root-id', 'A')
1403+ self.add_link(new_shape, old_revid, b'link-id-C', b'dir-id-B', 'C', 'C')
1404 self.assertTransitionFromBasisToShape(basis_shape, old_revid,
1405 new_shape, new_revid)
1406
1407 def test_dir_to_root(self):
1408 # test a A->''.
1409- old_revid = 'old-parent'
1410+ old_revid = b'old-parent'
1411 basis_shape = Inventory(root_id=None)
1412- self.add_dir(basis_shape, old_revid, 'root-id', None, '')
1413- self.add_dir(basis_shape, old_revid, 'dir-id-A', 'root-id', 'A')
1414- self.add_link(basis_shape, old_revid, 'link-id-B', 'dir-id-A', 'B', 'B')
1415- new_revid = 'new-parent'
1416+ self.add_dir(basis_shape, old_revid, b'root-id', None, '')
1417+ self.add_dir(basis_shape, old_revid, b'dir-id-A', b'root-id', 'A')
1418+ self.add_link(basis_shape, old_revid, b'link-id-B', b'dir-id-A', 'B', 'B')
1419+ new_revid = b'new-parent'
1420 new_shape = Inventory(root_id=None)
1421- self.add_dir(new_shape, new_revid, 'dir-id-A', None, '')
1422- self.add_link(new_shape, old_revid, 'link-id-B', 'dir-id-A', 'B', 'B')
1423+ self.add_dir(new_shape, new_revid, b'dir-id-A', None, '')
1424+ self.add_link(new_shape, old_revid, b'link-id-B', b'dir-id-A', 'B', 'B')
1425 self.assertTransitionFromBasisToShape(basis_shape, old_revid,
1426 new_shape, new_revid)
1427
1428 def test_path_swap(self):
1429 # test a A->B and B->A path swap.
1430- old_revid = 'old-parent'
1431+ old_revid = b'old-parent'
1432 basis_shape = Inventory(root_id=None)
1433- self.add_dir(basis_shape, old_revid, 'root-id', None, '')
1434- self.add_dir(basis_shape, old_revid, 'dir-id-A', 'root-id', 'A')
1435- self.add_dir(basis_shape, old_revid, 'dir-id-B', 'root-id', 'B')
1436- self.add_link(basis_shape, old_revid, 'link-id-C', 'root-id', 'C', 'C')
1437- self.add_link(basis_shape, old_revid, 'link-id-D', 'root-id', 'D', 'D')
1438- self.add_file(basis_shape, old_revid, 'file-id-E', 'root-id', 'E',
1439+ self.add_dir(basis_shape, old_revid, b'root-id', None, '')
1440+ self.add_dir(basis_shape, old_revid, b'dir-id-A', b'root-id', 'A')
1441+ self.add_dir(basis_shape, old_revid, b'dir-id-B', b'root-id', 'B')
1442+ self.add_link(basis_shape, old_revid, b'link-id-C', b'root-id', 'C', 'C')
1443+ self.add_link(basis_shape, old_revid, b'link-id-D', b'root-id', 'D', 'D')
1444+ self.add_file(basis_shape, old_revid, b'file-id-E', b'root-id', 'E',
1445 '1' * 32, 12)
1446- self.add_file(basis_shape, old_revid, 'file-id-F', 'root-id', 'F',
1447+ self.add_file(basis_shape, old_revid, b'file-id-F', b'root-id', 'F',
1448 '2' * 32, 24)
1449- new_revid = 'new-parent'
1450+ new_revid = b'new-parent'
1451 new_shape = Inventory(root_id=None)
1452 self.add_new_root(new_shape, old_revid, new_revid)
1453- self.add_dir(new_shape, new_revid, 'dir-id-A', 'root-id', 'B')
1454- self.add_dir(new_shape, new_revid, 'dir-id-B', 'root-id', 'A')
1455- self.add_link(new_shape, new_revid, 'link-id-C', 'root-id', 'D', 'C')
1456- self.add_link(new_shape, new_revid, 'link-id-D', 'root-id', 'C', 'D')
1457- self.add_file(new_shape, new_revid, 'file-id-E', 'root-id', 'F',
1458+ self.add_dir(new_shape, new_revid, b'dir-id-A', b'root-id', 'B')
1459+ self.add_dir(new_shape, new_revid, b'dir-id-B', b'root-id', 'A')
1460+ self.add_link(new_shape, new_revid, b'link-id-C', b'root-id', 'D', 'C')
1461+ self.add_link(new_shape, new_revid, b'link-id-D', b'root-id', 'C', 'D')
1462+ self.add_file(new_shape, new_revid, b'file-id-E', b'root-id', 'F',
1463 '1' * 32, 12)
1464- self.add_file(new_shape, new_revid, 'file-id-F', 'root-id', 'E',
1465+ self.add_file(new_shape, new_revid, b'file-id-F', b'root-id', 'E',
1466 '2' * 32, 24)
1467 self.assertTransitionFromBasisToShape(basis_shape, old_revid,
1468 new_shape, new_revid)
1469
1470 def test_adds(self):
1471 # test adding paths and dirs, including adding to a newly added dir.
1472- old_revid = 'old-parent'
1473+ old_revid = b'old-parent'
1474 basis_shape = Inventory(root_id=None)
1475 # with a root, so its a commit after the first.
1476- self.add_dir(basis_shape, old_revid, 'root-id', None, '')
1477- new_revid = 'new-parent'
1478+ self.add_dir(basis_shape, old_revid, b'root-id', None, '')
1479+ new_revid = b'new-parent'
1480 new_shape = Inventory(root_id=None)
1481 self.add_new_root(new_shape, old_revid, new_revid)
1482- self.add_dir(new_shape, new_revid, 'dir-id-A', 'root-id', 'A')
1483- self.add_link(new_shape, new_revid, 'link-id-B', 'root-id', 'B', 'C')
1484- self.add_file(new_shape, new_revid, 'file-id-C', 'root-id', 'C',
1485+ self.add_dir(new_shape, new_revid, b'dir-id-A', b'root-id', 'A')
1486+ self.add_link(new_shape, new_revid, b'link-id-B', b'root-id', 'B', 'C')
1487+ self.add_file(new_shape, new_revid, b'file-id-C', b'root-id', 'C',
1488 '1' * 32, 12)
1489- self.add_file(new_shape, new_revid, 'file-id-D', 'dir-id-A', 'D',
1490+ self.add_file(new_shape, new_revid, b'file-id-D', b'dir-id-A', 'D',
1491 '2' * 32, 24)
1492 self.assertTransitionFromBasisToShape(basis_shape, old_revid,
1493 new_shape, new_revid)
1494@@ -750,75 +750,75 @@
1495 def test_removes(self):
1496 # test removing paths, including paths that are within other also
1497 # removed paths.
1498- old_revid = 'old-parent'
1499+ old_revid = b'old-parent'
1500 basis_shape = Inventory(root_id=None)
1501- self.add_dir(basis_shape, old_revid, 'root-id', None, '')
1502- self.add_dir(basis_shape, old_revid, 'dir-id-A', 'root-id', 'A')
1503- self.add_link(basis_shape, old_revid, 'link-id-B', 'root-id', 'B', 'C')
1504- self.add_file(basis_shape, old_revid, 'file-id-C', 'root-id', 'C',
1505+ self.add_dir(basis_shape, old_revid, b'root-id', None, '')
1506+ self.add_dir(basis_shape, old_revid, b'dir-id-A', b'root-id', 'A')
1507+ self.add_link(basis_shape, old_revid, b'link-id-B', b'root-id', 'B', 'C')
1508+ self.add_file(basis_shape, old_revid, b'file-id-C', b'root-id', 'C',
1509 '1' * 32, 12)
1510- self.add_file(basis_shape, old_revid, 'file-id-D', 'dir-id-A', 'D',
1511+ self.add_file(basis_shape, old_revid, b'file-id-D', b'dir-id-A', 'D',
1512 '2' * 32, 24)
1513- new_revid = 'new-parent'
1514+ new_revid = b'new-parent'
1515 new_shape = Inventory(root_id=None)
1516 self.add_new_root(new_shape, old_revid, new_revid)
1517 self.assertTransitionFromBasisToShape(basis_shape, old_revid,
1518 new_shape, new_revid)
1519
1520 def test_move_to_added_dir(self):
1521- old_revid = 'old-parent'
1522+ old_revid = b'old-parent'
1523 basis_shape = Inventory(root_id=None)
1524- self.add_dir(basis_shape, old_revid, 'root-id', None, '')
1525- self.add_link(basis_shape, old_revid, 'link-id-B', 'root-id', 'B', 'C')
1526- new_revid = 'new-parent'
1527+ self.add_dir(basis_shape, old_revid, b'root-id', None, '')
1528+ self.add_link(basis_shape, old_revid, b'link-id-B', b'root-id', 'B', 'C')
1529+ new_revid = b'new-parent'
1530 new_shape = Inventory(root_id=None)
1531 self.add_new_root(new_shape, old_revid, new_revid)
1532- self.add_dir(new_shape, new_revid, 'dir-id-A', 'root-id', 'A')
1533- self.add_link(new_shape, new_revid, 'link-id-B', 'dir-id-A', 'B', 'C')
1534+ self.add_dir(new_shape, new_revid, b'dir-id-A', b'root-id', 'A')
1535+ self.add_link(new_shape, new_revid, b'link-id-B', b'dir-id-A', 'B', 'C')
1536 self.assertTransitionFromBasisToShape(basis_shape, old_revid,
1537 new_shape, new_revid)
1538
1539 def test_move_from_removed_dir(self):
1540- old_revid = 'old-parent'
1541+ old_revid = b'old-parent'
1542 basis_shape = Inventory(root_id=None)
1543- self.add_dir(basis_shape, old_revid, 'root-id', None, '')
1544- self.add_dir(basis_shape, old_revid, 'dir-id-A', 'root-id', 'A')
1545- self.add_link(basis_shape, old_revid, 'link-id-B', 'dir-id-A', 'B', 'C')
1546- new_revid = 'new-parent'
1547+ self.add_dir(basis_shape, old_revid, b'root-id', None, '')
1548+ self.add_dir(basis_shape, old_revid, b'dir-id-A', b'root-id', 'A')
1549+ self.add_link(basis_shape, old_revid, b'link-id-B', b'dir-id-A', 'B', 'C')
1550+ new_revid = b'new-parent'
1551 new_shape = Inventory(root_id=None)
1552 self.add_new_root(new_shape, old_revid, new_revid)
1553- self.add_link(new_shape, new_revid, 'link-id-B', 'root-id', 'B', 'C')
1554+ self.add_link(new_shape, new_revid, b'link-id-B', b'root-id', 'B', 'C')
1555 self.assertTransitionFromBasisToShape(basis_shape, old_revid,
1556 new_shape, new_revid)
1557
1558 def test_move_moves_children_recursively(self):
1559- old_revid = 'old-parent'
1560+ old_revid = b'old-parent'
1561 basis_shape = Inventory(root_id=None)
1562- self.add_dir(basis_shape, old_revid, 'root-id', None, '')
1563- self.add_dir(basis_shape, old_revid, 'dir-id-A', 'root-id', 'A')
1564- self.add_dir(basis_shape, old_revid, 'dir-id-B', 'dir-id-A', 'B')
1565- self.add_link(basis_shape, old_revid, 'link-id-C', 'dir-id-B', 'C', 'D')
1566- new_revid = 'new-parent'
1567+ self.add_dir(basis_shape, old_revid, b'root-id', None, '')
1568+ self.add_dir(basis_shape, old_revid, b'dir-id-A', b'root-id', 'A')
1569+ self.add_dir(basis_shape, old_revid, b'dir-id-B', b'dir-id-A', 'B')
1570+ self.add_link(basis_shape, old_revid, b'link-id-C', b'dir-id-B', 'C', 'D')
1571+ new_revid = b'new-parent'
1572 new_shape = Inventory(root_id=None)
1573 self.add_new_root(new_shape, old_revid, new_revid)
1574 # the moved path:
1575- self.add_dir(new_shape, new_revid, 'dir-id-A', 'root-id', 'B')
1576+ self.add_dir(new_shape, new_revid, b'dir-id-A', b'root-id', 'B')
1577 # unmoved children.
1578- self.add_dir(new_shape, old_revid, 'dir-id-B', 'dir-id-A', 'B')
1579- self.add_link(new_shape, old_revid, 'link-id-C', 'dir-id-B', 'C', 'D')
1580+ self.add_dir(new_shape, old_revid, b'dir-id-B', b'dir-id-A', 'B')
1581+ self.add_link(new_shape, old_revid, b'link-id-C', b'dir-id-B', 'C', 'D')
1582 self.assertTransitionFromBasisToShape(basis_shape, old_revid,
1583 new_shape, new_revid)
1584
1585 def test_add_files_to_empty_directory(self):
1586- old_revid = 'old-parent'
1587+ old_revid = b'old-parent'
1588 basis_shape = Inventory(root_id=None)
1589- self.add_dir(basis_shape, old_revid, 'root-id', None, '')
1590- self.add_dir(basis_shape, old_revid, 'dir-id-A', 'root-id', 'A')
1591- new_revid = 'new-parent'
1592+ self.add_dir(basis_shape, old_revid, b'root-id', None, '')
1593+ self.add_dir(basis_shape, old_revid, b'dir-id-A', b'root-id', 'A')
1594+ new_revid = b'new-parent'
1595 new_shape = Inventory(root_id=None)
1596 self.add_new_root(new_shape, old_revid, new_revid)
1597- self.add_dir(new_shape, old_revid, 'dir-id-A', 'root-id', 'A')
1598- self.add_file(new_shape, new_revid, 'file-id-B', 'dir-id-A', 'B',
1599+ self.add_dir(new_shape, old_revid, b'dir-id-A', b'root-id', 'A')
1600+ self.add_file(new_shape, new_revid, b'file-id-B', b'dir-id-A', 'B',
1601 '1' * 32, 24)
1602 self.assertTransitionFromBasisToShape(basis_shape, old_revid,
1603 new_shape, new_revid, set_current_inventory=False)
1604
1605=== modified file 'breezy/tests/test_conflicts.py'
1606--- breezy/tests/test_conflicts.py 2018-06-21 23:03:42 +0000
1607+++ breezy/tests/test_conflicts.py 2018-06-30 17:35:22 +0000
1608@@ -76,7 +76,7 @@
1609 ('hello.BASE', b'hello world1'),
1610 ])
1611 os.mkdir('hello.OTHER')
1612- tree.add('hello', 'q')
1613+ tree.add('hello', b'q')
1614 l = conflicts.ConflictList([conflicts.TextConflict('hello')])
1615 l.remove_files(tree)
1616
1617@@ -144,7 +144,7 @@
1618 self.assertIsInstance(o.path, text_type)
1619
1620 if o.file_id is not None:
1621- self.assertIsInstance(o.file_id, str)
1622+ self.assertIsInstance(o.file_id, bytes)
1623
1624 conflict_path = getattr(o, 'conflict_path', None)
1625 if conflict_path is not None:
1626@@ -152,7 +152,7 @@
1627
1628 conflict_file_id = getattr(o, 'conflict_file_id', None)
1629 if conflict_file_id is not None:
1630- self.assertIsInstance(conflict_file_id, str)
1631+ self.assertIsInstance(conflict_file_id, bytes)
1632
1633 def test_stanzification(self):
1634 stanza = self.conflict.as_stanza()
1635@@ -329,7 +329,7 @@
1636 def _merge_other_into_this(self):
1637 b = self.builder.get_branch()
1638 wt = b.controldir.sprout('branch').open_workingtree()
1639- wt.merge_from_branch(b, 'other')
1640+ wt.merge_from_branch(b, b'other')
1641 return wt
1642
1643 def assertConflict(self, wt):
1644@@ -377,14 +377,14 @@
1645 [
1646 # File modified on both sides
1647 (dict(_base_actions='create_file',
1648- _path='file', _file_id='file-id'),
1649+ _path='file', _file_id=b'file-id'),
1650 ('filed_modified_A',
1651 dict(actions='modify_file_A', check='file_has_content_A')),
1652 ('file_modified_B',
1653 dict(actions='modify_file_B', check='file_has_content_B')),),
1654 # File modified on both sides in dir
1655 (dict(_base_actions='create_file_in_dir',
1656- _path='dir/file', _file_id='file-id'),
1657+ _path='dir/file', _file_id=b'file-id'),
1658 ('filed_modified_A_in_dir',
1659 dict(actions='modify_file_A_in_dir',
1660 check='file_in_dir_has_content_A')),
1661@@ -394,30 +394,30 @@
1662 ])
1663
1664 def do_create_file(self, path='file'):
1665- return [('add', (path, 'file-id', 'file', 'trunk content\n'))]
1666+ return [('add', (path, b'file-id', 'file', b'trunk content\n'))]
1667
1668 def do_modify_file_A(self):
1669- return [('modify', ('file', 'trunk content\nfeature A\n'))]
1670+ return [('modify', ('file', b'trunk content\nfeature A\n'))]
1671
1672 def do_modify_file_B(self):
1673- return [('modify', ('file', 'trunk content\nfeature B\n'))]
1674+ return [('modify', ('file', b'trunk content\nfeature B\n'))]
1675
1676 def do_modify_file_A_in_dir(self):
1677- return [('modify', ('dir/file', 'trunk content\nfeature A\n'))]
1678+ return [('modify', ('dir/file', b'trunk content\nfeature A\n'))]
1679
1680 def do_modify_file_B_in_dir(self):
1681- return [('modify', ('dir/file', 'trunk content\nfeature B\n'))]
1682+ return [('modify', ('dir/file', b'trunk content\nfeature B\n'))]
1683
1684 def check_file_has_content_A(self, path='file'):
1685- self.assertFileEqual('trunk content\nfeature A\n',
1686+ self.assertFileEqual(b'trunk content\nfeature A\n',
1687 osutils.pathjoin('branch', path))
1688
1689 def check_file_has_content_B(self, path='file'):
1690- self.assertFileEqual('trunk content\nfeature B\n',
1691+ self.assertFileEqual(b'trunk content\nfeature B\n',
1692 osutils.pathjoin('branch', path))
1693
1694 def do_create_file_in_dir(self):
1695- return [('add', ('dir', 'dir-id', 'directory', '')),
1696+ return [('add', ('dir', b'dir-id', 'directory', '')),
1697 ] + self.do_create_file('dir/file')
1698
1699 def check_file_in_dir_has_content_A(self):
1700@@ -448,14 +448,14 @@
1701 [
1702 # File modified/deleted
1703 (dict(_base_actions='create_file',
1704- _path='file', _file_id='file-id'),
1705+ _path='file', _file_id=b'file-id'),
1706 ('file_modified',
1707 dict(actions='modify_file', check='file_has_more_content')),
1708 ('file_deleted',
1709 dict(actions='delete_file', check='file_doesnt_exist')),),
1710 # File renamed-modified/deleted
1711 (dict(_base_actions='create_file',
1712- _path='new-file', _file_id='file-id'),
1713+ _path='new-file', _file_id=b'file-id'),
1714 ('file_renamed_and_modified',
1715 dict(actions='modify_and_rename_file',
1716 check='file_renamed_and_more_content')),
1717@@ -463,7 +463,7 @@
1718 dict(actions='delete_file', check='file_doesnt_exist')),),
1719 # File modified/deleted in dir
1720 (dict(_base_actions='create_file_in_dir',
1721- _path='dir/file', _file_id='file-id'),
1722+ _path='dir/file', _file_id=b'file-id'),
1723 ('file_modified_in_dir',
1724 dict(actions='modify_file_in_dir',
1725 check='file_in_dir_has_more_content')),
1726@@ -473,20 +473,20 @@
1727 ])
1728
1729 def do_create_file(self):
1730- return [('add', ('file', 'file-id', 'file', 'trunk content\n'))]
1731+ return [('add', ('file', b'file-id', 'file', b'trunk content\n'))]
1732
1733 def do_modify_file(self):
1734- return [('modify', ('file', 'trunk content\nmore content\n'))]
1735+ return [('modify', ('file', b'trunk content\nmore content\n'))]
1736
1737 def do_modify_and_rename_file(self):
1738- return [('modify', ('new-file', 'trunk content\nmore content\n')),
1739+ return [('modify', ('new-file', b'trunk content\nmore content\n')),
1740 ('rename', ('file', 'new-file'))]
1741
1742 def check_file_has_more_content(self):
1743- self.assertFileEqual('trunk content\nmore content\n', 'branch/file')
1744+ self.assertFileEqual(b'trunk content\nmore content\n', 'branch/file')
1745
1746 def check_file_renamed_and_more_content(self):
1747- self.assertFileEqual('trunk content\nmore content\n', 'branch/new-file')
1748+ self.assertFileEqual(b'trunk content\nmore content\n', 'branch/new-file')
1749
1750 def do_delete_file(self):
1751 return [('unversion', 'file')]
1752@@ -498,14 +498,14 @@
1753 self.assertPathDoesNotExist('branch/file')
1754
1755 def do_create_file_in_dir(self):
1756- return [('add', ('dir', 'dir-id', 'directory', '')),
1757- ('add', ('dir/file', 'file-id', 'file', 'trunk content\n'))]
1758+ return [('add', ('dir', b'dir-id', 'directory', '')),
1759+ ('add', ('dir/file', b'file-id', 'file', b'trunk content\n'))]
1760
1761 def do_modify_file_in_dir(self):
1762- return [('modify', ('dir/file', 'trunk content\nmore content\n'))]
1763+ return [('modify', ('dir/file', b'trunk content\nmore content\n'))]
1764
1765 def check_file_in_dir_has_more_content(self):
1766- self.assertFileEqual('trunk content\nmore content\n', 'branch/dir/file')
1767+ self.assertFileEqual(b'trunk content\nmore content\n', 'branch/dir/file')
1768
1769 def check_file_in_dir_doesnt_exist(self):
1770 self.assertPathDoesNotExist('branch/dir/file')
1771@@ -535,55 +535,55 @@
1772 (dict(_base_actions='create_file'),
1773 ('file_renamed',
1774 dict(actions='rename_file', check='file_renamed',
1775- path='new-file', file_id='file-id')),
1776+ path='new-file', file_id=b'file-id')),
1777 ('file_deleted',
1778 dict(actions='delete_file', check='file_doesnt_exist',
1779 # PathConflicts deletion handling requires a special
1780 # hard-coded value
1781- path='<deleted>', file_id='file-id')),),
1782+ path='<deleted>', file_id=b'file-id')),),
1783 # File renamed/deleted in dir
1784 (dict(_base_actions='create_file_in_dir'),
1785 ('file_renamed_in_dir',
1786 dict(actions='rename_file_in_dir', check='file_in_dir_renamed',
1787- path='dir/new-file', file_id='file-id')),
1788+ path='dir/new-file', file_id=b'file-id')),
1789 ('file_deleted',
1790 dict(actions='delete_file_in_dir', check='file_in_dir_doesnt_exist',
1791 # PathConflicts deletion handling requires a special
1792 # hard-coded value
1793- path='<deleted>', file_id='file-id')),),
1794+ path='<deleted>', file_id=b'file-id')),),
1795 # File renamed/renamed differently
1796 (dict(_base_actions='create_file'),
1797 ('file_renamed',
1798 dict(actions='rename_file', check='file_renamed',
1799- path='new-file', file_id='file-id')),
1800+ path='new-file', file_id=b'file-id')),
1801 ('file_renamed2',
1802 dict(actions='rename_file2', check='file_renamed2',
1803- path='new-file2', file_id='file-id')),),
1804+ path='new-file2', file_id=b'file-id')),),
1805 # Dir renamed/deleted
1806 (dict(_base_actions='create_dir'),
1807 ('dir_renamed',
1808 dict(actions='rename_dir', check='dir_renamed',
1809- path='new-dir', file_id='dir-id')),
1810+ path='new-dir', file_id=b'dir-id')),
1811 ('dir_deleted',
1812 dict(actions='delete_dir', check='dir_doesnt_exist',
1813 # PathConflicts deletion handling requires a special
1814 # hard-coded value
1815- path='<deleted>', file_id='dir-id')),),
1816+ path='<deleted>', file_id=b'dir-id')),),
1817 # Dir renamed/renamed differently
1818 (dict(_base_actions='create_dir'),
1819 ('dir_renamed',
1820 dict(actions='rename_dir', check='dir_renamed',
1821- path='new-dir', file_id='dir-id')),
1822+ path='new-dir', file_id=b'dir-id')),
1823 ('dir_renamed2',
1824 dict(actions='rename_dir2', check='dir_renamed2',
1825- path='new-dir2', file_id='dir-id')),),
1826+ path='new-dir2', file_id=b'dir-id')),),
1827 ])
1828
1829 def do_create_file(self):
1830- return [('add', ('file', 'file-id', 'file', 'trunk content\n'))]
1831+ return [('add', ('file', b'file-id', 'file', b'trunk content\n'))]
1832
1833 def do_create_dir(self):
1834- return [('add', ('dir', 'dir-id', 'directory', ''))]
1835+ return [('add', ('dir', b'dir-id', 'directory', ''))]
1836
1837 def do_rename_file(self):
1838 return [('rename', ('file', 'new-file'))]
1839@@ -629,8 +629,8 @@
1840 self.assertPathDoesNotExist('branch/dir')
1841
1842 def do_create_file_in_dir(self):
1843- return [('add', ('dir', 'dir-id', 'directory', '')),
1844- ('add', ('dir/file', 'file-id', 'file', 'trunk content\n'))]
1845+ return [('add', ('dir', b'dir-id', 'directory', '')),
1846+ ('add', ('dir/file', b'file-id', 'file', b'trunk content\n'))]
1847
1848 def do_rename_file_in_dir(self):
1849 return [('rename', ('dir/file', 'dir/new-file'))]
1850@@ -686,44 +686,44 @@
1851 (dict(_base_actions='nothing'),
1852 ('filea_created',
1853 dict(actions='create_file_a', check='file_content_a',
1854- path='file', file_id='file-a-id')),
1855+ path='file', file_id=b'file-a-id')),
1856 ('fileb_created',
1857 dict(actions='create_file_b', check='file_content_b',
1858- path='file', file_id='file-b-id')),),
1859+ path='file', file_id=b'file-b-id')),),
1860 # File created with different file-ids but deleted on one side
1861 (dict(_base_actions='create_file_a'),
1862 ('filea_replaced',
1863 dict(actions='replace_file_a_by_b', check='file_content_b',
1864- path='file', file_id='file-b-id')),
1865+ path='file', file_id=b'file-b-id')),
1866 ('filea_modified',
1867 dict(actions='modify_file_a', check='file_new_content',
1868- path='file', file_id='file-a-id')),),
1869+ path='file', file_id=b'file-a-id')),),
1870 ])
1871
1872 def do_nothing(self):
1873 return []
1874
1875 def do_create_file_a(self):
1876- return [('add', ('file', 'file-a-id', 'file', 'file a content\n'))]
1877+ return [('add', ('file', b'file-a-id', 'file', b'file a content\n'))]
1878
1879 def check_file_content_a(self):
1880- self.assertFileEqual('file a content\n', 'branch/file')
1881+ self.assertFileEqual(b'file a content\n', 'branch/file')
1882
1883 def do_create_file_b(self):
1884- return [('add', ('file', 'file-b-id', 'file', 'file b content\n'))]
1885+ return [('add', ('file', b'file-b-id', 'file', b'file b content\n'))]
1886
1887 def check_file_content_b(self):
1888- self.assertFileEqual('file b content\n', 'branch/file')
1889+ self.assertFileEqual(b'file b content\n', 'branch/file')
1890
1891 def do_replace_file_a_by_b(self):
1892 return [('unversion', 'file'),
1893- ('add', ('file', 'file-b-id', 'file', 'file b content\n'))]
1894+ ('add', ('file', b'file-b-id', 'file', b'file b content\n'))]
1895
1896 def do_modify_file_a(self):
1897- return [('modify', ('file', 'new content\n'))]
1898+ return [('modify', ('file', b'new content\n'))]
1899
1900 def check_file_new_content(self):
1901- self.assertFileEqual('new content\n', 'branch/file')
1902+ self.assertFileEqual(b'new content\n', 'branch/file')
1903
1904 def _get_resolve_path_arg(self, wt, action):
1905 return self._this['path']
1906@@ -933,23 +933,23 @@
1907 (dict(_base_actions='create_dir1_dir2'),
1908 ('dir1_into_dir2',
1909 dict(actions='move_dir1_into_dir2', check='dir1_moved',
1910- dir_id='dir1-id', target_id='dir2-id', xfail=False)),
1911+ dir_id=b'dir1-id', target_id='dir2-id', xfail=False)),
1912 ('dir2_into_dir1',
1913 dict(actions='move_dir2_into_dir1', check='dir2_moved',
1914- dir_id='dir2-id', target_id='dir1-id', xfail=False))),
1915+ dir_id=b'dir2-id', target_id='dir1-id', xfail=False))),
1916 # Subdirs moved into each other
1917 (dict(_base_actions='create_dir1_4'),
1918 ('dir1_into_dir4',
1919 dict(actions='move_dir1_into_dir4', check='dir1_2_moved',
1920- dir_id='dir1-id', target_id='dir4-id', xfail=True)),
1921+ dir_id=b'dir1-id', target_id='dir4-id', xfail=True)),
1922 ('dir3_into_dir2',
1923 dict(actions='move_dir3_into_dir2', check='dir3_4_moved',
1924- dir_id='dir3-id', target_id='dir2-id', xfail=True))),
1925+ dir_id=b'dir3-id', target_id='dir2-id', xfail=True))),
1926 ])
1927
1928 def do_create_dir1_dir2(self):
1929- return [('add', ('dir1', 'dir1-id', 'directory', '')),
1930- ('add', ('dir2', 'dir2-id', 'directory', '')),]
1931+ return [('add', ('dir1', b'dir1-id', 'directory', '')),
1932+ ('add', ('dir2', b'dir2-id', 'directory', '')),]
1933
1934 def do_move_dir1_into_dir2(self):
1935 return [('rename', ('dir1', 'dir2/dir1'))]
1936@@ -966,10 +966,10 @@
1937 self.assertPathExists('branch/dir1/dir2')
1938
1939 def do_create_dir1_4(self):
1940- return [('add', ('dir1', 'dir1-id', 'directory', '')),
1941- ('add', ('dir1/dir2', 'dir2-id', 'directory', '')),
1942- ('add', ('dir3', 'dir3-id', 'directory', '')),
1943- ('add', ('dir3/dir4', 'dir4-id', 'directory', '')),]
1944+ return [('add', ('dir1', b'dir1-id', 'directory', '')),
1945+ ('add', ('dir1/dir2', b'dir2-id', 'directory', '')),
1946+ ('add', ('dir3', b'dir3-id', 'directory', '')),
1947+ ('add', ('dir3/dir4', b'dir4-id', 'directory', '')),]
1948
1949 def do_move_dir1_into_dir4(self):
1950 return [('rename', ('dir1', 'dir3/dir4/dir1'))]
1951@@ -994,8 +994,8 @@
1952 return wt.id2path(self._other['dir_id'])
1953
1954 def assertParentLoop(self, wt, c):
1955- self.assertEqual(self._other['dir_id'], c.file_id)
1956- self.assertEqual(self._other['target_id'], c.conflict_file_id)
1957+ self.assertEqual(self._other[b'dir_id'], c.file_id)
1958+ self.assertEqual(self._other[b'target_id'], c.conflict_file_id)
1959 # The conflict paths are irrelevant (they are deterministic but not
1960 # worth checking since they don't provide the needed information
1961 # anyway)
1962
1963=== modified file 'breezy/tests/test_export.py'
1964--- breezy/tests/test_export.py 2018-05-20 12:21:54 +0000
1965+++ breezy/tests/test_export.py 2018-06-30 17:35:22 +0000
1966@@ -115,10 +115,10 @@
1967 builder = self.make_branch_builder('source')
1968 builder.start_series()
1969 builder.build_snapshot(None, [
1970- ('add', ('', 'root-id', 'directory', '')),
1971- ('add', ('a', 'a-id', 'file', 'content\n'))])
1972+ ('add', ('', b'root-id', 'directory', '')),
1973+ ('add', ('a', b'a-id', 'file', b'content\n'))])
1974 builder.build_snapshot(None, [
1975- ('add', ('b', 'b-id', 'file', 'content\n'))])
1976+ ('add', ('b', b'b-id', 'file', b'content\n'))])
1977 builder.finish_series()
1978 b = builder.get_branch()
1979 b.lock_read()
1980@@ -146,11 +146,11 @@
1981 a_time = time.mktime((1999, 12, 12, 0, 0, 0, 0, 0, 0))
1982 b_time = time.mktime((1980, 0o1, 0o1, 0, 0, 0, 0, 0, 0))
1983 builder.build_snapshot(None, [
1984- ('add', ('', 'root-id', 'directory', '')),
1985- ('add', ('a', 'a-id', 'file', 'content\n'))],
1986+ ('add', ('', b'root-id', 'directory', '')),
1987+ ('add', ('a', b'a-id', 'file', b'content\n'))],
1988 timestamp=a_time)
1989 builder.build_snapshot(None, [
1990- ('add', ('b', 'b-id', 'file', 'content\n'))],
1991+ ('add', ('b', b'b-id', 'file', b'content\n'))],
1992 timestamp=b_time)
1993 builder.finish_series()
1994 b = builder.get_branch()
1995@@ -167,9 +167,9 @@
1996 builder.start_series()
1997 foo_time = time.mktime((1999, 12, 12, 0, 0, 0, 0, 0, 0))
1998 builder.build_snapshot(None, [
1999- ('add', ('', 'root-id', 'directory', '')),
2000- ('add', ('subdir', 'subdir-id', 'directory', '')),
2001- ('add', ('subdir/foo.txt', 'foo-id', 'file', 'content\n'))],
2002+ ('add', ('', b'root-id', 'directory', '')),
2003+ ('add', ('subdir', b'subdir-id', 'directory', '')),
2004+ ('add', ('subdir/foo.txt', b'foo-id', 'file', b'content\n'))],
2005 timestamp=foo_time)
2006 builder.finish_series()
2007 b = builder.get_branch()
2008@@ -228,18 +228,18 @@
2009 per_file_timestamps=True)
2010 export.export(wt, 'testdir2/target.tar.gz', format="tgz",
2011 per_file_timestamps=True)
2012- file1 = open('testdir1/target.tar.gz', 'r')
2013+ file1 = open('testdir1/target.tar.gz', 'rb')
2014 self.addCleanup(file1.close)
2015- file2 = open('testdir1/target.tar.gz', 'r')
2016+ file2 = open('testdir1/target.tar.gz', 'rb')
2017 self.addCleanup(file2.close)
2018 content1 = file1.read()
2019 content2 = file2.read()
2020 self.assertEqualDiff(content1, content2)
2021 # the gzip module doesn't have a way to read back to the original
2022 # filename, but it's stored as-is in the tarfile.
2023- self.assertFalse("testdir1" in content1)
2024- self.assertFalse("target.tar.gz" in content1)
2025- self.assertTrue("target.tar" in content1)
2026+ self.assertFalse(b"testdir1" in content1)
2027+ self.assertFalse(b"target.tar.gz" in content1)
2028+ self.assertTrue(b"target.tar" in content1)
2029
2030 def test_tbz2(self):
2031 wt = self.make_branch_and_tree('.')
2032
2033=== modified file 'breezy/tests/test_export_pot.py'
2034--- breezy/tests/test_export_pot.py 2018-02-17 02:02:38 +0000
2035+++ breezy/tests/test_export_pot.py 2018-06-30 17:35:22 +0000
2036@@ -237,13 +237,13 @@
2037
2038 def test_option_without_help(self):
2039 opt = option.Option("helpless")
2040- self.assertEqual("", self.pot_from_option(opt))
2041+ self.assertEqual(b"", self.pot_from_option(opt))
2042
2043 def test_option_with_help(self):
2044 opt = option.Option("helpful", help="Info.")
2045- self.assertContainsString(self.pot_from_option(opt), "\n"
2046- "# help of 'helpful' test\n"
2047- "msgid \"Info.\"\n")
2048+ self.assertContainsString(self.pot_from_option(opt), b"\n"
2049+ b"# help of 'helpful' test\n"
2050+ b"msgid \"Info.\"\n")
2051
2052 def test_option_hidden(self):
2053 opt = option.Option("hidden", help="Unseen.", hidden=True)
2054@@ -253,57 +253,57 @@
2055 context = export_pot._ModuleContext("remote.py", 3)
2056 opt = option.Option("metaphor", help="Not a literal in the source.")
2057 self.assertContainsString(self.pot_from_option(opt, context),
2058- "#: remote.py:3\n"
2059- "# help of 'metaphor' test\n")
2060+ b"#: remote.py:3\n"
2061+ b"# help of 'metaphor' test\n")
2062
2063 def test_option_context_string(self):
2064 s = "Literally."
2065 context = export_pot._ModuleContext("local.py", 3, ({}, {s: 17}))
2066 opt = option.Option("example", help=s)
2067 self.assertContainsString(self.pot_from_option(opt, context),
2068- "#: local.py:17\n"
2069- "# help of 'example' test\n")
2070+ b"#: local.py:17\n"
2071+ b"# help of 'example' test\n")
2072
2073 def test_registry_option_title(self):
2074 opt = option.RegistryOption.from_kwargs("group", help="Pick one.",
2075 title="Choose!")
2076 pot = self.pot_from_option(opt)
2077- self.assertContainsString(pot, "\n"
2078- "# title of 'group' test\n"
2079- "msgid \"Choose!\"\n")
2080- self.assertContainsString(pot, "\n"
2081- "# help of 'group' test\n"
2082- "msgid \"Pick one.\"\n")
2083+ self.assertContainsString(pot, b"\n"
2084+ b"# title of 'group' test\n"
2085+ b"msgid \"Choose!\"\n")
2086+ self.assertContainsString(pot, b"\n"
2087+ b"# help of 'group' test\n"
2088+ b"msgid \"Pick one.\"\n")
2089
2090 def test_registry_option_title_context_missing(self):
2091 context = export_pot._ModuleContext("theory.py", 3)
2092 opt = option.RegistryOption.from_kwargs("abstract", title="Unfounded!")
2093 self.assertContainsString(self.pot_from_option(opt, context),
2094- "#: theory.py:3\n"
2095- "# title of 'abstract' test\n")
2096+ b"#: theory.py:3\n"
2097+ b"# title of 'abstract' test\n")
2098
2099 def test_registry_option_title_context_string(self):
2100 s = "Grounded!"
2101 context = export_pot._ModuleContext("practice.py", 3, ({}, {s: 144}))
2102 opt = option.RegistryOption.from_kwargs("concrete", title=s)
2103 self.assertContainsString(self.pot_from_option(opt, context),
2104- "#: practice.py:144\n"
2105- "# title of 'concrete' test\n")
2106+ b"#: practice.py:144\n"
2107+ b"# title of 'concrete' test\n")
2108
2109 def test_registry_option_value_switches(self):
2110 opt = option.RegistryOption.from_kwargs("switch", help="Flip one.",
2111 value_switches=True, enum_switch=False,
2112 red="Big.", green="Small.")
2113 pot = self.pot_from_option(opt)
2114- self.assertContainsString(pot, "\n"
2115- "# help of 'switch' test\n"
2116- "msgid \"Flip one.\"\n")
2117- self.assertContainsString(pot, "\n"
2118- "# help of 'switch=red' test\n"
2119- "msgid \"Big.\"\n")
2120- self.assertContainsString(pot, "\n"
2121- "# help of 'switch=green' test\n"
2122- "msgid \"Small.\"\n")
2123+ self.assertContainsString(pot, b"\n"
2124+ b"# help of 'switch' test\n"
2125+ b"msgid \"Flip one.\"\n")
2126+ self.assertContainsString(pot, b"\n"
2127+ b"# help of 'switch=red' test\n"
2128+ b"msgid \"Big.\"\n")
2129+ self.assertContainsString(pot, b"\n"
2130+ b"# help of 'switch=green' test\n"
2131+ b"msgid \"Small.\"\n")
2132
2133 def test_registry_option_value_switches_hidden(self):
2134 reg = registry.Registry()
2135@@ -314,13 +314,13 @@
2136 opt = option.RegistryOption("protocol", "Talking.", reg,
2137 value_switches=True, enum_switch=False)
2138 pot = self.pot_from_option(opt)
2139- self.assertContainsString(pot, "\n"
2140- "# help of 'protocol' test\n"
2141- "msgid \"Talking.\"\n")
2142- self.assertContainsString(pot, "\n"
2143- "# help of 'protocol=new' test\n"
2144- "msgid \"Current.\"\n")
2145- self.assertNotContainsString(pot, "'protocol=old'")
2146+ self.assertContainsString(pot, b"\n"
2147+ b"# help of 'protocol' test\n"
2148+ b"msgid \"Talking.\"\n")
2149+ self.assertContainsString(pot, b"\n"
2150+ b"# help of 'protocol=new' test\n"
2151+ b"msgid \"Current.\"\n")
2152+ self.assertNotContainsString(pot, b"'protocol=old'")
2153
2154
2155 class TestPotExporter(tests.TestCase):
2156@@ -333,15 +333,15 @@
2157 exporter.poentry_in_context(context, "Common line.")
2158 context.lineno = 3
2159 exporter.poentry_in_context(context, "Common line.")
2160- self.assertEqual(1, exporter.outf.getvalue().count("Common line."))
2161-
2162+ self.assertEqual(1, exporter.outf.getvalue().count(b"Common line."))
2163+
2164 def test_duplicates_included(self):
2165 exporter = export_pot._PotExporter(BytesIO(), True)
2166 context = export_pot._ModuleContext("mod.py", 1)
2167 exporter.poentry_in_context(context, "Common line.")
2168 context.lineno = 3
2169 exporter.poentry_in_context(context, "Common line.")
2170- self.assertEqual(2, exporter.outf.getvalue().count("Common line."))
2171+ self.assertEqual(2, exporter.outf.getvalue().count(b"Common line."))
2172
2173
2174 class PoEntryTestCase(tests.TestCase):
2175@@ -353,7 +353,7 @@
2176 def check_output(self, expected):
2177 self.assertEqual(
2178 self.exporter.outf.getvalue(),
2179- textwrap.dedent(expected)
2180+ textwrap.dedent(expected.decode('utf-8')).encode('utf-8')
2181 )
2182
2183
2184@@ -362,7 +362,7 @@
2185 def test_simple(self):
2186 self.exporter.poentry('dummy', 1, "spam")
2187 self.exporter.poentry('dummy', 2, "ham", 'EGG')
2188- self.check_output('''\
2189+ self.check_output(b'''\
2190 #: dummy:1
2191 msgid "spam"
2192 msgstr ""
2193@@ -379,7 +379,7 @@
2194 # This should be ignored.
2195 self.exporter.poentry('dummy', 2, "spam", 'EGG')
2196
2197- self.check_output('''\
2198+ self.check_output(b'''\
2199 #: dummy:1
2200 msgid "spam"
2201 msgstr ""\n
2202@@ -394,7 +394,7 @@
2203 10,
2204 '''foo\nbar\nbaz\n'''
2205 )
2206- self.check_output('''\
2207+ self.check_output(b'''\
2208 #: dummy:10
2209 msgid ""
2210 "foo\\n"
2211@@ -409,7 +409,7 @@
2212 10,
2213 '''spam\nham\negg\n\nSPAM\nHAM\nEGG\n'''
2214 )
2215- self.check_output('''\
2216+ self.check_output(b'''\
2217 #: dummy:10
2218 msgid ""
2219 "spam\\n"
2220@@ -447,22 +447,22 @@
2221 export_pot._write_command_help(self.exporter, cmd_Demo())
2222 result = self.exporter.outf.getvalue()
2223 # We don't care about filename and lineno here.
2224- result = re.sub(r'(?m)^#: [^\n]+\n', '', result)
2225+ result = re.sub(br'(?m)^#: [^\n]+\n', b'', result)
2226
2227 self.assertEqualDiff(
2228- 'msgid "A sample command."\n'
2229- 'msgstr ""\n'
2230- '\n' # :Usage: should not be translated.
2231- 'msgid ""\n'
2232- '":Examples:\\n"\n'
2233- '" Example 1::"\n'
2234- 'msgstr ""\n'
2235- '\n'
2236- 'msgid " cmd arg1"\n'
2237- 'msgstr ""\n'
2238- '\n'
2239- 'msgid "Blah Blah Blah"\n'
2240- 'msgstr ""\n'
2241- '\n',
2242+ b'msgid "A sample command."\n'
2243+ b'msgstr ""\n'
2244+ b'\n' # :Usage: should not be translated.
2245+ b'msgid ""\n'
2246+ b'":Examples:\\n"\n'
2247+ b'" Example 1::"\n'
2248+ b'msgstr ""\n'
2249+ b'\n'
2250+ b'msgid " cmd arg1"\n'
2251+ b'msgstr ""\n'
2252+ b'\n'
2253+ b'msgid "Blah Blah Blah"\n'
2254+ b'msgstr ""\n'
2255+ b'\n',
2256 result
2257 )
2258
2259=== modified file 'breezy/tests/test_fetch.py'
2260--- breezy/tests/test_fetch.py 2018-06-19 01:27:34 +0000
2261+++ breezy/tests/test_fetch.py 2018-06-30 17:35:22 +0000
2262@@ -517,9 +517,9 @@
2263
2264 def test_two_fetch_changed_root(self):
2265 self.make_two_commits(change_root=True, fetch_twice=True)
2266- self.assertEqual((), self.get_parents('unique-id', 'second-id'))
2267+ self.assertEqual((), self.get_parents(b'unique-id', b'second-id'))
2268
2269 def test_two_fetches(self):
2270 self.make_two_commits(change_root=False, fetch_twice=True)
2271- self.assertEqual((('TREE_ROOT', 'first-id'),),
2272- self.get_parents('TREE_ROOT', 'second-id'))
2273+ self.assertEqual(((b'TREE_ROOT', b'first-id'),),
2274+ self.get_parents(b'TREE_ROOT', b'second-id'))
2275
2276=== modified file 'breezy/tests/test_http.py'
2277--- breezy/tests/test_http.py 2018-05-11 00:02:31 +0000
2278+++ breezy/tests/test_http.py 2018-06-30 17:35:22 +0000
2279@@ -1203,11 +1203,11 @@
2280
2281 def assertProxied(self):
2282 t = self.get_readonly_transport()
2283- self.assertEqual('proxied contents of foo\n', t.get('foo').read())
2284+ self.assertEqual(b'proxied contents of foo\n', t.get('foo').read())
2285
2286 def assertNotProxied(self):
2287 t = self.get_readonly_transport()
2288- self.assertEqual('contents of foo\n', t.get('foo').read())
2289+ self.assertEqual(b'contents of foo\n', t.get('foo').read())
2290
2291 def test_http_proxy(self):
2292 self.overrideEnv('http_proxy', self.proxy_url)
2293@@ -1810,12 +1810,12 @@
2294 httpd = self.http_server.server
2295
2296 socket = SampleSocket(
2297- 'POST /.bzr/smart %s \r\n' % self._protocol_version
2298+ b'POST /.bzr/smart %s \r\n' % self._protocol_version.encode('ascii')
2299 # HTTP/1.1 posts must have a Content-Length (but it doesn't hurt
2300 # for 1.0)
2301- + 'Content-Length: 6\r\n'
2302- '\r\n'
2303- 'hello\n')
2304+ + b'Content-Length: 6\r\n'
2305+ b'\r\n'
2306+ b'hello\n')
2307 # Beware: the ('localhost', 80) below is the
2308 # client_address parameter, but we don't have one because
2309 # we have defined a socket which is not bound to an
2310@@ -1827,7 +1827,7 @@
2311 response = socket.writefile.getvalue()
2312 self.assertStartsWith(response, '%s 200 ' % self._protocol_version)
2313 # This includes the end of the HTTP headers, and all the body.
2314- expected_end_of_response = '\r\n\r\nok\x012\n'
2315+ expected_end_of_response = b'\r\n\r\nok\x012\n'
2316 self.assertEndsWith(response, expected_end_of_response)
2317
2318
2319
2320=== modified file 'breezy/tests/test_http_response.py'
2321--- breezy/tests/test_http_response.py 2017-09-26 00:55:04 +0000
2322+++ breezy/tests/test_http_response.py 2018-06-30 17:35:22 +0000
2323@@ -88,13 +88,13 @@
2324
2325 def test_iter_many(self):
2326 f = response.ResponseFile('many', BytesIO(b'0\n1\nboo!\n'))
2327- self.assertEqual(['0\n', '1\n', 'boo!\n'], list(f))
2328+ self.assertEqual([b'0\n', b'1\n', b'boo!\n'], list(f))
2329
2330
2331 class TestHTTPConnection(tests.TestCase):
2332
2333 def test_cleanup_pipe(self):
2334- sock = ReadSocket("""HTTP/1.1 200 OK\r
2335+ sock = ReadSocket(b"""HTTP/1.1 200 OK\r
2336 Content-Type: text/plain; charset=UTF-8\r
2337 Content-Length: 18
2338 \r
2339@@ -108,7 +108,7 @@
2340 # Now, get the response
2341 resp = conn.getresponse()
2342 # Read part of the response
2343- self.assertEqual('0123456789\n', resp.read(11))
2344+ self.assertEqual(b'0123456789\n', resp.read(11))
2345 # Override the thresold to force the warning emission
2346 conn._range_warning_thresold = 6 # There are 7 bytes pending
2347 conn.cleanup_pipe()
2348@@ -122,7 +122,7 @@
2349 # which offsets are easy to calculate for test writers. It's used as a
2350 # building block with slight variations but basically 'a' is the first char
2351 # of the range and 'z' is the last.
2352- alpha = 'abcdefghijklmnopqrstuvwxyz'
2353+ alpha = b'abcdefghijklmnopqrstuvwxyz'
2354
2355 def test_can_read_at_first_access(self):
2356 """Test that the just created file can be read."""
2357@@ -137,14 +137,14 @@
2358 cur = start # For an overall offset assertion
2359 f.seek(start + 3)
2360 cur += 3
2361- self.assertEqual('def', f.read(3))
2362+ self.assertEqual(b'def', f.read(3))
2363 cur += len('def')
2364 f.seek(4, 1)
2365 cur += 4
2366- self.assertEqual('klmn', f.read(4))
2367+ self.assertEqual(b'klmn', f.read(4))
2368 cur += len('klmn')
2369 # read(0) in the middle of a range
2370- self.assertEqual('', f.read(0))
2371+ self.assertEqual(b'', f.read(0))
2372 # seek in place
2373 here = f.tell()
2374 f.seek(0, 1)
2375@@ -153,9 +153,9 @@
2376
2377 def test_read_zero(self):
2378 f = self._file
2379- self.assertEqual('', f.read(0))
2380+ self.assertEqual(b'', f.read(0))
2381 f.seek(10, 1)
2382- self.assertEqual('', f.read(0))
2383+ self.assertEqual(b'', f.read(0))
2384
2385 def test_seek_at_range_end(self):
2386 f = self._file
2387@@ -165,14 +165,14 @@
2388 """Test read behaviour at range end."""
2389 f = self._file
2390 self.assertEqual(self.alpha, f.read())
2391- self.assertEqual('', f.read(0))
2392+ self.assertEqual(b'', f.read(0))
2393 self.assertRaises(errors.InvalidRange, f.read, 1)
2394
2395 def test_unbounded_read_after_seek(self):
2396 f = self._file
2397 f.seek(24, 1)
2398 # Should not cross ranges
2399- self.assertEqual('yz', f.read())
2400+ self.assertEqual(b'yz', f.read())
2401
2402 def test_seek_backwards(self):
2403 f = self._file
2404@@ -212,7 +212,7 @@
2405 """
2406 f = self._file
2407 f.seek(-2, 2)
2408- self.assertEqual('yz', f.read())
2409+ self.assertEqual(b'yz', f.read())
2410
2411
2412 class TestRangeFileSizeUnknown(tests.TestCase, TestRangeFileMixin):
2413@@ -236,8 +236,8 @@
2414 """Test read behaviour at range end."""
2415 f = self._file
2416 self.assertEqual(self.alpha, f.read())
2417- self.assertEqual('', f.read(0))
2418- self.assertEqual('', f.read(1))
2419+ self.assertEqual(b'', f.read(0))
2420+ self.assertEqual(b'', f.read(1))
2421
2422
2423 class TestRangeFileSizeKnown(tests.TestCase, TestRangeFileMixin):
2424@@ -286,14 +286,14 @@
2425 # in HTTP response headers and the boundary lines that separate
2426 # multipart content.
2427
2428- boundary = "separation"
2429+ boundary = b"separation"
2430
2431 def setUp(self):
2432 super(TestRangeFileMultipleRanges, self).setUp()
2433
2434 boundary = self.boundary
2435
2436- content = ''
2437+ content = b''
2438 self.first_range_start = 25
2439 file_size = 200 # big enough to encompass all ranges
2440 for (start, part) in [(self.first_range_start, self.alpha),
2441@@ -311,7 +311,7 @@
2442
2443 def _boundary_line(self):
2444 """Helper to build the formatted boundary line."""
2445- return '--' + self.boundary + '\r\n'
2446+ return b'--' + self.boundary + b'\r\n'
2447
2448 def set_file_boundary(self):
2449 # Ranges are set by decoding the range headers, the RangeFile user is
2450@@ -320,7 +320,7 @@
2451 # which is part of the Content-Type header).
2452 self._file.set_boundary(self.boundary)
2453
2454- def _multipart_byterange(self, data, offset, boundary, file_size='*'):
2455+ def _multipart_byterange(self, data, offset, boundary, file_size=b'*'):
2456 """Encode a part of a file as a multipart/byterange MIME type.
2457
2458 When a range request is issued, the HTTP response body can be
2459@@ -342,10 +342,12 @@
2460 # A range is described by a set of headers, but only 'Content-Range' is
2461 # required for our implementation (TestHandleResponse below will
2462 # exercise ranges with multiple or missing headers')
2463- range += 'Content-Range: bytes %d-%d/%d\r\n' % (offset,
2464- offset+len(data)-1,
2465- file_size)
2466- range += '\r\n'
2467+ if isinstance(file_size, int):
2468+ file_size = b'%d' % file_size
2469+ range += b'Content-Range: bytes %d-%d/%s\r\n' % (offset,
2470+ offset+len(data)-1,
2471+ file_size)
2472+ range += b'\r\n'
2473 # Finally the raw bytes
2474 range += data
2475 return range
2476@@ -357,9 +359,9 @@
2477 self.assertEqual(self.alpha, f.read()) # Read second range
2478 self.assertEqual(126, f.tell())
2479 f.seek(126) # Start of third range which is also the current pos !
2480- self.assertEqual('A', f.read(1))
2481+ self.assertEqual(b'A', f.read(1))
2482 f.seek(10, 1)
2483- self.assertEqual('LMN', f.read(3))
2484+ self.assertEqual(b'LMN', f.read(3))
2485
2486 def test_seek_from_end(self):
2487 """See TestRangeFileMixin.test_seek_from_end."""
2488@@ -369,7 +371,7 @@
2489 # behaviour.
2490 f = self._file
2491 f.seek(-2, 2)
2492- self.assertEqual('yz', f.read())
2493+ self.assertEqual(b'yz', f.read())
2494 self.assertRaises(errors.InvalidRange, f.seek, -2, 2)
2495
2496 def test_seek_into_void(self):
2497@@ -387,14 +389,14 @@
2498 def test_seek_across_ranges(self):
2499 f = self._file
2500 f.seek(126) # skip the two first ranges
2501- self.assertEqual('AB', f.read(2))
2502+ self.assertEqual(b'AB', f.read(2))
2503
2504 def test_checked_read_dont_overflow_buffers(self):
2505 f = self._file
2506 # We force a very low value to exercise all code paths in _checked_read
2507 f._discarded_buf_size = 8
2508 f.seek(126) # skip the two first ranges
2509- self.assertEqual('AB', f.read(2))
2510+ self.assertEqual(b'AB', f.read(2))
2511
2512 def test_seek_twice_between_ranges(self):
2513 f = self._file
2514@@ -435,8 +437,8 @@
2515 """
2516 # The boundary as it appears in boundary lines
2517 # IIS 6 and 7 use this value
2518- _boundary_trimmed = "q1w2e3r4t5y6u7i8o9p0zaxscdvfbgnhmjklkl"
2519- boundary = '<' + _boundary_trimmed + '>'
2520+ _boundary_trimmed = b"q1w2e3r4t5y6u7i8o9p0zaxscdvfbgnhmjklkl"
2521+ boundary = b'<' + _boundary_trimmed + b'>'
2522
2523 def set_file_boundary(self):
2524 # Emulate broken rfc822.unquote() here by removing angles
2525@@ -485,7 +487,7 @@
2526
2527
2528 # Taken from real request responses
2529-_full_text_response = (200, """HTTP/1.1 200 OK\r
2530+_full_text_response = (200, b"""HTTP/1.1 200 OK\r
2531 Date: Tue, 11 Jul 2006 04:32:56 GMT\r
2532 Server: Apache/2.0.54 (Fedora)\r
2533 Last-Modified: Sun, 23 Apr 2006 19:35:20 GMT\r
2534@@ -495,11 +497,11 @@
2535 Connection: close\r
2536 Content-Type: text/plain; charset=UTF-8\r
2537 \r
2538-""", """Bazaar-NG meta directory, format 1
2539+""", b"""Bazaar-NG meta directory, format 1
2540 """)
2541
2542
2543-_single_range_response = (206, """HTTP/1.1 206 Partial Content\r
2544+_single_range_response = (206, b"""HTTP/1.1 206 Partial Content\r
2545 Date: Tue, 11 Jul 2006 04:45:22 GMT\r
2546 Server: Apache/2.0.54 (Fedora)\r
2547 Last-Modified: Thu, 06 Jul 2006 20:22:05 GMT\r
2548@@ -510,11 +512,11 @@
2549 Connection: close\r
2550 Content-Type: text/plain; charset=UTF-8\r
2551 \r
2552-""", """mbp@sourcefrog.net-20050309040815-13242001617e4a06
2553+""", b"""mbp@sourcefrog.net-20050309040815-13242001617e4a06
2554 mbp@sourcefrog.net-20050309040929-eee0eb3e6d1e762""")
2555
2556
2557-_single_range_no_content_type = (206, """HTTP/1.1 206 Partial Content\r
2558+_single_range_no_content_type = (206, b"""HTTP/1.1 206 Partial Content\r
2559 Date: Tue, 11 Jul 2006 04:45:22 GMT\r
2560 Server: Apache/2.0.54 (Fedora)\r
2561 Last-Modified: Thu, 06 Jul 2006 20:22:05 GMT\r
2562@@ -524,11 +526,11 @@
2563 Content-Range: bytes 100-199/93890\r
2564 Connection: close\r
2565 \r
2566-""", """mbp@sourcefrog.net-20050309040815-13242001617e4a06
2567+""", b"""mbp@sourcefrog.net-20050309040815-13242001617e4a06
2568 mbp@sourcefrog.net-20050309040929-eee0eb3e6d1e762""")
2569
2570
2571-_multipart_range_response = (206, """HTTP/1.1 206 Partial Content\r
2572+_multipart_range_response = (206, b"""HTTP/1.1 206 Partial Content\r
2573 Date: Tue, 11 Jul 2006 04:49:48 GMT\r
2574 Server: Apache/2.0.54 (Fedora)\r
2575 Last-Modified: Thu, 06 Jul 2006 20:22:05 GMT\r
2576@@ -538,7 +540,7 @@
2577 Connection: close\r
2578 Content-Type: multipart/byteranges; boundary=418470f848b63279b\r
2579 \r
2580-\r""", """--418470f848b63279b\r
2581+\r""", b"""--418470f848b63279b\r
2582 Content-type: text/plain; charset=UTF-8\r
2583 Content-range: bytes 0-254/93890\r
2584 \r
2585@@ -578,7 +580,7 @@
2586 """)
2587
2588
2589-_multipart_squid_range_response = (206, """HTTP/1.0 206 Partial Content\r
2590+_multipart_squid_range_response = (206, b"""HTTP/1.0 206 Partial Content\r
2591 Date: Thu, 31 Aug 2006 21:16:22 GMT\r
2592 Server: Apache/2.2.2 (Unix) DAV/2\r
2593 Last-Modified: Thu, 31 Aug 2006 17:57:06 GMT\r
2594@@ -590,7 +592,7 @@
2595 Proxy-Connection: keep-alive\r
2596 \r
2597 """,
2598-"""\r
2599+b"""\r
2600 --squid/2.5.STABLE12:C99323425AD4FE26F726261FA6C24196\r
2601 Content-Type: text/plain\r
2602 Content-Range: bytes 0-99/18672\r
2603@@ -611,7 +613,7 @@
2604
2605
2606 # This is made up
2607-_full_text_response_no_content_type = (200, """HTTP/1.1 200 OK\r
2608+_full_text_response_no_content_type = (200, b"""HTTP/1.1 200 OK\r
2609 Date: Tue, 11 Jul 2006 04:32:56 GMT\r
2610 Server: Apache/2.0.54 (Fedora)\r
2611 Last-Modified: Sun, 23 Apr 2006 19:35:20 GMT\r
2612@@ -620,11 +622,11 @@
2613 Content-Length: 35\r
2614 Connection: close\r
2615 \r
2616-""", """Bazaar-NG meta directory, format 1
2617+""", b"""Bazaar-NG meta directory, format 1
2618 """)
2619
2620
2621-_full_text_response_no_content_length = (200, """HTTP/1.1 200 OK\r
2622+_full_text_response_no_content_length = (200, b"""HTTP/1.1 200 OK\r
2623 Date: Tue, 11 Jul 2006 04:32:56 GMT\r
2624 Server: Apache/2.0.54 (Fedora)\r
2625 Last-Modified: Sun, 23 Apr 2006 19:35:20 GMT\r
2626@@ -633,11 +635,11 @@
2627 Connection: close\r
2628 Content-Type: text/plain; charset=UTF-8\r
2629 \r
2630-""", """Bazaar-NG meta directory, format 1
2631+""", b"""Bazaar-NG meta directory, format 1
2632 """)
2633
2634
2635-_single_range_no_content_range = (206, """HTTP/1.1 206 Partial Content\r
2636+_single_range_no_content_range = (206, b"""HTTP/1.1 206 Partial Content\r
2637 Date: Tue, 11 Jul 2006 04:45:22 GMT\r
2638 Server: Apache/2.0.54 (Fedora)\r
2639 Last-Modified: Thu, 06 Jul 2006 20:22:05 GMT\r
2640@@ -646,11 +648,11 @@
2641 Content-Length: 100\r
2642 Connection: close\r
2643 \r
2644-""", """mbp@sourcefrog.net-20050309040815-13242001617e4a06
2645+""", b"""mbp@sourcefrog.net-20050309040815-13242001617e4a06
2646 mbp@sourcefrog.net-20050309040929-eee0eb3e6d1e762""")
2647
2648
2649-_single_range_response_truncated = (206, """HTTP/1.1 206 Partial Content\r
2650+_single_range_response_truncated = (206, b"""HTTP/1.1 206 Partial Content\r
2651 Date: Tue, 11 Jul 2006 04:45:22 GMT\r
2652 Server: Apache/2.0.54 (Fedora)\r
2653 Last-Modified: Thu, 06 Jul 2006 20:22:05 GMT\r
2654@@ -661,15 +663,15 @@
2655 Connection: close\r
2656 Content-Type: text/plain; charset=UTF-8\r
2657 \r
2658-""", """mbp@sourcefrog.net-20050309040815-13242001617e4a06""")
2659-
2660-
2661-_invalid_response = (444, """HTTP/1.1 444 Bad Response\r
2662+""", b"""mbp@sourcefrog.net-20050309040815-13242001617e4a06""")
2663+
2664+
2665+_invalid_response = (444, b"""HTTP/1.1 444 Bad Response\r
2666 Date: Tue, 11 Jul 2006 04:32:56 GMT\r
2667 Connection: close\r
2668 Content-Type: text/html; charset=iso-8859-1\r
2669 \r
2670-""", """<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML 2.0//EN">
2671+""", b"""<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML 2.0//EN">
2672 <html><head>
2673 <title>404 Not Found</title>
2674 </head><body>
2675@@ -680,12 +682,12 @@
2676 """)
2677
2678
2679-_multipart_no_content_range = (206, """HTTP/1.0 206 Partial Content\r
2680+_multipart_no_content_range = (206, b"""HTTP/1.0 206 Partial Content\r
2681 Content-Type: multipart/byteranges; boundary=THIS_SEPARATES\r
2682 Content-Length: 598\r
2683 \r
2684 """,
2685-"""\r
2686+b"""\r
2687 --THIS_SEPARATES\r
2688 Content-Type: text/plain\r
2689 \r
2690@@ -694,12 +696,12 @@
2691 """)
2692
2693
2694-_multipart_no_boundary = (206, """HTTP/1.0 206 Partial Content\r
2695+_multipart_no_boundary = (206, b"""HTTP/1.0 206 Partial Content\r
2696 Content-Type: multipart/byteranges; boundary=THIS_SEPARATES\r
2697 Content-Length: 598\r
2698 \r
2699 """,
2700-"""\r
2701+b"""\r
2702 --THIS_SEPARATES\r
2703 Content-Type: text/plain\r
2704 Content-Range: bytes 0-18/18672\r
2705@@ -816,8 +818,8 @@
2706 super(TestRangeFileSizeReadLimited, self).setUp()
2707 # create a test datablock larger than _max_read_size.
2708 chunk_size = response.RangeFile._max_read_size
2709- test_pattern = '0123456789ABCDEF'
2710- self.test_data = test_pattern * (3 * chunk_size / len(test_pattern))
2711+ test_pattern = b'0123456789ABCDEF'
2712+ self.test_data = test_pattern * (3 * chunk_size // len(test_pattern))
2713 self.test_data_len = len(self.test_data)
2714
2715 def test_max_read_size(self):
2716
2717=== modified file 'breezy/tests/test_index.py'
2718--- breezy/tests/test_index.py 2017-11-12 13:53:51 +0000
2719+++ breezy/tests/test_index.py 2018-06-30 17:35:22 +0000
2720@@ -21,6 +21,7 @@
2721 tests,
2722 transport,
2723 )
2724+from ..sixish import int2byte
2725 from ..bzr import (
2726 index as _mod_index,
2727 )
2728@@ -66,7 +67,7 @@
2729 stream = builder.finish()
2730 contents = stream.read()
2731 self.assertEqual(
2732- "Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=0\n\n",
2733+ b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=0\n\n",
2734 contents)
2735
2736 def test_build_index_empty_two_element_keys(self):
2737@@ -74,7 +75,7 @@
2738 stream = builder.finish()
2739 contents = stream.read()
2740 self.assertEqual(
2741- "Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=2\nlen=0\n\n",
2742+ b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=2\nlen=0\n\n",
2743 contents)
2744
2745 def test_build_index_one_reference_list_empty(self):
2746@@ -82,7 +83,7 @@
2747 stream = builder.finish()
2748 contents = stream.read()
2749 self.assertEqual(
2750- "Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=0\n\n",
2751+ b"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=0\n\n",
2752 contents)
2753
2754 def test_build_index_two_reference_list_empty(self):
2755@@ -90,47 +91,47 @@
2756 stream = builder.finish()
2757 contents = stream.read()
2758 self.assertEqual(
2759- "Bazaar Graph Index 1\nnode_ref_lists=2\nkey_elements=1\nlen=0\n\n",
2760+ b"Bazaar Graph Index 1\nnode_ref_lists=2\nkey_elements=1\nlen=0\n\n",
2761 contents)
2762
2763 def test_build_index_one_node_no_refs(self):
2764 builder = _mod_index.GraphIndexBuilder()
2765- builder.add_node(('akey', ), 'data')
2766+ builder.add_node((b'akey', ), b'data')
2767 stream = builder.finish()
2768 contents = stream.read()
2769 self.assertEqual(
2770- "Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=1\n"
2771- "akey\x00\x00\x00data\n\n", contents)
2772+ b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=1\n"
2773+ b"akey\x00\x00\x00data\n\n", contents)
2774
2775 def test_build_index_one_node_no_refs_accepts_empty_reflist(self):
2776 builder = _mod_index.GraphIndexBuilder()
2777- builder.add_node(('akey', ), 'data', ())
2778+ builder.add_node((b'akey', ), b'data', ())
2779 stream = builder.finish()
2780 contents = stream.read()
2781 self.assertEqual(
2782- "Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=1\n"
2783- "akey\x00\x00\x00data\n\n", contents)
2784+ b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=1\n"
2785+ b"akey\x00\x00\x00data\n\n", contents)
2786
2787 def test_build_index_one_node_2_element_keys(self):
2788 # multipart keys are separated by \x00 - because they are fixed length,
2789 # not variable this does not cause any issues, and seems clearer to the
2790 # author.
2791 builder = _mod_index.GraphIndexBuilder(key_elements=2)
2792- builder.add_node(('akey', 'secondpart'), 'data')
2793+ builder.add_node((b'akey', b'secondpart'), b'data')
2794 stream = builder.finish()
2795 contents = stream.read()
2796 self.assertEqual(
2797- "Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=2\nlen=1\n"
2798- "akey\x00secondpart\x00\x00\x00data\n\n", contents)
2799+ b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=2\nlen=1\n"
2800+ b"akey\x00secondpart\x00\x00\x00data\n\n", contents)
2801
2802 def test_add_node_empty_value(self):
2803 builder = _mod_index.GraphIndexBuilder()
2804- builder.add_node(('akey', ), '')
2805+ builder.add_node((b'akey', ), b'')
2806 stream = builder.finish()
2807 contents = stream.read()
2808 self.assertEqual(
2809- "Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=1\n"
2810- "akey\x00\x00\x00\n\n", contents)
2811+ b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=1\n"
2812+ b"akey\x00\x00\x00\n\n", contents)
2813
2814 def test_build_index_nodes_sorted(self):
2815 # the highest sorted node comes first.
2816@@ -138,17 +139,17 @@
2817 # use three to have a good chance of glitching dictionary hash
2818 # lookups etc. Insert in randomish order that is not correct
2819 # and not the reverse of the correct order.
2820- builder.add_node(('2002', ), 'data')
2821- builder.add_node(('2000', ), 'data')
2822- builder.add_node(('2001', ), 'data')
2823+ builder.add_node((b'2002', ), b'data')
2824+ builder.add_node((b'2000', ), b'data')
2825+ builder.add_node((b'2001', ), b'data')
2826 stream = builder.finish()
2827 contents = stream.read()
2828 self.assertEqual(
2829- "Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=3\n"
2830- "2000\x00\x00\x00data\n"
2831- "2001\x00\x00\x00data\n"
2832- "2002\x00\x00\x00data\n"
2833- "\n", contents)
2834+ b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=1\nlen=3\n"
2835+ b"2000\x00\x00\x00data\n"
2836+ b"2001\x00\x00\x00data\n"
2837+ b"2002\x00\x00\x00data\n"
2838+ b"\n", contents)
2839
2840 def test_build_index_2_element_key_nodes_sorted(self):
2841 # multiple element keys are sorted first-key, second-key.
2842@@ -156,59 +157,59 @@
2843 # use three values of each key element, to have a good chance of
2844 # glitching dictionary hash lookups etc. Insert in randomish order that
2845 # is not correct and not the reverse of the correct order.
2846- builder.add_node(('2002', '2002'), 'data')
2847- builder.add_node(('2002', '2000'), 'data')
2848- builder.add_node(('2002', '2001'), 'data')
2849- builder.add_node(('2000', '2002'), 'data')
2850- builder.add_node(('2000', '2000'), 'data')
2851- builder.add_node(('2000', '2001'), 'data')
2852- builder.add_node(('2001', '2002'), 'data')
2853- builder.add_node(('2001', '2000'), 'data')
2854- builder.add_node(('2001', '2001'), 'data')
2855+ builder.add_node((b'2002', b'2002'), b'data')
2856+ builder.add_node((b'2002', b'2000'), b'data')
2857+ builder.add_node((b'2002', b'2001'), b'data')
2858+ builder.add_node((b'2000', b'2002'), b'data')
2859+ builder.add_node((b'2000', b'2000'), b'data')
2860+ builder.add_node((b'2000', b'2001'), b'data')
2861+ builder.add_node((b'2001', b'2002'), b'data')
2862+ builder.add_node((b'2001', b'2000'), b'data')
2863+ builder.add_node((b'2001', b'2001'), b'data')
2864 stream = builder.finish()
2865 contents = stream.read()
2866 self.assertEqual(
2867- "Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=2\nlen=9\n"
2868- "2000\x002000\x00\x00\x00data\n"
2869- "2000\x002001\x00\x00\x00data\n"
2870- "2000\x002002\x00\x00\x00data\n"
2871- "2001\x002000\x00\x00\x00data\n"
2872- "2001\x002001\x00\x00\x00data\n"
2873- "2001\x002002\x00\x00\x00data\n"
2874- "2002\x002000\x00\x00\x00data\n"
2875- "2002\x002001\x00\x00\x00data\n"
2876- "2002\x002002\x00\x00\x00data\n"
2877- "\n", contents)
2878+ b"Bazaar Graph Index 1\nnode_ref_lists=0\nkey_elements=2\nlen=9\n"
2879+ b"2000\x002000\x00\x00\x00data\n"
2880+ b"2000\x002001\x00\x00\x00data\n"
2881+ b"2000\x002002\x00\x00\x00data\n"
2882+ b"2001\x002000\x00\x00\x00data\n"
2883+ b"2001\x002001\x00\x00\x00data\n"
2884+ b"2001\x002002\x00\x00\x00data\n"
2885+ b"2002\x002000\x00\x00\x00data\n"
2886+ b"2002\x002001\x00\x00\x00data\n"
2887+ b"2002\x002002\x00\x00\x00data\n"
2888+ b"\n", contents)
2889
2890 def test_build_index_reference_lists_are_included_one(self):
2891 builder = _mod_index.GraphIndexBuilder(reference_lists=1)
2892- builder.add_node(('key', ), 'data', ([], ))
2893+ builder.add_node((b'key', ), b'data', ([], ))
2894 stream = builder.finish()
2895 contents = stream.read()
2896 self.assertEqual(
2897- "Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=1\n"
2898- "key\x00\x00\x00data\n"
2899- "\n", contents)
2900+ b"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=1\n"
2901+ b"key\x00\x00\x00data\n"
2902+ b"\n", contents)
2903
2904 def test_build_index_reference_lists_with_2_element_keys(self):
2905 builder = _mod_index.GraphIndexBuilder(reference_lists=1, key_elements=2)
2906- builder.add_node(('key', 'key2'), 'data', ([], ))
2907+ builder.add_node((b'key', b'key2'), b'data', ([], ))
2908 stream = builder.finish()
2909 contents = stream.read()
2910 self.assertEqual(
2911- "Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=2\nlen=1\n"
2912- "key\x00key2\x00\x00\x00data\n"
2913- "\n", contents)
2914+ b"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=2\nlen=1\n"
2915+ b"key\x00key2\x00\x00\x00data\n"
2916+ b"\n", contents)
2917
2918 def test_build_index_reference_lists_are_included_two(self):
2919 builder = _mod_index.GraphIndexBuilder(reference_lists=2)
2920- builder.add_node(('key', ), 'data', ([], []))
2921+ builder.add_node((b'key', ), b'data', ([], []))
2922 stream = builder.finish()
2923 contents = stream.read()
2924 self.assertEqual(
2925- "Bazaar Graph Index 1\nnode_ref_lists=2\nkey_elements=1\nlen=1\n"
2926- "key\x00\x00\t\x00data\n"
2927- "\n", contents)
2928+ b"Bazaar Graph Index 1\nnode_ref_lists=2\nkey_elements=1\nlen=1\n"
2929+ b"key\x00\x00\t\x00data\n"
2930+ b"\n", contents)
2931
2932 def test_clear_cache(self):
2933 builder = _mod_index.GraphIndexBuilder(reference_lists=2)
2934@@ -217,183 +218,183 @@
2935
2936 def test_node_references_are_byte_offsets(self):
2937 builder = _mod_index.GraphIndexBuilder(reference_lists=1)
2938- builder.add_node(('reference', ), 'data', ([], ))
2939- builder.add_node(('key', ), 'data', ([('reference', )], ))
2940+ builder.add_node((b'reference', ), b'data', ([], ))
2941+ builder.add_node((b'key', ), b'data', ([(b'reference', )], ))
2942 stream = builder.finish()
2943 contents = stream.read()
2944 self.assertEqual(
2945- "Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=2\n"
2946- "key\x00\x0072\x00data\n"
2947- "reference\x00\x00\x00data\n"
2948- "\n", contents)
2949+ b"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=2\n"
2950+ b"key\x00\x0072\x00data\n"
2951+ b"reference\x00\x00\x00data\n"
2952+ b"\n", contents)
2953
2954 def test_node_references_are_cr_delimited(self):
2955 builder = _mod_index.GraphIndexBuilder(reference_lists=1)
2956- builder.add_node(('reference', ), 'data', ([], ))
2957- builder.add_node(('reference2', ), 'data', ([], ))
2958- builder.add_node(('key', ), 'data',
2959- ([('reference', ), ('reference2', )], ))
2960+ builder.add_node((b'reference', ), b'data', ([], ))
2961+ builder.add_node((b'reference2', ), b'data', ([], ))
2962+ builder.add_node((b'key', ), b'data',
2963+ ([(b'reference', ), (b'reference2', )], ))
2964 stream = builder.finish()
2965 contents = stream.read()
2966 self.assertEqual(
2967- "Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=3\n"
2968- "key\x00\x00077\r094\x00data\n"
2969- "reference\x00\x00\x00data\n"
2970- "reference2\x00\x00\x00data\n"
2971- "\n", contents)
2972+ b"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=3\n"
2973+ b"key\x00\x00077\r094\x00data\n"
2974+ b"reference\x00\x00\x00data\n"
2975+ b"reference2\x00\x00\x00data\n"
2976+ b"\n", contents)
2977
2978 def test_multiple_reference_lists_are_tab_delimited(self):
2979 builder = _mod_index.GraphIndexBuilder(reference_lists=2)
2980- builder.add_node(('keference', ), 'data', ([], []))
2981- builder.add_node(('rey', ), 'data',
2982- ([('keference', )], [('keference', )]))
2983+ builder.add_node((b'keference', ), b'data', ([], []))
2984+ builder.add_node((b'rey', ), b'data',
2985+ ([(b'keference', )], [(b'keference', )]))
2986 stream = builder.finish()
2987 contents = stream.read()
2988 self.assertEqual(
2989- "Bazaar Graph Index 1\nnode_ref_lists=2\nkey_elements=1\nlen=2\n"
2990- "keference\x00\x00\t\x00data\n"
2991- "rey\x00\x0059\t59\x00data\n"
2992- "\n", contents)
2993+ b"Bazaar Graph Index 1\nnode_ref_lists=2\nkey_elements=1\nlen=2\n"
2994+ b"keference\x00\x00\t\x00data\n"
2995+ b"rey\x00\x0059\t59\x00data\n"
2996+ b"\n", contents)
2997
2998 def test_add_node_referencing_missing_key_makes_absent(self):
2999 builder = _mod_index.GraphIndexBuilder(reference_lists=1)
3000- builder.add_node(('rey', ), 'data',
3001- ([('beference', ), ('aeference2', )], ))
3002+ builder.add_node((b'rey', ), b'data',
3003+ ([(b'beference', ), (b'aeference2', )], ))
3004 stream = builder.finish()
3005 contents = stream.read()
3006 self.assertEqual(
3007- "Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=1\n"
3008- "aeference2\x00a\x00\x00\n"
3009- "beference\x00a\x00\x00\n"
3010- "rey\x00\x00074\r059\x00data\n"
3011- "\n", contents)
3012+ b"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=1\n"
3013+ b"aeference2\x00a\x00\x00\n"
3014+ b"beference\x00a\x00\x00\n"
3015+ b"rey\x00\x00074\r059\x00data\n"
3016+ b"\n", contents)
3017
3018 def test_node_references_three_digits(self):
3019 # test the node digit expands as needed.
3020 builder = _mod_index.GraphIndexBuilder(reference_lists=1)
3021- references = [(str(val), ) for val in range(8, -1, -1)]
3022- builder.add_node(('2-key', ), '', (references, ))
3023+ references = [((b"%d" % val), ) for val in range(8, -1, -1)]
3024+ builder.add_node((b'2-key', ), b'', (references, ))
3025 stream = builder.finish()
3026 contents = stream.read()
3027 self.assertEqualDiff(
3028- "Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=1\n"
3029- "0\x00a\x00\x00\n"
3030- "1\x00a\x00\x00\n"
3031- "2\x00a\x00\x00\n"
3032- "2-key\x00\x00151\r145\r139\r133\r127\r121\r071\r065\r059\x00\n"
3033- "3\x00a\x00\x00\n"
3034- "4\x00a\x00\x00\n"
3035- "5\x00a\x00\x00\n"
3036- "6\x00a\x00\x00\n"
3037- "7\x00a\x00\x00\n"
3038- "8\x00a\x00\x00\n"
3039- "\n", contents)
3040+ b"Bazaar Graph Index 1\nnode_ref_lists=1\nkey_elements=1\nlen=1\n"
3041+ b"0\x00a\x00\x00\n"
3042+ b"1\x00a\x00\x00\n"
3043+ b"2\x00a\x00\x00\n"
3044+ b"2-key\x00\x00151\r145\r139\r133\r127\r121\r071\r065\r059\x00\n"
3045+ b"3\x00a\x00\x00\n"
3046+ b"4\x00a\x00\x00\n"
3047+ b"5\x00a\x00\x00\n"
3048+ b"6\x00a\x00\x00\n"
3049+ b"7\x00a\x00\x00\n"
3050+ b"8\x00a\x00\x00\n"
3051+ b"\n", contents)
3052
3053 def test_absent_has_no_reference_overhead(self):
3054 # the offsets after an absent record should be correct when there are
3055 # >1 reference lists.
3056 builder = _mod_index.GraphIndexBuilder(reference_lists=2)
3057- builder.add_node(('parent', ), '', ([('aail', ), ('zther', )], []))
3058+ builder.add_node((b'parent', ), b'', ([(b'aail', ), (b'zther', )], []))
3059 stream = builder.finish()
3060 contents = stream.read()
3061 self.assertEqual(
3062- "Bazaar Graph Index 1\nnode_ref_lists=2\nkey_elements=1\nlen=1\n"
3063- "aail\x00a\x00\x00\n"
3064- "parent\x00\x0059\r84\t\x00\n"
3065- "zther\x00a\x00\x00\n"
3066- "\n", contents)
3067+ b"Bazaar Graph Index 1\nnode_ref_lists=2\nkey_elements=1\nlen=1\n"
3068+ b"aail\x00a\x00\x00\n"
3069+ b"parent\x00\x0059\r84\t\x00\n"
3070+ b"zther\x00a\x00\x00\n"
3071+ b"\n", contents)
3072
3073 def test_add_node_bad_key(self):
3074 builder = _mod_index.GraphIndexBuilder()
3075- for bad_char in '\t\n\x0b\x0c\r\x00 ':
3076+ for bad_char in bytearray(b'\t\n\x0b\x0c\r\x00 '):
3077 self.assertRaises(_mod_index.BadIndexKey, builder.add_node,
3078- ('a%skey' % bad_char, ), 'data')
3079- self.assertRaises(_mod_index.BadIndexKey, builder.add_node,
3080- ('', ), 'data')
3081- self.assertRaises(_mod_index.BadIndexKey, builder.add_node,
3082- 'not-a-tuple', 'data')
3083+ (b'a%skey' % int2byte(bad_char), ), b'data')
3084+ self.assertRaises(_mod_index.BadIndexKey, builder.add_node,
3085+ (b'', ), b'data')
3086+ self.assertRaises(_mod_index.BadIndexKey, builder.add_node,
3087+ b'not-a-tuple', b'data')
3088 # not enough length
3089 self.assertRaises(_mod_index.BadIndexKey, builder.add_node,
3090- (), 'data')
3091+ (), b'data')
3092 # too long
3093 self.assertRaises(_mod_index.BadIndexKey, builder.add_node,
3094- ('primary', 'secondary'), 'data')
3095+ (b'primary', b'secondary'), b'data')
3096 # secondary key elements get checked too:
3097 builder = _mod_index.GraphIndexBuilder(key_elements=2)
3098- for bad_char in '\t\n\x0b\x0c\r\x00 ':
3099+ for bad_char in bytearray(b'\t\n\x0b\x0c\r\x00 '):
3100 self.assertRaises(_mod_index.BadIndexKey, builder.add_node,
3101- ('prefix', 'a%skey' % bad_char), 'data')
3102+ (b'prefix', b'a%skey' % int2byte(bad_char)), b'data')
3103
3104 def test_add_node_bad_data(self):
3105 builder = _mod_index.GraphIndexBuilder()
3106- self.assertRaises(_mod_index.BadIndexValue, builder.add_node, ('akey', ),
3107- 'data\naa')
3108- self.assertRaises(_mod_index.BadIndexValue, builder.add_node, ('akey', ),
3109- 'data\x00aa')
3110+ self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
3111+ b'data\naa')
3112+ self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
3113+ b'data\x00aa')
3114
3115 def test_add_node_bad_mismatched_ref_lists_length(self):
3116 builder = _mod_index.GraphIndexBuilder()
3117- self.assertRaises(_mod_index.BadIndexValue, builder.add_node, ('akey', ),
3118- 'data aa', ([], ))
3119+ self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
3120+ b'data aa', ([], ))
3121 builder = _mod_index.GraphIndexBuilder(reference_lists=1)
3122- self.assertRaises(_mod_index.BadIndexValue, builder.add_node, ('akey', ),
3123- 'data aa')
3124- self.assertRaises(_mod_index.BadIndexValue, builder.add_node, ('akey', ),
3125- 'data aa', (), )
3126- self.assertRaises(_mod_index.BadIndexValue, builder.add_node, ('akey', ),
3127- 'data aa', ([], []))
3128+ self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
3129+ b'data aa')
3130+ self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
3131+ b'data aa', (), )
3132+ self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
3133+ b'data aa', ([], []))
3134 builder = _mod_index.GraphIndexBuilder(reference_lists=2)
3135- self.assertRaises(_mod_index.BadIndexValue, builder.add_node, ('akey', ),
3136- 'data aa')
3137- self.assertRaises(_mod_index.BadIndexValue, builder.add_node, ('akey', ),
3138- 'data aa', ([], ))
3139- self.assertRaises(_mod_index.BadIndexValue, builder.add_node, ('akey', ),
3140- 'data aa', ([], [], []))
3141+ self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
3142+ b'data aa')
3143+ self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
3144+ b'data aa', ([], ))
3145+ self.assertRaises(_mod_index.BadIndexValue, builder.add_node, (b'akey', ),
3146+ b'data aa', ([], [], []))
3147
3148 def test_add_node_bad_key_in_reference_lists(self):
3149 # first list, first key - trivial
3150 builder = _mod_index.GraphIndexBuilder(reference_lists=1)
3151- self.assertRaises(_mod_index.BadIndexKey, builder.add_node, ('akey', ),
3152- 'data aa', ([('a key', )], ))
3153+ self.assertRaises(_mod_index.BadIndexKey, builder.add_node, (b'akey', ),
3154+ b'data aa', ([(b'a key', )], ))
3155 # references keys must be tuples too
3156- self.assertRaises(_mod_index.BadIndexKey, builder.add_node, ('akey', ),
3157- 'data aa', (['not-a-tuple'], ))
3158+ self.assertRaises(_mod_index.BadIndexKey, builder.add_node, (b'akey', ),
3159+ b'data aa', (['not-a-tuple'], ))
3160 # not enough length
3161- self.assertRaises(_mod_index.BadIndexKey, builder.add_node, ('akey', ),
3162- 'data aa', ([()], ))
3163+ self.assertRaises(_mod_index.BadIndexKey, builder.add_node, (b'akey', ),
3164+ b'data aa', ([()], ))
3165 # too long
3166- self.assertRaises(_mod_index.BadIndexKey, builder.add_node, ('akey', ),
3167- 'data aa', ([('primary', 'secondary')], ))
3168+ self.assertRaises(_mod_index.BadIndexKey, builder.add_node, (b'akey', ),
3169+ b'data aa', ([(b'primary', b'secondary')], ))
3170 # need to check more than the first key in the list
3171- self.assertRaises(_mod_index.BadIndexKey, builder.add_node, ('akey', ),
3172- 'data aa', ([('agoodkey', ), ('that is a bad key', )], ))
3173+ self.assertRaises(_mod_index.BadIndexKey, builder.add_node, (b'akey', ),
3174+ b'data aa', ([(b'agoodkey', ), (b'that is a bad key', )], ))
3175 # and if there is more than one list it should be getting checked
3176 # too
3177 builder = _mod_index.GraphIndexBuilder(reference_lists=2)
3178- self.assertRaises(_mod_index.BadIndexKey, builder.add_node, ('akey', ),
3179- 'data aa', ([], ['a bad key']))
3180+ self.assertRaises(_mod_index.BadIndexKey, builder.add_node, (b'akey', ),
3181+ b'data aa', ([], ['a bad key']))
3182
3183 def test_add_duplicate_key(self):
3184 builder = _mod_index.GraphIndexBuilder()
3185- builder.add_node(('key', ), 'data')
3186+ builder.add_node((b'key', ), b'data')
3187 self.assertRaises(_mod_index.BadIndexDuplicateKey,
3188- builder.add_node, ('key', ), 'data')
3189+ builder.add_node, (b'key', ), b'data')
3190
3191 def test_add_duplicate_key_2_elements(self):
3192 builder = _mod_index.GraphIndexBuilder(key_elements=2)
3193- builder.add_node(('key', 'key'), 'data')
3194+ builder.add_node((b'key', b'key'), b'data')
3195 self.assertRaises(_mod_index.BadIndexDuplicateKey, builder.add_node,
3196- ('key', 'key'), 'data')
3197+ (b'key', b'key'), b'data')
3198
3199 def test_add_key_after_referencing_key(self):
3200 builder = _mod_index.GraphIndexBuilder(reference_lists=1)
3201- builder.add_node(('key', ), 'data', ([('reference', )], ))
3202- builder.add_node(('reference', ), 'data', ([],))
3203+ builder.add_node((b'key', ), b'data', ([(b'reference', )], ))
3204+ builder.add_node((b'reference', ), b'data', ([],))
3205
3206 def test_add_key_after_referencing_key_2_elements(self):
3207 builder = _mod_index.GraphIndexBuilder(reference_lists=1, key_elements=2)
3208- builder.add_node(('k', 'ey'), 'data', ([('reference', 'tokey')], ))
3209- builder.add_node(('reference', 'tokey'), 'data', ([],))
3210+ builder.add_node((b'k', b'ey'), b'data', ([(b'reference', b'tokey')], ))
3211+ builder.add_node((b'reference', b'tokey'), b'data', ([],))
3212
3213 def test_set_optimize(self):
3214 builder = _mod_index.GraphIndexBuilder(reference_lists=1, key_elements=2)
3215@@ -406,10 +407,10 @@
3216 class TestGraphIndex(tests.TestCaseWithMemoryTransport):
3217
3218 def make_key(self, number):
3219- return (str(number) + 'X'*100,)
3220+ return ((b'%d' % number) + b'X'*100,)
3221
3222 def make_value(self, number):
3223- return str(number) + 'Y'*100
3224+ return (b'%d' % number) + b'Y'*100
3225
3226 def make_nodes(self, count=64):
3227 # generate a big enough index that we only read some of it on a typical
3228@@ -436,7 +437,7 @@
3229 content = builder.finish().read()
3230 size = len(content)
3231 trans = self.get_transport()
3232- trans.put_bytes('index', (' '*offset) + content)
3233+ trans.put_bytes('index', (b' '*offset) + content)
3234 return _mod_index.GraphIndex(trans, 'index', size, offset=offset)
3235
3236 def test_clear_cache(self):
3237@@ -447,7 +448,7 @@
3238
3239 def test_open_bad_index_no_error(self):
3240 trans = self.get_transport()
3241- trans.put_bytes('name', "not an index\n")
3242+ trans.put_bytes('name', b"not an index\n")
3243 idx = _mod_index.GraphIndex(trans, 'name', 13)
3244
3245 def test_with_offset(self):
3246@@ -496,7 +497,7 @@
3247 # do a _lookup_keys_via_location call for the middle of the file, which
3248 # is what bisection uses.
3249 result = index._lookup_keys_via_location(
3250- [(index._size // 2, ('missing', ))])
3251+ [(index._size // 2, (b'missing', ))])
3252 # this should have asked for a readv request, with adjust_for_latency,
3253 # and two regions: the header, and half-way into the file.
3254 self.assertEqual([
3255@@ -505,7 +506,7 @@
3256 index._transport._activity)
3257 # and the result should be that the key cannot be present, because this
3258 # is a trivial index.
3259- self.assertEqual([((index._size // 2, ('missing', )), False)],
3260+ self.assertEqual([((index._size // 2, (b'missing', )), False)],
3261 result)
3262 # And this should have caused the file to be fully buffered
3263 self.assertIsNot(None, index._nodes)
3264@@ -526,7 +527,7 @@
3265 # is what bisection uses.
3266 start_lookup = index._size // 2
3267 result = index._lookup_keys_via_location(
3268- [(start_lookup, ('40missing', ))])
3269+ [(start_lookup, (b'40missing', ))])
3270 # this should have asked for a readv request, with adjust_for_latency,
3271 # and two regions: the header, and half-way into the file.
3272 self.assertEqual([
3273@@ -536,7 +537,7 @@
3274 index._transport._activity)
3275 # and the result should be that the key cannot be present, because this
3276 # is a trivial index.
3277- self.assertEqual([((start_lookup, ('40missing', )), False)],
3278+ self.assertEqual([((start_lookup, (b'40missing', )), False)],
3279 result)
3280 # And this should not have caused the file to be fully buffered
3281 self.assertIs(None, index._nodes)
3282@@ -550,11 +551,11 @@
3283 def test_parsing_non_adjacent_data_trims(self):
3284 index = self.make_index(nodes=self.make_nodes(64))
3285 result = index._lookup_keys_via_location(
3286- [(index._size // 2, ('40', ))])
3287+ [(index._size // 2, (b'40', ))])
3288 # and the result should be that the key cannot be present, because key is
3289 # in the middle of the observed data from a 4K read - the smallest transport
3290 # will do today with this api.
3291- self.assertEqual([((index._size // 2, ('40', )), False)],
3292+ self.assertEqual([((index._size // 2, (b'40', )), False)],
3293 result)
3294 # and we should have a parse map that includes the header and the
3295 # region that was parsed after trimming.
3296@@ -579,7 +580,7 @@
3297 # locations of both keys.
3298 index = self.make_index(nodes=self.make_nodes(128))
3299 result = index._lookup_keys_via_location(
3300- [(index._size // 2, ('40', ))])
3301+ [(index._size // 2, (b'40', ))])
3302 # and we should have a parse map that includes the header and the
3303 # region that was parsed after trimming.
3304 self.assertEqual([(0, 4045), (11759, 15707)], index._parsed_byte_map)
3305@@ -604,7 +605,7 @@
3306 index = self.make_index(nodes=self.make_nodes(64))
3307 # lookup the keys in the middle of the file
3308 result =index._lookup_keys_via_location(
3309- [(index._size // 2, ('40', ))])
3310+ [(index._size // 2, (b'40', ))])
3311 # check the parse map, this determines the test validity
3312 self.assertEqual([(0, 4008), (5046, 8996)], index._parsed_byte_map)
3313 self.assertEqual([(None, self.make_key(26)),
3314@@ -617,8 +618,8 @@
3315 # be in the index) - even when the byte location we ask for is outside
3316 # the parsed region
3317 result = index._lookup_keys_via_location(
3318- [(4000, ('40', ))])
3319- self.assertEqual([((4000, ('40', )), False)],
3320+ [(4000, (b'40', ))])
3321+ self.assertEqual([((4000, (b'40', )), False)],
3322 result)
3323 self.assertEqual([], index._transport._activity)
3324
3325@@ -628,7 +629,7 @@
3326 index = self.make_index(nodes=self.make_nodes(64))
3327 # lookup the keys in the middle of the file
3328 result =index._lookup_keys_via_location(
3329- [(index._size // 2, ('40', ))])
3330+ [(index._size // 2, (b'40', ))])
3331 # check the parse map, this determines the test validity
3332 self.assertEqual([(0, 4008), (5046, 8996)], index._parsed_byte_map)
3333 self.assertEqual([(None, self.make_key(26)),
3334@@ -655,7 +656,7 @@
3335 # ask for the key in the middle, but a key that is located in the
3336 # unparsed region before the middle.
3337 result =index._lookup_keys_via_location(
3338- [(index._size // 2, ('30', ))])
3339+ [(index._size // 2, (b'30', ))])
3340 # check the parse map, this determines the test validity
3341 self.assertEqual([(0, 4008), (5046, 8996)], index._parsed_byte_map)
3342 self.assertEqual([(None, self.make_key(26)),
3343@@ -671,13 +672,13 @@
3344 # ask for the key in the middle, but a key that is located in the
3345 # unparsed region after the middle.
3346 result =index._lookup_keys_via_location(
3347- [(index._size // 2, ('50', ))])
3348+ [(index._size // 2, (b'50', ))])
3349 # check the parse map, this determines the test validity
3350 self.assertEqual([(0, 4008), (5046, 8996)], index._parsed_byte_map)
3351 self.assertEqual([(None, self.make_key(26)),
3352 (self.make_key(31), self.make_key(48))],
3353 index._parsed_key_map)
3354- self.assertEqual([((index._size // 2, ('50', )), +1)],
3355+ self.assertEqual([((index._size // 2, (b'50', )), +1)],
3356 result)
3357
3358 def test_lookup_key_resolves_references(self):
3359@@ -693,7 +694,7 @@
3360 index_size = index._size
3361 index_center = index_size // 2
3362 result = index._lookup_keys_via_location(
3363- [(index_center, ('40', ))])
3364+ [(index_center, (b'40', ))])
3365 # check the parse map - only the start and middle should have been
3366 # parsed.
3367 self.assertEqual([(0, 4027), (10198, 14028)], index._parsed_byte_map)
3368@@ -728,7 +729,7 @@
3369 # check that the referred-to-keys are not accessed automatically.
3370 index_size = index._size
3371 index_center = index_size // 2
3372- result = index._lookup_keys_via_location([(index_center, ('40', ))])
3373+ result = index._lookup_keys_via_location([(index_center, (b'40', ))])
3374 # check the parse map - only the start and middle should have been
3375 # parsed.
3376 self.assertEqual([(0, 3890), (6444, 10274)], index._parsed_byte_map)
3377@@ -759,22 +760,22 @@
3378 self.assertEqual([], list(index.iter_all_entries()))
3379
3380 def test_iter_all_entries_simple(self):
3381- index = self.make_index(nodes=[(('name', ), 'data', ())])
3382- self.assertEqual([(index, ('name', ), 'data')],
3383+ index = self.make_index(nodes=[((b'name', ), b'data', ())])
3384+ self.assertEqual([(index, (b'name', ), b'data')],
3385 list(index.iter_all_entries()))
3386
3387 def test_iter_all_entries_simple_2_elements(self):
3388 index = self.make_index(key_elements=2,
3389- nodes=[(('name', 'surname'), 'data', ())])
3390- self.assertEqual([(index, ('name', 'surname'), 'data')],
3391+ nodes=[((b'name', b'surname'), b'data', ())])
3392+ self.assertEqual([(index, (b'name', b'surname'), b'data')],
3393 list(index.iter_all_entries()))
3394
3395 def test_iter_all_entries_references_resolved(self):
3396 index = self.make_index(1, nodes=[
3397- (('name', ), 'data', ([('ref', )], )),
3398- (('ref', ), 'refdata', ([], ))])
3399- self.assertEqual({(index, ('name', ), 'data', ((('ref',),),)),
3400- (index, ('ref', ), 'refdata', ((), ))},
3401+ ((b'name', ), b'data', ([(b'ref', )], )),
3402+ ((b'ref', ), b'refdata', ([], ))])
3403+ self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),)),
3404+ (index, (b'ref', ), b'refdata', ((), ))},
3405 set(index.iter_all_entries()))
3406
3407 def test_iter_entries_buffers_once(self):
3408@@ -821,45 +822,45 @@
3409
3410 def test_iter_entries_references_resolved(self):
3411 index = self.make_index(1, nodes=[
3412- (('name', ), 'data', ([('ref', ), ('ref', )], )),
3413- (('ref', ), 'refdata', ([], ))])
3414- self.assertEqual({(index, ('name', ), 'data', ((('ref',), ('ref',)),)),
3415- (index, ('ref', ), 'refdata', ((), ))},
3416- set(index.iter_entries([('name',), ('ref',)])))
3417+ ((b'name', ), b'data', ([(b'ref', ), (b'ref', )], )),
3418+ ((b'ref', ), b'refdata', ([], ))])
3419+ self.assertEqual({(index, (b'name', ), b'data', (((b'ref',), (b'ref',)),)),
3420+ (index, (b'ref', ), b'refdata', ((), ))},
3421+ set(index.iter_entries([(b'name',), (b'ref',)])))
3422
3423 def test_iter_entries_references_2_refs_resolved(self):
3424 index = self.make_index(2, nodes=[
3425- (('name', ), 'data', ([('ref', )], [('ref', )])),
3426- (('ref', ), 'refdata', ([], []))])
3427- self.assertEqual({(index, ('name', ), 'data', ((('ref',),), (('ref',),))),
3428- (index, ('ref', ), 'refdata', ((), ()))},
3429- set(index.iter_entries([('name',), ('ref',)])))
3430+ ((b'name', ), b'data', ([(b'ref', )], [(b'ref', )])),
3431+ ((b'ref', ), b'refdata', ([], []))])
3432+ self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),), ((b'ref',),))),
3433+ (index, (b'ref', ), b'refdata', ((), ()))},
3434+ set(index.iter_entries([(b'name',), (b'ref',)])))
3435
3436 def test_iteration_absent_skipped(self):
3437 index = self.make_index(1, nodes=[
3438- (('name', ), 'data', ([('ref', )], ))])
3439- self.assertEqual({(index, ('name', ), 'data', ((('ref',),),))},
3440+ ((b'name', ), b'data', ([(b'ref', )], ))])
3441+ self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),))},
3442 set(index.iter_all_entries()))
3443- self.assertEqual({(index, ('name', ), 'data', ((('ref',),),))},
3444- set(index.iter_entries([('name', )])))
3445- self.assertEqual([], list(index.iter_entries([('ref', )])))
3446+ self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),))},
3447+ set(index.iter_entries([(b'name', )])))
3448+ self.assertEqual([], list(index.iter_entries([(b'ref', )])))
3449
3450 def test_iteration_absent_skipped_2_element_keys(self):
3451 index = self.make_index(1, key_elements=2, nodes=[
3452- (('name', 'fin'), 'data', ([('ref', 'erence')], ))])
3453- self.assertEqual({(index, ('name', 'fin'), 'data', ((('ref', 'erence'),),))},
3454- set(index.iter_all_entries()))
3455- self.assertEqual({(index, ('name', 'fin'), 'data', ((('ref', 'erence'),),))},
3456- set(index.iter_entries([('name', 'fin')])))
3457- self.assertEqual([], list(index.iter_entries([('ref', 'erence')])))
3458+ ((b'name', b'fin'), b'data', ([(b'ref', b'erence')], ))])
3459+ self.assertEqual([(index, (b'name', b'fin'), b'data', (((b'ref', b'erence'),),))],
3460+ list(index.iter_all_entries()))
3461+ self.assertEqual([(index, (b'name', b'fin'), b'data', (((b'ref', b'erence'),),))],
3462+ list(index.iter_entries([(b'name', b'fin')])))
3463+ self.assertEqual([], list(index.iter_entries([(b'ref', b'erence')])))
3464
3465 def test_iter_all_keys(self):
3466 index = self.make_index(1, nodes=[
3467- (('name', ), 'data', ([('ref', )], )),
3468- (('ref', ), 'refdata', ([], ))])
3469- self.assertEqual({(index, ('name', ), 'data', ((('ref',),),)),
3470- (index, ('ref', ), 'refdata', ((), ))},
3471- set(index.iter_entries([('name', ), ('ref', )])))
3472+ ((b'name', ), b'data', ([(b'ref', )], )),
3473+ ((b'ref', ), b'refdata', ([], ))])
3474+ self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),)),
3475+ (index, (b'ref', ), b'refdata', ((), ))},
3476+ set(index.iter_entries([(b'name', ), (b'ref', )])))
3477
3478 def test_iter_nothing_empty(self):
3479 index = self.make_index()
3480@@ -867,12 +868,12 @@
3481
3482 def test_iter_missing_entry_empty(self):
3483 index = self.make_index()
3484- self.assertEqual([], list(index.iter_entries([('a', )])))
3485+ self.assertEqual([], list(index.iter_entries([(b'a', )])))
3486
3487 def test_iter_missing_entry_empty_no_size(self):
3488 idx = self.make_index()
3489- idx = _mod_index.GraphIndex(idx._transport, 'index', None)
3490- self.assertEqual([], list(idx.iter_entries([('a', )])))
3491+ idx = _mod_index.GraphIndex(idx._transport, b'index', None)
3492+ self.assertEqual([], list(idx.iter_entries([(b'a', )])))
3493
3494 def test_iter_key_prefix_1_element_key_None(self):
3495 index = self.make_index()
3496@@ -882,64 +883,64 @@
3497 def test_iter_key_prefix_wrong_length(self):
3498 index = self.make_index()
3499 self.assertRaises(_mod_index.BadIndexKey, list,
3500- index.iter_entries_prefix([('foo', None)]))
3501+ index.iter_entries_prefix([(b'foo', None)]))
3502 index = self.make_index(key_elements=2)
3503 self.assertRaises(_mod_index.BadIndexKey, list,
3504- index.iter_entries_prefix([('foo', )]))
3505+ index.iter_entries_prefix([(b'foo', )]))
3506 self.assertRaises(_mod_index.BadIndexKey, list,
3507- index.iter_entries_prefix([('foo', None, None)]))
3508+ index.iter_entries_prefix([(b'foo', None, None)]))
3509
3510 def test_iter_key_prefix_1_key_element_no_refs(self):
3511 index = self.make_index( nodes=[
3512- (('name', ), 'data', ()),
3513- (('ref', ), 'refdata', ())])
3514- self.assertEqual({(index, ('name', ), 'data'),
3515- (index, ('ref', ), 'refdata')},
3516- set(index.iter_entries_prefix([('name', ), ('ref', )])))
3517+ ((b'name', ), b'data', ()),
3518+ ((b'ref', ), b'refdata', ())])
3519+ self.assertEqual({(index, (b'name', ), b'data'),
3520+ (index, (b'ref', ), b'refdata')},
3521+ set(index.iter_entries_prefix([(b'name', ), (b'ref', )])))
3522
3523 def test_iter_key_prefix_1_key_element_refs(self):
3524 index = self.make_index(1, nodes=[
3525- (('name', ), 'data', ([('ref', )], )),
3526- (('ref', ), 'refdata', ([], ))])
3527- self.assertEqual({(index, ('name', ), 'data', ((('ref',),),)),
3528- (index, ('ref', ), 'refdata', ((), ))},
3529- set(index.iter_entries_prefix([('name', ), ('ref', )])))
3530+ ((b'name', ), b'data', ([(b'ref', )], )),
3531+ ((b'ref', ), b'refdata', ([], ))])
3532+ self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),)),
3533+ (index, (b'ref', ), b'refdata', ((), ))},
3534+ set(index.iter_entries_prefix([(b'name', ), (b'ref', )])))
3535
3536 def test_iter_key_prefix_2_key_element_no_refs(self):
3537 index = self.make_index(key_elements=2, nodes=[
3538- (('name', 'fin1'), 'data', ()),
3539- (('name', 'fin2'), 'beta', ()),
3540- (('ref', 'erence'), 'refdata', ())])
3541- self.assertEqual({(index, ('name', 'fin1'), 'data'),
3542- (index, ('ref', 'erence'), 'refdata')},
3543- set(index.iter_entries_prefix([('name', 'fin1'), ('ref', 'erence')])))
3544- self.assertEqual({(index, ('name', 'fin1'), 'data'),
3545- (index, ('name', 'fin2'), 'beta')},
3546- set(index.iter_entries_prefix([('name', None)])))
3547+ ((b'name', b'fin1'), b'data', ()),
3548+ ((b'name', b'fin2'), b'beta', ()),
3549+ ((b'ref', b'erence'), b'refdata', ())])
3550+ self.assertEqual({(index, (b'name', b'fin1'), b'data'),
3551+ (index, (b'ref', b'erence'), b'refdata')},
3552+ set(index.iter_entries_prefix([(b'name', b'fin1'), (b'ref', b'erence')])))
3553+ self.assertEqual({(index, (b'name', b'fin1'), b'data'),
3554+ (index, (b'name', b'fin2'), b'beta')},
3555+ set(index.iter_entries_prefix([(b'name', None)])))
3556
3557 def test_iter_key_prefix_2_key_element_refs(self):
3558 index = self.make_index(1, key_elements=2, nodes=[
3559- (('name', 'fin1'), 'data', ([('ref', 'erence')], )),
3560- (('name', 'fin2'), 'beta', ([], )),
3561- (('ref', 'erence'), 'refdata', ([], ))])
3562- self.assertEqual({(index, ('name', 'fin1'), 'data', ((('ref', 'erence'),),)),
3563- (index, ('ref', 'erence'), 'refdata', ((), ))},
3564- set(index.iter_entries_prefix([('name', 'fin1'), ('ref', 'erence')])))
3565- self.assertEqual({(index, ('name', 'fin1'), 'data', ((('ref', 'erence'),),)),
3566- (index, ('name', 'fin2'), 'beta', ((), ))},
3567- set(index.iter_entries_prefix([('name', None)])))
3568+ ((b'name', b'fin1'), b'data', ([(b'ref', b'erence')], )),
3569+ ((b'name', b'fin2'), b'beta', ([], )),
3570+ ((b'ref', b'erence'), b'refdata', ([], ))])
3571+ self.assertEqual({(index, (b'name', b'fin1'), b'data', (((b'ref', b'erence'),),)),
3572+ (index, (b'ref', b'erence'), b'refdata', ((), ))},
3573+ set(index.iter_entries_prefix([(b'name', b'fin1'), (b'ref', b'erence')])))
3574+ self.assertEqual({(index, (b'name', b'fin1'), b'data', (((b'ref', b'erence'),),)),
3575+ (index, (b'name', b'fin2'), b'beta', ((), ))},
3576+ set(index.iter_entries_prefix([(b'name', None)])))
3577
3578 def test_key_count_empty(self):
3579 index = self.make_index()
3580 self.assertEqual(0, index.key_count())
3581
3582 def test_key_count_one(self):
3583- index = self.make_index(nodes=[(('name', ), '', ())])
3584+ index = self.make_index(nodes=[((b'name', ), b'', ())])
3585 self.assertEqual(1, index.key_count())
3586
3587 def test_key_count_two(self):
3588 index = self.make_index(nodes=[
3589- (('name', ), '', ()), (('foo', ), '', ())])
3590+ ((b'name', ), b'', ()), ((b'foo', ), b'', ())])
3591 self.assertEqual(2, index.key_count())
3592
3593 def test_read_and_parse_tracks_real_read_value(self):
3594@@ -957,9 +958,9 @@
3595
3596 def test_read_and_parse_triggers_buffer_all(self):
3597 index = self.make_index(key_elements=2, nodes=[
3598- (('name', 'fin1'), 'data', ()),
3599- (('name', 'fin2'), 'beta', ()),
3600- (('ref', 'erence'), 'refdata', ())])
3601+ ((b'name', b'fin1'), b'data', ()),
3602+ ((b'name', b'fin2'), b'beta', ()),
3603+ ((b'ref', b'erence'), b'refdata', ())])
3604 self.assertTrue(index._size > 0)
3605 self.assertIs(None, index._nodes)
3606 index._read_and_parse([(0, index._size)])
3607@@ -967,7 +968,7 @@
3608
3609 def test_validate_bad_index_errors(self):
3610 trans = self.get_transport()
3611- trans.put_bytes('name', "not an index\n")
3612+ trans.put_bytes('name', b"not an index\n")
3613 idx = _mod_index.GraphIndex(trans, 'name', 13)
3614 self.assertRaises(_mod_index.BadIndexFormatSignature, idx.validate)
3615
3616@@ -976,7 +977,7 @@
3617 trans = self.get_transport()
3618 content = trans.get_bytes('index')
3619 # change the options line to end with a rather than a parseable number
3620- new_content = content[:-2] + 'a\n\n'
3621+ new_content = content[:-2] + b'a\n\n'
3622 trans.put_bytes('index', new_content)
3623 self.assertRaises(_mod_index.BadIndexOptions, idx.validate)
3624
3625@@ -989,7 +990,7 @@
3626 self.assertRaises(_mod_index.BadIndexData, index.validate)
3627
3628 def test_validate_missing_end_line_nonempty(self):
3629- index = self.make_index(2, nodes=[(('key', ), '', ([], []))])
3630+ index = self.make_index(2, nodes=[((b'key', ), b'', ([], []))])
3631 trans = self.get_transport()
3632 content = trans.get_bytes('index')
3633 # truncate the last byte
3634@@ -1001,7 +1002,7 @@
3635 index.validate()
3636
3637 def test_validate_no_refs_content(self):
3638- index = self.make_index(nodes=[(('key', ), 'value', ())])
3639+ index = self.make_index(nodes=[((b'key', ), b'value', ())])
3640 index.validate()
3641
3642 # XXX: external_references tests are duplicated in test_btree_index. We
3643@@ -1012,35 +1013,35 @@
3644
3645 def test_external_references_no_results(self):
3646 index = self.make_index(ref_lists=1, nodes=[
3647- (('key',), 'value', ([],))])
3648+ ((b'key',), b'value', ([],))])
3649 self.assertEqual(set(), index.external_references(0))
3650
3651 def test_external_references_missing_ref(self):
3652- missing_key = ('missing',)
3653+ missing_key = (b'missing',)
3654 index = self.make_index(ref_lists=1, nodes=[
3655- (('key',), 'value', ([missing_key],))])
3656+ ((b'key',), b'value', ([missing_key],))])
3657 self.assertEqual({missing_key}, index.external_references(0))
3658
3659 def test_external_references_multiple_ref_lists(self):
3660- missing_key = ('missing',)
3661+ missing_key = (b'missing',)
3662 index = self.make_index(ref_lists=2, nodes=[
3663- (('key',), 'value', ([], [missing_key]))])
3664+ ((b'key',), b'value', ([], [missing_key]))])
3665 self.assertEqual(set([]), index.external_references(0))
3666 self.assertEqual({missing_key}, index.external_references(1))
3667
3668 def test_external_references_two_records(self):
3669 index = self.make_index(ref_lists=1, nodes=[
3670- (('key-1',), 'value', ([('key-2',)],)),
3671- (('key-2',), 'value', ([],)),
3672+ ((b'key-1',), b'value', ([(b'key-2',)],)),
3673+ ((b'key-2',), b'value', ([],)),
3674 ])
3675 self.assertEqual(set([]), index.external_references(0))
3676
3677 def test__find_ancestors(self):
3678- key1 = ('key-1',)
3679- key2 = ('key-2',)
3680+ key1 = (b'key-1',)
3681+ key2 = (b'key-2',)
3682 index = self.make_index(ref_lists=1, key_elements=1, nodes=[
3683- (key1, 'value', ([key2],)),
3684- (key2, 'value', ([],)),
3685+ (key1, b'value', ([key2],)),
3686+ (key2, b'value', ([],)),
3687 ])
3688 parent_map = {}
3689 missing_keys = set()
3690@@ -1055,12 +1056,12 @@
3691 self.assertEqual(set(), search_keys)
3692
3693 def test__find_ancestors_w_missing(self):
3694- key1 = ('key-1',)
3695- key2 = ('key-2',)
3696- key3 = ('key-3',)
3697+ key1 = (b'key-1',)
3698+ key2 = (b'key-2',)
3699+ key3 = (b'key-3',)
3700 index = self.make_index(ref_lists=1, key_elements=1, nodes=[
3701- (key1, 'value', ([key2],)),
3702- (key2, 'value', ([],)),
3703+ (key1, b'value', ([key2],)),
3704+ (key2, b'value', ([],)),
3705 ])
3706 parent_map = {}
3707 missing_keys = set()
3708@@ -1071,13 +1072,13 @@
3709 self.assertEqual(set(), search_keys)
3710
3711 def test__find_ancestors_dont_search_known(self):
3712- key1 = ('key-1',)
3713- key2 = ('key-2',)
3714- key3 = ('key-3',)
3715+ key1 = (b'key-1',)
3716+ key2 = (b'key-2',)
3717+ key3 = (b'key-3',)
3718 index = self.make_index(ref_lists=1, key_elements=1, nodes=[
3719- (key1, 'value', ([key2],)),
3720- (key2, 'value', ([key3],)),
3721- (key3, 'value', ([],)),
3722+ (key1, b'value', ([key2],)),
3723+ (key2, b'value', ([key3],)),
3724+ (key3, b'value', ([],)),
3725 ])
3726 # We already know about key2, so we won't try to search for key3
3727 parent_map = {key2: (key3,)}
3728@@ -1109,7 +1110,7 @@
3729 size = trans.put_file(name, stream)
3730 return _mod_index.GraphIndex(trans, name, size)
3731
3732- def make_combined_index_with_missing(self, missing=['1', '2']):
3733+ def make_combined_index_with_missing(self, missing=[b'1', b'2']):
3734 """Create a CombinedGraphIndex which will have missing indexes.
3735
3736 This creates a CGI which thinks it has 2 indexes, however they have
3737@@ -1119,11 +1120,11 @@
3738 :param missing: The underlying indexes to delete
3739 :return: (CombinedGraphIndex, reload_counter)
3740 """
3741- idx1 = self.make_index('1', nodes=[(('1',), '', ())])
3742- idx2 = self.make_index('2', nodes=[(('2',), '', ())])
3743+ idx1 = self.make_index('1', nodes=[((b'1',), b'', ())])
3744+ idx2 = self.make_index('2', nodes=[((b'2',), b'', ())])
3745 idx3 = self.make_index('3', nodes=[
3746- (('1',), '', ()),
3747- (('2',), '', ())])
3748+ ((b'1',), b'', ()),
3749+ ((b'2',), b'', ())])
3750
3751 # total_reloads, num_changed, num_unchanged
3752 reload_counter = [0, 0, 0]
3753@@ -1149,9 +1150,9 @@
3754
3755 def test_add_index(self):
3756 idx = _mod_index.CombinedGraphIndex([])
3757- idx1 = self.make_index('name', 0, nodes=[(('key', ), '', ())])
3758+ idx1 = self.make_index('name', 0, nodes=[((b'key', ), b'', ())])
3759 idx.insert_index(0, idx1)
3760- self.assertEqual([(idx1, ('key', ), '')],
3761+ self.assertEqual([(idx1, (b'key', ), b'')],
3762 list(idx.iter_all_entries()))
3763
3764 def test_clear_cache(self):
3765@@ -1170,9 +1171,9 @@
3766 return self._index.clear_cache()
3767
3768 idx = _mod_index.CombinedGraphIndex([])
3769- idx1 = self.make_index('name', 0, nodes=[(('key', ), '', ())])
3770+ idx1 = self.make_index('name', 0, nodes=[((b'key', ), b'', ())])
3771 idx.insert_index(0, ClearCacheProxy(idx1))
3772- idx2 = self.make_index('name', 0, nodes=[(('key', ), '', ())])
3773+ idx2 = self.make_index('name', 0, nodes=[((b'key', ), b'', ())])
3774 idx.insert_index(1, ClearCacheProxy(idx2))
3775 # CombinedGraphIndex should call 'clear_cache()' on all children
3776 idx.clear_cache()
3777@@ -1188,49 +1189,49 @@
3778 self.assertEqual([], list(idx.iter_all_entries()))
3779
3780 def test_iter_all_entries_simple(self):
3781- idx1 = self.make_index('name', nodes=[(('name', ), 'data', ())])
3782+ idx1 = self.make_index('name', nodes=[((b'name', ), b'data', ())])
3783 idx = _mod_index.CombinedGraphIndex([idx1])
3784- self.assertEqual([(idx1, ('name', ), 'data')],
3785+ self.assertEqual([(idx1, (b'name', ), b'data')],
3786 list(idx.iter_all_entries()))
3787
3788 def test_iter_all_entries_two_indices(self):
3789- idx1 = self.make_index('name1', nodes=[(('name', ), 'data', ())])
3790- idx2 = self.make_index('name2', nodes=[(('2', ), '', ())])
3791+ idx1 = self.make_index('name1', nodes=[((b'name', ), b'data', ())])
3792+ idx2 = self.make_index('name2', nodes=[((b'2', ), b'', ())])
3793 idx = _mod_index.CombinedGraphIndex([idx1, idx2])
3794- self.assertEqual([(idx1, ('name', ), 'data'),
3795- (idx2, ('2', ), '')],
3796+ self.assertEqual([(idx1, (b'name', ), b'data'),
3797+ (idx2, (b'2', ), b'')],
3798 list(idx.iter_all_entries()))
3799
3800 def test_iter_entries_two_indices_dup_key(self):
3801- idx1 = self.make_index('name1', nodes=[(('name', ), 'data', ())])
3802- idx2 = self.make_index('name2', nodes=[(('name', ), 'data', ())])
3803+ idx1 = self.make_index('name1', nodes=[((b'name', ), b'data', ())])
3804+ idx2 = self.make_index('name2', nodes=[((b'name', ), b'data', ())])
3805 idx = _mod_index.CombinedGraphIndex([idx1, idx2])
3806- self.assertEqual([(idx1, ('name', ), 'data')],
3807- list(idx.iter_entries([('name', )])))
3808+ self.assertEqual([(idx1, (b'name', ), b'data')],
3809+ list(idx.iter_entries([(b'name', )])))
3810
3811 def test_iter_all_entries_two_indices_dup_key(self):
3812- idx1 = self.make_index('name1', nodes=[(('name', ), 'data', ())])
3813- idx2 = self.make_index('name2', nodes=[(('name', ), 'data', ())])
3814+ idx1 = self.make_index('name1', nodes=[((b'name', ), b'data', ())])
3815+ idx2 = self.make_index('name2', nodes=[((b'name', ), b'data', ())])
3816 idx = _mod_index.CombinedGraphIndex([idx1, idx2])
3817- self.assertEqual([(idx1, ('name', ), 'data')],
3818+ self.assertEqual([(idx1, (b'name', ), b'data')],
3819 list(idx.iter_all_entries()))
3820
3821 def test_iter_key_prefix_2_key_element_refs(self):
3822 idx1 = self.make_index('1', 1, key_elements=2, nodes=[
3823- (('name', 'fin1'), 'data', ([('ref', 'erence')], ))])
3824- idx2 = self.make_index('2', 1, key_elements=2, nodes=[
3825- (('name', 'fin2'), 'beta', ([], )),
3826- (('ref', 'erence'), 'refdata', ([], ))])
3827+ ((b'name', b'fin1'), b'data', ([(b'ref', b'erence')], ))])
3828+ idx2 = self.make_index(b'2', 1, key_elements=2, nodes=[
3829+ ((b'name', b'fin2'), b'beta', ([], )),
3830+ ((b'ref', b'erence'), b'refdata', ([], ))])
3831 idx = _mod_index.CombinedGraphIndex([idx1, idx2])
3832- self.assertEqual({(idx1, ('name', 'fin1'), 'data',
3833- ((('ref', 'erence'),),)),
3834- (idx2, ('ref', 'erence'), 'refdata', ((), ))},
3835- set(idx.iter_entries_prefix([('name', 'fin1'),
3836- ('ref', 'erence')])))
3837- self.assertEqual({(idx1, ('name', 'fin1'), 'data',
3838- ((('ref', 'erence'),),)),
3839- (idx2, ('name', 'fin2'), 'beta', ((), ))},
3840- set(idx.iter_entries_prefix([('name', None)])))
3841+ self.assertEqual({(idx1, (b'name', b'fin1'), b'data',
3842+ (((b'ref', b'erence'),),)),
3843+ (idx2, (b'ref', b'erence'), b'refdata', ((), ))},
3844+ set(idx.iter_entries_prefix([(b'name', b'fin1'),
3845+ (b'ref', b'erence')])))
3846+ self.assertEqual({(idx1, (b'name', b'fin1'), b'data',
3847+ (((b'ref', b'erence'),),)),
3848+ (idx2, (b'name', b'fin2'), b'beta', ((), ))},
3849+ set(idx.iter_entries_prefix([(b'name', None)])))
3850
3851 def test_iter_nothing_empty(self):
3852 idx = _mod_index.CombinedGraphIndex([])
3853@@ -1242,23 +1243,23 @@
3854 self.assertEqual([], list(idx.iter_entries([])))
3855
3856 def test_iter_all_keys(self):
3857- idx1 = self.make_index('1', 1, nodes=[(('name', ), 'data',
3858- ([('ref', )], ))])
3859- idx2 = self.make_index('2', 1, nodes=[(('ref', ), 'refdata', ((), ))])
3860+ idx1 = self.make_index('1', 1, nodes=[((b'name', ), b'data',
3861+ ([(b'ref', )], ))])
3862+ idx2 = self.make_index('2', 1, nodes=[((b'ref', ), b'refdata', ((), ))])
3863 idx = _mod_index.CombinedGraphIndex([idx1, idx2])
3864- self.assertEqual({(idx1, ('name', ), 'data', ((('ref', ), ), )),
3865- (idx2, ('ref', ), 'refdata', ((), ))},
3866- set(idx.iter_entries([('name', ), ('ref', )])))
3867+ self.assertEqual({(idx1, (b'name', ), b'data', (((b'ref', ), ), )),
3868+ (idx2, (b'ref', ), b'refdata', ((), ))},
3869+ set(idx.iter_entries([(b'name', ), (b'ref', )])))
3870
3871 def test_iter_all_keys_dup_entry(self):
3872- idx1 = self.make_index('1', 1, nodes=[(('name', ), 'data',
3873- ([('ref', )], )),
3874- (('ref', ), 'refdata', ([], ))])
3875- idx2 = self.make_index('2', 1, nodes=[(('ref', ), 'refdata', ([], ))])
3876+ idx1 = self.make_index('1', 1, nodes=[((b'name', ), b'data',
3877+ ([(b'ref', )], )),
3878+ ((b'ref', ), b'refdata', ([], ))])
3879+ idx2 = self.make_index('2', 1, nodes=[((b'ref', ), b'refdata', ([], ))])
3880 idx = _mod_index.CombinedGraphIndex([idx1, idx2])
3881- self.assertEqual({(idx1, ('name', ), 'data', ((('ref',),),)),
3882- (idx1, ('ref', ), 'refdata', ((), ))},
3883- set(idx.iter_entries([('name', ), ('ref', )])))
3884+ self.assertEqual({(idx1, (b'name', ), b'data', (((b'ref',),),)),
3885+ (idx1, (b'ref', ), b'refdata', ((), ))},
3886+ set(idx.iter_entries([(b'name', ), (b'ref', )])))
3887
3888 def test_iter_missing_entry_empty(self):
3889 idx = _mod_index.CombinedGraphIndex([])
3890@@ -1267,7 +1268,7 @@
3891 def test_iter_missing_entry_one_index(self):
3892 idx1 = self.make_index('1')
3893 idx = _mod_index.CombinedGraphIndex([idx1])
3894- self.assertEqual([], list(idx.iter_entries([('a', )])))
3895+ self.assertEqual([], list(idx.iter_entries([(b'a', )])))
3896
3897 def test_iter_missing_entry_two_index(self):
3898 idx1 = self.make_index('1')
3899@@ -1276,15 +1277,15 @@
3900 self.assertEqual([], list(idx.iter_entries([('a', )])))
3901
3902 def test_iter_entry_present_one_index_only(self):
3903- idx1 = self.make_index('1', nodes=[(('key', ), '', ())])
3904+ idx1 = self.make_index('1', nodes=[((b'key', ), b'', ())])
3905 idx2 = self.make_index('2', nodes=[])
3906 idx = _mod_index.CombinedGraphIndex([idx1, idx2])
3907- self.assertEqual([(idx1, ('key', ), '')],
3908- list(idx.iter_entries([('key', )])))
3909+ self.assertEqual([(idx1, (b'key', ), b'')],
3910+ list(idx.iter_entries([(b'key', )])))
3911 # and in the other direction
3912 idx = _mod_index.CombinedGraphIndex([idx2, idx1])
3913- self.assertEqual([(idx1, ('key', ), '')],
3914- list(idx.iter_entries([('key', )])))
3915+ self.assertEqual([(idx1, (b'key', ), b'')],
3916+ list(idx.iter_entries([(b'key', )])))
3917
3918 def test_key_count_empty(self):
3919 idx1 = self.make_index('1', nodes=[])
3920@@ -1294,15 +1295,15 @@
3921
3922 def test_key_count_sums_index_keys(self):
3923 idx1 = self.make_index('1', nodes=[
3924- (('1',), '', ()),
3925- (('2',), '', ())])
3926- idx2 = self.make_index('2', nodes=[(('1',), '', ())])
3927+ ((b'1',), b'', ()),
3928+ ((b'2',), b'', ())])
3929+ idx2 = self.make_index('2', nodes=[((b'1',), b'', ())])
3930 idx = _mod_index.CombinedGraphIndex([idx1, idx2])
3931 self.assertEqual(3, idx.key_count())
3932
3933 def test_validate_bad_child_index_errors(self):
3934 trans = self.get_transport()
3935- trans.put_bytes('name', "not an index\n")
3936+ trans.put_bytes('name', b"not an index\n")
3937 idx1 = _mod_index.GraphIndex(trans, 'name', 13)
3938 idx = _mod_index.CombinedGraphIndex([idx1])
3939 self.assertRaises(_mod_index.BadIndexFormatSignature, idx.validate)
3940@@ -1327,15 +1328,15 @@
3941 # still fail. This is mostly to test we don't get stuck in an infinite
3942 # loop trying to reload
3943 idx, reload_counter = self.make_combined_index_with_missing(
3944- ['1', '2', '3'])
3945+ [b'1', b'2', b'3'])
3946 self.assertRaises(errors.NoSuchFile, idx.key_count)
3947 self.assertEqual([2, 1, 1], reload_counter)
3948
3949 def test_iter_entries_reloads(self):
3950 index, reload_counter = self.make_combined_index_with_missing()
3951- result = list(index.iter_entries([('1',), ('2',), ('3',)]))
3952+ result = list(index.iter_entries([(b'1',), (b'2',), (b'3',)]))
3953 index3 = index._indices[0]
3954- self.assertEqual([(index3, ('1',), ''), (index3, ('2',), '')],
3955+ self.assertEqual([(index3, (b'1',), b''), (index3, (b'2',), b'')],
3956 result)
3957 self.assertEqual([1, 1, 0], reload_counter)
3958
3959@@ -1344,11 +1345,11 @@
3960 # through
3961 index, reload_counter = self.make_combined_index_with_missing(['2'])
3962 index1, index2 = index._indices
3963- result = list(index.iter_entries([('1',), ('2',), ('3',)]))
3964+ result = list(index.iter_entries([(b'1',), (b'2',), (b'3',)]))
3965 index3 = index._indices[0]
3966- # We had already yielded '1', so we just go on to the next, we should
3967- # not yield '1' twice.
3968- self.assertEqual([(index1, ('1',), ''), (index3, ('2',), '')],
3969+ # We had already yielded b'1', so we just go on to the next, we should
3970+ # not yield b'1' twice.
3971+ self.assertEqual([(index1, (b'1',), b''), (index3, (b'2',), b'')],
3972 result)
3973 self.assertEqual([1, 1, 0], reload_counter)
3974
3975@@ -1368,7 +1369,7 @@
3976 index, reload_counter = self.make_combined_index_with_missing()
3977 result = list(index.iter_all_entries())
3978 index3 = index._indices[0]
3979- self.assertEqual([(index3, ('1',), ''), (index3, ('2',), '')],
3980+ self.assertEqual([(index3, (b'1',), b''), (index3, (b'2',), b'')],
3981 result)
3982 self.assertEqual([1, 1, 0], reload_counter)
3983
3984@@ -1379,7 +1380,7 @@
3985 index3 = index._indices[0]
3986 # We had already yielded '1', so we just go on to the next, we should
3987 # not yield '1' twice.
3988- self.assertEqual([(index1, ('1',), ''), (index3, ('2',), '')],
3989+ self.assertEqual([(index1, (b'1',), b''), (index3, (b'2',), b'')],
3990 result)
3991 self.assertEqual([1, 1, 0], reload_counter)
3992
3993@@ -1390,37 +1391,37 @@
3994
3995 def test_iter_all_entries_reloads_and_fails(self):
3996 index, reload_counter = self.make_combined_index_with_missing(
3997- ['1', '2', '3'])
3998+ [b'1', b'2', b'3'])
3999 self.assertListRaises(errors.NoSuchFile, index.iter_all_entries)
4000
4001 def test_iter_entries_prefix_reloads(self):
4002 index, reload_counter = self.make_combined_index_with_missing()
4003- result = list(index.iter_entries_prefix([('1',)]))
4004+ result = list(index.iter_entries_prefix([(b'1',)]))
4005 index3 = index._indices[0]
4006- self.assertEqual([(index3, ('1',), '')], result)
4007+ self.assertEqual([(index3, (b'1',), b'')], result)
4008 self.assertEqual([1, 1, 0], reload_counter)
4009
4010 def test_iter_entries_prefix_reloads_midway(self):
4011- index, reload_counter = self.make_combined_index_with_missing(['2'])
4012+ index, reload_counter = self.make_combined_index_with_missing([b'2'])
4013 index1, index2 = index._indices
4014- result = list(index.iter_entries_prefix([('1',)]))
4015+ result = list(index.iter_entries_prefix([(b'1',)]))
4016 index3 = index._indices[0]
4017- # We had already yielded '1', so we just go on to the next, we should
4018- # not yield '1' twice.
4019- self.assertEqual([(index1, ('1',), '')], result)
4020+ # We had already yielded b'1', so we just go on to the next, we should
4021+ # not yield b'1' twice.
4022+ self.assertEqual([(index1, (b'1',), b'')], result)
4023 self.assertEqual([1, 1, 0], reload_counter)
4024
4025 def test_iter_entries_prefix_no_reload(self):
4026 index, reload_counter = self.make_combined_index_with_missing()
4027 index._reload_func = None
4028 self.assertListRaises(errors.NoSuchFile, index.iter_entries_prefix,
4029- [('1',)])
4030+ [(b'1',)])
4031
4032 def test_iter_entries_prefix_reloads_and_fails(self):
4033 index, reload_counter = self.make_combined_index_with_missing(
4034- ['1', '2', '3'])
4035+ [b'1', b'2', b'3'])
4036 self.assertListRaises(errors.NoSuchFile, index.iter_entries_prefix,
4037- [('1',)])
4038+ [(b'1',)])
4039
4040
4041 def make_index_with_simple_nodes(self, name, num_nodes=1):
4042@@ -1429,7 +1430,7 @@
4043 Nodes will have a value of '' and no references.
4044 """
4045 nodes = [
4046- (('index-%s-key-%s' % (name, n),), '', ())
4047+ ((('index-%s-key-%s' % (name, n)).encode('ascii'),), b'', ())
4048 for n in range(1, num_nodes+1)]
4049 return self.make_index('index-%s' % name, 0, nodes=nodes)
4050
4051@@ -1437,18 +1438,18 @@
4052 # Four indices: [key1] in idx1, [key2,key3] in idx2, [] in idx3,
4053 # [key4] in idx4.
4054 idx = _mod_index.CombinedGraphIndex([])
4055- idx.insert_index(0, self.make_index_with_simple_nodes('1'), '1')
4056- idx.insert_index(1, self.make_index_with_simple_nodes('2'), '2')
4057- idx.insert_index(2, self.make_index_with_simple_nodes('3'), '3')
4058- idx.insert_index(3, self.make_index_with_simple_nodes('4'), '4')
4059+ idx.insert_index(0, self.make_index_with_simple_nodes('1'), b'1')
4060+ idx.insert_index(1, self.make_index_with_simple_nodes('2'), b'2')
4061+ idx.insert_index(2, self.make_index_with_simple_nodes('3'), b'3')
4062+ idx.insert_index(3, self.make_index_with_simple_nodes('4'), b'4')
4063 idx1, idx2, idx3, idx4 = idx._indices
4064 # Query a key from idx4 and idx2.
4065 self.assertLength(2, list(idx.iter_entries(
4066- [('index-4-key-1',), ('index-2-key-1',)])))
4067+ [(b'index-4-key-1',), (b'index-2-key-1',)])))
4068 # Now idx2 and idx4 should be moved to the front (and idx1 should
4069 # still be before idx3).
4070 self.assertEqual([idx2, idx4, idx1, idx3], idx._indices)
4071- self.assertEqual(['2', '4', '1', '3'], idx._index_names)
4072+ self.assertEqual([b'2', b'4', b'1', b'3'], idx._index_names)
4073
4074 def test_reorder_propagates_to_siblings(self):
4075 # Two CombinedGraphIndex objects, with the same number of indicies with
4076@@ -1486,17 +1487,17 @@
4077 self.assertRaises(errors.NoSuchFile, idx.validate)
4078
4079 def test_find_ancestors_across_indexes(self):
4080- key1 = ('key-1',)
4081- key2 = ('key-2',)
4082- key3 = ('key-3',)
4083- key4 = ('key-4',)
4084+ key1 = (b'key-1',)
4085+ key2 = (b'key-2',)
4086+ key3 = (b'key-3',)
4087+ key4 = (b'key-4',)
4088 index1 = self.make_index('12', ref_lists=1, nodes=[
4089- (key1, 'value', ([],)),
4090- (key2, 'value', ([key1],)),
4091+ (key1, b'value', ([],)),
4092+ (key2, b'value', ([key1],)),
4093 ])
4094 index2 = self.make_index('34', ref_lists=1, nodes=[
4095- (key3, 'value', ([key2],)),
4096- (key4, 'value', ([key3],)),
4097+ (key3, b'value', ([key2],)),
4098+ (key4, b'value', ([key3],)),
4099 ])
4100 c_index = _mod_index.CombinedGraphIndex([index1, index2])
4101 parent_map, missing_keys = c_index.find_ancestry([key1], 0)
4102@@ -1510,16 +1511,16 @@
4103 self.assertEqual(set(), missing_keys)
4104
4105 def test_find_ancestors_missing_keys(self):
4106- key1 = ('key-1',)
4107- key2 = ('key-2',)
4108- key3 = ('key-3',)
4109- key4 = ('key-4',)
4110+ key1 = (b'key-1',)
4111+ key2 = (b'key-2',)
4112+ key3 = (b'key-3',)
4113+ key4 = (b'key-4',)
4114 index1 = self.make_index('12', ref_lists=1, nodes=[
4115- (key1, 'value', ([],)),
4116- (key2, 'value', ([key1],)),
4117+ (key1, b'value', ([],)),
4118+ (key2, b'value', ([key1],)),
4119 ])
4120 index2 = self.make_index('34', ref_lists=1, nodes=[
4121- (key3, 'value', ([key2],)),
4122+ (key3, b'value', ([key2],)),
4123 ])
4124 c_index = _mod_index.CombinedGraphIndex([index1, index2])
4125 # Searching for a key which is actually not present at all should
4126@@ -1530,22 +1531,22 @@
4127
4128 def test_find_ancestors_no_indexes(self):
4129 c_index = _mod_index.CombinedGraphIndex([])
4130- key1 = ('key-1',)
4131+ key1 = (b'key-1',)
4132 parent_map, missing_keys = c_index.find_ancestry([key1], 0)
4133 self.assertEqual({}, parent_map)
4134 self.assertEqual({key1}, missing_keys)
4135
4136 def test_find_ancestors_ghost_parent(self):
4137- key1 = ('key-1',)
4138- key2 = ('key-2',)
4139- key3 = ('key-3',)
4140- key4 = ('key-4',)
4141+ key1 = (b'key-1',)
4142+ key2 = (b'key-2',)
4143+ key3 = (b'key-3',)
4144+ key4 = (b'key-4',)
4145 index1 = self.make_index('12', ref_lists=1, nodes=[
4146- (key1, 'value', ([],)),
4147- (key2, 'value', ([key1],)),
4148+ (key1, b'value', ([],)),
4149+ (key2, b'value', ([key1],)),
4150 ])
4151 index2 = self.make_index('34', ref_lists=1, nodes=[
4152- (key4, 'value', ([key2, key3],)),
4153+ (key4, b'value', ([key2, key3],)),
4154 ])
4155 c_index = _mod_index.CombinedGraphIndex([index1, index2])
4156 # Searching for a key which is actually not present at all should
4157@@ -1559,11 +1560,11 @@
4158 idx = self.make_index('test', ref_lists=1, key_elements=1, nodes=[])
4159 parent_map = {}
4160 missing_keys = set()
4161- search_keys = idx._find_ancestors([('one',), ('two',)], 0, parent_map,
4162+ search_keys = idx._find_ancestors([(b'one',), (b'two',)], 0, parent_map,
4163 missing_keys)
4164 self.assertEqual(set(), search_keys)
4165 self.assertEqual({}, parent_map)
4166- self.assertEqual({('one',), ('two',)}, missing_keys)
4167+ self.assertEqual({(b'one',), (b'two',)}, missing_keys)
4168
4169
4170 class TestInMemoryGraphIndex(tests.TestCaseWithMemoryTransport):
4171@@ -1575,22 +1576,22 @@
4172
4173 def test_add_nodes_no_refs(self):
4174 index = self.make_index(0)
4175- index.add_nodes([(('name', ), 'data')])
4176- index.add_nodes([(('name2', ), ''), (('name3', ), '')])
4177+ index.add_nodes([((b'name', ), b'data')])
4178+ index.add_nodes([((b'name2', ), b''), ((b'name3', ), b'')])
4179 self.assertEqual({
4180- (index, ('name', ), 'data'),
4181- (index, ('name2', ), ''),
4182- (index, ('name3', ), ''),
4183+ (index, (b'name', ), b'data'),
4184+ (index, (b'name2', ), b''),
4185+ (index, (b'name3', ), b''),
4186 }, set(index.iter_all_entries()))
4187
4188 def test_add_nodes(self):
4189 index = self.make_index(1)
4190- index.add_nodes([(('name', ), 'data', ([],))])
4191- index.add_nodes([(('name2', ), '', ([],)), (('name3', ), '', ([('r', )],))])
4192+ index.add_nodes([((b'name', ), b'data', ([],))])
4193+ index.add_nodes([((b'name2', ), b'', ([],)), ((b'name3', ), b'', ([(b'r', )],))])
4194 self.assertEqual({
4195- (index, ('name', ), 'data', ((),)),
4196- (index, ('name2', ), '', ((),)),
4197- (index, ('name3', ), '', ((('r', ), ), )),
4198+ (index, (b'name', ), b'data', ((),)),
4199+ (index, (b'name2', ), b'', ((),)),
4200+ (index, (b'name3', ), b'', (((b'r', ), ), )),
4201 }, set(index.iter_all_entries()))
4202
4203 def test_iter_all_entries_empty(self):
4204@@ -1598,74 +1599,74 @@
4205 self.assertEqual([], list(index.iter_all_entries()))
4206
4207 def test_iter_all_entries_simple(self):
4208- index = self.make_index(nodes=[(('name', ), 'data')])
4209- self.assertEqual([(index, ('name', ), 'data')],
4210+ index = self.make_index(nodes=[((b'name', ), b'data')])
4211+ self.assertEqual([(index, (b'name', ), b'data')],
4212 list(index.iter_all_entries()))
4213
4214 def test_iter_all_entries_references(self):
4215 index = self.make_index(1, nodes=[
4216- (('name', ), 'data', ([('ref', )], )),
4217- (('ref', ), 'refdata', ([], ))])
4218- self.assertEqual({(index, ('name', ), 'data', ((('ref', ),),)),
4219- (index, ('ref', ), 'refdata', ((), ))},
4220+ ((b'name', ), b'data', ([(b'ref', )], )),
4221+ ((b'ref', ), b'refdata', ([], ))])
4222+ self.assertEqual({(index, (b'name', ), b'data', (((b'ref', ),),)),
4223+ (index, (b'ref', ), b'refdata', ((), ))},
4224 set(index.iter_all_entries()))
4225
4226 def test_iteration_absent_skipped(self):
4227 index = self.make_index(1, nodes=[
4228- (('name', ), 'data', ([('ref', )], ))])
4229- self.assertEqual({(index, ('name', ), 'data', ((('ref',),),))},
4230+ ((b'name', ), b'data', ([(b'ref', )], ))])
4231+ self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),))},
4232 set(index.iter_all_entries()))
4233- self.assertEqual({(index, ('name', ), 'data', ((('ref',),),))},
4234- set(index.iter_entries([('name', )])))
4235- self.assertEqual([], list(index.iter_entries([('ref', )])))
4236+ self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),))},
4237+ set(index.iter_entries([(b'name', )])))
4238+ self.assertEqual([], list(index.iter_entries([(b'ref', )])))
4239
4240 def test_iter_all_keys(self):
4241 index = self.make_index(1, nodes=[
4242- (('name', ), 'data', ([('ref', )], )),
4243- (('ref', ), 'refdata', ([], ))])
4244- self.assertEqual({(index, ('name', ), 'data', ((('ref',),),)),
4245- (index, ('ref', ), 'refdata', ((), ))},
4246- set(index.iter_entries([('name', ), ('ref', )])))
4247+ ((b'name', ), b'data', ([(b'ref', )], )),
4248+ ((b'ref', ), b'refdata', ([], ))])
4249+ self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),)),
4250+ (index, (b'ref', ), b'refdata', ((), ))},
4251+ set(index.iter_entries([(b'name', ), (b'ref', )])))
4252
4253 def test_iter_key_prefix_1_key_element_no_refs(self):
4254 index = self.make_index( nodes=[
4255- (('name', ), 'data'),
4256- (('ref', ), 'refdata')])
4257- self.assertEqual({(index, ('name', ), 'data'),
4258- (index, ('ref', ), 'refdata')},
4259- set(index.iter_entries_prefix([('name', ), ('ref', )])))
4260+ ((b'name', ), b'data'),
4261+ ((b'ref', ), b'refdata')])
4262+ self.assertEqual({(index, (b'name', ), b'data'),
4263+ (index, (b'ref', ), b'refdata')},
4264+ set(index.iter_entries_prefix([(b'name', ), (b'ref', )])))
4265
4266 def test_iter_key_prefix_1_key_element_refs(self):
4267 index = self.make_index(1, nodes=[
4268- (('name', ), 'data', ([('ref', )], )),
4269- (('ref', ), 'refdata', ([], ))])
4270- self.assertEqual({(index, ('name', ), 'data', ((('ref',),),)),
4271- (index, ('ref', ), 'refdata', ((), ))},
4272- set(index.iter_entries_prefix([('name', ), ('ref', )])))
4273+ ((b'name', ), b'data', ([(b'ref', )], )),
4274+ ((b'ref', ), b'refdata', ([], ))])
4275+ self.assertEqual({(index, (b'name', ), b'data', (((b'ref',),),)),
4276+ (index, (b'ref', ), b'refdata', ((), ))},
4277+ set(index.iter_entries_prefix([(b'name', ), (b'ref', )])))
4278
4279 def test_iter_key_prefix_2_key_element_no_refs(self):
4280 index = self.make_index(key_elements=2, nodes=[
4281- (('name', 'fin1'), 'data'),
4282- (('name', 'fin2'), 'beta'),
4283- (('ref', 'erence'), 'refdata')])
4284- self.assertEqual({(index, ('name', 'fin1'), 'data'),
4285- (index, ('ref', 'erence'), 'refdata')},
4286- set(index.iter_entries_prefix([('name', 'fin1'), ('ref', 'erence')])))
4287- self.assertEqual({(index, ('name', 'fin1'), 'data'),
4288- (index, ('name', 'fin2'), 'beta')},
4289- set(index.iter_entries_prefix([('name', None)])))
4290+ ((b'name', b'fin1'), b'data'),
4291+ ((b'name', b'fin2'), b'beta'),
4292+ ((b'ref', b'erence'), b'refdata')])
4293+ self.assertEqual({(index, (b'name', b'fin1'), b'data'),
4294+ (index, (b'ref', b'erence'), b'refdata')},
4295+ set(index.iter_entries_prefix([(b'name', b'fin1'), (b'ref', b'erence')])))
4296+ self.assertEqual({(index, (b'name', b'fin1'), b'data'),
4297+ (index, (b'name', b'fin2'), b'beta')},
4298+ set(index.iter_entries_prefix([(b'name', None)])))
4299
4300 def test_iter_key_prefix_2_key_element_refs(self):
4301 index = self.make_index(1, key_elements=2, nodes=[
4302- (('name', 'fin1'), 'data', ([('ref', 'erence')], )),
4303- (('name', 'fin2'), 'beta', ([], )),
4304- (('ref', 'erence'), 'refdata', ([], ))])
4305- self.assertEqual({(index, ('name', 'fin1'), 'data', ((('ref', 'erence'),),)),
4306- (index, ('ref', 'erence'), 'refdata', ((), ))},
4307- set(index.iter_entries_prefix([('name', 'fin1'), ('ref', 'erence')])))
4308- self.assertEqual({(index, ('name', 'fin1'), 'data', ((('ref', 'erence'),),)),
4309- (index, ('name', 'fin2'), 'beta', ((), ))},
4310- set(index.iter_entries_prefix([('name', None)])))
4311+ ((b'name', b'fin1'), b'data', ([(b'ref', b'erence')], )),
4312+ ((b'name', b'fin2'), b'beta', ([], )),
4313+ ((b'ref', b'erence'), b'refdata', ([], ))])
4314+ self.assertEqual({(index, (b'name', b'fin1'), b'data', (((b'ref', b'erence'),),)),
4315+ (index, (b'ref', b'erence'), b'refdata', ((), ))},
4316+ set(index.iter_entries_prefix([(b'name', b'fin1'), (b'ref', b'erence')])))
4317+ self.assertEqual({(index, (b'name', b'fin1'), b'data', (((b'ref', b'erence'),),)),
4318+ (index, (b'name', b'fin2'), b'beta', ((), ))},
4319+ set(index.iter_entries_prefix([(b'name', None)])))
4320
4321 def test_iter_nothing_empty(self):
4322 index = self.make_index()
4323@@ -1673,18 +1674,18 @@
4324
4325 def test_iter_missing_entry_empty(self):
4326 index = self.make_index()
4327- self.assertEqual([], list(index.iter_entries(['a'])))
4328+ self.assertEqual([], list(index.iter_entries([b'a'])))
4329
4330 def test_key_count_empty(self):
4331 index = self.make_index()
4332 self.assertEqual(0, index.key_count())
4333
4334 def test_key_count_one(self):
4335- index = self.make_index(nodes=[(('name', ), '')])
4336+ index = self.make_index(nodes=[((b'name', ), b'')])
4337 self.assertEqual(1, index.key_count())
4338
4339 def test_key_count_two(self):
4340- index = self.make_index(nodes=[(('name', ), ''), (('foo', ), '')])
4341+ index = self.make_index(nodes=[((b'name', ), b''), ((b'foo', ), b'')])
4342 self.assertEqual(2, index.key_count())
4343
4344 def test_validate_empty(self):
4345@@ -1692,7 +1693,7 @@
4346 index.validate()
4347
4348 def test_validate_no_refs_content(self):
4349- index = self.make_index(nodes=[(('key', ), 'value')])
4350+ index = self.make_index(nodes=[((b'key', ), b'value')])
4351 index.validate()
4352
4353
4354@@ -1707,88 +1708,88 @@
4355 else:
4356 add_nodes_callback = None
4357 adapter = _mod_index.GraphIndexPrefixAdapter(
4358- result, ('prefix', ), key_elements - 1,
4359+ result, (b'prefix', ), key_elements - 1,
4360 add_nodes_callback=add_nodes_callback)
4361 return result, adapter
4362
4363 def test_add_node(self):
4364 index, adapter = self.make_index(add_callback=True)
4365- adapter.add_node(('key',), 'value', ((('ref',),),))
4366- self.assertEqual({(index, ('prefix', 'key'), 'value',
4367- ((('prefix', 'ref'),),))},
4368+ adapter.add_node((b'key',), b'value', (((b'ref',),),))
4369+ self.assertEqual({(index, (b'prefix', b'key'), b'value',
4370+ (((b'prefix', b'ref'),),))},
4371 set(index.iter_all_entries()))
4372
4373 def test_add_nodes(self):
4374 index, adapter = self.make_index(add_callback=True)
4375 adapter.add_nodes((
4376- (('key',), 'value', ((('ref',),),)),
4377- (('key2',), 'value2', ((),)),
4378+ ((b'key',), b'value', (((b'ref',),),)),
4379+ ((b'key2',), b'value2', ((),)),
4380 ))
4381 self.assertEqual({
4382- (index, ('prefix', 'key2'), 'value2', ((),)),
4383- (index, ('prefix', 'key'), 'value', ((('prefix', 'ref'),),))
4384+ (index, (b'prefix', b'key2'), b'value2', ((),)),
4385+ (index, (b'prefix', b'key'), b'value', (((b'prefix', b'ref'),),))
4386 },
4387 set(index.iter_all_entries()))
4388
4389 def test_construct(self):
4390 idx = _mod_index.InMemoryGraphIndex()
4391- adapter = _mod_index.GraphIndexPrefixAdapter(idx, ('prefix', ), 1)
4392+ adapter = _mod_index.GraphIndexPrefixAdapter(idx, (b'prefix', ), 1)
4393
4394 def test_construct_with_callback(self):
4395 idx = _mod_index.InMemoryGraphIndex()
4396- adapter = _mod_index.GraphIndexPrefixAdapter(idx, ('prefix', ), 1,
4397+ adapter = _mod_index.GraphIndexPrefixAdapter(idx, (b'prefix', ), 1,
4398 idx.add_nodes)
4399
4400 def test_iter_all_entries_cross_prefix_map_errors(self):
4401 index, adapter = self.make_index(nodes=[
4402- (('prefix', 'key1'), 'data1', ((('prefixaltered', 'key2'),),))])
4403+ ((b'prefix', b'key1'), b'data1', (((b'prefixaltered', b'key2'),),))])
4404 self.assertRaises(_mod_index.BadIndexData, list, adapter.iter_all_entries())
4405
4406 def test_iter_all_entries(self):
4407 index, adapter = self.make_index(nodes=[
4408- (('notprefix', 'key1'), 'data', ((), )),
4409- (('prefix', 'key1'), 'data1', ((), )),
4410- (('prefix', 'key2'), 'data2', ((('prefix', 'key1'),),))])
4411- self.assertEqual({(index, ('key1', ), 'data1', ((),)),
4412- (index, ('key2', ), 'data2', ((('key1',),),))},
4413+ ((b'notprefix', b'key1'), b'data', ((), )),
4414+ ((b'prefix', b'key1'), b'data1', ((), )),
4415+ ((b'prefix', b'key2'), b'data2', (((b'prefix', b'key1'),),))])
4416+ self.assertEqual({(index, (b'key1', ), b'data1', ((),)),
4417+ (index, (b'key2', ), b'data2', (((b'key1',),),))},
4418 set(adapter.iter_all_entries()))
4419
4420 def test_iter_entries(self):
4421 index, adapter = self.make_index(nodes=[
4422- (('notprefix', 'key1'), 'data', ((), )),
4423- (('prefix', 'key1'), 'data1', ((), )),
4424- (('prefix', 'key2'), 'data2', ((('prefix', 'key1'),),))])
4425+ ((b'notprefix', b'key1'), b'data', ((), )),
4426+ ((b'prefix', b'key1'), b'data1', ((), )),
4427+ ((b'prefix', b'key2'), b'data2', (((b'prefix', b'key1'),),))])
4428 # ask for many - get all
4429- self.assertEqual({(index, ('key1', ), 'data1', ((),)),
4430- (index, ('key2', ), 'data2', ((('key1', ),),))},
4431- set(adapter.iter_entries([('key1', ), ('key2', )])))
4432+ self.assertEqual({(index, (b'key1', ), b'data1', ((),)),
4433+ (index, (b'key2', ), b'data2', (((b'key1', ),),))},
4434+ set(adapter.iter_entries([(b'key1', ), (b'key2', )])))
4435 # ask for one, get one
4436- self.assertEqual({(index, ('key1', ), 'data1', ((),))},
4437- set(adapter.iter_entries([('key1', )])))
4438+ self.assertEqual({(index, (b'key1', ), b'data1', ((),))},
4439+ set(adapter.iter_entries([(b'key1', )])))
4440 # ask for missing, get none
4441 self.assertEqual(set(),
4442- set(adapter.iter_entries([('key3', )])))
4443+ set(adapter.iter_entries([(b'key3', )])))
4444
4445 def test_iter_entries_prefix(self):
4446 index, adapter = self.make_index(key_elements=3, nodes=[
4447- (('notprefix', 'foo', 'key1'), 'data', ((), )),
4448- (('prefix', 'prefix2', 'key1'), 'data1', ((), )),
4449- (('prefix', 'prefix2', 'key2'), 'data2', ((('prefix', 'prefix2', 'key1'),),))])
4450+ ((b'notprefix', b'foo', b'key1'), b'data', ((), )),
4451+ ((b'prefix', b'prefix2', b'key1'), b'data1', ((), )),
4452+ ((b'prefix', b'prefix2', b'key2'), b'data2', (((b'prefix', b'prefix2', b'key1'),),))])
4453 # ask for a prefix, get the results for just that prefix, adjusted.
4454- self.assertEqual({(index, ('prefix2', 'key1', ), 'data1', ((),)),
4455- (index, ('prefix2', 'key2', ), 'data2', ((('prefix2', 'key1', ),),))},
4456- set(adapter.iter_entries_prefix([('prefix2', None)])))
4457+ self.assertEqual({(index, (b'prefix2', b'key1', ), b'data1', ((),)),
4458+ (index, (b'prefix2', b'key2', ), b'data2', (((b'prefix2', b'key1', ),),))},
4459+ set(adapter.iter_entries_prefix([(b'prefix2', None)])))
4460
4461 def test_key_count_no_matching_keys(self):
4462 index, adapter = self.make_index(nodes=[
4463- (('notprefix', 'key1'), 'data', ((), ))])
4464+ ((b'notprefix', b'key1'), b'data', ((), ))])
4465 self.assertEqual(0, adapter.key_count())
4466
4467 def test_key_count_some_keys(self):
4468 index, adapter = self.make_index(nodes=[
4469- (('notprefix', 'key1'), 'data', ((), )),
4470- (('prefix', 'key1'), 'data1', ((), )),
4471- (('prefix', 'key2'), 'data2', ((('prefix', 'key1'),),))])
4472+ ((b'notprefix', b'key1'), b'data', ((), )),
4473+ ((b'prefix', b'key1'), b'data1', ((), )),
4474+ ((b'prefix', b'key2'), b'data2', (((b'prefix', b'key1'),),))])
4475 self.assertEqual(2, adapter.key_count())
4476
4477 def test_validate(self):
4478
4479=== modified file 'breezy/tests/test_inv.py'
4480--- breezy/tests/test_inv.py 2018-06-30 14:29:46 +0000
4481+++ breezy/tests/test_inv.py 2018-06-30 17:35:22 +0000
4482@@ -88,7 +88,7 @@
4483 def create_texts_for_inv(repo, inv):
4484 for path, ie in inv.iter_entries():
4485 if ie.text_size:
4486- lines = ['a' * ie.text_size]
4487+ lines = [b'a' * ie.text_size]
4488 else:
4489 lines = []
4490 repo.texts.add_lines((ie.file_id, ie.revision), [], lines)
4491@@ -142,11 +142,11 @@
4492 def _create_repo_revisions(repo, basis, delta, invalid_delta):
4493 repo.start_write_group()
4494 try:
4495- rev = revision.Revision('basis', timestamp=0, timezone=None,
4496+ rev = revision.Revision(b'basis', timestamp=0, timezone=None,
4497 message="", committer="foo@example.com")
4498- basis.revision_id = 'basis'
4499+ basis.revision_id = b'basis'
4500 create_texts_for_inv(repo, basis)
4501- repo.add_revision('basis', rev, basis)
4502+ repo.add_revision(b'basis', rev, basis)
4503 if invalid_delta:
4504 # We don't want to apply the delta to the basis, because we expect
4505 # the delta is invalid.
4506@@ -154,12 +154,12 @@
4507 result_inv.revision_id = b'result'
4508 target_entries = None
4509 else:
4510- result_inv = basis.create_by_apply_delta(delta, 'result')
4511+ result_inv = basis.create_by_apply_delta(delta, b'result')
4512 create_texts_for_inv(repo, result_inv)
4513 target_entries = list(result_inv.iter_entries_by_dir())
4514- rev = revision.Revision('result', timestamp=0, timezone=None,
4515+ rev = revision.Revision(b'result', timestamp=0, timezone=None,
4516 message="", committer="foo@example.com")
4517- repo.add_revision('result', rev, result_inv)
4518+ repo.add_revision(b'result', rev, result_inv)
4519 repo.commit_write_group()
4520 except:
4521 repo.abort_write_group()
4522@@ -222,7 +222,7 @@
4523 tree.unlock()
4524 # Fresh lock, reads disk again.
4525 with tree.lock_write():
4526- tree.update_basis_by_delta('result', delta)
4527+ tree.update_basis_by_delta(b'result', delta)
4528 if not invalid_delta:
4529 tree._validate()
4530 # reload tree - ensure we get what was written.
4531@@ -251,8 +251,7 @@
4532 format = self.format()
4533 control = self.make_controldir('tree', format=format._matchingcontroldir)
4534 repo = format.initialize(control)
4535- repo.lock_write()
4536- try:
4537+ with repo.lock_write():
4538 repo.start_write_group()
4539 try:
4540 rev = revision.Revision(b'basis', timestamp=0, timezone=None,
4541@@ -264,10 +263,7 @@
4542 except:
4543 repo.abort_write_group()
4544 raise
4545- finally:
4546- repo.unlock()
4547- repo.lock_write()
4548- try:
4549+ with repo.lock_write():
4550 repo.start_write_group()
4551 try:
4552 inv_sha1 = repo.add_inventory_by_delta(b'basis', delta,
4553@@ -277,8 +273,6 @@
4554 raise
4555 else:
4556 repo.commit_write_group()
4557- finally:
4558- repo.unlock()
4559 # Fresh lock, reads disk again.
4560 repo = repo.controldir.open_repository()
4561 repo.lock_read()
4562@@ -345,7 +339,7 @@
4563 inv.add(InventoryFile(b'a-id', u'\u1234', b'tree-root'))
4564 e = self.assertRaises(errors.InconsistentDelta, inv.add,
4565 InventoryFile(b'b-id', u'\u1234', b'tree-root'))
4566- self.assertContainsRe(str(e), r'\\u1234')
4567+ self.assertContainsRe(str(e), '\\u1234')
4568
4569 def test_add_recursive(self):
4570 parent = InventoryDirectory(b'src-id', 'src', b'tree-root')
4571@@ -377,14 +371,14 @@
4572 if reference_inv is not None:
4573 inv.root.revision = reference_inv.root.revision
4574 else:
4575- inv.root.revision = 'basis'
4576+ inv.root.revision = b'basis'
4577 return inv
4578
4579 def make_file_ie(self, file_id=b'file-id', name='name', parent_id=None):
4580 ie_file = inventory.InventoryFile(file_id, name, parent_id)
4581 ie_file.revision = b'result'
4582 ie_file.text_size = 0
4583- ie_file.text_sha1 = ''
4584+ ie_file.text_sha1 = b''
4585 return ie_file
4586
4587 def test_empty_delta(self):
4588@@ -396,7 +390,8 @@
4589
4590 def test_None_file_id(self):
4591 inv = self.get_empty_inventory()
4592- dir1 = inventory.InventoryDirectory(None, 'dir1', inv.root.file_id)
4593+ dir1 = inventory.InventoryDirectory(b'dirid', 'dir1', inv.root.file_id)
4594+ dir1.file_id = None
4595 dir1.revision = b'result'
4596 delta = [(None, u'dir1', None, dir1)]
4597 self.assertRaises(errors.InconsistentDelta, self.apply_delta, self,
4598@@ -404,7 +399,8 @@
4599
4600 def test_unicode_file_id(self):
4601 inv = self.get_empty_inventory()
4602- dir1 = inventory.InventoryDirectory(u'dirid', 'dir1', inv.root.file_id)
4603+ dir1 = inventory.InventoryDirectory(b'dirid', 'dir1', inv.root.file_id)
4604+ dir1.file_id = u'dirid'
4605 dir1.revision = b'result'
4606 delta = [(None, u'dir1', dir1.file_id, dir1)]
4607 self.assertRaises(errors.InconsistentDelta, self.apply_delta, self,
4608@@ -412,152 +408,152 @@
4609
4610 def test_repeated_file_id(self):
4611 inv = self.get_empty_inventory()
4612- file1 = inventory.InventoryFile('id', 'path1', inv.root.file_id)
4613+ file1 = inventory.InventoryFile(b'id', 'path1', inv.root.file_id)
4614 file1.revision = b'result'
4615 file1.text_size = 0
4616- file1.text_sha1 = ""
4617+ file1.text_sha1 = b""
4618 file2 = file1.copy()
4619 file2.name = 'path2'
4620- delta = [(None, u'path1', 'id', file1), (None, u'path2', 'id', file2)]
4621+ delta = [(None, u'path1', b'id', file1), (None, u'path2', b'id', file2)]
4622 self.assertRaises(errors.InconsistentDelta, self.apply_delta, self,
4623 inv, delta)
4624
4625 def test_repeated_new_path(self):
4626 inv = self.get_empty_inventory()
4627- file1 = inventory.InventoryFile('id1', 'path', inv.root.file_id)
4628+ file1 = inventory.InventoryFile(b'id1', 'path', inv.root.file_id)
4629 file1.revision = b'result'
4630 file1.text_size = 0
4631- file1.text_sha1 = ""
4632+ file1.text_sha1 = b""
4633 file2 = file1.copy()
4634- file2.file_id = 'id2'
4635- delta = [(None, u'path', 'id1', file1), (None, u'path', 'id2', file2)]
4636+ file2.file_id = b'id2'
4637+ delta = [(None, u'path', b'id1', file1), (None, u'path', b'id2', file2)]
4638 self.assertRaises(errors.InconsistentDelta, self.apply_delta, self,
4639 inv, delta)
4640
4641 def test_repeated_old_path(self):
4642 inv = self.get_empty_inventory()
4643- file1 = inventory.InventoryFile('id1', 'path', inv.root.file_id)
4644+ file1 = inventory.InventoryFile(b'id1', 'path', inv.root.file_id)
4645 file1.revision = b'result'
4646 file1.text_size = 0
4647- file1.text_sha1 = ""
4648+ file1.text_sha1 = b""
4649 # We can't *create* a source inventory with the same path, but
4650 # a badly generated partial delta might claim the same source twice.
4651 # This would be buggy in two ways: the path is repeated in the delta,
4652 # And the path for one of the file ids doesn't match the source
4653 # location. Alternatively, we could have a repeated fileid, but that
4654 # is separately checked for.
4655- file2 = inventory.InventoryFile('id2', 'path2', inv.root.file_id)
4656+ file2 = inventory.InventoryFile(b'id2', 'path2', inv.root.file_id)
4657 file2.revision = b'result'
4658 file2.text_size = 0
4659- file2.text_sha1 = ""
4660+ file2.text_sha1 = b""
4661 inv.add(file1)
4662 inv.add(file2)
4663- delta = [(u'path', None, 'id1', None), (u'path', None, 'id2', None)]
4664+ delta = [(u'path', None, b'id1', None), (u'path', None, b'id2', None)]
4665 self.assertRaises(errors.InconsistentDelta, self.apply_delta, self,
4666 inv, delta)
4667
4668 def test_mismatched_id_entry_id(self):
4669 inv = self.get_empty_inventory()
4670- file1 = inventory.InventoryFile('id1', 'path', inv.root.file_id)
4671+ file1 = inventory.InventoryFile(b'id1', 'path', inv.root.file_id)
4672 file1.revision = b'result'
4673 file1.text_size = 0
4674- file1.text_sha1 = ""
4675- delta = [(None, u'path', 'id', file1)]
4676+ file1.text_sha1 = b""
4677+ delta = [(None, u'path', b'id', file1)]
4678 self.assertRaises(errors.InconsistentDelta, self.apply_delta, self,
4679 inv, delta)
4680
4681 def test_mismatched_new_path_entry_None(self):
4682 inv = self.get_empty_inventory()
4683- delta = [(None, u'path', 'id', None)]
4684+ delta = [(None, u'path', b'id', None)]
4685 self.assertRaises(errors.InconsistentDelta, self.apply_delta, self,
4686 inv, delta)
4687
4688 def test_mismatched_new_path_None_entry(self):
4689 inv = self.get_empty_inventory()
4690- file1 = inventory.InventoryFile('id1', 'path', inv.root.file_id)
4691+ file1 = inventory.InventoryFile(b'id1', 'path', inv.root.file_id)
4692 file1.revision = b'result'
4693 file1.text_size = 0
4694- file1.text_sha1 = ""
4695- delta = [(u"path", None, 'id1', file1)]
4696+ file1.text_sha1 = b""
4697+ delta = [(u"path", None, b'id1', file1)]
4698 self.assertRaises(errors.InconsistentDelta, self.apply_delta, self,
4699 inv, delta)
4700
4701 def test_parent_is_not_directory(self):
4702 inv = self.get_empty_inventory()
4703- file1 = inventory.InventoryFile('id1', 'path', inv.root.file_id)
4704+ file1 = inventory.InventoryFile(b'id1', 'path', inv.root.file_id)
4705 file1.revision = b'result'
4706 file1.text_size = 0
4707- file1.text_sha1 = ""
4708- file2 = inventory.InventoryFile('id2', 'path2', 'id1')
4709+ file1.text_sha1 = b""
4710+ file2 = inventory.InventoryFile(b'id2', 'path2', b'id1')
4711 file2.revision = b'result'
4712 file2.text_size = 0
4713- file2.text_sha1 = ""
4714+ file2.text_sha1 = b""
4715 inv.add(file1)
4716- delta = [(None, u'path/path2', 'id2', file2)]
4717+ delta = [(None, u'path/path2', b'id2', file2)]
4718 self.assertRaises(errors.InconsistentDelta, self.apply_delta, self,
4719 inv, delta)
4720
4721 def test_parent_is_missing(self):
4722 inv = self.get_empty_inventory()
4723- file2 = inventory.InventoryFile('id2', 'path2', 'missingparent')
4724+ file2 = inventory.InventoryFile(b'id2', 'path2', 'missingparent')
4725 file2.revision = b'result'
4726 file2.text_size = 0
4727- file2.text_sha1 = ""
4728- delta = [(None, u'path/path2', 'id2', file2)]
4729+ file2.text_sha1 = b""
4730+ delta = [(None, u'path/path2', b'id2', file2)]
4731 self.assertRaises(errors.InconsistentDelta, self.apply_delta, self,
4732 inv, delta)
4733
4734 def test_new_parent_path_has_wrong_id(self):
4735 inv = self.get_empty_inventory()
4736- parent1 = inventory.InventoryDirectory('p-1', 'dir', inv.root.file_id)
4737+ parent1 = inventory.InventoryDirectory(b'p-1', 'dir', inv.root.file_id)
4738 parent1.revision = b'result'
4739- parent2 = inventory.InventoryDirectory('p-2', 'dir2', inv.root.file_id)
4740+ parent2 = inventory.InventoryDirectory(b'p-2', 'dir2', inv.root.file_id)
4741 parent2.revision = b'result'
4742- file1 = inventory.InventoryFile('id', 'path', 'p-2')
4743+ file1 = inventory.InventoryFile(b'id', 'path', b'p-2')
4744 file1.revision = b'result'
4745 file1.text_size = 0
4746- file1.text_sha1 = ""
4747+ file1.text_sha1 = b""
4748 inv.add(parent1)
4749 inv.add(parent2)
4750 # This delta claims that file1 is at dir/path, but actually its at
4751 # dir2/path if you follow the inventory parent structure.
4752- delta = [(None, u'dir/path', 'id', file1)]
4753+ delta = [(None, u'dir/path', b'id', file1)]
4754 self.assertRaises(errors.InconsistentDelta, self.apply_delta, self,
4755 inv, delta)
4756
4757 def test_old_parent_path_is_wrong(self):
4758 inv = self.get_empty_inventory()
4759- parent1 = inventory.InventoryDirectory('p-1', 'dir', inv.root.file_id)
4760+ parent1 = inventory.InventoryDirectory(b'p-1', 'dir', inv.root.file_id)
4761 parent1.revision = b'result'
4762- parent2 = inventory.InventoryDirectory('p-2', 'dir2', inv.root.file_id)
4763+ parent2 = inventory.InventoryDirectory(b'p-2', 'dir2', inv.root.file_id)
4764 parent2.revision = b'result'
4765- file1 = inventory.InventoryFile('id', 'path', 'p-2')
4766+ file1 = inventory.InventoryFile(b'id', 'path', b'p-2')
4767 file1.revision = b'result'
4768 file1.text_size = 0
4769- file1.text_sha1 = ""
4770+ file1.text_sha1 = b""
4771 inv.add(parent1)
4772 inv.add(parent2)
4773 inv.add(file1)
4774 # This delta claims that file1 was at dir/path, but actually it was at
4775 # dir2/path if you follow the inventory parent structure.
4776- delta = [(u'dir/path', None, 'id', None)]
4777+ delta = [(u'dir/path', None, b'id', None)]
4778 self.assertRaises(errors.InconsistentDelta, self.apply_delta, self,
4779 inv, delta)
4780
4781 def test_old_parent_path_is_for_other_id(self):
4782 inv = self.get_empty_inventory()
4783- parent1 = inventory.InventoryDirectory('p-1', 'dir', inv.root.file_id)
4784+ parent1 = inventory.InventoryDirectory(b'p-1', 'dir', inv.root.file_id)
4785 parent1.revision = b'result'
4786- parent2 = inventory.InventoryDirectory('p-2', 'dir2', inv.root.file_id)
4787+ parent2 = inventory.InventoryDirectory(b'p-2', 'dir2', inv.root.file_id)
4788 parent2.revision = b'result'
4789- file1 = inventory.InventoryFile('id', 'path', 'p-2')
4790+ file1 = inventory.InventoryFile(b'id', 'path', b'p-2')
4791 file1.revision = b'result'
4792 file1.text_size = 0
4793- file1.text_sha1 = ""
4794- file2 = inventory.InventoryFile('id2', 'path', 'p-1')
4795+ file1.text_sha1 = b""
4796+ file2 = inventory.InventoryFile(b'id2', 'path', b'p-1')
4797 file2.revision = b'result'
4798 file2.text_size = 0
4799- file2.text_sha1 = ""
4800+ file2.text_sha1 = b""
4801 inv.add(parent1)
4802 inv.add(parent2)
4803 inv.add(file1)
4804@@ -565,45 +561,45 @@
4805 # This delta claims that file1 was at dir/path, but actually it was at
4806 # dir2/path if you follow the inventory parent structure. At dir/path
4807 # is another entry we should not delete.
4808- delta = [(u'dir/path', None, 'id', None)]
4809+ delta = [(u'dir/path', None, b'id', None)]
4810 self.assertRaises(errors.InconsistentDelta, self.apply_delta, self,
4811 inv, delta)
4812
4813 def test_add_existing_id_new_path(self):
4814 inv = self.get_empty_inventory()
4815- parent1 = inventory.InventoryDirectory('p-1', 'dir1', inv.root.file_id)
4816+ parent1 = inventory.InventoryDirectory(b'p-1', 'dir1', inv.root.file_id)
4817 parent1.revision = b'result'
4818- parent2 = inventory.InventoryDirectory('p-1', 'dir2', inv.root.file_id)
4819+ parent2 = inventory.InventoryDirectory(b'p-1', 'dir2', inv.root.file_id)
4820 parent2.revision = b'result'
4821 inv.add(parent1)
4822- delta = [(None, u'dir2', 'p-1', parent2)]
4823+ delta = [(None, u'dir2', b'p-1', parent2)]
4824 self.assertRaises(errors.InconsistentDelta, self.apply_delta, self,
4825 inv, delta)
4826
4827 def test_add_new_id_existing_path(self):
4828 inv = self.get_empty_inventory()
4829- parent1 = inventory.InventoryDirectory('p-1', 'dir1', inv.root.file_id)
4830+ parent1 = inventory.InventoryDirectory(b'p-1', 'dir1', inv.root.file_id)
4831 parent1.revision = b'result'
4832- parent2 = inventory.InventoryDirectory('p-2', 'dir1', inv.root.file_id)
4833+ parent2 = inventory.InventoryDirectory(b'p-2', 'dir1', inv.root.file_id)
4834 parent2.revision = b'result'
4835 inv.add(parent1)
4836- delta = [(None, u'dir1', 'p-2', parent2)]
4837+ delta = [(None, u'dir1', b'p-2', parent2)]
4838 self.assertRaises(errors.InconsistentDelta, self.apply_delta, self,
4839 inv, delta)
4840
4841 def test_remove_dir_leaving_dangling_child(self):
4842 inv = self.get_empty_inventory()
4843- dir1 = inventory.InventoryDirectory('p-1', 'dir1', inv.root.file_id)
4844+ dir1 = inventory.InventoryDirectory(b'p-1', 'dir1', inv.root.file_id)
4845 dir1.revision = b'result'
4846- dir2 = inventory.InventoryDirectory('p-2', 'child1', 'p-1')
4847+ dir2 = inventory.InventoryDirectory(b'p-2', 'child1', b'p-1')
4848 dir2.revision = b'result'
4849- dir3 = inventory.InventoryDirectory('p-3', 'child2', 'p-1')
4850+ dir3 = inventory.InventoryDirectory(b'p-3', 'child2', b'p-1')
4851 dir3.revision = b'result'
4852 inv.add(dir1)
4853 inv.add(dir2)
4854 inv.add(dir3)
4855- delta = [(u'dir1', None, 'p-1', None),
4856- (u'dir1/child2', None, 'p-3', None)]
4857+ delta = [(u'dir1', None, b'p-1', None),
4858+ (u'dir1/child2', None, b'p-3', None)]
4859 self.assertRaises(errors.InconsistentDelta, self.apply_delta, self,
4860 inv, delta)
4861
4862@@ -612,7 +608,7 @@
4863 file1 = inventory.InventoryFile(b'file-id', 'path', inv.root.file_id)
4864 file1.revision = b'result'
4865 file1.text_size = 0
4866- file1.text_sha1 = ''
4867+ file1.text_sha1 = b''
4868 delta = [(None, u'path', b'file-id', file1)]
4869 res_inv = self.apply_delta(self, inv, delta, invalid_delta=False)
4870 self.assertEqual(b'file-id', res_inv.get_entry(b'file-id').file_id)
4871@@ -622,7 +618,7 @@
4872 file1 = inventory.InventoryFile(b'file-id', 'path', inv.root.file_id)
4873 file1.revision = b'result'
4874 file1.text_size = 0
4875- file1.text_sha1 = ''
4876+ file1.text_sha1 = b''
4877 inv.add(file1)
4878 delta = [(u'path', None, b'file-id', None)]
4879 res_inv = self.apply_delta(self, inv, delta, invalid_delta=False)
4880@@ -685,16 +681,16 @@
4881
4882 def test_is_root(self):
4883 """Ensure our root-checking code is accurate."""
4884- inv = inventory.Inventory('TREE_ROOT')
4885- self.assertTrue(inv.is_root('TREE_ROOT'))
4886- self.assertFalse(inv.is_root('booga'))
4887- inv.root.file_id = 'booga'
4888- self.assertFalse(inv.is_root('TREE_ROOT'))
4889- self.assertTrue(inv.is_root('booga'))
4890+ inv = inventory.Inventory(b'TREE_ROOT')
4891+ self.assertTrue(inv.is_root(b'TREE_ROOT'))
4892+ self.assertFalse(inv.is_root(b'booga'))
4893+ inv.root.file_id = b'booga'
4894+ self.assertFalse(inv.is_root(b'TREE_ROOT'))
4895+ self.assertTrue(inv.is_root(b'booga'))
4896 # works properly even if no root is set
4897 inv.root = None
4898- self.assertFalse(inv.is_root('TREE_ROOT'))
4899- self.assertFalse(inv.is_root('booga'))
4900+ self.assertFalse(inv.is_root(b'TREE_ROOT'))
4901+ self.assertFalse(inv.is_root(b'booga'))
4902
4903 def test_entries_for_empty_inventory(self):
4904 """Test that entries() will not fail for an empty inventory"""
4905@@ -706,34 +702,34 @@
4906
4907 def test_file_invalid_entry_name(self):
4908 self.assertRaises(errors.InvalidEntryName, inventory.InventoryFile,
4909- '123', 'a/hello.c', ROOT_ID)
4910+ b'123', 'a/hello.c', ROOT_ID)
4911
4912 def test_file_backslash(self):
4913- file = inventory.InventoryFile('123', 'h\\ello.c', ROOT_ID)
4914+ file = inventory.InventoryFile(b'123', 'h\\ello.c', ROOT_ID)
4915 self.assertEquals(file.name, 'h\\ello.c')
4916
4917 def test_file_kind_character(self):
4918- file = inventory.InventoryFile('123', 'hello.c', ROOT_ID)
4919+ file = inventory.InventoryFile(b'123', 'hello.c', ROOT_ID)
4920 self.assertEqual(file.kind_character(), '')
4921
4922 def test_dir_kind_character(self):
4923- dir = inventory.InventoryDirectory('123', 'hello.c', ROOT_ID)
4924+ dir = inventory.InventoryDirectory(b'123', 'hello.c', ROOT_ID)
4925 self.assertEqual(dir.kind_character(), '/')
4926
4927 def test_link_kind_character(self):
4928- dir = inventory.InventoryLink('123', 'hello.c', ROOT_ID)
4929+ dir = inventory.InventoryLink(b'123', 'hello.c', ROOT_ID)
4930 self.assertEqual(dir.kind_character(), '')
4931
4932 def test_dir_detect_changes(self):
4933- left = inventory.InventoryDirectory('123', 'hello.c', ROOT_ID)
4934- right = inventory.InventoryDirectory('123', 'hello.c', ROOT_ID)
4935+ left = inventory.InventoryDirectory(b'123', 'hello.c', ROOT_ID)
4936+ right = inventory.InventoryDirectory(b'123', 'hello.c', ROOT_ID)
4937 self.assertEqual((False, False), left.detect_changes(right))
4938 self.assertEqual((False, False), right.detect_changes(left))
4939
4940 def test_file_detect_changes(self):
4941- left = inventory.InventoryFile('123', 'hello.c', ROOT_ID)
4942+ left = inventory.InventoryFile(b'123', 'hello.c', ROOT_ID)
4943 left.text_sha1 = 123
4944- right = inventory.InventoryFile('123', 'hello.c', ROOT_ID)
4945+ right = inventory.InventoryFile(b'123', 'hello.c', ROOT_ID)
4946 right.text_sha1 = 123
4947 self.assertEqual((False, False), left.detect_changes(right))
4948 self.assertEqual((False, False), right.detect_changes(left))
4949@@ -745,9 +741,9 @@
4950 self.assertEqual((True, True), right.detect_changes(left))
4951
4952 def test_symlink_detect_changes(self):
4953- left = inventory.InventoryLink('123', 'hello.c', ROOT_ID)
4954+ left = inventory.InventoryLink(b'123', 'hello.c', ROOT_ID)
4955 left.symlink_target='foo'
4956- right = inventory.InventoryLink('123', 'hello.c', ROOT_ID)
4957+ right = inventory.InventoryLink(b'123', 'hello.c', ROOT_ID)
4958 right.symlink_target='foo'
4959 self.assertEqual((False, False), left.detect_changes(right))
4960 self.assertEqual((False, False), right.detect_changes(left))
4961@@ -756,15 +752,15 @@
4962 self.assertEqual((True, False), right.detect_changes(left))
4963
4964 def test_file_has_text(self):
4965- file = inventory.InventoryFile('123', 'hello.c', ROOT_ID)
4966+ file = inventory.InventoryFile(b'123', 'hello.c', ROOT_ID)
4967 self.assertTrue(file.has_text())
4968
4969 def test_directory_has_text(self):
4970- dir = inventory.InventoryDirectory('123', 'hello.c', ROOT_ID)
4971+ dir = inventory.InventoryDirectory(b'123', 'hello.c', ROOT_ID)
4972 self.assertFalse(dir.has_text())
4973
4974 def test_link_has_text(self):
4975- link = inventory.InventoryLink('123', 'hello.c', ROOT_ID)
4976+ link = inventory.InventoryLink(b'123', 'hello.c', ROOT_ID)
4977 self.assertFalse(link.has_text())
4978
4979 def test_make_entry(self):
4980@@ -803,17 +799,17 @@
4981 # renamed/reparented and modified
4982 # change kind (perhaps can't be done yet?)
4983 # also, merged in combination with all of these?
4984- old_a = InventoryFile('a-id', 'a_file', ROOT_ID)
4985- old_a.text_sha1 = '123132'
4986+ old_a = InventoryFile(b'a-id', 'a_file', ROOT_ID)
4987+ old_a.text_sha1 = b'123132'
4988 old_a.text_size = 0
4989- new_a = InventoryFile('a-id', 'a_file', ROOT_ID)
4990- new_a.text_sha1 = '123132'
4991+ new_a = InventoryFile(b'a-id', 'a_file', ROOT_ID)
4992+ new_a.text_sha1 = b'123132'
4993 new_a.text_size = 0
4994
4995 self.assertChangeDescription('unchanged', old_a, new_a)
4996
4997 new_a.text_size = 10
4998- new_a.text_sha1 = 'abcabc'
4999+ new_a.text_sha1 = b'abcabc'
5000 self.assertChangeDescription('modified', old_a, new_a)
The diff has been truncated for viewing.

Subscribers

People subscribed via source and target branches