Merge lp:~gz/brz/next_up_next into lp:brz

Proposed by Martin Packman
Status: Merged
Approved by: Martin Packman
Approved revision: no longer in the source branch.
Merge reported by: The Breezy Bot
Merged at revision: not available
Proposed branch: lp:~gz/brz/next_up_next
Merge into: lp:brz
Diff against target: 2154 lines (+254/-249)
71 files modified
breezy/_annotator_py.py (+1/-1)
breezy/_dirstate_helpers_py.py (+5/-1)
breezy/annotate.py (+1/-1)
breezy/branch.py (+1/-1)
breezy/btree_index.py (+8/-8)
breezy/builtins.py (+1/-1)
breezy/bundle/bundle_data.py (+1/-1)
breezy/bundle/serializer/v08.py (+2/-2)
breezy/bundle/serializer/v4.py (+1/-1)
breezy/cmdline.py (+7/-3)
breezy/config.py (+1/-1)
breezy/dirstate.py (+7/-7)
breezy/export/__init__.py (+1/-1)
breezy/graph.py (+8/-6)
breezy/groupcompress.py (+2/-2)
breezy/index.py (+2/-2)
breezy/inventory_delta.py (+1/-1)
breezy/iterablefile.py (+10/-8)
breezy/knit.py (+11/-13)
breezy/log.py (+2/-2)
breezy/merge_directive.py (+1/-1)
breezy/multiparent.py (+6/-6)
breezy/mutabletree.py (+2/-2)
breezy/pack.py (+1/-1)
breezy/patches.py (+5/-5)
breezy/plugins/fastimport/revision_store.py (+3/-3)
breezy/plugins/weave_fmt/bzrdir.py (+1/-1)
breezy/remote.py (+3/-3)
breezy/repository.py (+2/-2)
breezy/revisiontree.py (+1/-1)
breezy/shelf.py (+1/-1)
breezy/smart/protocol.py (+1/-1)
breezy/smart/repository.py (+1/-1)
breezy/status.py (+1/-1)
breezy/tests/blackbox/test_export.py (+2/-2)
breezy/tests/per_intertree/test_compare.py (+1/-1)
breezy/tests/per_pack_repository.py (+2/-2)
breezy/tests/per_repository_vf/test_write_group.py (+4/-4)
breezy/tests/per_versionedfile.py (+8/-8)
breezy/tests/per_workingtree/test_inv.py (+1/-1)
breezy/tests/per_workingtree/test_nested_specifics.py (+1/-1)
breezy/tests/test__annotator.py (+1/-1)
breezy/tests/test__simple_set.py (+3/-3)
breezy/tests/test_bundle.py (+7/-7)
breezy/tests/test_chk_map.py (+1/-1)
breezy/tests/test_fetch.py (+4/-4)
breezy/tests/test_graph.py (+24/-24)
breezy/tests/test_groupcompress.py (+2/-2)
breezy/tests/test_http.py (+9/-9)
breezy/tests/test_knit.py (+11/-11)
breezy/tests/test_pack.py (+2/-2)
breezy/tests/test_patches.py (+2/-7)
breezy/tests/test_repository.py (+3/-3)
breezy/tests/test_revisiontree.py (+1/-1)
breezy/tests/test_shelf.py (+3/-3)
breezy/tests/test_smart_transport.py (+5/-5)
breezy/tests/test_tree.py (+9/-9)
breezy/tests/test_ui.py (+1/-1)
breezy/tests/test_versionedfile.py (+3/-3)
breezy/transform.py (+5/-5)
breezy/transport/__init__.py (+2/-2)
breezy/transport/http/__init__.py (+3/-3)
breezy/transport/remote.py (+3/-3)
breezy/transport/sftp.py (+3/-3)
breezy/tree.py (+3/-3)
breezy/vf_repository.py (+6/-6)
breezy/vf_search.py (+1/-1)
breezy/weavefile.py (+5/-5)
breezy/workingtree.py (+8/-8)
breezy/workingtree_4.py (+2/-2)
breezy/xml_serializer.py (+1/-1)
To merge this branch: bzr merge lp:~gz/brz/next_up_next
Reviewer Review Type Date Requested Status
Jelmer Vernooij Approve
Review via email: mp+324586@code.launchpad.net

Commit message

Make iterator objects and use of next Python 3 compatible

Description of the change

Most of the changes are the 2to3 fixer.

What I changed on top after:

* Pick an appropriate next in _dirstate_helpers_py
* Make all iterator objects have alias the __next__ method as next for Python 2
* Change a bunch of assertRaises tests from `... iterator.__next__)` to `... next, iterator)`
* Fixed doctest in breezy.iterablefile

To post a comment you must log in.
Revision history for this message
Jelmer Vernooij (jelmer) :
review: Approve
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote :

Running landing tests failed
http://10.242.247.184:8080/job/brz-dev/13/

Revision history for this message
Martin Packman (gz) wrote :

Fixed single failing test and made it 90% less horrible.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'breezy/_annotator_py.py'
--- breezy/_annotator_py.py 2017-05-22 00:56:52 +0000
+++ breezy/_annotator_py.py 2017-05-26 09:27:07 +0000
@@ -281,7 +281,7 @@
281 # Backwards compatibility, break up the heads into pairs and281 # Backwards compatibility, break up the heads into pairs and
282 # resolve the result282 # resolve the result
283 next_head = iter(the_heads)283 next_head = iter(the_heads)
284 head = next_head.next()284 head = next(next_head)
285 for possible_head in next_head:285 for possible_head in next_head:
286 annotated_lines = ((head, line), (possible_head, line))286 annotated_lines = ((head, line), (possible_head, line))
287 head = tiebreaker(annotated_lines)[0]287 head = tiebreaker(annotated_lines)[0]
288288
=== modified file 'breezy/_dirstate_helpers_py.py'
--- breezy/_dirstate_helpers_py.py 2017-05-22 00:56:52 +0000
+++ breezy/_dirstate_helpers_py.py 2017-05-26 09:27:07 +0000
@@ -262,7 +262,11 @@
262 # them. Grab an straight iterator over the fields. (We use an262 # them. Grab an straight iterator over the fields. (We use an
263 # iterator because we don't want to do a lot of additions, nor263 # iterator because we don't want to do a lot of additions, nor
264 # do we want to do a lot of slicing)264 # do we want to do a lot of slicing)
265 next = iter(fields).next265 _iter = iter(fields)
266 # Get a local reference to the compatible next method
267 next = getattr(_iter, '__next__', None)
268 if next is None:
269 next = _iter.next
266 # Move the iterator to the current position270 # Move the iterator to the current position
267 for x in xrange(cur):271 for x in xrange(cur):
268 next()272 next()
269273
=== modified file 'breezy/annotate.py'
--- breezy/annotate.py 2017-05-22 00:56:52 +0000
+++ breezy/annotate.py 2017-05-26 09:27:07 +0000
@@ -367,7 +367,7 @@
367 else:367 else:
368 heads = heads_provider.heads((left[0], right[0]))368 heads = heads_provider.heads((left[0], right[0]))
369 if len(heads) == 1:369 if len(heads) == 1:
370 output_append((iter(heads).next(), left[1]))370 output_append((next(iter(heads)), left[1]))
371 else:371 else:
372 # Both claim different origins, get a stable result.372 # Both claim different origins, get a stable result.
373 # If the result is not stable, there is a risk a373 # If the result is not stable, there is a risk a
374374
=== modified file 'breezy/branch.py'
--- breezy/branch.py 2017-05-22 00:56:52 +0000
+++ breezy/branch.py 2017-05-26 09:27:07 +0000
@@ -621,7 +621,7 @@
621 # ancestry. Given the order guaranteed by the merge sort, we will see621 # ancestry. Given the order guaranteed by the merge sort, we will see
622 # uninteresting descendants of the first parent of our tip before the622 # uninteresting descendants of the first parent of our tip before the
623 # tip itself.623 # tip itself.
624 first = rev_iter.next()624 first = next(rev_iter)
625 (rev_id, merge_depth, revno, end_of_merge) = first625 (rev_id, merge_depth, revno, end_of_merge) = first
626 yield first626 yield first
627 if not merge_depth:627 if not merge_depth:
628628
=== modified file 'breezy/btree_index.py'
--- breezy/btree_index.py 2017-05-25 21:59:11 +0000
+++ breezy/btree_index.py 2017-05-26 09:27:07 +0000
@@ -265,7 +265,7 @@
265 current_values = []265 current_values = []
266 for iterator in iterators_to_combine:266 for iterator in iterators_to_combine:
267 try:267 try:
268 current_values.append(iterator.next())268 current_values.append(next(iterator))
269 except StopIteration:269 except StopIteration:
270 current_values.append(None)270 current_values.append(None)
271 last = None271 last = None
@@ -285,7 +285,7 @@
285 yield (self,) + selected[1][1:]285 yield (self,) + selected[1][1:]
286 pos = selected[0]286 pos = selected[0]
287 try:287 try:
288 current_values[pos] = iterators_to_combine[pos].next()288 current_values[pos] = next(iterators_to_combine[pos])
289 except StopIteration:289 except StopIteration:
290 current_values[pos] = None290 current_values[pos] = None
291291
@@ -576,7 +576,7 @@
576 while dicts:576 while dicts:
577 key_dict = dicts.pop(-1)577 key_dict = dicts.pop(-1)
578 # can't be empty or would not exist578 # can't be empty or would not exist
579 item, value = key_dict.iteritems().next()579 item, value = next(key_dict.iteritems())
580 if isinstance(value, dict):580 if isinstance(value, dict):
581 # push keys581 # push keys
582 dicts.extend(key_dict.itervalues())582 dicts.extend(key_dict.itervalues())
@@ -1071,8 +1071,8 @@
1071 # return [(o, offsets[o]) for o in sorted(offsets)]1071 # return [(o, offsets[o]) for o in sorted(offsets)]
1072 in_keys_iter = iter(in_keys)1072 in_keys_iter = iter(in_keys)
1073 fixed_keys_iter = enumerate(fixed_keys)1073 fixed_keys_iter = enumerate(fixed_keys)
1074 cur_in_key = in_keys_iter.next()1074 cur_in_key = next(in_keys_iter)
1075 cur_fixed_offset, cur_fixed_key = fixed_keys_iter.next()1075 cur_fixed_offset, cur_fixed_key = next(fixed_keys_iter)
10761076
1077 class InputDone(Exception): pass1077 class InputDone(Exception): pass
1078 class FixedDone(Exception): pass1078 class FixedDone(Exception): pass
@@ -1094,7 +1094,7 @@
1094 while cur_in_key < cur_fixed_key:1094 while cur_in_key < cur_fixed_key:
1095 cur_keys.append(cur_in_key)1095 cur_keys.append(cur_in_key)
1096 try:1096 try:
1097 cur_in_key = in_keys_iter.next()1097 cur_in_key = next(in_keys_iter)
1098 except StopIteration:1098 except StopIteration:
1099 raise InputDone1099 raise InputDone
1100 # At this point cur_in_key must be >= cur_fixed_key1100 # At this point cur_in_key must be >= cur_fixed_key
@@ -1102,7 +1102,7 @@
1102 # the end1102 # the end
1103 while cur_in_key >= cur_fixed_key:1103 while cur_in_key >= cur_fixed_key:
1104 try:1104 try:
1105 cur_fixed_offset, cur_fixed_key = fixed_keys_iter.next()1105 cur_fixed_offset, cur_fixed_key = next(fixed_keys_iter)
1106 except StopIteration:1106 except StopIteration:
1107 raise FixedDone1107 raise FixedDone
1108 except InputDone:1108 except InputDone:
@@ -1430,7 +1430,7 @@
1430 while dicts:1430 while dicts:
1431 key_dict = dicts.pop(-1)1431 key_dict = dicts.pop(-1)
1432 # can't be empty or would not exist1432 # can't be empty or would not exist
1433 item, value = key_dict.iteritems().next()1433 item, value = next(key_dict.iteritems())
1434 if isinstance(value, dict):1434 if isinstance(value, dict):
1435 # push keys1435 # push keys
1436 dicts.extend(key_dict.itervalues())1436 dicts.extend(key_dict.itervalues())
14371437
=== modified file 'breezy/builtins.py'
--- breezy/builtins.py 2017-05-22 00:56:52 +0000
+++ breezy/builtins.py 2017-05-26 09:27:07 +0000
@@ -432,7 +432,7 @@
432432
433 def print_revision(self, revisions, revid):433 def print_revision(self, revisions, revid):
434 stream = revisions.get_record_stream([(revid,)], 'unordered', True)434 stream = revisions.get_record_stream([(revid,)], 'unordered', True)
435 record = stream.next()435 record = next(stream)
436 if record.storage_kind == 'absent':436 if record.storage_kind == 'absent':
437 raise errors.NoSuchRevision(revisions, revid)437 raise errors.NoSuchRevision(revisions, revid)
438 revtext = record.get_bytes_as('fulltext')438 revtext = record.get_bytes_as('fulltext')
439439
=== modified file 'breezy/bundle/bundle_data.py'
--- breezy/bundle/bundle_data.py 2017-05-22 00:56:52 +0000
+++ breezy/bundle/bundle_data.py 2017-05-26 09:27:07 +0000
@@ -766,7 +766,7 @@
766 entries = inv.iter_entries(from_dir=from_dir_id, recursive=recursive)766 entries = inv.iter_entries(from_dir=from_dir_id, recursive=recursive)
767 if inv.root is not None and not include_root and from_dir is None:767 if inv.root is not None and not include_root and from_dir is None:
768 # skip the root for compatability with the current apis.768 # skip the root for compatability with the current apis.
769 entries.next()769 next(entries)
770 for path, entry in entries:770 for path, entry in entries:
771 yield path, 'V', entry.kind, entry.file_id, entry771 yield path, 'V', entry.kind, entry.file_id, entry
772772
773773
=== modified file 'breezy/bundle/serializer/v08.py'
--- breezy/bundle/serializer/v08.py 2017-05-21 18:10:28 +0000
+++ breezy/bundle/serializer/v08.py 2017-05-26 09:27:07 +0000
@@ -360,7 +360,7 @@
360 return BundleInfo08()360 return BundleInfo08()
361361
362 def _read(self):362 def _read(self):
363 self._next().next()363 next(self._next())
364 while self._next_line is not None:364 while self._next_line is not None:
365 if not self._read_revision_header():365 if not self._read_revision_header():
366 break366 break
@@ -537,7 +537,7 @@
537 break537 break
538 if not self._next_line.startswith('#'):538 if not self._next_line.startswith('#'):
539 # Consume the trailing \n and stop processing539 # Consume the trailing \n and stop processing
540 self._next().next()540 next(self._next())
541 break541 break
542542
543class BundleInfo08(BundleInfo):543class BundleInfo08(BundleInfo):
544544
=== modified file 'breezy/bundle/serializer/v4.py'
--- breezy/bundle/serializer/v4.py 2017-05-22 00:56:52 +0000
+++ breezy/bundle/serializer/v4.py 2017-05-26 09:27:07 +0000
@@ -258,7 +258,7 @@
258 if metadata['storage_kind'] == 'header':258 if metadata['storage_kind'] == 'header':
259 bytes = None259 bytes = None
260 else:260 else:
261 _unused, bytes = iterator.next()261 _unused, bytes = next(iterator)
262 yield (bytes, metadata) + self.decode_name(names[0][0])262 yield (bytes, metadata) + self.decode_name(names[0][0])
263263
264264
265265
=== modified file 'breezy/cmdline.py'
--- breezy/cmdline.py 2013-05-27 10:22:27 +0000
+++ breezy/cmdline.py 2017-05-26 09:27:07 +0000
@@ -33,11 +33,13 @@
33 self._iter = iter(orig)33 self._iter = iter(orig)
34 self._pushback_buffer = []34 self._pushback_buffer = []
3535
36 def next(self):36 def __next__(self):
37 if len(self._pushback_buffer) > 0:37 if len(self._pushback_buffer) > 0:
38 return self._pushback_buffer.pop()38 return self._pushback_buffer.pop()
39 else:39 else:
40 return self._iter.next()40 return next(self._iter)
41
42 next = __next__
4143
42 def pushback(self, char):44 def pushback(self, char):
43 self._pushback_buffer.append(char)45 self._pushback_buffer.append(char)
@@ -140,12 +142,14 @@
140 def __iter__(self):142 def __iter__(self):
141 return self143 return self
142144
143 def next(self):145 def __next__(self):
144 quoted, token = self._get_token()146 quoted, token = self._get_token()
145 if token is None:147 if token is None:
146 raise StopIteration148 raise StopIteration
147 return quoted, token149 return quoted, token
148150
151 next = __next__
152
149 def _get_token(self):153 def _get_token(self):
150 self.quoted = False154 self.quoted = False
151 self.token = []155 self.token = []
152156
=== modified file 'breezy/config.py'
--- breezy/config.py 2017-05-24 16:21:50 +0000
+++ breezy/config.py 2017-05-26 09:27:07 +0000
@@ -3691,7 +3691,7 @@
3691 # sections are part of 'all_sections' and will always be found3691 # sections are part of 'all_sections' and will always be found
3692 # there.3692 # there.
3693 while True:3693 while True:
3694 section = iter_all_sections.next()3694 section = next(iter_all_sections)
3695 if section_id == section.id:3695 if section_id == section.id:
3696 section = LocationSection(section, extra_path,3696 section = LocationSection(section, extra_path,
3697 self.branch_name)3697 self.branch_name)
36983698
=== modified file 'breezy/dirstate.py'
--- breezy/dirstate.py 2017-05-24 19:44:00 +0000
+++ breezy/dirstate.py 2017-05-26 09:27:07 +0000
@@ -2703,7 +2703,7 @@
2703 new_details.append(DirState.NULL_PARENT_DETAILS)2703 new_details.append(DirState.NULL_PARENT_DETAILS)
2704 else:2704 else:
2705 # grab any one entry, use it to find the right path.2705 # grab any one entry, use it to find the right path.
2706 a_key = iter(entry_keys).next()2706 a_key = next(iter(entry_keys))
2707 if by_path[a_key][lookup_index][0] in ('r', 'a'):2707 if by_path[a_key][lookup_index][0] in ('r', 'a'):
2708 # its a pointer or missing statement, use it as2708 # its a pointer or missing statement, use it as
2709 # is.2709 # is.
@@ -2783,11 +2783,11 @@
2783 # underlying dirstate.2783 # underlying dirstate.
2784 old_iterator = iter(list(self._iter_entries()))2784 old_iterator = iter(list(self._iter_entries()))
2785 # both must have roots so this is safe:2785 # both must have roots so this is safe:
2786 current_new = new_iterator.next()2786 current_new = next(new_iterator)
2787 current_old = old_iterator.next()2787 current_old = next(old_iterator)
2788 def advance(iterator):2788 def advance(iterator):
2789 try:2789 try:
2790 return iterator.next()2790 return next(iterator)
2791 except StopIteration:2791 except StopIteration:
2792 return None2792 return None
2793 while current_new or current_old:2793 while current_new or current_old:
@@ -3906,7 +3906,7 @@
3906 else:3906 else:
3907 dir_iterator = osutils._walkdirs_utf8(root_abspath, prefix=current_root)3907 dir_iterator = osutils._walkdirs_utf8(root_abspath, prefix=current_root)
3908 try:3908 try:
3909 current_dir_info = dir_iterator.next()3909 current_dir_info = next(dir_iterator)
3910 except OSError as e:3910 except OSError as e:
3911 # on win32, python2.4 has e.errno == ERROR_DIRECTORY, but3911 # on win32, python2.4 has e.errno == ERROR_DIRECTORY, but
3912 # python 2.5 has e.errno == EINVAL,3912 # python 2.5 has e.errno == EINVAL,
@@ -3982,7 +3982,7 @@
39823982
3983 # This dir info has been handled, go to the next3983 # This dir info has been handled, go to the next
3984 try:3984 try:
3985 current_dir_info = dir_iterator.next()3985 current_dir_info = next(dir_iterator)
3986 except StopIteration:3986 except StopIteration:
3987 current_dir_info = None3987 current_dir_info = None
3988 else:3988 else:
@@ -4134,7 +4134,7 @@
4134 current_block = None4134 current_block = None
4135 if current_dir_info is not None:4135 if current_dir_info is not None:
4136 try:4136 try:
4137 current_dir_info = dir_iterator.next()4137 current_dir_info = next(dir_iterator)
4138 except StopIteration:4138 except StopIteration:
4139 current_dir_info = None4139 current_dir_info = None
4140 for result in self._iter_specific_file_parents():4140 for result in self._iter_specific_file_parents():
41414141
=== modified file 'breezy/export/__init__.py'
--- breezy/export/__init__.py 2017-05-22 00:56:52 +0000
+++ breezy/export/__init__.py 2017-05-26 09:27:07 +0000
@@ -205,7 +205,7 @@
205 if subdir is not None:205 if subdir is not None:
206 subdir = subdir.rstrip('/')206 subdir = subdir.rstrip('/')
207 entries = tree.iter_entries_by_dir()207 entries = tree.iter_entries_by_dir()
208 entries.next() # skip root208 next(entries) # skip root
209 for path, entry in entries:209 for path, entry in entries:
210 # The .bzr* namespace is reserved for "magic" files like210 # The .bzr* namespace is reserved for "magic" files like
211 # .bzrignore and .bzrrules - do not export these211 # .bzrignore and .bzrrules - do not export these
212212
=== modified file 'breezy/graph.py'
--- breezy/graph.py 2017-05-22 00:56:52 +0000
+++ breezy/graph.py 2017-05-26 09:27:07 +0000
@@ -481,7 +481,7 @@
481 unique_searcher = self._make_breadth_first_searcher(unique_revisions)481 unique_searcher = self._make_breadth_first_searcher(unique_revisions)
482 # we know that unique_revisions aren't in common_revisions, so skip482 # we know that unique_revisions aren't in common_revisions, so skip
483 # past them.483 # past them.
484 unique_searcher.next()484 next(unique_searcher)
485 common_searcher = self._make_breadth_first_searcher(common_revisions)485 common_searcher = self._make_breadth_first_searcher(common_revisions)
486486
487 # As long as we are still finding unique nodes, keep searching487 # As long as we are still finding unique nodes, keep searching
@@ -836,7 +836,7 @@
836 active_searchers = dict(searchers)836 active_searchers = dict(searchers)
837 # skip over the actual candidate for each searcher837 # skip over the actual candidate for each searcher
838 for searcher in active_searchers.itervalues():838 for searcher in active_searchers.itervalues():
839 searcher.next()839 next(searcher)
840 # The common walker finds nodes that are common to two or more of the840 # The common walker finds nodes that are common to two or more of the
841 # input keys, so that we don't access all history when a currently841 # input keys, so that we don't access all history when a currently
842 # uncommon search point actually meets up with something behind a842 # uncommon search point actually meets up with something behind a
@@ -848,7 +848,7 @@
848 ancestors = set()848 ancestors = set()
849 # advance searches849 # advance searches
850 try:850 try:
851 common_walker.next()851 next(common_walker)
852 except StopIteration:852 except StopIteration:
853 # No common points being searched at this time.853 # No common points being searched at this time.
854 pass854 pass
@@ -861,7 +861,7 @@
861 # a descendant of another candidate.861 # a descendant of another candidate.
862 continue862 continue
863 try:863 try:
864 ancestors.update(searcher.next())864 ancestors.update(next(searcher))
865 except StopIteration:865 except StopIteration:
866 del active_searchers[candidate]866 del active_searchers[candidate]
867 continue867 continue
@@ -1384,11 +1384,11 @@
13841384
1385 def step(self):1385 def step(self):
1386 try:1386 try:
1387 return self.next()1387 return next(self)
1388 except StopIteration:1388 except StopIteration:
1389 return ()1389 return ()
13901390
1391 def next(self):1391 def __next__(self):
1392 """Return the next ancestors of this revision.1392 """Return the next ancestors of this revision.
13931393
1394 Ancestors are returned in the order they are seen in a breadth-first1394 Ancestors are returned in the order they are seen in a breadth-first
@@ -1414,6 +1414,8 @@
1414 self.seen.update(self._next_query)1414 self.seen.update(self._next_query)
1415 return self._next_query1415 return self._next_query
14161416
1417 next = __next__
1418
1417 def next_with_ghosts(self):1419 def next_with_ghosts(self):
1418 """Return the next found ancestors, with ghosts split out.1420 """Return the next found ancestors, with ghosts split out.
14191421
14201422
=== modified file 'breezy/groupcompress.py'
--- breezy/groupcompress.py 2017-05-25 21:59:11 +0000
+++ breezy/groupcompress.py 2017-05-26 09:27:07 +0000
@@ -1170,7 +1170,7 @@
1170 if memos_to_get_stack and memos_to_get_stack[-1] == read_memo:1170 if memos_to_get_stack and memos_to_get_stack[-1] == read_memo:
1171 # The next block from _get_blocks will be the block we1171 # The next block from _get_blocks will be the block we
1172 # need.1172 # need.
1173 block_read_memo, block = blocks.next()1173 block_read_memo, block = next(blocks)
1174 if block_read_memo != read_memo:1174 if block_read_memo != read_memo:
1175 raise AssertionError(1175 raise AssertionError(
1176 "block_read_memo out of sync with read_memo"1176 "block_read_memo out of sync with read_memo"
@@ -1412,7 +1412,7 @@
1412 yield read_memo, cached[read_memo]1412 yield read_memo, cached[read_memo]
1413 except KeyError:1413 except KeyError:
1414 # Read the block, and cache it.1414 # Read the block, and cache it.
1415 zdata = raw_records.next()1415 zdata = next(raw_records)
1416 block = GroupCompressBlock.from_bytes(zdata)1416 block = GroupCompressBlock.from_bytes(zdata)
1417 self._group_cache[read_memo] = block1417 self._group_cache[read_memo] = block
1418 cached[read_memo] = block1418 cached[read_memo] = block
14191419
=== modified file 'breezy/index.py'
--- breezy/index.py 2017-05-24 16:21:50 +0000
+++ breezy/index.py 2017-05-26 09:27:07 +0000
@@ -750,7 +750,7 @@
750 while dicts:750 while dicts:
751 key_dict = dicts.pop(-1)751 key_dict = dicts.pop(-1)
752 # can't be empty or would not exist752 # can't be empty or would not exist
753 item, value = key_dict.iteritems().next()753 item, value = next(key_dict.iteritems())
754 if isinstance(value, dict):754 if isinstance(value, dict):
755 # push keys755 # push keys
756 dicts.extend(key_dict.itervalues())756 dicts.extend(key_dict.itervalues())
@@ -1726,7 +1726,7 @@
1726 while dicts:1726 while dicts:
1727 key_dict = dicts.pop(-1)1727 key_dict = dicts.pop(-1)
1728 # can't be empty or would not exist1728 # can't be empty or would not exist
1729 item, value = key_dict.iteritems().next()1729 item, value = next(key_dict.iteritems())
1730 if isinstance(value, dict):1730 if isinstance(value, dict):
1731 # push keys1731 # push keys
1732 dicts.extend(key_dict.itervalues())1732 dicts.extend(key_dict.itervalues())
17331733
=== modified file 'breezy/inventory_delta.py'
--- breezy/inventory_delta.py 2017-05-22 00:56:52 +0000
+++ breezy/inventory_delta.py 2017-05-26 09:27:07 +0000
@@ -303,7 +303,7 @@
303 seen_ids = set()303 seen_ids = set()
304 line_iter = iter(lines)304 line_iter = iter(lines)
305 for i in range(5):305 for i in range(5):
306 line_iter.next()306 next(line_iter)
307 for line in line_iter:307 for line in line_iter:
308 (oldpath_utf8, newpath_utf8, file_id, parent_id, last_modified,308 (oldpath_utf8, newpath_utf8, file_id, parent_id, last_modified,
309 content) = line.split('\x00', 5)309 content) = line.split('\x00', 5)
310310
=== modified file 'breezy/iterablefile.py'
--- breezy/iterablefile.py 2011-12-18 15:28:38 +0000
+++ breezy/iterablefile.py 2017-05-26 09:27:07 +0000
@@ -67,7 +67,7 @@
67 result = self._buffer67 result = self._buffer
68 while result_length(result) is None:68 while result_length(result) is None:
69 try:69 try:
70 result += self._iter.next()70 result += next(self._iter)
71 except StopIteration:71 except StopIteration:
72 self.done = True72 self.done = True
73 self._buffer = ""73 self._buffer = ""
@@ -142,27 +142,29 @@
142 """142 """
143 self._check_closed()143 self._check_closed()
144144
145 def next(self):145 def __next__(self):
146 """Implementation of the iterator protocol's next()146 """Implementation of the iterator protocol's next()
147147
148 >>> f = IterableFile(['This \\n', 'is ', 'a ', 'test.'])148 >>> f = IterableFile(['This \\n', 'is ', 'a ', 'test.'])
149 >>> f.next()149 >>> next(f)
150 'This \\n'150 'This \\n'
151 >>> f.close()151 >>> f.close()
152 >>> f.next()152 >>> next(f)
153 Traceback (most recent call last):153 Traceback (most recent call last):
154 ValueError: File is closed.154 ValueError: File is closed.
155 >>> f = IterableFile(['This \\n', 'is ', 'a ', 'test.\\n'])155 >>> f = IterableFile(['This \\n', 'is ', 'a ', 'test.\\n'])
156 >>> f.next()156 >>> next(f)
157 'This \\n'157 'This \\n'
158 >>> f.next()158 >>> next(f)
159 'is a test.\\n'159 'is a test.\\n'
160 >>> f.next()160 >>> next(f)
161 Traceback (most recent call last):161 Traceback (most recent call last):
162 StopIteration162 StopIteration
163 """163 """
164 self._check_closed()164 self._check_closed()
165 return self._iter.next()165 return next(self._iter)
166
167 next = __next__
166168
167 def __iter__(self):169 def __iter__(self):
168 """170 """
169171
=== modified file 'breezy/knit.py'
--- breezy/knit.py 2017-05-24 16:33:08 +0000
+++ breezy/knit.py 2017-05-26 09:27:07 +0000
@@ -191,8 +191,8 @@
191 delta = self._annotate_factory.parse_line_delta(contents, rec[1],191 delta = self._annotate_factory.parse_line_delta(contents, rec[1],
192 plain=True)192 plain=True)
193 compression_parent = factory.parents[0]193 compression_parent = factory.parents[0]
194 basis_entry = self._basis_vf.get_record_stream(194 basis_entry = next(self._basis_vf.get_record_stream(
195 [compression_parent], 'unordered', True).next()195 [compression_parent], 'unordered', True))
196 if basis_entry.storage_kind == 'absent':196 if basis_entry.storage_kind == 'absent':
197 raise errors.RevisionNotPresent(compression_parent, self._basis_vf)197 raise errors.RevisionNotPresent(compression_parent, self._basis_vf)
198 basis_chunks = basis_entry.get_bytes_as('chunked')198 basis_chunks = basis_entry.get_bytes_as('chunked')
@@ -227,8 +227,8 @@
227 delta = self._plain_factory.parse_line_delta(contents, rec[1])227 delta = self._plain_factory.parse_line_delta(contents, rec[1])
228 compression_parent = factory.parents[0]228 compression_parent = factory.parents[0]
229 # XXX: string splitting overhead.229 # XXX: string splitting overhead.
230 basis_entry = self._basis_vf.get_record_stream(230 basis_entry = next(self._basis_vf.get_record_stream(
231 [compression_parent], 'unordered', True).next()231 [compression_parent], 'unordered', True))
232 if basis_entry.storage_kind == 'absent':232 if basis_entry.storage_kind == 'absent':
233 raise errors.RevisionNotPresent(compression_parent, self._basis_vf)233 raise errors.RevisionNotPresent(compression_parent, self._basis_vf)
234 basis_chunks = basis_entry.get_bytes_as('chunked')234 basis_chunks = basis_entry.get_bytes_as('chunked')
@@ -619,7 +619,6 @@
619 """619 """
620 result = []620 result = []
621 lines = iter(lines)621 lines = iter(lines)
622 next = lines.next
623622
624 cache = {}623 cache = {}
625 def cache_and_return(line):624 def cache_and_return(line):
@@ -632,12 +631,13 @@
632 if plain:631 if plain:
633 for header in lines:632 for header in lines:
634 start, end, count = [int(n) for n in header.split(',')]633 start, end, count = [int(n) for n in header.split(',')]
635 contents = [next().split(' ', 1)[1] for i in xrange(count)]634 contents = [next(lines).split(' ', 1)[1] for _ in range(count)]
636 result.append((start, end, count, contents))635 result.append((start, end, count, contents))
637 else:636 else:
638 for header in lines:637 for header in lines:
639 start, end, count = [int(n) for n in header.split(',')]638 start, end, count = [int(n) for n in header.split(',')]
640 contents = [tuple(next().split(' ', 1)) for i in xrange(count)]639 contents = [tuple(next(lines).split(' ', 1))
640 for _ in range(count)]
641 result.append((start, end, count, contents))641 result.append((start, end, count, contents))
642 return result642 return result
643643
@@ -652,12 +652,11 @@
652 Only the actual content lines.652 Only the actual content lines.
653 """653 """
654 lines = iter(lines)654 lines = iter(lines)
655 next = lines.next
656 for header in lines:655 for header in lines:
657 header = header.split(',')656 header = header.split(',')
658 count = int(header[2])657 count = int(header[2])
659 for i in xrange(count):658 for i in xrange(count):
660 origin, text = next().split(' ', 1)659 origin, text = next(lines).split(' ', 1)
661 yield text660 yield text
662661
663 def lower_fulltext(self, content):662 def lower_fulltext(self, content):
@@ -738,12 +737,11 @@
738 Only the actual content lines.737 Only the actual content lines.
739 """738 """
740 lines = iter(lines)739 lines = iter(lines)
741 next = lines.next
742 for header in lines:740 for header in lines:
743 header = header.split(',')741 header = header.split(',')
744 count = int(header[2])742 count = int(header[2])
745 for i in xrange(count):743 for i in xrange(count):
746 yield next()744 yield next(lines)
747745
748 def lower_fulltext(self, content):746 def lower_fulltext(self, content):
749 return content.text()747 return content.text()
@@ -1967,7 +1965,7 @@
1967 raw_records = self._access.get_raw_records(needed_offsets)1965 raw_records = self._access.get_raw_records(needed_offsets)
19681966
1969 for key, index_memo in records:1967 for key, index_memo in records:
1970 data = raw_records.next()1968 data = next(raw_records)
1971 yield key, data1969 yield key, data
19721970
1973 def _record_to_data(self, key, digest, lines, dense_lines=None):1971 def _record_to_data(self, key, digest, lines, dense_lines=None):
@@ -2024,7 +2022,7 @@
2024 # Note that _get_content is only called when the _ContentMapGenerator2022 # Note that _get_content is only called when the _ContentMapGenerator
2025 # has been constructed with just one key requested for reconstruction.2023 # has been constructed with just one key requested for reconstruction.
2026 if key in self.nonlocal_keys:2024 if key in self.nonlocal_keys:
2027 record = self.get_record_stream().next()2025 record = next(self.get_record_stream())
2028 # Create a content object on the fly2026 # Create a content object on the fly
2029 lines = osutils.chunks_to_lines(record.get_bytes_as('chunked'))2027 lines = osutils.chunks_to_lines(record.get_bytes_as('chunked'))
2030 return PlainKnitContent(lines, record.key)2028 return PlainKnitContent(lines, record.key)
20312029
=== modified file 'breezy/log.py'
--- breezy/log.py 2017-05-25 22:09:31 +0000
+++ breezy/log.py 2017-05-26 09:27:07 +0000
@@ -1923,7 +1923,7 @@
1923 while do_new or do_old:1923 while do_new or do_old:
1924 if do_new:1924 if do_new:
1925 try:1925 try:
1926 new_revision = new_iter.next()1926 new_revision = next(new_iter)
1927 except StopIteration:1927 except StopIteration:
1928 do_new = False1928 do_new = False
1929 else:1929 else:
@@ -1934,7 +1934,7 @@
1934 break1934 break
1935 if do_old:1935 if do_old:
1936 try:1936 try:
1937 old_revision = old_iter.next()1937 old_revision = next(old_iter)
1938 except StopIteration:1938 except StopIteration:
1939 do_old = False1939 do_old = False
1940 else:1940 else:
19411941
=== modified file 'breezy/merge_directive.py'
--- breezy/merge_directive.py 2017-05-22 00:56:52 +0000
+++ breezy/merge_directive.py 2017-05-26 09:27:07 +0000
@@ -516,7 +516,7 @@
516 patch = None516 patch = None
517 bundle = None517 bundle = None
518 try:518 try:
519 start = line_iter.next()519 start = next(line_iter)
520 except StopIteration:520 except StopIteration:
521 pass521 pass
522 else:522 else:
523523
=== modified file 'breezy/multiparent.py'
--- breezy/multiparent.py 2017-05-22 00:56:52 +0000
+++ breezy/multiparent.py 2017-05-26 09:27:07 +0000
@@ -117,7 +117,7 @@
117 diff = MultiParent([])117 diff = MultiParent([])
118 def next_block(p):118 def next_block(p):
119 try:119 try:
120 return block_iter[p].next()120 return next(block_iter[p])
121 except StopIteration:121 except StopIteration:
122 return None122 return None
123 cur_block = [next_block(p) for p, i in enumerate(block_iter)]123 cur_block = [next_block(p) for p, i in enumerate(block_iter)]
@@ -203,12 +203,12 @@
203 cur_line = None203 cur_line = None
204 while(True):204 while(True):
205 try:205 try:
206 cur_line = line_iter.next()206 cur_line = next(line_iter)
207 except StopIteration:207 except StopIteration:
208 break208 break
209 if cur_line[0] == 'i':209 if cur_line[0] == 'i':
210 num_lines = int(cur_line.split(' ')[1])210 num_lines = int(cur_line.split(' ')[1])
211 hunk_lines = [line_iter.next() for x in xrange(num_lines)]211 hunk_lines = [next(line_iter) for x in xrange(num_lines)]
212 hunk_lines[-1] = hunk_lines[-1][:-1]212 hunk_lines[-1] = hunk_lines[-1][:-1]
213 hunks.append(NewText(hunk_lines))213 hunks.append(NewText(hunk_lines))
214 elif cur_line[0] == '\n':214 elif cur_line[0] == '\n':
@@ -646,14 +646,14 @@
646 start, end, kind, data, iterator = self.cursor[req_version_id]646 start, end, kind, data, iterator = self.cursor[req_version_id]
647 except KeyError:647 except KeyError:
648 iterator = self.diffs.get_diff(req_version_id).range_iterator()648 iterator = self.diffs.get_diff(req_version_id).range_iterator()
649 start, end, kind, data = iterator.next()649 start, end, kind, data = next(iterator)
650 if start > req_start:650 if start > req_start:
651 iterator = self.diffs.get_diff(req_version_id).range_iterator()651 iterator = self.diffs.get_diff(req_version_id).range_iterator()
652 start, end, kind, data = iterator.next()652 start, end, kind, data = next(iterator)
653653
654 # find the first hunk relevant to the request654 # find the first hunk relevant to the request
655 while end <= req_start:655 while end <= req_start:
656 start, end, kind, data = iterator.next()656 start, end, kind, data = next(iterator)
657 self.cursor[req_version_id] = start, end, kind, data, iterator657 self.cursor[req_version_id] = start, end, kind, data, iterator
658 # if the hunk can't satisfy the whole request, split it in two,658 # if the hunk can't satisfy the whole request, split it in two,
659 # and leave the second half for later.659 # and leave the second half for later.
660660
=== modified file 'breezy/mutabletree.py'
--- breezy/mutabletree.py 2017-05-24 19:44:00 +0000
+++ breezy/mutabletree.py 2017-05-26 09:27:07 +0000
@@ -234,10 +234,10 @@
234 _from_tree = self.basis_tree()234 _from_tree = self.basis_tree()
235 changes = self.iter_changes(_from_tree)235 changes = self.iter_changes(_from_tree)
236 try:236 try:
237 change = changes.next()237 change = next(changes)
238 # Exclude root (talk about black magic... --vila 20090629)238 # Exclude root (talk about black magic... --vila 20090629)
239 if change[4] == (None, None):239 if change[4] == (None, None):
240 change = changes.next()240 change = next(changes)
241 return True241 return True
242 except StopIteration:242 except StopIteration:
243 # No changes243 # No changes
244244
=== modified file 'breezy/pack.py'
--- breezy/pack.py 2017-05-22 00:56:52 +0000
+++ breezy/pack.py 2017-05-26 09:27:07 +0000
@@ -194,7 +194,7 @@
194 def _next(self):194 def _next(self):
195 if (self._string is None or195 if (self._string is None or
196 self._string.tell() == self._string_length):196 self._string.tell() == self._string_length):
197 offset, data = self.readv_result.next()197 offset, data = next(self.readv_result)
198 self._string_length = len(data)198 self._string_length = len(data)
199 self._string = BytesIO(data)199 self._string = BytesIO(data)
200200
201201
=== modified file 'breezy/patches.py'
--- breezy/patches.py 2017-05-22 00:56:52 +0000
+++ breezy/patches.py 2017-05-26 09:27:07 +0000
@@ -33,7 +33,7 @@
3333
3434
35def get_patch_names(iter_lines):35def get_patch_names(iter_lines):
36 line = iter_lines.next()36 line = next(iter_lines)
37 try:37 try:
38 match = re.match(binary_files_re, line)38 match = re.match(binary_files_re, line)
39 if match is not None:39 if match is not None:
@@ -45,7 +45,7 @@
45 except StopIteration:45 except StopIteration:
46 raise MalformedPatchHeader("No orig line", "")46 raise MalformedPatchHeader("No orig line", "")
47 try:47 try:
48 line = iter_lines.next()48 line = next(iter_lines)
49 if not line.startswith("+++ "):49 if not line.startswith("+++ "):
50 raise PatchSyntax("No mod name")50 raise PatchSyntax("No mod name")
51 else:51 else:
@@ -244,7 +244,7 @@
244 orig_size = 0244 orig_size = 0
245 mod_size = 0245 mod_size = 0
246 while orig_size < hunk.orig_range or mod_size < hunk.mod_range:246 while orig_size < hunk.orig_range or mod_size < hunk.mod_range:
247 hunk_line = parse_line(iter_lines.next())247 hunk_line = parse_line(next(iter_lines))
248 hunk.lines.append(hunk_line)248 hunk.lines.append(hunk_line)
249 if isinstance(hunk_line, (RemoveLine, ContextLine)):249 if isinstance(hunk_line, (RemoveLine, ContextLine)):
250 orig_size += 1250 orig_size += 1
@@ -483,7 +483,7 @@
483 orig_lines = iter(orig_lines)483 orig_lines = iter(orig_lines)
484 for hunk in hunks:484 for hunk in hunks:
485 while line_no < hunk.orig_pos:485 while line_no < hunk.orig_pos:
486 orig_line = orig_lines.next()486 orig_line = next(orig_lines)
487 yield orig_line487 yield orig_line
488 line_no += 1488 line_no += 1
489 for hunk_line in hunk.lines:489 for hunk_line in hunk.lines:
@@ -491,7 +491,7 @@
491 if isinstance(hunk_line, InsertLine):491 if isinstance(hunk_line, InsertLine):
492 yield hunk_line.contents492 yield hunk_line.contents
493 elif isinstance(hunk_line, (ContextLine, RemoveLine)):493 elif isinstance(hunk_line, (ContextLine, RemoveLine)):
494 orig_line = orig_lines.next()494 orig_line = next(orig_lines)
495 if orig_line != hunk_line.contents:495 if orig_line != hunk_line.contents:
496 raise PatchConflict(line_no, orig_line, "".join(seen_patch))496 raise PatchConflict(line_no, orig_line, "".join(seen_patch))
497 if isinstance(hunk_line, ContextLine):497 if isinstance(hunk_line, ContextLine):
498498
=== modified file 'breezy/plugins/fastimport/revision_store.py'
--- breezy/plugins/fastimport/revision_store.py 2017-05-23 23:21:16 +0000
+++ breezy/plugins/fastimport/revision_store.py 2017-05-26 09:27:07 +0000
@@ -436,7 +436,7 @@
436 path_entries = inv.iter_entries()436 path_entries = inv.iter_entries()
437 # Backwards compatibility hack: skip the root id.437 # Backwards compatibility hack: skip the root id.
438 if not self.repo.supports_rich_root():438 if not self.repo.supports_rich_root():
439 path, root = path_entries.next()439 path, root = next(path_entries)
440 if root.revision != revision_id:440 if root.revision != revision_id:
441 raise errors.IncompatibleRevision(repr(self.repo))441 raise errors.IncompatibleRevision(repr(self.repo))
442 entries = iter([ie for path, ie in path_entries])442 entries = iter([ie for path, ie in path_entries])
@@ -602,8 +602,8 @@
602 self.repo.texts.add_lines(text_key, text_parents, lines)602 self.repo.texts.add_lines(text_key, text_parents, lines)
603603
604 def get_file_lines(self, revision_id, file_id):604 def get_file_lines(self, revision_id, file_id):
605 record = self.repo.texts.get_record_stream([(file_id, revision_id)],605 record = next(self.repo.texts.get_record_stream([(file_id, revision_id)],
606 'unordered', True).next()606 'unordered', True))
607 if record.storage_kind == 'absent':607 if record.storage_kind == 'absent':
608 raise errors.RevisionNotPresent(record.key, self.repo)608 raise errors.RevisionNotPresent(record.key, self.repo)
609 return osutils.split_lines(record.get_bytes_as('fulltext'))609 return osutils.split_lines(record.get_bytes_as('fulltext'))
610610
=== modified file 'breezy/plugins/weave_fmt/bzrdir.py'
--- breezy/plugins/weave_fmt/bzrdir.py 2017-05-24 19:44:00 +0000
+++ breezy/plugins/weave_fmt/bzrdir.py 2017-05-26 09:27:07 +0000
@@ -417,7 +417,7 @@
417 trace.mutter('converting texts of revision {%s}', rev_id)417 trace.mutter('converting texts of revision {%s}', rev_id)
418 parent_invs = list(map(self._load_updated_inventory, present_parents))418 parent_invs = list(map(self._load_updated_inventory, present_parents))
419 entries = inv.iter_entries()419 entries = inv.iter_entries()
420 entries.next()420 next(entries)
421 for path, ie in entries:421 for path, ie in entries:
422 self._convert_file_version(rev, ie, parent_invs)422 self._convert_file_version(rev, ie, parent_invs)
423423
424424
=== modified file 'breezy/remote.py'
--- breezy/remote.py 2017-05-22 00:56:52 +0000
+++ breezy/remote.py 2017-05-26 09:27:07 +0000
@@ -1948,7 +1948,7 @@
1948 prev_inv = Inventory(root_id=None,1948 prev_inv = Inventory(root_id=None,
1949 revision_id=_mod_revision.NULL_REVISION)1949 revision_id=_mod_revision.NULL_REVISION)
1950 # there should be just one substream, with inventory deltas1950 # there should be just one substream, with inventory deltas
1951 substream_kind, substream = stream.next()1951 substream_kind, substream = next(stream)
1952 if substream_kind != "inventory-deltas":1952 if substream_kind != "inventory-deltas":
1953 raise AssertionError(1953 raise AssertionError(
1954 "Unexpected stream %r received" % substream_kind)1954 "Unexpected stream %r received" % substream_kind)
@@ -2190,7 +2190,7 @@
2190 yield decompressor.decompress(start)2190 yield decompressor.decompress(start)
2191 while decompressor.unused_data == "":2191 while decompressor.unused_data == "":
2192 try:2192 try:
2193 data = byte_stream.next()2193 data = next(byte_stream)
2194 except StopIteration:2194 except StopIteration:
2195 break2195 break
2196 yield decompressor.decompress(data)2196 yield decompressor.decompress(data)
@@ -2199,7 +2199,7 @@
2199 unused = ""2199 unused = ""
2200 while True:2200 while True:
2201 while not "\n" in unused:2201 while not "\n" in unused:
2202 unused += byte_stream.next()2202 unused += next(byte_stream)
2203 header, rest = unused.split("\n", 1)2203 header, rest = unused.split("\n", 1)
2204 args = header.split("\0")2204 args = header.split("\0")
2205 if args[0] == "absent":2205 if args[0] == "absent":
22062206
=== modified file 'breezy/repository.py'
--- breezy/repository.py 2017-05-24 19:44:00 +0000
+++ breezy/repository.py 2017-05-26 09:27:07 +0000
@@ -1788,14 +1788,14 @@
1788 (_mod_revision.NULL_REVISION,))1788 (_mod_revision.NULL_REVISION,))
1789 try:1789 try:
1790 # skip the last revision in the list1790 # skip the last revision in the list
1791 iterator.next()1791 next(iterator)
1792 while True:1792 while True:
1793 if (stop_index is not None and1793 if (stop_index is not None and
1794 len(partial_history_cache) > stop_index):1794 len(partial_history_cache) > stop_index):
1795 break1795 break
1796 if partial_history_cache[-1] == stop_revision:1796 if partial_history_cache[-1] == stop_revision:
1797 break1797 break
1798 revision_id = iterator.next()1798 revision_id = next(iterator)
1799 partial_history_cache.append(revision_id)1799 partial_history_cache.append(revision_id)
1800 except StopIteration:1800 except StopIteration:
1801 # No more history1801 # No more history
18021802
=== modified file 'breezy/revisiontree.py'
--- breezy/revisiontree.py 2017-05-22 00:56:52 +0000
+++ breezy/revisiontree.py 2017-05-26 09:27:07 +0000
@@ -151,7 +151,7 @@
151 entries = inv.iter_entries(from_dir=from_dir_id, recursive=recursive)151 entries = inv.iter_entries(from_dir=from_dir_id, recursive=recursive)
152 if inv.root is not None and not include_root and from_dir is None:152 if inv.root is not None and not include_root and from_dir is None:
153 # skip the root for compatability with the current apis.153 # skip the root for compatability with the current apis.
154 entries.next()154 next(entries)
155 for path, entry in entries:155 for path, entry in entries:
156 yield path, 'V', entry.kind, entry.file_id, entry156 yield path, 'V', entry.kind, entry.file_id, entry
157157
158158
=== modified file 'breezy/shelf.py'
--- breezy/shelf.py 2017-05-22 00:56:52 +0000
+++ breezy/shelf.py 2017-05-26 09:27:07 +0000
@@ -314,7 +314,7 @@
314314
315 @staticmethod315 @staticmethod
316 def parse_metadata(records):316 def parse_metadata(records):
317 names, metadata_bytes = records.next()317 names, metadata_bytes = next(records)
318 if names[0] != ('metadata',):318 if names[0] != ('metadata',):
319 raise errors.ShelfCorrupt319 raise errors.ShelfCorrupt
320 metadata = bencode.bdecode(metadata_bytes)320 metadata = bencode.bdecode(metadata_bytes)
321321
=== modified file 'breezy/smart/protocol.py'
--- breezy/smart/protocol.py 2017-05-22 00:56:52 +0000
+++ breezy/smart/protocol.py 2017-05-26 09:27:07 +0000
@@ -1278,7 +1278,7 @@
1278 iterator = iter(iterable)1278 iterator = iter(iterable)
1279 while True:1279 while True:
1280 try:1280 try:
1281 yield None, iterator.next()1281 yield None, next(iterator)
1282 except StopIteration:1282 except StopIteration:
1283 return1283 return
1284 except (KeyboardInterrupt, SystemExit):1284 except (KeyboardInterrupt, SystemExit):
12851285
=== modified file 'breezy/smart/repository.py'
--- breezy/smart/repository.py 2017-05-24 19:44:00 +0000
+++ breezy/smart/repository.py 2017-05-26 09:27:07 +0000
@@ -127,7 +127,7 @@
127 start_keys)127 start_keys)
128 while True:128 while True:
129 try:129 try:
130 next_revs = search.next()130 next_revs = next(search)
131 except StopIteration:131 except StopIteration:
132 break132 break
133 search.stop_searching_any(exclude_keys.intersection(next_revs))133 search.stop_searching_any(exclude_keys.intersection(next_revs))
134134
=== modified file 'breezy/status.py'
--- breezy/status.py 2017-05-22 00:56:52 +0000
+++ breezy/status.py 2017-05-26 09:27:07 +0000
@@ -334,7 +334,7 @@
334 rev_id_iterator = _get_sorted_revisions(merge, merge_extra,334 rev_id_iterator = _get_sorted_revisions(merge, merge_extra,
335 branch.repository.get_parent_map(merge_extra))335 branch.repository.get_parent_map(merge_extra))
336 # Skip the first node336 # Skip the first node
337 num, first, depth, eom = rev_id_iterator.next()337 num, first, depth, eom = next(rev_id_iterator)
338 if first != merge:338 if first != merge:
339 raise AssertionError('Somehow we misunderstood how'339 raise AssertionError('Somehow we misunderstood how'
340 ' iter_topo_order works %s != %s' % (first, merge))340 ' iter_topo_order works %s != %s' % (first, merge))
341341
=== modified file 'breezy/tests/blackbox/test_export.py'
--- breezy/tests/blackbox/test_export.py 2017-05-22 00:56:52 +0000
+++ breezy/tests/blackbox/test_export.py 2017-05-26 09:27:07 +0000
@@ -163,7 +163,7 @@
163163
164 def assertTarANameAndContent(self, ball, root=''):164 def assertTarANameAndContent(self, ball, root=''):
165 fname = root + 'a'165 fname = root + 'a'
166 tar_info = ball.next()166 tar_info = next(ball)
167 self.assertEqual(fname, tar_info.name)167 self.assertEqual(fname, tar_info.name)
168 self.assertEqual(tarfile.REGTYPE, tar_info.type)168 self.assertEqual(tarfile.REGTYPE, tar_info.type)
169 self.assertEqual(len(self._file_content), tar_info.size)169 self.assertEqual(len(self._file_content), tar_info.size)
@@ -172,7 +172,7 @@
172 self.fail('File content has been corrupted.'172 self.fail('File content has been corrupted.'
173 ' Check that all streams are handled in binary mode.')173 ' Check that all streams are handled in binary mode.')
174 # There should be no other files in the tarball174 # There should be no other files in the tarball
175 self.assertIs(None, ball.next())175 self.assertIs(None, next(ball))
176176
177 def run_tar_export_disk_and_stdout(self, extension, tarfile_flags):177 def run_tar_export_disk_and_stdout(self, extension, tarfile_flags):
178 tree = self.make_basic_tree()178 tree = self.make_basic_tree()
179179
=== modified file 'breezy/tests/per_intertree/test_compare.py'
--- breezy/tests/per_intertree/test_compare.py 2017-05-21 18:10:28 +0000
+++ breezy/tests/per_intertree/test_compare.py 2017-05-26 09:27:07 +0000
@@ -515,7 +515,7 @@
515 @staticmethod515 @staticmethod
516 def get_path_entry(tree, file_id):516 def get_path_entry(tree, file_id):
517 iterator = tree.iter_entries_by_dir(specific_file_ids=[file_id])517 iterator = tree.iter_entries_by_dir(specific_file_ids=[file_id])
518 return iterator.next()518 return next(iterator)
519519
520 def content_changed(self, tree, file_id):520 def content_changed(self, tree, file_id):
521 path, entry = self.get_path_entry(tree, file_id)521 path, entry = self.get_path_entry(tree, file_id)
522522
=== modified file 'breezy/tests/per_pack_repository.py'
--- breezy/tests/per_pack_repository.py 2017-05-24 19:44:00 +0000
+++ breezy/tests/per_pack_repository.py 2017-05-26 09:27:07 +0000
@@ -328,7 +328,7 @@
328 repo.lock_write()328 repo.lock_write()
329 self.addCleanup(repo.unlock)329 self.addCleanup(repo.unlock)
330 repo.fetch(b.repository, revision_id='B-id')330 repo.fetch(b.repository, revision_id='B-id')
331 inv = b.repository.iter_inventories(['C-id']).next()331 inv = next(b.repository.iter_inventories(['C-id']))
332 repo.start_write_group()332 repo.start_write_group()
333 repo.add_inventory('C-id', inv, ['B-id'])333 repo.add_inventory('C-id', inv, ['B-id'])
334 repo.commit_write_group()334 repo.commit_write_group()
@@ -338,7 +338,7 @@
338 self.assertEqual([('A-id',), ('B-id',), ('C-id',)],338 self.assertEqual([('A-id',), ('B-id',), ('C-id',)],
339 sorted(repo.inventories.keys()))339 sorted(repo.inventories.keys()))
340 # Content should be preserved as well340 # Content should be preserved as well
341 self.assertEqual(inv, repo.iter_inventories(['C-id']).next())341 self.assertEqual(inv, next(repo.iter_inventories(['C-id'])))
342342
343 def test_pack_layout(self):343 def test_pack_layout(self):
344 # Test that the ordering of revisions in pack repositories is344 # Test that the ordering of revisions in pack repositories is
345345
=== modified file 'breezy/tests/per_repository_vf/test_write_group.py'
--- breezy/tests/per_repository_vf/test_write_group.py 2017-05-22 00:56:52 +0000
+++ breezy/tests/per_repository_vf/test_write_group.py 2017-05-26 09:27:07 +0000
@@ -563,8 +563,8 @@
563 else:563 else:
564 same_repo = self.reopen_repo(repo)564 same_repo = self.reopen_repo(repo)
565 same_repo.lock_read()565 same_repo.lock_read()
566 record = same_repo.texts.get_record_stream([key_delta],566 record = next(same_repo.texts.get_record_stream([key_delta],
567 'unordered', True).next()567 'unordered', True))
568 self.assertEqual('more\nlines\n', record.get_bytes_as('fulltext'))568 self.assertEqual('more\nlines\n', record.get_bytes_as('fulltext'))
569 return569 return
570 # Merely suspending and resuming doesn't make it commitable either.570 # Merely suspending and resuming doesn't make it commitable either.
@@ -607,8 +607,8 @@
607 # insert_record_stream already gave it a fulltext.607 # insert_record_stream already gave it a fulltext.
608 same_repo = self.reopen_repo(repo)608 same_repo = self.reopen_repo(repo)
609 same_repo.lock_read()609 same_repo.lock_read()
610 record = same_repo.texts.get_record_stream([key_delta],610 record = next(same_repo.texts.get_record_stream([key_delta],
611 'unordered', True).next()611 'unordered', True))
612 self.assertEqual('more\nlines\n', record.get_bytes_as('fulltext'))612 self.assertEqual('more\nlines\n', record.get_bytes_as('fulltext'))
613 return613 return
614 same_repo.abort_write_group()614 same_repo.abort_write_group()
615615
=== modified file 'breezy/tests/per_versionedfile.py'
--- breezy/tests/per_versionedfile.py 2017-05-25 00:04:21 +0000
+++ breezy/tests/per_versionedfile.py 2017-05-26 09:27:07 +0000
@@ -890,8 +890,8 @@
890 def test_get_record_stream(self):890 def test_get_record_stream(self):
891 self.setup_abcde()891 self.setup_abcde()
892 def get_record(suffix):892 def get_record(suffix):
893 return self.plan_merge_vf.get_record_stream(893 return next(self.plan_merge_vf.get_record_stream(
894 [('root', suffix)], 'unordered', True).next()894 [('root', suffix)], 'unordered', True))
895 self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))895 self.assertEqual('a', get_record('A').get_bytes_as('fulltext'))
896 self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))896 self.assertEqual('c', get_record('C').get_bytes_as('fulltext'))
897 self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))897 self.assertEqual('e', get_record('E:').get_bytes_as('fulltext'))
@@ -1225,11 +1225,11 @@
1225 """Grab the interested adapted texts for tests."""1225 """Grab the interested adapted texts for tests."""
1226 # origin is a fulltext1226 # origin is a fulltext
1227 entries = f.get_record_stream([('origin',)], 'unordered', False)1227 entries = f.get_record_stream([('origin',)], 'unordered', False)
1228 base = entries.next()1228 base = next(entries)
1229 ft_data = ft_adapter.get_bytes(base)1229 ft_data = ft_adapter.get_bytes(base)
1230 # merged is both a delta and multiple parents.1230 # merged is both a delta and multiple parents.
1231 entries = f.get_record_stream([('merged',)], 'unordered', False)1231 entries = f.get_record_stream([('merged',)], 'unordered', False)
1232 merged = entries.next()1232 merged = next(entries)
1233 delta_data = delta_adapter.get_bytes(merged)1233 delta_data = delta_adapter.get_bytes(merged)
1234 return ft_data, delta_data1234 return ft_data, delta_data
12351235
@@ -1637,7 +1637,7 @@
1637 vf._add_text, new_key, [], ''.join(lines),1637 vf._add_text, new_key, [], ''.join(lines),
1638 nostore_sha=sha)1638 nostore_sha=sha)
1639 # and no new version should have been added.1639 # and no new version should have been added.
1640 record = vf.get_record_stream([new_key], 'unordered', True).next()1640 record = next(vf.get_record_stream([new_key], 'unordered', True))
1641 self.assertEqual('absent', record.storage_kind)1641 self.assertEqual('absent', record.storage_kind)
16421642
1643 def test_add_lines_nostoresha(self):1643 def test_add_lines_nostoresha(self):
@@ -2002,7 +2002,7 @@
2002 key = self.get_simple_key('foo')2002 key = self.get_simple_key('foo')
2003 files.add_lines(key, (), ['my text\n', 'content'])2003 files.add_lines(key, (), ['my text\n', 'content'])
2004 stream = files.get_record_stream([key], 'unordered', False)2004 stream = files.get_record_stream([key], 'unordered', False)
2005 record = stream.next()2005 record = next(stream)
2006 if record.storage_kind in ('chunked', 'fulltext'):2006 if record.storage_kind in ('chunked', 'fulltext'):
2007 # chunked and fulltext representations are for direct use not wire2007 # chunked and fulltext representations are for direct use not wire
2008 # serialisation: check they are able to be used directly. To send2008 # serialisation: check they are able to be used directly. To send
@@ -2785,14 +2785,14 @@
2785 def test_get_record_stream(self):2785 def test_get_record_stream(self):
2786 self._lines["A"] = ["FOO", "BAR"]2786 self._lines["A"] = ["FOO", "BAR"]
2787 it = self.texts.get_record_stream([("A",)], "unordered", True)2787 it = self.texts.get_record_stream([("A",)], "unordered", True)
2788 record = it.next()2788 record = next(it)
2789 self.assertEqual("chunked", record.storage_kind)2789 self.assertEqual("chunked", record.storage_kind)
2790 self.assertEqual("FOOBAR", record.get_bytes_as("fulltext"))2790 self.assertEqual("FOOBAR", record.get_bytes_as("fulltext"))
2791 self.assertEqual(["FOO", "BAR"], record.get_bytes_as("chunked"))2791 self.assertEqual(["FOO", "BAR"], record.get_bytes_as("chunked"))
27922792
2793 def test_get_record_stream_absent(self):2793 def test_get_record_stream_absent(self):
2794 it = self.texts.get_record_stream([("A",)], "unordered", True)2794 it = self.texts.get_record_stream([("A",)], "unordered", True)
2795 record = it.next()2795 record = next(it)
2796 self.assertEqual("absent", record.storage_kind)2796 self.assertEqual("absent", record.storage_kind)
27972797
2798 def test_iter_lines_added_or_present_in_keys(self):2798 def test_iter_lines_added_or_present_in_keys(self):
27992799
=== modified file 'breezy/tests/per_workingtree/test_inv.py'
--- breezy/tests/per_workingtree/test_inv.py 2017-05-21 18:10:28 +0000
+++ breezy/tests/per_workingtree/test_inv.py 2017-05-26 09:27:07 +0000
@@ -177,6 +177,6 @@
177 # wt.current_dirstate()'s idea about what files are where.177 # wt.current_dirstate()'s idea about what files are where.
178 ie = base.inventory['subdir-id']178 ie = base.inventory['subdir-id']
179 self.assertEqual('directory', ie.kind)179 self.assertEqual('directory', ie.kind)
180 path, ie = base.iter_entries_by_dir(['subdir-id']).next()180 path, ie = next(base.iter_entries_by_dir(['subdir-id']))
181 self.assertEqual('subdir', path)181 self.assertEqual('subdir', path)
182 self.assertEqual('tree-reference', ie.kind)182 self.assertEqual('tree-reference', ie.kind)
183183
=== modified file 'breezy/tests/per_workingtree/test_nested_specifics.py'
--- breezy/tests/per_workingtree/test_nested_specifics.py 2017-05-21 18:10:28 +0000
+++ breezy/tests/per_workingtree/test_nested_specifics.py 2017-05-26 09:27:07 +0000
@@ -79,5 +79,5 @@
7979
80 def test_iter_entries_by_dir_autodetects_subtree(self):80 def test_iter_entries_by_dir_autodetects_subtree(self):
81 tree = self.prepare_with_subtree()81 tree = self.prepare_with_subtree()
82 path, ie = tree.iter_entries_by_dir(['subtree-id']).next()82 path, ie = next(tree.iter_entries_by_dir(['subtree-id']))
83 self.assertEqual('tree-reference', ie.kind)83 self.assertEqual('tree-reference', ie.kind)
8484
=== modified file 'breezy/tests/test__annotator.py'
--- breezy/tests/test__annotator.py 2017-05-23 14:08:03 +0000
+++ breezy/tests/test__annotator.py 2017-05-26 09:27:07 +0000
@@ -137,7 +137,7 @@
137 annotation, lines = self.ann.annotate(key)137 annotation, lines = self.ann.annotate(key)
138 self.assertEqual(expected_annotation, annotation)138 self.assertEqual(expected_annotation, annotation)
139 if exp_text is None:139 if exp_text is None:
140 record = self.vf.get_record_stream([key], 'unordered', True).next()140 record = next(self.vf.get_record_stream([key], 'unordered', True))
141 exp_text = record.get_bytes_as('fulltext')141 exp_text = record.get_bytes_as('fulltext')
142 self.assertEqualDiff(exp_text, ''.join(lines))142 self.assertEqualDiff(exp_text, ''.join(lines))
143143
144144
=== modified file 'breezy/tests/test__simple_set.py'
--- breezy/tests/test__simple_set.py 2017-05-21 18:10:28 +0000
+++ breezy/tests/test__simple_set.py 2017-05-26 09:27:07 +0000
@@ -373,13 +373,13 @@
373 all.add(key)373 all.add(key)
374 self.assertEqual(sorted([k1, k2, k3]), sorted(all))374 self.assertEqual(sorted([k1, k2, k3]), sorted(all))
375 iterator = iter(obj)375 iterator = iter(obj)
376 iterator.next()376 next(iterator)
377 obj.add(('foo',))377 obj.add(('foo',))
378 # Set changed size378 # Set changed size
379 self.assertRaises(RuntimeError, iterator.next)379 self.assertRaises(RuntimeError, next, iterator)
380 # And even removing an item still causes it to fail380 # And even removing an item still causes it to fail
381 obj.discard(k2)381 obj.discard(k2)
382 self.assertRaises(RuntimeError, iterator.next)382 self.assertRaises(RuntimeError, next, iterator)
383383
384 def test__sizeof__(self):384 def test__sizeof__(self):
385 # SimpleSet needs a custom sizeof implementation, because it allocates385 # SimpleSet needs a custom sizeof implementation, because it allocates
386386
=== modified file 'breezy/tests/test_bundle.py'
--- breezy/tests/test_bundle.py 2017-05-22 00:56:52 +0000
+++ breezy/tests/test_bundle.py 2017-05-26 09:27:07 +0000
@@ -56,7 +56,7 @@
56def get_text(vf, key):56def get_text(vf, key):
57 """Get the fulltext for a given revision id that is present in the vf"""57 """Get the fulltext for a given revision id that is present in the vf"""
58 stream = vf.get_record_stream([key], 'unordered', True)58 stream = vf.get_record_stream([key], 'unordered', True)
59 record = stream.next()59 record = next(stream)
60 return record.get_bytes_as('fulltext')60 return record.get_bytes_as('fulltext')
6161
6262
@@ -1764,10 +1764,10 @@
1764 fileobj.seek(0)1764 fileobj.seek(0)
1765 reader = v4.BundleReader(fileobj, stream_input=True)1765 reader = v4.BundleReader(fileobj, stream_input=True)
1766 record_iter = reader.iter_records()1766 record_iter = reader.iter_records()
1767 record = record_iter.next()1767 record = next(record_iter)
1768 self.assertEqual((None, {'foo': 'bar', 'storage_kind': 'header'},1768 self.assertEqual((None, {'foo': 'bar', 'storage_kind': 'header'},
1769 'info', None, None), record)1769 'info', None, None), record)
1770 record = record_iter.next()1770 record = next(record_iter)
1771 self.assertEqual(("Record body", {'storage_kind': 'fulltext',1771 self.assertEqual(("Record body", {'storage_kind': 'fulltext',
1772 'parents': ['1', '3']}, 'file', 'revid', 'fileid'),1772 'parents': ['1', '3']}, 'file', 'revid', 'fileid'),
1773 record)1773 record)
@@ -1783,10 +1783,10 @@
1783 fileobj.seek(0)1783 fileobj.seek(0)
1784 reader = v4.BundleReader(fileobj, stream_input=False)1784 reader = v4.BundleReader(fileobj, stream_input=False)
1785 record_iter = reader.iter_records()1785 record_iter = reader.iter_records()
1786 record = record_iter.next()1786 record = next(record_iter)
1787 self.assertEqual((None, {'foo': 'bar', 'storage_kind': 'header'},1787 self.assertEqual((None, {'foo': 'bar', 'storage_kind': 'header'},
1788 'info', None, None), record)1788 'info', None, None), record)
1789 record = record_iter.next()1789 record = next(record_iter)
1790 self.assertEqual(("Record body", {'storage_kind': 'fulltext',1790 self.assertEqual(("Record body", {'storage_kind': 'fulltext',
1791 'parents': ['1', '3']}, 'file', 'revid', 'fileid'),1791 'parents': ['1', '3']}, 'file', 'revid', 'fileid'),
1792 record)1792 record)
@@ -1816,10 +1816,10 @@
1816 writer.end()1816 writer.end()
1817 fileobj.seek(0)1817 fileobj.seek(0)
1818 record_iter = v4.BundleReader(fileobj).iter_records()1818 record_iter = v4.BundleReader(fileobj).iter_records()
1819 record = record_iter.next()1819 record = next(record_iter)
1820 self.assertEqual((None, {'foo': 'bar', 'storage_kind': 'header'},1820 self.assertEqual((None, {'foo': 'bar', 'storage_kind': 'header'},
1821 'info', None, None), record)1821 'info', None, None), record)
1822 self.assertRaises(errors.BadBundle, record_iter.next)1822 self.assertRaises(errors.BadBundle, next, record_iter)
18231823
18241824
1825class TestReadMergeableFromUrl(tests.TestCaseWithTransport):1825class TestReadMergeableFromUrl(tests.TestCaseWithTransport):
18261826
=== modified file 'breezy/tests/test_chk_map.py'
--- breezy/tests/test_chk_map.py 2017-05-22 00:56:52 +0000
+++ breezy/tests/test_chk_map.py 2017-05-26 09:27:07 +0000
@@ -85,7 +85,7 @@
8585
86 def read_bytes(self, chk_bytes, key):86 def read_bytes(self, chk_bytes, key):
87 stream = chk_bytes.get_record_stream([key], 'unordered', True)87 stream = chk_bytes.get_record_stream([key], 'unordered', True)
88 record = stream.next()88 record = next(stream)
89 if record.storage_kind == 'absent':89 if record.storage_kind == 'absent':
90 self.fail('Store does not contain the key %s' % (key,))90 self.fail('Store does not contain the key %s' % (key,))
91 return record.get_bytes_as("fulltext")91 return record.get_bytes_as("fulltext")
9292
=== modified file 'breezy/tests/test_fetch.py'
--- breezy/tests/test_fetch.py 2017-05-22 00:56:52 +0000
+++ breezy/tests/test_fetch.py 2017-05-26 09:27:07 +0000
@@ -368,15 +368,15 @@
368 # Ensure that we stored a delta368 # Ensure that we stored a delta
369 source.lock_read()369 source.lock_read()
370 self.addCleanup(source.unlock)370 self.addCleanup(source.unlock)
371 record = source.revisions.get_record_stream([('rev-two',)],371 record = next(source.revisions.get_record_stream([('rev-two',)],
372 'unordered', False).next()372 'unordered', False))
373 self.assertEqual('knit-delta-gz', record.storage_kind)373 self.assertEqual('knit-delta-gz', record.storage_kind)
374 target.fetch(tree.branch.repository, revision_id='rev-two')374 target.fetch(tree.branch.repository, revision_id='rev-two')
375 # The record should get expanded back to a fulltext375 # The record should get expanded back to a fulltext
376 target.lock_read()376 target.lock_read()
377 self.addCleanup(target.unlock)377 self.addCleanup(target.unlock)
378 record = target.revisions.get_record_stream([('rev-two',)],378 record = next(target.revisions.get_record_stream([('rev-two',)],
379 'unordered', False).next()379 'unordered', False))
380 self.assertEqual('knit-ft-gz', record.storage_kind)380 self.assertEqual('knit-ft-gz', record.storage_kind)
381381
382 def test_fetch_with_fallback_and_merge(self):382 def test_fetch_with_fallback_and_merge(self):
383383
=== modified file 'breezy/tests/test_graph.py'
--- breezy/tests/test_graph.py 2017-05-22 00:56:52 +0000
+++ breezy/tests/test_graph.py 2017-05-26 09:27:07 +0000
@@ -934,8 +934,8 @@
934 self.assertRaises(StopIteration, search.next_with_ghosts)934 self.assertRaises(StopIteration, search.next_with_ghosts)
935 # next includes them935 # next includes them
936 search = graph._make_breadth_first_searcher(['a-ghost'])936 search = graph._make_breadth_first_searcher(['a-ghost'])
937 self.assertEqual({'a-ghost'}, search.next())937 self.assertEqual({'a-ghost'}, next(search))
938 self.assertRaises(StopIteration, search.next)938 self.assertRaises(StopIteration, next, search)
939939
940 def test_breadth_first_search_deep_ghosts(self):940 def test_breadth_first_search_deep_ghosts(self):
941 graph = self.make_graph({941 graph = self.make_graph({
@@ -952,11 +952,11 @@
952 self.assertRaises(StopIteration, search.next_with_ghosts)952 self.assertRaises(StopIteration, search.next_with_ghosts)
953 # next includes them953 # next includes them
954 search = graph._make_breadth_first_searcher(['head'])954 search = graph._make_breadth_first_searcher(['head'])
955 self.assertEqual({'head'}, search.next())955 self.assertEqual({'head'}, next(search))
956 self.assertEqual({'present'}, search.next())956 self.assertEqual({'present'}, next(search))
957 self.assertEqual({'child', 'ghost'},957 self.assertEqual({'child', 'ghost'},
958 search.next())958 next(search))
959 self.assertRaises(StopIteration, search.next)959 self.assertRaises(StopIteration, next, search)
960960
961 def test_breadth_first_search_change_next_to_next_with_ghosts(self):961 def test_breadth_first_search_change_next_to_next_with_ghosts(self):
962 # To make the API robust, we allow calling both next() and962 # To make the API robust, we allow calling both next() and
@@ -969,16 +969,16 @@
969 # start with next_with_ghosts969 # start with next_with_ghosts
970 search = graph._make_breadth_first_searcher(['head'])970 search = graph._make_breadth_first_searcher(['head'])
971 self.assertEqual(({'head'}, set()), search.next_with_ghosts())971 self.assertEqual(({'head'}, set()), search.next_with_ghosts())
972 self.assertEqual({'present'}, search.next())972 self.assertEqual({'present'}, next(search))
973 self.assertEqual(({'child'}, {'ghost'}),973 self.assertEqual(({'child'}, {'ghost'}),
974 search.next_with_ghosts())974 search.next_with_ghosts())
975 self.assertRaises(StopIteration, search.next)975 self.assertRaises(StopIteration, next, search)
976 # start with next976 # start with next
977 search = graph._make_breadth_first_searcher(['head'])977 search = graph._make_breadth_first_searcher(['head'])
978 self.assertEqual({'head'}, search.next())978 self.assertEqual({'head'}, next(search))
979 self.assertEqual(({'present'}, set()), search.next_with_ghosts())979 self.assertEqual(({'present'}, set()), search.next_with_ghosts())
980 self.assertEqual({'child', 'ghost'},980 self.assertEqual({'child', 'ghost'},
981 search.next())981 next(search))
982 self.assertRaises(StopIteration, search.next_with_ghosts)982 self.assertRaises(StopIteration, search.next_with_ghosts)
983983
984 def test_breadth_first_change_search(self):984 def test_breadth_first_change_search(self):
@@ -1000,13 +1000,13 @@
1000 self.assertRaises(StopIteration, search.next_with_ghosts)1000 self.assertRaises(StopIteration, search.next_with_ghosts)
1001 # next includes them1001 # next includes them
1002 search = graph._make_breadth_first_searcher(['head'])1002 search = graph._make_breadth_first_searcher(['head'])
1003 self.assertEqual({'head'}, search.next())1003 self.assertEqual({'head'}, next(search))
1004 self.assertEqual({'present'}, search.next())1004 self.assertEqual({'present'}, next(search))
1005 self.assertEqual({'present'},1005 self.assertEqual({'present'},
1006 search.stop_searching_any(['present']))1006 search.stop_searching_any(['present']))
1007 search.start_searching(['other', 'other_ghost'])1007 search.start_searching(['other', 'other_ghost'])
1008 self.assertEqual({'other_2'}, search.next())1008 self.assertEqual({'other_2'}, next(search))
1009 self.assertRaises(StopIteration, search.next)1009 self.assertRaises(StopIteration, next, search)
10101010
1011 def assertSeenAndResult(self, instructions, search, next):1011 def assertSeenAndResult(self, instructions, search, next):
1012 """Check the results of .seen and get_result() for a seach.1012 """Check the results of .seen and get_result() for a seach.
@@ -1054,7 +1054,7 @@
1054 ({'head', 'child', NULL_REVISION}, ({'head'}, set(), 3),1054 ({'head', 'child', NULL_REVISION}, ({'head'}, set(), 3),
1055 ['head', 'child', NULL_REVISION], None, None),1055 ['head', 'child', NULL_REVISION], None, None),
1056 ]1056 ]
1057 self.assertSeenAndResult(expected, search, search.next)1057 self.assertSeenAndResult(expected, search, search.__next__)
1058 # using next_with_ghosts:1058 # using next_with_ghosts:
1059 search = graph._make_breadth_first_searcher(['head'])1059 search = graph._make_breadth_first_searcher(['head'])
1060 self.assertSeenAndResult(expected, search, search.next_with_ghosts)1060 self.assertSeenAndResult(expected, search, search.next_with_ghosts)
@@ -1092,7 +1092,7 @@
1092 ({'head', 'otherhead'}, {'child', 'excluded'}, 3),1092 ({'head', 'otherhead'}, {'child', 'excluded'}, 3),
1093 ['head', 'otherhead', 'otherchild'], None, ['excluded']),1093 ['head', 'otherhead', 'otherchild'], None, ['excluded']),
1094 ]1094 ]
1095 self.assertSeenAndResult(expected, search, search.next)1095 self.assertSeenAndResult(expected, search, search.__next__)
1096 # using next_with_ghosts:1096 # using next_with_ghosts:
1097 search = graph._make_breadth_first_searcher([])1097 search = graph._make_breadth_first_searcher([])
1098 search.start_searching(['head'])1098 search.start_searching(['head'])
@@ -1118,7 +1118,7 @@
1118 ({'head'}, {'ghost1', NULL_REVISION}, 2),1118 ({'head'}, {'ghost1', NULL_REVISION}, 2),
1119 ['head', 'child'], None, [NULL_REVISION, 'ghost1']),1119 ['head', 'child'], None, [NULL_REVISION, 'ghost1']),
1120 ]1120 ]
1121 self.assertSeenAndResult(expected, search, search.next)1121 self.assertSeenAndResult(expected, search, search.__next__)
1122 # using next_with_ghosts:1122 # using next_with_ghosts:
1123 search = graph._make_breadth_first_searcher(['head'])1123 search = graph._make_breadth_first_searcher(['head'])
1124 self.assertSeenAndResult(expected, search, search.next_with_ghosts)1124 self.assertSeenAndResult(expected, search, search.next_with_ghosts)
@@ -1145,7 +1145,7 @@
1145 ({'head'}, {'middle', 'child'}, 1),1145 ({'head'}, {'middle', 'child'}, 1),
1146 ['head'], None, ['middle', 'child']),1146 ['head'], None, ['middle', 'child']),
1147 ]1147 ]
1148 self.assertSeenAndResult(expected, search, search.next)1148 self.assertSeenAndResult(expected, search, search.__next__)
1149 # using next_with_ghosts:1149 # using next_with_ghosts:
1150 search = graph._make_breadth_first_searcher(['head'])1150 search = graph._make_breadth_first_searcher(['head'])
1151 self.assertSeenAndResult(expected, search, search.next_with_ghosts)1151 self.assertSeenAndResult(expected, search, search.next_with_ghosts)
@@ -1166,7 +1166,7 @@
1166 ({'head'}, {NULL_REVISION, 'ghost'}, 2),1166 ({'head'}, {NULL_REVISION, 'ghost'}, 2),
1167 ['head', 'child'], None, None),1167 ['head', 'child'], None, None),
1168 ]1168 ]
1169 self.assertSeenAndResult(expected, search, search.next)1169 self.assertSeenAndResult(expected, search, search.__next__)
1170 # using next_with_ghosts:1170 # using next_with_ghosts:
1171 search = graph._make_breadth_first_searcher(['head'])1171 search = graph._make_breadth_first_searcher(['head'])
1172 self.assertSeenAndResult(expected, search, search.next_with_ghosts)1172 self.assertSeenAndResult(expected, search, search.next_with_ghosts)
@@ -1187,7 +1187,7 @@
1187 ({'head', 'ghost'}, {NULL_REVISION, 'ghost'}, 2),1187 ({'head', 'ghost'}, {NULL_REVISION, 'ghost'}, 2),
1188 ['head', 'child'], None, None),1188 ['head', 'child'], None, None),
1189 ]1189 ]
1190 self.assertSeenAndResult(expected, search, search.next)1190 self.assertSeenAndResult(expected, search, search.__next__)
1191 # using next_with_ghosts:1191 # using next_with_ghosts:
1192 search = graph._make_breadth_first_searcher(['head'])1192 search = graph._make_breadth_first_searcher(['head'])
1193 self.assertSeenAndResult(expected, search, search.next_with_ghosts)1193 self.assertSeenAndResult(expected, search, search.next_with_ghosts)
@@ -1207,7 +1207,7 @@
1207 ({'head'}, set([]), 2),1207 ({'head'}, set([]), 2),
1208 ['head', NULL_REVISION], None, None),1208 ['head', NULL_REVISION], None, None),
1209 ]1209 ]
1210 self.assertSeenAndResult(expected, search, search.next)1210 self.assertSeenAndResult(expected, search, search.__next__)
1211 # using next_with_ghosts:1211 # using next_with_ghosts:
1212 search = graph._make_breadth_first_searcher(['head'])1212 search = graph._make_breadth_first_searcher(['head'])
1213 self.assertSeenAndResult(expected, search, search.next_with_ghosts)1213 self.assertSeenAndResult(expected, search, search.next_with_ghosts)
@@ -1228,8 +1228,8 @@
1228 ({'head', 'ghost'}, {'ghost'}, 2),1228 ({'head', 'ghost'}, {'ghost'}, 2),
1229 ['head', NULL_REVISION], ['ghost'], None),1229 ['head', NULL_REVISION], ['ghost'], None),
1230 ]1230 ]
1231 self.assertSeenAndResult(expected, search, search.next)1231 self.assertSeenAndResult(expected, search, search.__next__)
1232 self.assertRaises(StopIteration, search.next)1232 self.assertRaises(StopIteration, next, search)
1233 self.assertEqual({'head', 'ghost', NULL_REVISION}, search.seen)1233 self.assertEqual({'head', 'ghost', NULL_REVISION}, search.seen)
1234 state = search.get_state()1234 state = search.get_state()
1235 self.assertEqual(1235 self.assertEqual(
@@ -1239,7 +1239,7 @@
1239 # using next_with_ghosts:1239 # using next_with_ghosts:
1240 search = graph._make_breadth_first_searcher(['head'])1240 search = graph._make_breadth_first_searcher(['head'])
1241 self.assertSeenAndResult(expected, search, search.next_with_ghosts)1241 self.assertSeenAndResult(expected, search, search.next_with_ghosts)
1242 self.assertRaises(StopIteration, search.next)1242 self.assertRaises(StopIteration, next, search)
1243 self.assertEqual({'head', 'ghost', NULL_REVISION}, search.seen)1243 self.assertEqual({'head', 'ghost', NULL_REVISION}, search.seen)
1244 state = search.get_state()1244 state = search.get_state()
1245 self.assertEqual(1245 self.assertEqual(
12461246
=== modified file 'breezy/tests/test_groupcompress.py'
--- breezy/tests/test_groupcompress.py 2017-05-22 00:56:52 +0000
+++ breezy/tests/test_groupcompress.py 2017-05-26 09:27:07 +0000
@@ -557,7 +557,7 @@
557 vf = self.make_test_vf(True, dir='source')557 vf = self.make_test_vf(True, dir='source')
558 vf.add_lines(('a',), (), ['lines\n'])558 vf.add_lines(('a',), (), ['lines\n'])
559 vf.writer.end()559 vf.writer.end()
560 record = vf.get_record_stream([('a',)], 'unordered', True).next()560 record = next(vf.get_record_stream([('a',)], 'unordered', True))
561 self.assertEqual(vf._DEFAULT_COMPRESSOR_SETTINGS,561 self.assertEqual(vf._DEFAULT_COMPRESSOR_SETTINGS,
562 record._manager._get_compressor_settings())562 record._manager._get_compressor_settings())
563563
@@ -566,7 +566,7 @@
566 vf.add_lines(('a',), (), ['lines\n'])566 vf.add_lines(('a',), (), ['lines\n'])
567 vf.writer.end()567 vf.writer.end()
568 vf._max_bytes_to_index = 1234568 vf._max_bytes_to_index = 1234
569 record = vf.get_record_stream([('a',)], 'unordered', True).next()569 record = next(vf.get_record_stream([('a',)], 'unordered', True))
570 self.assertEqual(dict(max_bytes_to_index=1234),570 self.assertEqual(dict(max_bytes_to_index=1234),
571 record._manager._get_compressor_settings())571 record._manager._get_compressor_settings())
572572
573573
=== modified file 'breezy/tests/test_http.py'
--- breezy/tests/test_http.py 2017-05-24 19:44:00 +0000
+++ breezy/tests/test_http.py 2017-05-26 09:27:07 +0000
@@ -889,7 +889,7 @@
889 # Don't collapse readv results into a list so that we leave unread889 # Don't collapse readv results into a list so that we leave unread
890 # bytes on the socket890 # bytes on the socket
891 ireadv = iter(t.readv('a', ((0, 1), (1, 1), (2, 4), (6, 4))))891 ireadv = iter(t.readv('a', ((0, 1), (1, 1), (2, 4), (6, 4))))
892 self.assertEqual((0, '0'), ireadv.next())892 self.assertEqual((0, '0'), next(ireadv))
893 # The server should have issued one request so far893 # The server should have issued one request so far
894 self.assertEqual(1, server.GET_request_nb)894 self.assertEqual(1, server.GET_request_nb)
895 self.assertEqual('0123456789', t.get_bytes('a'))895 self.assertEqual('0123456789', t.get_bytes('a'))
@@ -1045,14 +1045,14 @@
1045 # Force separate ranges for each offset1045 # Force separate ranges for each offset
1046 t._bytes_to_read_before_seek = 01046 t._bytes_to_read_before_seek = 0
1047 ireadv = iter(t.readv('a', ((0, 1), (2, 1), (4, 2), (9, 1))))1047 ireadv = iter(t.readv('a', ((0, 1), (2, 1), (4, 2), (9, 1))))
1048 self.assertEqual((0, '0'), ireadv.next())1048 self.assertEqual((0, '0'), next(ireadv))
1049 self.assertEqual((2, '2'), ireadv.next())1049 self.assertEqual((2, '2'), next(ireadv))
1050 if not self._testing_pycurl():1050 if not self._testing_pycurl():
1051 # Only one request have been issued so far (except for pycurl that1051 # Only one request have been issued so far (except for pycurl that
1052 # try to read the whole response at once)1052 # try to read the whole response at once)
1053 self.assertEqual(1, server.GET_request_nb)1053 self.assertEqual(1, server.GET_request_nb)
1054 self.assertEqual((4, '45'), ireadv.next())1054 self.assertEqual((4, '45'), next(ireadv))
1055 self.assertEqual((9, '9'), ireadv.next())1055 self.assertEqual((9, '9'), next(ireadv))
1056 # Both implementations issue 3 requests but:1056 # Both implementations issue 3 requests but:
1057 # - urllib does two multiple (4 ranges, then 2 ranges) then a single1057 # - urllib does two multiple (4 ranges, then 2 ranges) then a single
1058 # range,1058 # range,
@@ -1123,10 +1123,10 @@
1123 # Force separate ranges for each offset1123 # Force separate ranges for each offset
1124 t._bytes_to_read_before_seek = 01124 t._bytes_to_read_before_seek = 0
1125 ireadv = iter(t.readv('a', ((0, 1), (2, 1), (4, 2), (9, 1))))1125 ireadv = iter(t.readv('a', ((0, 1), (2, 1), (4, 2), (9, 1))))
1126 self.assertEqual((0, '0'), ireadv.next())1126 self.assertEqual((0, '0'), next(ireadv))
1127 self.assertEqual((2, '2'), ireadv.next())1127 self.assertEqual((2, '2'), next(ireadv))
1128 self.assertEqual((4, '45'), ireadv.next())1128 self.assertEqual((4, '45'), next(ireadv))
1129 self.assertEqual((9, '9'), ireadv.next())1129 self.assertEqual((9, '9'), next(ireadv))
11301130
11311131
1132class LimitedRangeRequestHandler(http_server.TestingHTTPRequestHandler):1132class LimitedRangeRequestHandler(http_server.TestingHTTPRequestHandler):
11331133
=== modified file 'breezy/tests/test_knit.py'
--- breezy/tests/test_knit.py 2017-05-22 00:56:52 +0000
+++ breezy/tests/test_knit.py 2017-05-26 09:27:07 +0000
@@ -205,8 +205,8 @@
205 content1 = self._make_content([("", "a"), ("", "b")])205 content1 = self._make_content([("", "a"), ("", "b")])
206 content2 = self._make_content([("", "a"), ("", "a"), ("", "c")])206 content2 = self._make_content([("", "a"), ("", "a"), ("", "c")])
207 it = content1.line_delta_iter(content2)207 it = content1.line_delta_iter(content2)
208 self.assertEqual(it.next(), (1, 2, 2, ["a", "c"]))208 self.assertEqual(next(it), (1, 2, 2, ["a", "c"]))
209 self.assertRaises(StopIteration, it.next)209 self.assertRaises(StopIteration, next, it)
210210
211211
212class TestAnnotatedKnitContent(TestCase, KnitContentTestsMixin):212class TestAnnotatedKnitContent(TestCase, KnitContentTestsMixin):
@@ -232,8 +232,8 @@
232 content1 = self._make_content([("", "a"), ("", "b")])232 content1 = self._make_content([("", "a"), ("", "b")])
233 content2 = self._make_content([("", "a"), ("", "a"), ("", "c")])233 content2 = self._make_content([("", "a"), ("", "a"), ("", "c")])
234 it = content1.line_delta_iter(content2)234 it = content1.line_delta_iter(content2)
235 self.assertEqual(it.next(), (1, 2, 2, [("", "a"), ("", "c")]))235 self.assertEqual(next(it), (1, 2, 2, [("", "a"), ("", "c")]))
236 self.assertRaises(StopIteration, it.next)236 self.assertRaises(StopIteration, next, it)
237237
238238
239class MockTransport(object):239class MockTransport(object):
@@ -648,13 +648,13 @@
648 vf, reload_counter = self.make_vf_for_retrying()648 vf, reload_counter = self.make_vf_for_retrying()
649 keys = [('rev-1',), ('rev-2',), ('rev-3',)]649 keys = [('rev-1',), ('rev-2',), ('rev-3',)]
650 record_stream = vf.get_record_stream(keys, 'topological', False)650 record_stream = vf.get_record_stream(keys, 'topological', False)
651 record = record_stream.next()651 record = next(record_stream)
652 self.assertEqual(('rev-1',), record.key)652 self.assertEqual(('rev-1',), record.key)
653 self.assertEqual([0, 0, 0], reload_counter)653 self.assertEqual([0, 0, 0], reload_counter)
654 record = record_stream.next()654 record = next(record_stream)
655 self.assertEqual(('rev-2',), record.key)655 self.assertEqual(('rev-2',), record.key)
656 self.assertEqual([1, 1, 0], reload_counter)656 self.assertEqual([1, 1, 0], reload_counter)
657 record = record_stream.next()657 record = next(record_stream)
658 self.assertEqual(('rev-3',), record.key)658 self.assertEqual(('rev-3',), record.key)
659 self.assertEqual([1, 1, 0], reload_counter)659 self.assertEqual([1, 1, 0], reload_counter)
660 # Now delete all pack files, and see that we raise the right error660 # Now delete all pack files, and see that we raise the right error
@@ -2338,8 +2338,8 @@
2338 source = test2338 source = test
2339 else:2339 else:
2340 source = basis2340 source = basis
2341 record = source.get_record_stream([result[0]], 'unordered',2341 record = next(source.get_record_stream([result[0]], 'unordered',
2342 True).next()2342 True))
2343 self.assertEqual(record.key, result[0])2343 self.assertEqual(record.key, result[0])
2344 self.assertEqual(record.sha1, result[1])2344 self.assertEqual(record.sha1, result[1])
2345 # We used to check that the storage kind matched, but actually it2345 # We used to check that the storage kind matched, but actually it
@@ -2425,8 +2425,8 @@
2425 source = test2425 source = test
2426 else:2426 else:
2427 source = basis2427 source = basis
2428 record = source.get_record_stream([result[0]], 'unordered',2428 record = next(source.get_record_stream([result[0]], 'unordered',
2429 False).next()2429 False))
2430 self.assertEqual(record.key, result[0])2430 self.assertEqual(record.key, result[0])
2431 self.assertEqual(record.sha1, result[1])2431 self.assertEqual(record.sha1, result[1])
2432 self.assertEqual(record.storage_kind, result[2])2432 self.assertEqual(record.storage_kind, result[2])
24332433
=== modified file 'breezy/tests/test_pack.py'
--- breezy/tests/test_pack.py 2017-05-22 00:56:52 +0000
+++ breezy/tests/test_pack.py 2017-05-26 09:27:07 +0000
@@ -265,7 +265,7 @@
265 "Bazaar pack format 1 (introduced in 0.18)\n")265 "Bazaar pack format 1 (introduced in 0.18)\n")
266 iterator = reader.iter_records()266 iterator = reader.iter_records()
267 self.assertRaises(267 self.assertRaises(
268 errors.UnexpectedEndOfContainerError, iterator.next)268 errors.UnexpectedEndOfContainerError, next, iterator)
269269
270 def test_unknown_record_type(self):270 def test_unknown_record_type(self):
271 """Unknown record types cause UnknownRecordTypeError to be raised."""271 """Unknown record types cause UnknownRecordTypeError to be raised."""
@@ -273,7 +273,7 @@
273 "Bazaar pack format 1 (introduced in 0.18)\nX")273 "Bazaar pack format 1 (introduced in 0.18)\nX")
274 iterator = reader.iter_records()274 iterator = reader.iter_records()
275 self.assertRaises(275 self.assertRaises(
276 errors.UnknownRecordTypeError, iterator.next)276 errors.UnknownRecordTypeError, next, iterator)
277277
278 def test_container_with_one_unnamed_record(self):278 def test_container_with_one_unnamed_record(self):
279 """Read a container with one Bytes record.279 """Read a container with one Bytes record.
280280
=== modified file 'breezy/tests/test_patches.py'
--- breezy/tests/test_patches.py 2017-05-22 00:56:52 +0000
+++ breezy/tests/test_patches.py 2017-05-26 09:27:07 +0000
@@ -211,7 +211,6 @@
211 self.compare_parsed(patchtext)211 self.compare_parsed(patchtext)
212212
213 def testLineLookup(self):213 def testLineLookup(self):
214 import sys
215 """Make sure we can accurately look up mod line from orig"""214 """Make sure we can accurately look up mod line from orig"""
216 patch = parse_patch(self.datafile("diff"))215 patch = parse_patch(self.datafile("diff"))
217 orig = list(self.datafile("orig"))216 orig = list(self.datafile("orig"))
@@ -227,12 +226,8 @@
227 for hunk in patch.hunks:226 for hunk in patch.hunks:
228 for line in hunk.lines:227 for line in hunk.lines:
229 if isinstance(line, RemoveLine):228 if isinstance(line, RemoveLine):
230 next = rem_iter.next()229 self.assertEqual(line.contents, next(rem_iter))
231 if line.contents != next:230 self.assertRaises(StopIteration, next, rem_iter)
232 sys.stdout.write(" orig:%spatch:%s" % (next,
233 line.contents))
234 self.assertEqual(line.contents, next)
235 self.assertRaises(StopIteration, rem_iter.next)
236231
237 def testPatching(self):232 def testPatching(self):
238 """Test a few patch files, and make sure they work."""233 """Test a few patch files, and make sure they work."""
239234
=== modified file 'breezy/tests/test_repository.py'
--- breezy/tests/test_repository.py 2017-05-22 00:56:52 +0000
+++ breezy/tests/test_repository.py 2017-05-26 09:27:07 +0000
@@ -968,8 +968,8 @@
968 return968 return
969 empty_repo.lock_read()969 empty_repo.lock_read()
970 self.addCleanup(empty_repo.unlock)970 self.addCleanup(empty_repo.unlock)
971 text = empty_repo.texts.get_record_stream(971 text = next(empty_repo.texts.get_record_stream(
972 [('file2-id', 'rev3')], 'topological', True).next()972 [('file2-id', 'rev3')], 'topological', True))
973 self.assertEqual('line\n', text.get_bytes_as('fulltext'))973 self.assertEqual('line\n', text.get_bytes_as('fulltext'))
974974
975975
@@ -1275,7 +1275,7 @@
1275 # and remove another pack (via _remove_pack_from_memory)1275 # and remove another pack (via _remove_pack_from_memory)
1276 orig_names = packs.names()1276 orig_names = packs.names()
1277 orig_at_load = packs._packs_at_load1277 orig_at_load = packs._packs_at_load
1278 to_remove_name = iter(orig_names).next()1278 to_remove_name = next(iter(orig_names))
1279 r.start_write_group()1279 r.start_write_group()
1280 self.addCleanup(r.abort_write_group)1280 self.addCleanup(r.abort_write_group)
1281 r.texts.insert_record_stream([versionedfile.FulltextContentFactory(1281 r.texts.insert_record_stream([versionedfile.FulltextContentFactory(
12821282
=== modified file 'breezy/tests/test_revisiontree.py'
--- breezy/tests/test_revisiontree.py 2017-05-21 18:10:28 +0000
+++ breezy/tests/test_revisiontree.py 2017-05-26 09:27:07 +0000
@@ -74,7 +74,7 @@
74 tree.get_file_revision(tree.path2id('a')))74 tree.get_file_revision(tree.path2id('a')))
7575
76 def test_get_file_mtime_ghost(self):76 def test_get_file_mtime_ghost(self):
77 file_id = iter(self.rev_tree.all_file_ids()).next()77 file_id = next(iter(self.rev_tree.all_file_ids()))
78 self.rev_tree.root_inventory[file_id].revision = 'ghostrev'78 self.rev_tree.root_inventory[file_id].revision = 'ghostrev'
79 self.assertRaises(errors.FileTimestampUnavailable, 79 self.assertRaises(errors.FileTimestampUnavailable,
80 self.rev_tree.get_file_mtime, file_id)80 self.rev_tree.get_file_mtime, file_id)
8181
=== modified file 'breezy/tests/test_shelf.py'
--- breezy/tests/test_shelf.py 2017-05-21 18:10:28 +0000
+++ breezy/tests/test_shelf.py 2017-05-26 09:27:07 +0000
@@ -190,7 +190,7 @@
190190
191 def check_shelve_creation(self, creator, tree):191 def check_shelve_creation(self, creator, tree):
192 self.assertRaises(StopIteration,192 self.assertRaises(StopIteration,
193 tree.iter_entries_by_dir(['foo-id']).next)193 next, tree.iter_entries_by_dir(['foo-id']))
194 s_trans_id = creator.shelf_transform.trans_id_file_id('foo-id')194 s_trans_id = creator.shelf_transform.trans_id_file_id('foo-id')
195 self.assertEqual('foo-id',195 self.assertEqual('foo-id',
196 creator.shelf_transform.final_file_id(s_trans_id))196 creator.shelf_transform.final_file_id(s_trans_id))
@@ -308,7 +308,7 @@
308 creator.shelve_creation('foo-id')308 creator.shelve_creation('foo-id')
309 creator.transform()309 creator.transform()
310 self.assertRaises(StopIteration,310 self.assertRaises(StopIteration,
311 tree.iter_entries_by_dir(['foo-id']).next)311 next, tree.iter_entries_by_dir(['foo-id']))
312 self.assertShelvedFileEqual('', creator, 'foo-id')312 self.assertShelvedFileEqual('', creator, 'foo-id')
313 s_trans_id = creator.shelf_transform.trans_id_file_id('foo-id')313 s_trans_id = creator.shelf_transform.trans_id_file_id('foo-id')
314 self.assertEqual('foo-id',314 self.assertEqual('foo-id',
@@ -465,7 +465,7 @@
465 self.addCleanup(tt.finalize)465 self.addCleanup(tt.finalize)
466 records = iter(parser.read_pending_records())466 records = iter(parser.read_pending_records())
467 #skip revision-id467 #skip revision-id
468 records.next()468 next(records)
469 tt.deserialize(records)469 tt.deserialize(records)
470470
471 def test_shelve_unversioned(self):471 def test_shelve_unversioned(self):
472472
=== modified file 'breezy/tests/test_smart_transport.py'
--- breezy/tests/test_smart_transport.py 2017-05-22 00:56:52 +0000
+++ breezy/tests/test_smart_transport.py 2017-05-26 09:27:07 +0000
@@ -2703,7 +2703,7 @@
2703 smart_protocol.call('foo')2703 smart_protocol.call('foo')
2704 smart_protocol.read_response_tuple(True)2704 smart_protocol.read_response_tuple(True)
2705 stream = smart_protocol.read_streamed_body()2705 stream = smart_protocol.read_streamed_body()
2706 self.assertRaises(errors.ConnectionReset, stream.next)2706 self.assertRaises(errors.ConnectionReset, next, stream)
27072707
2708 def test_client_read_response_tuple_sets_response_status(self):2708 def test_client_read_response_tuple_sets_response_status(self):
2709 server_bytes = protocol.RESPONSE_VERSION_TWO + "success\nok\n"2709 server_bytes = protocol.RESPONSE_VERSION_TWO + "success\nok\n"
@@ -2917,9 +2917,9 @@
2917 def test_interrupted_by_error(self):2917 def test_interrupted_by_error(self):
2918 response_handler = self.make_response_handler(interrupted_body_stream)2918 response_handler = self.make_response_handler(interrupted_body_stream)
2919 stream = response_handler.read_streamed_body()2919 stream = response_handler.read_streamed_body()
2920 self.assertEqual('aaa', stream.next())2920 self.assertEqual('aaa', next(stream))
2921 self.assertEqual('bbb', stream.next())2921 self.assertEqual('bbb', next(stream))
2922 exc = self.assertRaises(errors.ErrorFromSmartServer, stream.next)2922 exc = self.assertRaises(errors.ErrorFromSmartServer, next, stream)
2923 self.assertEqual(('error', 'Exception', 'Boom!'), exc.error_tuple)2923 self.assertEqual(('error', 'Exception', 'Boom!'), exc.error_tuple)
29242924
2925 def test_interrupted_by_connection_lost(self):2925 def test_interrupted_by_connection_lost(self):
@@ -2929,7 +2929,7 @@
2929 'b\0\0\xff\xffincomplete chunk')2929 'b\0\0\xff\xffincomplete chunk')
2930 response_handler = self.make_response_handler(interrupted_body_stream)2930 response_handler = self.make_response_handler(interrupted_body_stream)
2931 stream = response_handler.read_streamed_body()2931 stream = response_handler.read_streamed_body()
2932 self.assertRaises(errors.ConnectionReset, stream.next)2932 self.assertRaises(errors.ConnectionReset, next, stream)
29332933
2934 def test_read_body_bytes_interrupted_by_connection_lost(self):2934 def test_read_body_bytes_interrupted_by_connection_lost(self):
2935 interrupted_body_stream = (2935 interrupted_body_stream = (
29362936
=== modified file 'breezy/tests/test_tree.py'
--- breezy/tests/test_tree.py 2017-05-21 18:10:28 +0000
+++ breezy/tests/test_tree.py 2017-05-26 09:27:07 +0000
@@ -192,7 +192,7 @@
192 :param exp_other_paths: A list of other_path values.192 :param exp_other_paths: A list of other_path values.
193 :param iterator: The iterator to step193 :param iterator: The iterator to step
194 """194 """
195 path, file_id, master_ie, other_values = iterator.next()195 path, file_id, master_ie, other_values = next(iterator)
196 self.assertEqual((exp_path, exp_file_id), (path, file_id),196 self.assertEqual((exp_path, exp_file_id), (path, file_id),
197 'Master entry did not match')197 'Master entry did not match')
198 if master_has_node:198 if master_has_node:
@@ -244,7 +244,7 @@
244 self.assertWalkerNext(u'a', 'a-id', True, [u'a'], iterator)244 self.assertWalkerNext(u'a', 'a-id', True, [u'a'], iterator)
245 self.assertWalkerNext(u'b', 'b-id', True, [u'b'], iterator)245 self.assertWalkerNext(u'b', 'b-id', True, [u'b'], iterator)
246 self.assertWalkerNext(u'b/c', 'c-id', True, [u'b/c'], iterator)246 self.assertWalkerNext(u'b/c', 'c-id', True, [u'b/c'], iterator)
247 self.assertRaises(StopIteration, iterator.next)247 self.assertRaises(StopIteration, next, iterator)
248248
249 def test_master_has_extra(self):249 def test_master_has_extra(self):
250 tree = self.make_branch_and_tree('tree')250 tree = self.make_branch_and_tree('tree')
@@ -263,7 +263,7 @@
263 self.assertWalkerNext(u'b', 'b-id', True, [u'b'], iterator)263 self.assertWalkerNext(u'b', 'b-id', True, [u'b'], iterator)
264 self.assertWalkerNext(u'c', 'c-id', True, [None], iterator)264 self.assertWalkerNext(u'c', 'c-id', True, [None], iterator)
265 self.assertWalkerNext(u'd', 'd-id', True, [u'd'], iterator)265 self.assertWalkerNext(u'd', 'd-id', True, [u'd'], iterator)
266 self.assertRaises(StopIteration, iterator.next)266 self.assertRaises(StopIteration, next, iterator)
267267
268 def test_master_renamed_to_earlier(self):268 def test_master_renamed_to_earlier(self):
269 """The record is still present, it just shows up early."""269 """The record is still present, it just shows up early."""
@@ -281,7 +281,7 @@
281 self.assertWalkerNext(u'a', 'a-id', True, [u'a'], iterator)281 self.assertWalkerNext(u'a', 'a-id', True, [u'a'], iterator)
282 self.assertWalkerNext(u'b', 'd-id', True, [u'd'], iterator)282 self.assertWalkerNext(u'b', 'd-id', True, [u'd'], iterator)
283 self.assertWalkerNext(u'c', 'c-id', True, [u'c'], iterator)283 self.assertWalkerNext(u'c', 'c-id', True, [u'c'], iterator)
284 self.assertRaises(StopIteration, iterator.next)284 self.assertRaises(StopIteration, next, iterator)
285285
286 def test_master_renamed_to_later(self):286 def test_master_renamed_to_later(self):
287 tree = self.make_branch_and_tree('tree')287 tree = self.make_branch_and_tree('tree')
@@ -298,7 +298,7 @@
298 self.assertWalkerNext(u'a', 'a-id', True, [u'a'], iterator)298 self.assertWalkerNext(u'a', 'a-id', True, [u'a'], iterator)
299 self.assertWalkerNext(u'd', 'd-id', True, [u'd'], iterator)299 self.assertWalkerNext(u'd', 'd-id', True, [u'd'], iterator)
300 self.assertWalkerNext(u'e', 'b-id', True, [u'b'], iterator)300 self.assertWalkerNext(u'e', 'b-id', True, [u'b'], iterator)
301 self.assertRaises(StopIteration, iterator.next)301 self.assertRaises(StopIteration, next, iterator)
302302
303 def test_other_extra_in_middle(self):303 def test_other_extra_in_middle(self):
304 tree = self.make_branch_and_tree('tree')304 tree = self.make_branch_and_tree('tree')
@@ -314,7 +314,7 @@
314 self.assertWalkerNext(u'a', 'a-id', True, [u'a'], iterator)314 self.assertWalkerNext(u'a', 'a-id', True, [u'a'], iterator)
315 self.assertWalkerNext(u'd', 'd-id', True, [u'd'], iterator)315 self.assertWalkerNext(u'd', 'd-id', True, [u'd'], iterator)
316 self.assertWalkerNext(u'b', 'b-id', False, [u'b'], iterator)316 self.assertWalkerNext(u'b', 'b-id', False, [u'b'], iterator)
317 self.assertRaises(StopIteration, iterator.next)317 self.assertRaises(StopIteration, next, iterator)
318318
319 def test_other_extra_at_end(self):319 def test_other_extra_at_end(self):
320 tree = self.make_branch_and_tree('tree')320 tree = self.make_branch_and_tree('tree')
@@ -330,7 +330,7 @@
330 self.assertWalkerNext(u'a', 'a-id', True, [u'a'], iterator)330 self.assertWalkerNext(u'a', 'a-id', True, [u'a'], iterator)
331 self.assertWalkerNext(u'b', 'b-id', True, [u'b'], iterator)331 self.assertWalkerNext(u'b', 'b-id', True, [u'b'], iterator)
332 self.assertWalkerNext(u'd', 'd-id', False, [u'd'], iterator)332 self.assertWalkerNext(u'd', 'd-id', False, [u'd'], iterator)
333 self.assertRaises(StopIteration, iterator.next)333 self.assertRaises(StopIteration, next, iterator)
334334
335 def test_others_extra_at_end(self):335 def test_others_extra_at_end(self):
336 tree = self.make_branch_and_tree('tree')336 tree = self.make_branch_and_tree('tree')
@@ -356,7 +356,7 @@
356 self.assertWalkerNext(u'c', 'c-id', False, [u'c', u'c', u'c'], iterator)356 self.assertWalkerNext(u'c', 'c-id', False, [u'c', u'c', u'c'], iterator)
357 self.assertWalkerNext(u'd', 'd-id', False, [None, u'd', u'd'], iterator)357 self.assertWalkerNext(u'd', 'd-id', False, [None, u'd', u'd'], iterator)
358 self.assertWalkerNext(u'e', 'e-id', False, [None, u'e', None], iterator)358 self.assertWalkerNext(u'e', 'e-id', False, [None, u'e', None], iterator)
359 self.assertRaises(StopIteration, iterator.next)359 self.assertRaises(StopIteration, next, iterator)
360360
361 def test_different_file_id_in_others(self):361 def test_different_file_id_in_others(self):
362 tree = self.make_branch_and_tree('tree')362 tree = self.make_branch_and_tree('tree')
@@ -384,7 +384,7 @@
384 self.assertWalkerNext(u'c', 'c-id', True, [u'c', u'c'], iterator)384 self.assertWalkerNext(u'c', 'c-id', True, [u'c', u'c'], iterator)
385 self.assertWalkerNext(u'c/d', 'b-id', True, [u'c/d', u'b'], iterator)385 self.assertWalkerNext(u'c/d', 'b-id', True, [u'c/d', u'b'], iterator)
386 self.assertWalkerNext(u'c/e', 'a-id', True, [u'a', u'a'], iterator)386 self.assertWalkerNext(u'c/e', 'a-id', True, [u'a', u'a'], iterator)
387 self.assertRaises(StopIteration, iterator.next)387 self.assertRaises(StopIteration, next, iterator)
388388
389 def assertCmpByDirblock(self, cmp_val, path1, path2):389 def assertCmpByDirblock(self, cmp_val, path1, path2):
390 self.assertEqual(cmp_val,390 self.assertEqual(cmp_val,
391391
=== modified file 'breezy/tests/test_ui.py'
--- breezy/tests/test_ui.py 2017-05-22 00:56:52 +0000
+++ breezy/tests/test_ui.py 2017-05-26 09:27:07 +0000
@@ -39,7 +39,7 @@
39class TestUIConfiguration(tests.TestCase):39class TestUIConfiguration(tests.TestCase):
4040
41 def test_output_encoding_configuration(self):41 def test_output_encoding_configuration(self):
42 enc = fixtures.generate_unicode_encodings().next()42 enc = next(fixtures.generate_unicode_encodings())
43 config.GlobalStack().set('output_encoding', enc)43 config.GlobalStack().set('output_encoding', enc)
44 IO = ui_testing.BytesIOWithEncoding44 IO = ui_testing.BytesIOWithEncoding
45 ui = _mod_ui.make_ui_for_terminal(IO(), IO(), IO())45 ui = _mod_ui.make_ui_for_terminal(IO(), IO(), IO())
4646
=== modified file 'breezy/tests/test_versionedfile.py'
--- breezy/tests/test_versionedfile.py 2017-05-22 00:56:52 +0000
+++ breezy/tests/test_versionedfile.py 2017-05-26 09:27:07 +0000
@@ -88,7 +88,7 @@
88 self.assertEqual(sorted([('one',), ('two',), ('three',)]),88 self.assertEqual(sorted([('one',), ('two',), ('three',)]),
89 sorted(gen.needed_keys))89 sorted(gen.needed_keys))
90 stream = vf.get_record_stream(gen.needed_keys, 'topological', True)90 stream = vf.get_record_stream(gen.needed_keys, 'topological', True)
91 record = stream.next()91 record = next(stream)
92 self.assertEqual(('one',), record.key)92 self.assertEqual(('one',), record.key)
93 # one is not needed in the output, but it is needed by children. As93 # one is not needed in the output, but it is needed by children. As
94 # such, it should end up in the various caches94 # such, it should end up in the various caches
@@ -99,7 +99,7 @@
99 self.assertEqual([], gen.diffs.keys())99 self.assertEqual([], gen.diffs.keys())
100 # Next we get 'two', which is something we output, but also needed for100 # Next we get 'two', which is something we output, but also needed for
101 # three101 # three
102 record = stream.next()102 record = next(stream)
103 self.assertEqual(('two',), record.key)103 self.assertEqual(('two',), record.key)
104 gen._process_one_record(record.key, record.get_bytes_as('chunked'))104 gen._process_one_record(record.key, record.get_bytes_as('chunked'))
105 # Both are now cached, and the diff for two has been extracted, and105 # Both are now cached, and the diff for two has been extracted, and
@@ -113,7 +113,7 @@
113 gen.parent_map)113 gen.parent_map)
114 # Finally 'three', which allows us to remove all parents from the114 # Finally 'three', which allows us to remove all parents from the
115 # caches115 # caches
116 record = stream.next()116 record = next(stream)
117 self.assertEqual(('three',), record.key)117 self.assertEqual(('three',), record.key)
118 gen._process_one_record(record.key, record.get_bytes_as('chunked'))118 gen._process_one_record(record.key, record.get_bytes_as('chunked'))
119 # Both are now cached, and the diff for two has been extracted, and119 # Both are now cached, and the diff for two has been extracted, and
120120
=== modified file 'breezy/transform.py'
--- breezy/transform.py 2017-05-22 00:56:52 +0000
+++ breezy/transform.py 2017-05-26 09:27:07 +0000
@@ -305,7 +305,7 @@
305 return self._r_new_id[file_id]305 return self._r_new_id[file_id]
306 else:306 else:
307 try:307 try:
308 self._tree.iter_entries_by_dir([file_id]).next()308 next(self._tree.iter_entries_by_dir([file_id]))
309 except StopIteration:309 except StopIteration:
310 if file_id in self._non_present_ids:310 if file_id in self._non_present_ids:
311 return self._non_present_ids[file_id]311 return self._non_present_ids[file_id]
@@ -1138,7 +1138,7 @@
1138 :param records: An iterable of (names, content) tuples, as per1138 :param records: An iterable of (names, content) tuples, as per
1139 pack.ContainerPushParser.1139 pack.ContainerPushParser.
1140 """1140 """
1141 names, content = records.next()1141 names, content = next(records)
1142 attribs = bencode.bdecode(content)1142 attribs = bencode.bdecode(content)
1143 self._id_number = attribs['_id_number']1143 self._id_number = attribs['_id_number']
1144 self._new_name = dict((k, v.decode('utf-8'))1144 self._new_name = dict((k, v.decode('utf-8'))
@@ -2675,7 +2675,7 @@
2675 in iter if not (c or e[0] != e[1])]2675 in iter if not (c or e[0] != e[1])]
2676 if accelerator_tree.supports_content_filtering():2676 if accelerator_tree.supports_content_filtering():
2677 unchanged = [(f, p) for (f, p) in unchanged2677 unchanged = [(f, p) for (f, p) in unchanged
2678 if not accelerator_tree.iter_search_rules([p]).next()]2678 if not next(accelerator_tree.iter_search_rules([p]))]
2679 unchanged = dict(unchanged)2679 unchanged = dict(unchanged)
2680 new_desired_files = []2680 new_desired_files = []
2681 count = 02681 count = 0
@@ -3091,8 +3091,8 @@
3091 file_id = tt.final_file_id(trans_id)3091 file_id = tt.final_file_id(trans_id)
3092 if file_id is None:3092 if file_id is None:
3093 file_id = tt.inactive_file_id(trans_id)3093 file_id = tt.inactive_file_id(trans_id)
3094 _, entry = path_tree.iter_entries_by_dir(3094 _, entry = next(path_tree.iter_entries_by_dir(
3095 [file_id]).next()3095 [file_id]))
3096 # special-case the other tree root (move its3096 # special-case the other tree root (move its
3097 # children to current root)3097 # children to current root)
3098 if entry.parent_id is None:3098 if entry.parent_id is None:
30993099
=== modified file 'breezy/transport/__init__.py'
--- breezy/transport/__init__.py 2017-05-22 00:56:52 +0000
+++ breezy/transport/__init__.py 2017-05-26 09:27:07 +0000
@@ -703,7 +703,7 @@
703703
704 # turn the list of offsets into a stack704 # turn the list of offsets into a stack
705 offset_stack = iter(offsets)705 offset_stack = iter(offsets)
706 cur_offset_and_size = offset_stack.next()706 cur_offset_and_size = next(offset_stack)
707 coalesced = self._coalesce_offsets(sorted_offsets,707 coalesced = self._coalesce_offsets(sorted_offsets,
708 limit=self._max_readv_combine,708 limit=self._max_readv_combine,
709 fudge_factor=self._bytes_to_read_before_seek)709 fudge_factor=self._bytes_to_read_before_seek)
@@ -729,7 +729,7 @@
729 this_data = data_map.pop(cur_offset_and_size)729 this_data = data_map.pop(cur_offset_and_size)
730 this_offset = cur_offset_and_size[0]730 this_offset = cur_offset_and_size[0]
731 try:731 try:
732 cur_offset_and_size = offset_stack.next()732 cur_offset_and_size = next(offset_stack)
733 except StopIteration:733 except StopIteration:
734 fp.close()734 fp.close()
735 cur_offset_and_size = None735 cur_offset_and_size = None
736736
=== modified file 'breezy/transport/http/__init__.py'
--- breezy/transport/http/__init__.py 2017-05-22 00:56:52 +0000
+++ breezy/transport/http/__init__.py 2017-05-26 09:27:07 +0000
@@ -194,7 +194,7 @@
194 # serve the corresponding offsets respecting the initial order. We194 # serve the corresponding offsets respecting the initial order. We
195 # need an offset iterator for that.195 # need an offset iterator for that.
196 iter_offsets = iter(offsets)196 iter_offsets = iter(offsets)
197 cur_offset_and_size = iter_offsets.next()197 cur_offset_and_size = next(iter_offsets)
198198
199 try:199 try:
200 for cur_coal, rfile in self._coalesce_readv(relpath, coalesced):200 for cur_coal, rfile in self._coalesce_readv(relpath, coalesced):
@@ -211,7 +211,7 @@
211 # The offset requested are sorted as the coalesced211 # The offset requested are sorted as the coalesced
212 # ones, no need to cache. Win !212 # ones, no need to cache. Win !
213 yield cur_offset_and_size[0], data213 yield cur_offset_and_size[0], data
214 cur_offset_and_size = iter_offsets.next()214 cur_offset_and_size = next(iter_offsets)
215 else:215 else:
216 # Different sorting. We need to cache.216 # Different sorting. We need to cache.
217 data_map[(start, size)] = data217 data_map[(start, size)] = data
@@ -223,7 +223,7 @@
223 # vila20071129223 # vila20071129
224 this_data = data_map.pop(cur_offset_and_size)224 this_data = data_map.pop(cur_offset_and_size)
225 yield cur_offset_and_size[0], this_data225 yield cur_offset_and_size[0], this_data
226 cur_offset_and_size = iter_offsets.next()226 cur_offset_and_size = next(iter_offsets)
227227
228 except (errors.ShortReadvError, errors.InvalidRange,228 except (errors.ShortReadvError, errors.InvalidRange,
229 errors.InvalidHttpRange, errors.HttpBoundaryMissing) as e:229 errors.InvalidHttpRange, errors.HttpBoundaryMissing) as e:
230230
=== modified file 'breezy/transport/remote.py'
--- breezy/transport/remote.py 2017-05-22 00:56:52 +0000
+++ breezy/transport/remote.py 2017-05-26 09:27:07 +0000
@@ -354,7 +354,7 @@
354 # turn the list of offsets into a single stack to iterate354 # turn the list of offsets into a single stack to iterate
355 offset_stack = iter(offsets)355 offset_stack = iter(offsets)
356 # using a list so it can be modified when passing down and coming back356 # using a list so it can be modified when passing down and coming back
357 next_offset = [offset_stack.next()]357 next_offset = [next(offset_stack)]
358 for cur_request in requests:358 for cur_request in requests:
359 try:359 try:
360 result = self._client.call_with_body_readv_array(360 result = self._client.call_with_body_readv_array(
@@ -398,7 +398,7 @@
398 # not have a real string.398 # not have a real string.
399 if key == cur_offset_and_size:399 if key == cur_offset_and_size:
400 yield cur_offset_and_size[0], this_data400 yield cur_offset_and_size[0], this_data
401 cur_offset_and_size = next_offset[0] = offset_stack.next()401 cur_offset_and_size = next_offset[0] = next(offset_stack)
402 else:402 else:
403 data_map[key] = this_data403 data_map[key] = this_data
404 data_offset += c_offset.length404 data_offset += c_offset.length
@@ -407,7 +407,7 @@
407 while cur_offset_and_size in data_map:407 while cur_offset_and_size in data_map:
408 this_data = data_map.pop(cur_offset_and_size)408 this_data = data_map.pop(cur_offset_and_size)
409 yield cur_offset_and_size[0], this_data409 yield cur_offset_and_size[0], this_data
410 cur_offset_and_size = next_offset[0] = offset_stack.next()410 cur_offset_and_size = next_offset[0] = next(offset_stack)
411411
412 def rename(self, rel_from, rel_to):412 def rename(self, rel_from, rel_to):
413 self._call('rename',413 self._call('rename',
414414
=== modified file 'breezy/transport/sftp.py'
--- breezy/transport/sftp.py 2017-05-25 00:04:21 +0000
+++ breezy/transport/sftp.py 2017-05-26 09:27:07 +0000
@@ -188,7 +188,7 @@
188 """188 """
189 requests = self._get_requests()189 requests = self._get_requests()
190 offset_iter = iter(self.original_offsets)190 offset_iter = iter(self.original_offsets)
191 cur_offset, cur_size = offset_iter.next()191 cur_offset, cur_size = next(offset_iter)
192 # paramiko .readv() yields strings that are in the order of the requests192 # paramiko .readv() yields strings that are in the order of the requests
193 # So we track the current request to know where the next data is193 # So we track the current request to know where the next data is
194 # being returned from.194 # being returned from.
@@ -262,7 +262,7 @@
262 input_start += cur_size262 input_start += cur_size
263 # Yield the requested data263 # Yield the requested data
264 yield cur_offset, cur_data264 yield cur_offset, cur_data
265 cur_offset, cur_size = offset_iter.next()265 cur_offset, cur_size = next(offset_iter)
266 # at this point, we've consumed as much of buffered as we can,266 # at this point, we've consumed as much of buffered as we can,
267 # so break off the portion that we consumed267 # so break off the portion that we consumed
268 if buffered_offset == len(buffered_data):268 if buffered_offset == len(buffered_data):
@@ -311,7 +311,7 @@
311 ' We expected %d bytes, but only found %d'311 ' We expected %d bytes, but only found %d'
312 % (cur_size, len(data)))312 % (cur_size, len(data)))
313 yield cur_offset, data313 yield cur_offset, data
314 cur_offset, cur_size = offset_iter.next()314 cur_offset, cur_size = next(offset_iter)
315315
316316
317class SFTPTransport(ConnectedTransport):317class SFTPTransport(ConnectedTransport):
318318
=== modified file 'breezy/tree.py'
--- breezy/tree.py 2017-05-22 00:56:52 +0000
+++ breezy/tree.py 2017-05-26 09:27:07 +0000
@@ -641,7 +641,7 @@
641 return []641 return []
642 if path is None:642 if path is None:
643 path = self.id2path(file_id)643 path = self.id2path(file_id)
644 prefs = self.iter_search_rules([path], filter_pref_names).next()644 prefs = next(self.iter_search_rules([path], filter_pref_names))
645 stk = filters._get_filter_stack_for(prefs)645 stk = filters._get_filter_stack_for(prefs)
646 if 'filters' in debug.debug_flags:646 if 'filters' in debug.debug_flags:
647 trace.note(gettext("*** {0} content-filter: {1} => {2!r}").format(path,prefs,stk))647 trace.note(gettext("*** {0} content-filter: {1} => {2!r}").format(path,prefs,stk))
@@ -731,7 +731,7 @@
731 :return: The input path adjusted to account for existing elements731 :return: The input path adjusted to account for existing elements
732 that match case insensitively.732 that match case insensitively.
733 """733 """
734 return self._yield_canonical_inventory_paths([path]).next()734 return next(self._yield_canonical_inventory_paths([path]))
735735
736 def _yield_canonical_inventory_paths(self, paths):736 def _yield_canonical_inventory_paths(self, paths):
737 for path in paths:737 for path in paths:
@@ -1439,7 +1439,7 @@
1439 If has_more is False, path and ie will be None.1439 If has_more is False, path and ie will be None.
1440 """1440 """
1441 try:1441 try:
1442 path, ie = iterator.next()1442 path, ie = next(iterator)
1443 except StopIteration:1443 except StopIteration:
1444 return False, None, None1444 return False, None, None
1445 else:1445 else:
14461446
=== modified file 'breezy/vf_repository.py'
--- breezy/vf_repository.py 2017-05-24 19:44:00 +0000
+++ breezy/vf_repository.py 2017-05-26 09:27:07 +0000
@@ -1728,7 +1728,7 @@
1728 @needs_read_lock1728 @needs_read_lock
1729 def get_inventory(self, revision_id):1729 def get_inventory(self, revision_id):
1730 """Get Inventory object by revision id."""1730 """Get Inventory object by revision id."""
1731 return self.iter_inventories([revision_id]).next()1731 return next(self.iter_inventories([revision_id]))
17321732
1733 def iter_inventories(self, revision_ids, ordering=None):1733 def iter_inventories(self, revision_ids, ordering=None):
1734 """Get many inventories by revision_ids.1734 """Get many inventories by revision_ids.
@@ -1771,7 +1771,7 @@
1771 return1771 return
1772 if order_as_requested:1772 if order_as_requested:
1773 key_iter = iter(keys)1773 key_iter = iter(keys)
1774 next_key = key_iter.next()1774 next_key = next(key_iter)
1775 stream = self.inventories.get_record_stream(keys, ordering, True)1775 stream = self.inventories.get_record_stream(keys, ordering, True)
1776 text_chunks = {}1776 text_chunks = {}
1777 for record in stream:1777 for record in stream:
@@ -1789,7 +1789,7 @@
1789 chunks = text_chunks.pop(next_key)1789 chunks = text_chunks.pop(next_key)
1790 yield ''.join(chunks), next_key[-1]1790 yield ''.join(chunks), next_key[-1]
1791 try:1791 try:
1792 next_key = key_iter.next()1792 next_key = next(key_iter)
1793 except StopIteration:1793 except StopIteration:
1794 # We still want to fully consume the get_record_stream,1794 # We still want to fully consume the get_record_stream,
1795 # just in case it is not actually finished at this point1795 # just in case it is not actually finished at this point
@@ -1817,7 +1817,7 @@
1817 def _get_inventory_xml(self, revision_id):1817 def _get_inventory_xml(self, revision_id):
1818 """Get serialized inventory as a string."""1818 """Get serialized inventory as a string."""
1819 texts = self._iter_inventory_xmls([revision_id], 'unordered')1819 texts = self._iter_inventory_xmls([revision_id], 'unordered')
1820 text, revision_id = texts.next()1820 text, revision_id = next(texts)
1821 if text is None:1821 if text is None:
1822 raise errors.NoSuchRevision(self, revision_id)1822 raise errors.NoSuchRevision(self, revision_id)
1823 return text1823 return text
@@ -1943,7 +1943,7 @@
1943 """Return the text for a signature."""1943 """Return the text for a signature."""
1944 stream = self.signatures.get_record_stream([(revision_id,)],1944 stream = self.signatures.get_record_stream([(revision_id,)],
1945 'unordered', True)1945 'unordered', True)
1946 record = stream.next()1946 record = next(stream)
1947 if record.storage_kind == 'absent':1947 if record.storage_kind == 'absent':
1948 raise errors.NoSuchRevision(self, revision_id)1948 raise errors.NoSuchRevision(self, revision_id)
1949 return record.get_bytes_as('fulltext')1949 return record.get_bytes_as('fulltext')
@@ -3149,7 +3149,7 @@
3149 entries = inv.iter_entries()3149 entries = inv.iter_entries()
3150 # backwards compatibility hack: skip the root id.3150 # backwards compatibility hack: skip the root id.
3151 if not repository.supports_rich_root():3151 if not repository.supports_rich_root():
3152 path, root = entries.next()3152 path, root = next(entries)
3153 if root.revision != rev.revision_id:3153 if root.revision != rev.revision_id:
3154 raise errors.IncompatibleRevision(repr(repository))3154 raise errors.IncompatibleRevision(repr(repository))
3155 text_keys = {}3155 text_keys = {}
31563156
=== modified file 'breezy/vf_search.py'
--- breezy/vf_search.py 2017-05-22 00:56:52 +0000
+++ breezy/vf_search.py 2017-05-26 09:27:07 +0000
@@ -417,7 +417,7 @@
417 found_heads = set()417 found_heads = set()
418 while True:418 while True:
419 try:419 try:
420 next_revs = s.next()420 next_revs = next(s)
421 except StopIteration:421 except StopIteration:
422 break422 break
423 for parents in s._current_parents.itervalues():423 for parents in s._current_parents.itervalues():
424424
=== modified file 'breezy/weavefile.py'
--- breezy/weavefile.py 2017-05-24 19:44:00 +0000
+++ breezy/weavefile.py 2017-05-26 09:27:07 +0000
@@ -122,7 +122,7 @@
122 f.close()122 f.close()
123123
124 try:124 try:
125 l = lines.next()125 l = next(lines)
126 except StopIteration:126 except StopIteration:
127 raise WeaveFormatError('invalid weave file: no header')127 raise WeaveFormatError('invalid weave file: no header')
128128
@@ -132,7 +132,7 @@
132 ver = 0132 ver = 0
133 # read weave header.133 # read weave header.
134 while True:134 while True:
135 l = lines.next()135 l = next(lines)
136 if l[0] == 'i':136 if l[0] == 'i':
137 if len(l) > 2:137 if len(l) > 2:
138 w._parents.append(list(map(int, l[2:].split(' '))))138 w._parents.append(list(map(int, l[2:].split(' '))))
@@ -140,11 +140,11 @@
140 w._parents.append([])140 w._parents.append([])
141 l = lines.next()[:-1]141 l = lines.next()[:-1]
142 w._sha1s.append(l[2:])142 w._sha1s.append(l[2:])
143 l = lines.next()143 l = next(lines)
144 name = l[2:-1]144 name = l[2:-1]
145 w._names.append(name)145 w._names.append(name)
146 w._name_map[name] = ver146 w._name_map[name] = ver
147 l = lines.next()147 l = next(lines)
148 ver += 1148 ver += 1
149 elif l == 'w\n':149 elif l == 'w\n':
150 break150 break
@@ -153,7 +153,7 @@
153153
154 # read weave body154 # read weave body
155 while True:155 while True:
156 l = lines.next()156 l = next(lines)
157 if l == 'W\n':157 if l == 'W\n':
158 break158 break
159 elif '. ' == l[0:2]:159 elif '. ' == l[0:2]:
160160
=== modified file 'breezy/workingtree.py'
--- breezy/workingtree.py 2017-05-22 00:56:52 +0000
+++ breezy/workingtree.py 2017-05-26 09:27:07 +0000
@@ -1594,7 +1594,7 @@
1594 inventory_iterator = self._walkdirs(prefix)1594 inventory_iterator = self._walkdirs(prefix)
1595 disk_iterator = osutils.walkdirs(disk_top, prefix)1595 disk_iterator = osutils.walkdirs(disk_top, prefix)
1596 try:1596 try:
1597 current_disk = disk_iterator.next()1597 current_disk = next(disk_iterator)
1598 disk_finished = False1598 disk_finished = False
1599 except OSError as e:1599 except OSError as e:
1600 if not (e.errno == errno.ENOENT or1600 if not (e.errno == errno.ENOENT or
@@ -1603,7 +1603,7 @@
1603 current_disk = None1603 current_disk = None
1604 disk_finished = True1604 disk_finished = True
1605 try:1605 try:
1606 current_inv = inventory_iterator.next()1606 current_inv = next(inventory_iterator)
1607 inv_finished = False1607 inv_finished = False
1608 except StopIteration:1608 except StopIteration:
1609 current_inv = None1609 current_inv = None
@@ -1644,7 +1644,7 @@
1644 cur_disk_dir_content]1644 cur_disk_dir_content]
1645 yield (cur_disk_dir_relpath, None), dirblock1645 yield (cur_disk_dir_relpath, None), dirblock
1646 try:1646 try:
1647 current_disk = disk_iterator.next()1647 current_disk = next(disk_iterator)
1648 except StopIteration:1648 except StopIteration:
1649 disk_finished = True1649 disk_finished = True
1650 elif direction < 0:1650 elif direction < 0:
@@ -1654,7 +1654,7 @@
1654 current_inv[1]]1654 current_inv[1]]
1655 yield (current_inv[0][0], current_inv[0][1]), dirblock1655 yield (current_inv[0][0], current_inv[0][1]), dirblock
1656 try:1656 try:
1657 current_inv = inventory_iterator.next()1657 current_inv = next(inventory_iterator)
1658 except StopIteration:1658 except StopIteration:
1659 inv_finished = True1659 inv_finished = True
1660 else:1660 else:
@@ -1686,11 +1686,11 @@
1686 raise NotImplementedError('unreachable code')1686 raise NotImplementedError('unreachable code')
1687 yield current_inv[0], dirblock1687 yield current_inv[0], dirblock
1688 try:1688 try:
1689 current_inv = inventory_iterator.next()1689 current_inv = next(inventory_iterator)
1690 except StopIteration:1690 except StopIteration:
1691 inv_finished = True1691 inv_finished = True
1692 try:1692 try:
1693 current_disk = disk_iterator.next()1693 current_disk = next(disk_iterator)
1694 except StopIteration:1694 except StopIteration:
1695 disk_finished = True1695 disk_finished = True
16961696
@@ -2073,7 +2073,7 @@
2073 return _mod_conflicts.ConflictList()2073 return _mod_conflicts.ConflictList()
2074 try:2074 try:
2075 try:2075 try:
2076 if confile.next() != CONFLICT_HEADER_1 + '\n':2076 if next(confile) != CONFLICT_HEADER_1 + '\n':
2077 raise errors.ConflictFormatError()2077 raise errors.ConflictFormatError()
2078 except StopIteration:2078 except StopIteration:
2079 raise errors.ConflictFormatError()2079 raise errors.ConflictFormatError()
@@ -2370,7 +2370,7 @@
2370 try:2370 try:
2371 merge_hashes = {}2371 merge_hashes = {}
2372 try:2372 try:
2373 if hashfile.next() != MERGE_MODIFIED_HEADER_1 + '\n':2373 if next(hashfile) != MERGE_MODIFIED_HEADER_1 + '\n':
2374 raise errors.MergeModifiedFormatError()2374 raise errors.MergeModifiedFormatError()
2375 except StopIteration:2375 except StopIteration:
2376 raise errors.MergeModifiedFormatError()2376 raise errors.MergeModifiedFormatError()
23772377
=== modified file 'breezy/workingtree_4.py'
--- breezy/workingtree_4.py 2017-05-24 16:21:50 +0000
+++ breezy/workingtree_4.py 2017-05-26 09:27:07 +0000
@@ -1288,7 +1288,7 @@
1288 ids_to_unversion.remove(entry[0][2])1288 ids_to_unversion.remove(entry[0][2])
1289 block_index += 11289 block_index += 1
1290 if ids_to_unversion:1290 if ids_to_unversion:
1291 raise errors.NoSuchId(self, iter(ids_to_unversion).next())1291 raise errors.NoSuchId(self, next(iter(ids_to_unversion)))
1292 self._make_dirty(reset_inventory=False)1292 self._make_dirty(reset_inventory=False)
1293 # have to change the legacy inventory too.1293 # have to change the legacy inventory too.
1294 if self._inventory is not None:1294 if self._inventory is not None:
@@ -2014,7 +2014,7 @@
2014 # FIXME: Support nested trees2014 # FIXME: Support nested trees
2015 entries = inv.iter_entries(from_dir=from_dir_id, recursive=recursive)2015 entries = inv.iter_entries(from_dir=from_dir_id, recursive=recursive)
2016 if inv.root is not None and not include_root and from_dir is None:2016 if inv.root is not None and not include_root and from_dir is None:
2017 entries.next()2017 next(entries)
2018 for path, entry in entries:2018 for path, entry in entries:
2019 yield path, 'V', entry.kind, entry.file_id, entry2019 yield path, 'V', entry.kind, entry.file_id, entry
20202020
20212021
=== modified file 'breezy/xml_serializer.py'
--- breezy/xml_serializer.py 2017-05-22 00:56:52 +0000
+++ breezy/xml_serializer.py 2017-05-26 09:27:07 +0000
@@ -367,7 +367,7 @@
367 """367 """
368 entries = inv.iter_entries()368 entries = inv.iter_entries()
369 # Skip the root369 # Skip the root
370 root_path, root_ie = entries.next()370 root_path, root_ie = next(entries)
371 for path, ie in entries:371 for path, ie in entries:
372 if ie.parent_id != root_id:372 if ie.parent_id != root_id:
373 parent_str = ' parent_id="'373 parent_str = ' parent_id="'

Subscribers

People subscribed via source and target branches