Merge lp:~gz/brz/dict_viewing into lp:brz

Proposed by Martin Packman
Status: Merged
Approved by: Martin Packman
Approved revision: no longer in the source branch.
Merge reported by: The Breezy Bot
Merged at revision: not available
Proposed branch: lp:~gz/brz/dict_viewing
Merge into: lp:brz
Diff against target: 3844 lines (+524/-481)
93 files modified
breezy/_annotator_py.py (+2/-1)
breezy/_known_graph_py.py (+10/-6)
breezy/branch.py (+4/-4)
breezy/branchbuilder.py (+4/-1)
breezy/btree_index.py (+8/-4)
breezy/builtins.py (+10/-8)
breezy/bundle/bundle_data.py (+4/-3)
breezy/bundle/commands.py (+2/-1)
breezy/bundle/serializer/v4.py (+2/-1)
breezy/bzrdir.py (+4/-4)
breezy/check.py (+8/-7)
breezy/chk_map.py (+37/-38)
breezy/chk_serializer.py (+3/-3)
breezy/commands.py (+7/-9)
breezy/config.py (+7/-7)
breezy/controldir.py (+1/-1)
breezy/dirstate.py (+7/-5)
breezy/export_pot.py (+1/-1)
breezy/fetch.py (+7/-5)
breezy/fifo_cache.py (+4/-4)
breezy/gpg.py (+1/-1)
breezy/graph.py (+20/-16)
breezy/groupcompress.py (+7/-6)
breezy/hashcache.py (+4/-5)
breezy/help.py (+4/-4)
breezy/help_topics/__init__.py (+1/-1)
breezy/index.py (+12/-11)
breezy/inventory.py (+21/-17)
breezy/knit.py (+15/-13)
breezy/log.py (+2/-2)
breezy/lru_cache.py (+8/-2)
breezy/mail_client.py (+2/-2)
breezy/merge.py (+4/-1)
breezy/mutabletree.py (+9/-4)
breezy/plugins/bash_completion/bashcomp.py (+4/-4)
breezy/plugins/bisect/tests.py (+3/-3)
breezy/plugins/fastimport/branch_updater.py (+1/-1)
breezy/plugins/fastimport/cache_manager.py (+7/-20)
breezy/plugins/fastimport/exporter.py (+1/-1)
breezy/plugins/fastimport/helpers.py (+2/-2)
breezy/plugins/fastimport/idmapfile.py (+3/-7)
breezy/plugins/fastimport/marks_file.py (+2/-2)
breezy/plugins/fastimport/processors/info_processor.py (+10/-11)
breezy/plugins/fastimport/revision_store.py (+1/-1)
breezy/plugins/launchpad/lp_api_lite.py (+1/-1)
breezy/plugins/launchpad/lp_registration.py (+2/-2)
breezy/plugins/stats/cmds.py (+13/-8)
breezy/plugins/weave_fmt/bzrdir.py (+3/-3)
breezy/reconcile.py (+1/-1)
breezy/registry.py (+4/-5)
breezy/remote.py (+10/-7)
breezy/rename_map.py (+7/-6)
breezy/repofmt/groupcompress_repo.py (+8/-4)
breezy/repofmt/knitpack_repo.py (+4/-3)
breezy/repofmt/pack_repo.py (+3/-3)
breezy/repository.py (+10/-7)
breezy/revision.py (+1/-1)
breezy/smart/bzrdir.py (+1/-1)
breezy/smart/client.py (+1/-1)
breezy/smtp_connection.py (+1/-1)
breezy/tag.py (+2/-2)
breezy/tests/per_branch/test_check.py (+1/-1)
breezy/tests/per_bzrdir/test_bzrdir.py (+1/-1)
breezy/tests/per_controldir/test_controldir.py (+1/-1)
breezy/tests/per_controldir_colo/test_supported.py (+2/-2)
breezy/tests/per_controldir_colo/test_unsupported.py (+1/-2)
breezy/tests/per_foreign_vcs/__init__.py (+1/-1)
breezy/tests/per_pack_repository.py (+4/-3)
breezy/tests/per_workingtree/test_parents.py (+2/-2)
breezy/tests/test_btree_index.py (+3/-3)
breezy/tests/test_bundle.py (+1/-1)
breezy/tests/test_fifo_cache.py (+29/-32)
breezy/tests/test_graph.py (+1/-1)
breezy/tests/test_groupcompress.py (+2/-2)
breezy/tests/test_inv.py (+3/-3)
breezy/tests/test_knit.py (+4/-4)
breezy/tests/test_merge.py (+1/-1)
breezy/tests/test_merge_core.py (+3/-3)
breezy/tests/test_remote.py (+2/-2)
breezy/tests/test_versionedfile.py (+6/-8)
breezy/transform.py (+41/-38)
breezy/transport/http/_urllib2_wrappers.py (+3/-3)
breezy/transport/ssh.py (+2/-2)
breezy/tree.py (+6/-3)
breezy/tsort.py (+1/-1)
breezy/uncommit.py (+1/-1)
breezy/urlutils.py (+1/-1)
breezy/versionedfile.py (+17/-18)
breezy/vf_repository.py (+22/-25)
breezy/vf_search.py (+9/-6)
breezy/weave.py (+1/-1)
breezy/workingtree.py (+2/-2)
breezy/workingtree_4.py (+2/-1)
To merge this branch: bzr merge lp:~gz/brz/dict_viewing
Reviewer Review Type Date Requested Status
Jelmer Vernooij Approve
Review via email: mp+325108@code.launchpad.net

Commit message

Apply 2to3 dict fixer and clean up with sixish view methods

Description of the change

One of the few remaining big-bang 2to3 changes, this branch runs the dict fixer.

The basic scheme is iter(items|values) gets the iter stripped, plus other futzing to try and keep semantics. The plain methods get wrapped in list() which is often not the right thing, but the fixer is being conservative due to changes during iteration.

Unfortunately, we have a bunch of types that have some dict-like methods but aren't dict:

* chk_map: CHKMap.iteritems LeafNode.iteritems
* lru_cache: LRUCache.keys
* registry: Registry methods keys iteritems items
* versionedfile: VersionedFiles.keys
* groupcompress: GroupCompressVersionedFiles.keys _GCGraphIndex.keys
* knit: _KndxIndex.keys _KnitGraphIndex.keys KnitVersionedFiles.keys

Which had lead to a bunch of bogus changes.

For general sanity, I switched uses of items/values over the the sixish view helpers which is the new Python 3 behaviour, and have non-horrible 2.7 performance.

There's a small amount of other drive-by stuff, but mostly this branch can just be verified by the tests passing.

To post a comment you must log in.
Revision history for this message
Jelmer Vernooij (jelmer) :
review: Approve
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote :

Running landing tests failed
http://10.242.247.184:8080/job/brz-dev/66/

Revision history for this message
The Breezy Bot (the-breezy-bot) wrote :

Running landing tests failed
http://10.242.247.184:8080/job/brz-dev/67/

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'breezy/_annotator_py.py'
--- breezy/_annotator_py.py 2017-06-04 18:09:30 +0000
+++ breezy/_annotator_py.py 2017-06-05 21:41:38 +0000
@@ -33,6 +33,7 @@
33 )33 )
34from .sixish import (34from .sixish import (
35 range,35 range,
36 viewitems,
36 )37 )
3738
3839
@@ -92,7 +93,7 @@
92 vf_keys_needed.add(key)93 vf_keys_needed.add(key)
93 needed_keys = set()94 needed_keys = set()
94 next_parent_map.update(self._vf.get_parent_map(parent_lookup))95 next_parent_map.update(self._vf.get_parent_map(parent_lookup))
95 for key, parent_keys in next_parent_map.iteritems():96 for key, parent_keys in viewitems(next_parent_map):
96 if parent_keys is None: # No graph versionedfile97 if parent_keys is None: # No graph versionedfile
97 parent_keys = ()98 parent_keys = ()
98 next_parent_map[key] = ()99 next_parent_map[key] = ()
99100
=== modified file 'breezy/_known_graph_py.py'
--- breezy/_known_graph_py.py 2017-05-22 00:56:52 +0000
+++ breezy/_known_graph_py.py 2017-06-05 21:41:38 +0000
@@ -24,6 +24,10 @@
24 errors,24 errors,
25 revision,25 revision,
26 )26 )
27from .sixish import (
28 viewitems,
29 viewvalues,
30 )
2731
2832
29class _KnownGraphNode(object):33class _KnownGraphNode(object):
@@ -81,7 +85,7 @@
81 child_keys,85 child_keys,
82 """86 """
83 nodes = self._nodes87 nodes = self._nodes
84 for key, parent_keys in parent_map.iteritems():88 for key, parent_keys in viewitems(parent_map):
85 if key in nodes:89 if key in nodes:
86 node = nodes[key]90 node = nodes[key]
87 node.parent_keys = parent_keys91 node.parent_keys = parent_keys
@@ -97,11 +101,11 @@
97 parent_node.child_keys.append(key)101 parent_node.child_keys.append(key)
98102
99 def _find_tails(self):103 def _find_tails(self):
100 return [node for node in self._nodes.itervalues()104 return [node for node in viewvalues(self._nodes)
101 if not node.parent_keys]105 if not node.parent_keys]
102106
103 def _find_tips(self):107 def _find_tips(self):
104 return [node for node in self._nodes.itervalues()108 return [node for node in viewvalues(self._nodes)
105 if not node.child_keys]109 if not node.child_keys]
106110
107 def _find_gdfo(self):111 def _find_gdfo(self):
@@ -234,7 +238,7 @@
234 seen = set()238 seen = set()
235 pending = []239 pending = []
236 min_gdfo = None240 min_gdfo = None
237 for node in candidate_nodes.values():241 for node in viewvalues(candidate_nodes):
238 if node.parent_keys:242 if node.parent_keys:
239 pending.extend(node.parent_keys)243 pending.extend(node.parent_keys)
240 if min_gdfo is None or node.gdfo < min_gdfo:244 if min_gdfo is None or node.gdfo < min_gdfo:
@@ -261,7 +265,7 @@
261265
262 All parents must occur before all children.266 All parents must occur before all children.
263 """267 """
264 for node in self._nodes.itervalues():268 for node in viewvalues(self._nodes):
265 if node.gdfo is None:269 if node.gdfo is None:
266 raise errors.GraphCycleError(self._nodes)270 raise errors.GraphCycleError(self._nodes)
267 pending = self._find_tails()271 pending = self._find_tails()
@@ -339,7 +343,7 @@
339 """Compute the merge sorted graph output."""343 """Compute the merge sorted graph output."""
340 from breezy import tsort344 from breezy import tsort
341 as_parent_map = dict((node.key, node.parent_keys)345 as_parent_map = dict((node.key, node.parent_keys)
342 for node in self._nodes.itervalues()346 for node in viewvalues(self._nodes)
343 if node.parent_keys is not None)347 if node.parent_keys is not None)
344 # We intentionally always generate revnos and never force the348 # We intentionally always generate revnos and never force the
345 # mainline_revisions349 # mainline_revisions
346350
=== modified file 'breezy/branch.py'
--- breezy/branch.py 2017-05-30 19:32:13 +0000
+++ breezy/branch.py 2017-06-05 21:41:38 +0000
@@ -66,6 +66,7 @@
66from .lock import _RelockDebugMixin, LogicalLockResult66from .lock import _RelockDebugMixin, LogicalLockResult
67from .sixish import (67from .sixish import (
68 BytesIO,68 BytesIO,
69 viewitems,
69 )70 )
70from .trace import mutter, mutter_callsite, note, is_quiet71from .trace import mutter, mutter_callsite, note, is_quiet
7172
@@ -397,7 +398,7 @@
397 return self.get_rev_id(revno[0])398 return self.get_rev_id(revno[0])
398 revision_id_to_revno = self.get_revision_id_to_revno_map()399 revision_id_to_revno = self.get_revision_id_to_revno_map()
399 revision_ids = [revision_id for revision_id, this_revno400 revision_ids = [revision_id for revision_id, this_revno
400 in revision_id_to_revno.iteritems()401 in viewitems(revision_id_to_revno)
401 if revno == this_revno]402 if revno == this_revno]
402 if len(revision_ids) == 1:403 if len(revision_ids) == 1:
403 return revision_ids[0]404 return revision_ids[0]
@@ -1330,8 +1331,7 @@
1330 old_base = self.base1331 old_base = self.base
1331 new_base = target.base1332 new_base = target.base
1332 target_reference_dict = target._get_all_reference_info()1333 target_reference_dict = target._get_all_reference_info()
1333 for file_id, (tree_path, branch_location) in (1334 for file_id, (tree_path, branch_location) in viewitems(reference_dict):
1334 reference_dict.items()):
1335 branch_location = urlutils.rebase_url(branch_location,1335 branch_location = urlutils.rebase_url(branch_location,
1336 old_base, new_base)1336 old_base, new_base)
1337 target_reference_dict.setdefault(1337 target_reference_dict.setdefault(
@@ -2735,7 +2735,7 @@
2735 """2735 """
2736 s = BytesIO()2736 s = BytesIO()
2737 writer = rio.RioWriter(s)2737 writer = rio.RioWriter(s)
2738 for key, (tree_path, branch_location) in info_dict.iteritems():2738 for key, (tree_path, branch_location) in viewitems(info_dict):
2739 stanza = rio.Stanza(file_id=key, tree_path=tree_path,2739 stanza = rio.Stanza(file_id=key, tree_path=tree_path,
2740 branch_location=branch_location)2740 branch_location=branch_location)
2741 writer.write_stanza(stanza)2741 writer.write_stanza(stanza)
27422742
=== modified file 'breezy/branchbuilder.py'
--- breezy/branchbuilder.py 2017-05-22 00:56:52 +0000
+++ breezy/branchbuilder.py 2017-06-05 21:41:38 +0000
@@ -25,6 +25,9 @@
25 memorytree,25 memorytree,
26 revision,26 revision,
27 )27 )
28from .sixish import (
29 viewitems,
30 )
2831
2932
30class BranchBuilder(object):33class BranchBuilder(object):
@@ -277,7 +280,7 @@
277 if pending.to_unversion_ids:280 if pending.to_unversion_ids:
278 tree.unversion(pending.to_unversion_ids)281 tree.unversion(pending.to_unversion_ids)
279 tree.add(pending.to_add_files, pending.to_add_file_ids, pending.to_add_kinds)282 tree.add(pending.to_add_files, pending.to_add_file_ids, pending.to_add_kinds)
280 for file_id, content in pending.new_contents.iteritems():283 for file_id, content in viewitems(pending.new_contents):
281 tree.put_file_bytes_non_atomic(file_id, content)284 tree.put_file_bytes_non_atomic(file_id, content)
282285
283 def get_branch(self):286 def get_branch(self):
284287
=== modified file 'breezy/btree_index.py'
--- breezy/btree_index.py 2017-06-05 01:46:14 +0000
+++ breezy/btree_index.py 2017-06-05 21:41:38 +0000
@@ -44,6 +44,9 @@
44 BytesIO,44 BytesIO,
45 map,45 map,
46 range,46 range,
47 viewitems,
48 viewkeys,
49 viewvalues,
47 )50 )
4851
4952
@@ -557,13 +560,13 @@
557 if self._nodes_by_key is None:560 if self._nodes_by_key is None:
558 nodes_by_key = {}561 nodes_by_key = {}
559 if self.reference_lists:562 if self.reference_lists:
560 for key, (references, value) in self._nodes.iteritems():563 for key, (references, value) in viewitems(self._nodes):
561 key_dict = nodes_by_key564 key_dict = nodes_by_key
562 for subkey in key[:-1]:565 for subkey in key[:-1]:
563 key_dict = key_dict.setdefault(subkey, {})566 key_dict = key_dict.setdefault(subkey, {})
564 key_dict[key[-1]] = key, value, references567 key_dict[key[-1]] = key, value, references
565 else:568 else:
566 for key, (references, value) in self._nodes.iteritems():569 for key, (references, value) in viewitems(self._nodes):
567 key_dict = nodes_by_key570 key_dict = nodes_by_key
568 for subkey in key[:-1]:571 for subkey in key[:-1]:
569 key_dict = key_dict.setdefault(subkey, {})572 key_dict = key_dict.setdefault(subkey, {})
@@ -905,7 +908,8 @@
905908
906 def _get_offsets_to_cached_pages(self):909 def _get_offsets_to_cached_pages(self):
907 """Determine what nodes we already have cached."""910 """Determine what nodes we already have cached."""
908 cached_offsets = set(self._internal_node_cache.keys())911 cached_offsets = set(self._internal_node_cache)
912 # cache may be dict or LRUCache, keys() is the common method
909 cached_offsets.update(self._leaf_node_cache.keys())913 cached_offsets.update(self._leaf_node_cache.keys())
910 if self._root_node is not None:914 if self._root_node is not None:
911 cached_offsets.add(0)915 cached_offsets.add(0)
@@ -944,7 +948,7 @@
944 def _cache_leaf_values(self, nodes):948 def _cache_leaf_values(self, nodes):
945 """Cache directly from key => value, skipping the btree."""949 """Cache directly from key => value, skipping the btree."""
946 if self._leaf_value_cache is not None:950 if self._leaf_value_cache is not None:
947 for node in nodes.itervalues():951 for node in viewvalues(nodes):
948 for key, value in node.all_items():952 for key, value in node.all_items():
949 if key in self._leaf_value_cache:953 if key in self._leaf_value_cache:
950 # Don't add the rest of the keys, we've seen this node954 # Don't add the rest of the keys, we've seen this node
951955
=== modified file 'breezy/builtins.py'
--- breezy/builtins.py 2017-06-04 21:47:02 +0000
+++ breezy/builtins.py 2017-06-05 21:41:38 +0000
@@ -81,6 +81,8 @@
81from .sixish import (81from .sixish import (
82 BytesIO,82 BytesIO,
83 text_type,83 text_type,
84 viewitems,
85 viewvalues,
84)86)
85from .trace import mutter, note, warning, is_quiet, get_verbosity_level87from .trace import mutter, note, warning, is_quiet, get_verbosity_level
8688
@@ -201,7 +203,7 @@
201 reference = control_dir.get_branch_reference()203 reference = control_dir.get_branch_reference()
202 except errors.NotBranchError:204 except errors.NotBranchError:
203 # There is no active branch, just return the colocated branches.205 # There is no active branch, just return the colocated branches.
204 for name, branch in control_dir.get_branches().iteritems():206 for name, branch in viewitems(control_dir.get_branches()):
205 yield name, branch207 yield name, branch
206 return208 return
207 if reference is not None:209 if reference is not None:
@@ -212,7 +214,7 @@
212 if ref_branch is None or ref_branch.name:214 if ref_branch is None or ref_branch.name:
213 if ref_branch is not None:215 if ref_branch is not None:
214 control_dir = ref_branch.bzrdir216 control_dir = ref_branch.bzrdir
215 for name, branch in control_dir.get_branches().iteritems():217 for name, branch in viewitems(control_dir.get_branches()):
216 yield name, branch218 yield name, branch
217 else:219 else:
218 repo = ref_branch.bzrdir.find_repository()220 repo = ref_branch.bzrdir.find_repository()
@@ -845,7 +847,7 @@
845 self.cleanup_now()847 self.cleanup_now()
846 if len(ignored) > 0:848 if len(ignored) > 0:
847 if verbose:849 if verbose:
848 for glob in sorted(ignored.keys()):850 for glob in sorted(ignored):
849 for path in ignored[glob]:851 for path in ignored[glob]:
850 self.outf.write(852 self.outf.write(
851 gettext("ignored {0} matching \"{1}\"\n").format(853 gettext("ignored {0} matching \"{1}\"\n").format(
@@ -1583,9 +1585,9 @@
1583 names[name] = active1585 names[name] = active
1584 # Only mention the current branch explicitly if it's not1586 # Only mention the current branch explicitly if it's not
1585 # one of the colocated branches1587 # one of the colocated branches
1586 if not any(names.values()) and active_branch is not None:1588 if not any(viewvalues(names)) and active_branch is not None:
1587 self.outf.write("* %s\n" % gettext("(default)"))1589 self.outf.write("* %s\n" % gettext("(default)"))
1588 for name in sorted(names.keys()):1590 for name in sorted(names):
1589 active = names[name]1591 active = names[name]
1590 if active:1592 if active:
1591 prefix = "*"1593 prefix = "*"
@@ -3963,7 +3965,7 @@
3963 def print_aliases(self):3965 def print_aliases(self):
3964 """Print out the defined aliases in a similar format to bash."""3966 """Print out the defined aliases in a similar format to bash."""
3965 aliases = _mod_config.GlobalConfig().get_aliases()3967 aliases = _mod_config.GlobalConfig().get_aliases()
3966 for key, value in sorted(aliases.iteritems()):3968 for key, value in sorted(viewitems(aliases)):
3967 self.outf.write('brz alias %s="%s"\n' % (key, value))3969 self.outf.write('brz alias %s="%s"\n' % (key, value))
39683970
3969 @display_command3971 @display_command
@@ -6004,7 +6006,7 @@
6004 from .tag import tag_sort_methods6006 from .tag import tag_sort_methods
6005 branch, relpath = Branch.open_containing(directory)6007 branch, relpath = Branch.open_containing(directory)
60066008
6007 tags = branch.tags.get_tag_dict().items()6009 tags = list(viewitems(branch.tags.get_tag_dict()))
6008 if not tags:6010 if not tags:
6009 return6011 return
60106012
@@ -6648,7 +6650,7 @@
6648 if tree is None:6650 if tree is None:
6649 tree = branch.basis_tree()6651 tree = branch.basis_tree()
6650 if path is None:6652 if path is None:
6651 info = branch._get_all_reference_info().iteritems()6653 info = viewitems(branch._get_all_reference_info())
6652 self._display_reference_info(tree, branch, info)6654 self._display_reference_info(tree, branch, info)
6653 else:6655 else:
6654 file_id = tree.path2id(path)6656 file_id = tree.path2id(path)
66556657
=== modified file 'breezy/bundle/bundle_data.py'
--- breezy/bundle/bundle_data.py 2017-05-30 19:32:13 +0000
+++ breezy/bundle/bundle_data.py 2017-06-05 21:41:38 +0000
@@ -42,6 +42,7 @@
42from ..revision import Revision, NULL_REVISION42from ..revision import Revision, NULL_REVISION
43from ..sixish import (43from ..sixish import (
44 BytesIO,44 BytesIO,
45 viewitems,
45 )46 )
46from ..testament import StrictTestament47from ..testament import StrictTestament
47from ..trace import mutter, warning48from ..trace import mutter, warning
@@ -106,7 +107,7 @@
106 revision_info.timestamp = revision.timestamp107 revision_info.timestamp = revision.timestamp
107 revision_info.message = revision.message.split('\n')108 revision_info.message = revision.message.split('\n')
108 revision_info.properties = [': '.join(p) for p in109 revision_info.properties = [': '.join(p) for p in
109 revision.properties.iteritems()]110 viewitems(revision.properties)]
110 return revision_info111 return revision_info
111112
112113
@@ -252,7 +253,7 @@
252253
253 count = 0254 count = 0
254 missing = {}255 missing = {}
255 for revision_id, sha1 in rev_to_sha.iteritems():256 for revision_id, sha1 in viewitems(rev_to_sha):
256 if repository.has_revision(revision_id):257 if repository.has_revision(revision_id):
257 testament = StrictTestament.from_revision(repository,258 testament = StrictTestament.from_revision(repository,
258 revision_id)259 revision_id)
@@ -772,7 +773,7 @@
772773
773 def sorted_path_id(self):774 def sorted_path_id(self):
774 paths = []775 paths = []
775 for result in self._new_id.iteritems():776 for result in viewitems(self._new_id):
776 paths.append(result)777 paths.append(result)
777 for id in self.base_tree.all_file_ids():778 for id in self.base_tree.all_file_ids():
778 path = self.id2path(id)779 path = self.id2path(id)
779780
=== modified file 'breezy/bundle/commands.py'
--- breezy/bundle/commands.py 2017-05-22 00:56:52 +0000
+++ breezy/bundle/commands.py 2017-06-05 21:41:38 +0000
@@ -39,6 +39,7 @@
39from ..commands import Command39from ..commands import Command
40from ..sixish import (40from ..sixish import (
41 BytesIO,41 BytesIO,
42 viewitems,
42 )43 )
4344
4445
@@ -76,7 +77,7 @@
76 if file_id is not None:77 if file_id is not None:
77 file_ids.add(file_id)78 file_ids.add(file_id)
78 self.outf.write(gettext('Records\n'))79 self.outf.write(gettext('Records\n'))
79 for kind, records in sorted(by_kind.iteritems()):80 for kind, records in sorted(viewitems(by_kind)):
80 multiparent = sum(1 for b, m, k, r, f in records if81 multiparent = sum(1 for b, m, k, r, f in records if
81 len(m.get('parents', [])) > 1)82 len(m.get('parents', [])) > 1)
82 self.outf.write(gettext('{0}: {1} ({2} multiparent)\n').format(83 self.outf.write(gettext('{0}: {1} ({2} multiparent)\n').format(
8384
=== modified file 'breezy/bundle/serializer/v4.py'
--- breezy/bundle/serializer/v4.py 2017-05-25 01:35:55 +0000
+++ breezy/bundle/serializer/v4.py 2017-06-05 21:41:38 +0000
@@ -37,6 +37,7 @@
37from ...i18n import ngettext37from ...i18n import ngettext
38from ...sixish import (38from ...sixish import (
39 BytesIO,39 BytesIO,
40 viewitems,
40 )41 )
4142
4243
@@ -352,7 +353,7 @@
352 text_keys = []353 text_keys = []
353 altered_fileids = self.repository.fileids_altered_by_revision_ids(354 altered_fileids = self.repository.fileids_altered_by_revision_ids(
354 self.revision_ids)355 self.revision_ids)
355 for file_id, revision_ids in altered_fileids.iteritems():356 for file_id, revision_ids in viewitems(altered_fileids):
356 for revision_id in revision_ids:357 for revision_id in revision_ids:
357 text_keys.append((file_id, revision_id))358 text_keys.append((file_id, revision_id))
358 self._add_mp_records_keys('file', self.repository.texts, text_keys)359 self._add_mp_records_keys('file', self.repository.texts, text_keys)
359360
=== modified file 'breezy/bzrdir.py'
--- breezy/bzrdir.py 2017-05-30 19:16:23 +0000
+++ breezy/bzrdir.py 2017-06-05 21:41:38 +0000
@@ -1140,7 +1140,7 @@
11401140
1141 def check_support_status(self, allow_unsupported, recommend_upgrade=True,1141 def check_support_status(self, allow_unsupported, recommend_upgrade=True,
1142 basedir=None):1142 basedir=None):
1143 for name, necessity in self.features.iteritems():1143 for name, necessity in self.features.items():
1144 if name in self._present_features:1144 if name in self._present_features:
1145 continue1145 continue
1146 if necessity == "optional":1146 if necessity == "optional":
@@ -1179,7 +1179,7 @@
1179 """1179 """
1180 lines = [self.get_format_string()]1180 lines = [self.get_format_string()]
1181 lines.extend([("%s %s\n" % (item[1], item[0])) for item in1181 lines.extend([("%s %s\n" % (item[1], item[0])) for item in
1182 self.features.iteritems()])1182 self.features.items()])
1183 return "".join(lines)1183 return "".join(lines)
11841184
1185 @classmethod1185 @classmethod
@@ -1210,7 +1210,7 @@
12101210
1211 :param updated_flags: Updated feature flags1211 :param updated_flags: Updated feature flags
1212 """1212 """
1213 for name, necessity in updated_flags.iteritems():1213 for name, necessity in updated_flags.items():
1214 if necessity is None:1214 if necessity is None:
1215 try:1215 try:
1216 del self.features[name]1216 del self.features[name]
@@ -1246,7 +1246,7 @@
1246 @classmethod1246 @classmethod
1247 def known_formats(cls):1247 def known_formats(cls):
1248 result = set()1248 result = set()
1249 for name, format in cls.formats.iteritems():1249 for name, format in cls.formats.items():
1250 if callable(format):1250 if callable(format):
1251 format = format()1251 format = format()
1252 result.add(format)1252 result.add(format)
12531253
=== modified file 'breezy/check.py'
--- breezy/check.py 2017-05-22 00:56:52 +0000
+++ breezy/check.py 2017-06-05 21:41:38 +0000
@@ -56,6 +56,9 @@
56from .branch import Branch56from .branch import Branch
57from .controldir import ControlDir57from .controldir import ControlDir
58from .revision import NULL_REVISION58from .revision import NULL_REVISION
59from .sixish import (
60 viewitems,
61 )
59from .trace import note62from .trace import note
60from .workingtree import WorkingTree63from .workingtree import WorkingTree
61from .i18n import gettext64from .i18n import gettext
@@ -128,7 +131,7 @@
128 # landing].131 # landing].
129 distances = set()132 distances = set()
130 existences = set()133 existences = set()
131 for ref, wantlist in callback_refs.iteritems():134 for ref, wantlist in viewitems(callback_refs):
132 wanting_items.update(wantlist)135 wanting_items.update(wantlist)
133 kind, value = ref136 kind, value = ref
134 if kind == 'trees':137 if kind == 'trees':
@@ -141,7 +144,7 @@
141 raise AssertionError(144 raise AssertionError(
142 'unknown ref kind for ref %s' % ref)145 'unknown ref kind for ref %s' % ref)
143 node_distances = repo.get_graph().find_lefthand_distances(distances)146 node_distances = repo.get_graph().find_lefthand_distances(distances)
144 for key, distance in node_distances.iteritems():147 for key, distance in viewitems(node_distances):
145 refs[('lefthand-distance', key)] = distance148 refs[('lefthand-distance', key)] = distance
146 if key in existences and distance > 0:149 if key in existences and distance > 0:
147 refs[('revision-existence', key)] = True150 refs[('revision-existence', key)] = True
@@ -229,7 +232,7 @@
229 note(gettext('%6d revisions missing parents in ancestry'),232 note(gettext('%6d revisions missing parents in ancestry'),
230 len(self.missing_parent_links))233 len(self.missing_parent_links))
231 if verbose:234 if verbose:
232 for link, linkers in self.missing_parent_links.items():235 for link, linkers in viewitems(self.missing_parent_links):
233 note(gettext(' %s should be in the ancestry for:'), link)236 note(gettext(' %s should be in the ancestry for:'), link)
234 for linker in linkers:237 for linker in linkers:
235 note(' * %s', linker)238 note(' * %s', linker)
@@ -320,12 +323,10 @@
320 text_key_references=self.text_key_references,323 text_key_references=self.text_key_references,
321 ancestors=self.ancestors)324 ancestors=self.ancestors)
322 storebar.update('file-graph', 1)325 storebar.update('file-graph', 1)
323 result = weave_checker.check_file_version_parents(326 wrongs, unused_versions = weave_checker.check_file_version_parents(
324 self.repository.texts)327 self.repository.texts)
325 self.checked_weaves = weave_checker.file_ids328 self.checked_weaves = weave_checker.file_ids
326 bad_parents, unused_versions = result329 for text_key, (stored_parents, correct_parents) in viewitems(wrongs):
327 bad_parents = bad_parents.items()
328 for text_key, (stored_parents, correct_parents) in bad_parents:
329 # XXX not ready for id join/split operations.330 # XXX not ready for id join/split operations.
330 weave_id = text_key[0]331 weave_id = text_key[0]
331 revision_id = text_key[-1]332 revision_id = text_key[-1]
332333
=== modified file 'breezy/chk_map.py'
--- breezy/chk_map.py 2017-06-04 18:09:30 +0000
+++ breezy/chk_map.py 2017-06-05 21:41:38 +0000
@@ -56,6 +56,10 @@
56 static_tuple,56 static_tuple,
57 trace,57 trace,
58 )58 )
59from .sixish import (
60 viewitems,
61 viewvalues,
62 )
59from .static_tuple import StaticTuple63from .static_tuple import StaticTuple
6064
61# approx 4MB65# approx 4MB
@@ -212,11 +216,11 @@
212 if isinstance(node, InternalNode):216 if isinstance(node, InternalNode):
213 # Trigger all child nodes to get loaded217 # Trigger all child nodes to get loaded
214 list(node._iter_nodes(self._store))218 list(node._iter_nodes(self._store))
215 for prefix, sub in sorted(node._items.iteritems()):219 for prefix, sub in sorted(viewitems(node._items)):
216 result.extend(self._dump_tree_node(sub, prefix, indent + ' ',220 result.extend(self._dump_tree_node(sub, prefix, indent + ' ',
217 include_keys=include_keys))221 include_keys=include_keys))
218 else:222 else:
219 for key, value in sorted(node._items.iteritems()):223 for key, value in sorted(viewitems(node._items)):
220 # Don't use prefix nor indent here to line up when used in224 # Don't use prefix nor indent here to line up when used in
221 # tests in conjunction with assertEqualDiff225 # tests in conjunction with assertEqualDiff
222 result.append(' %r %r' % (tuple(key), value))226 result.append(' %r %r' % (tuple(key), value))
@@ -255,7 +259,7 @@
255 result._root_node.set_maximum_size(maximum_size)259 result._root_node.set_maximum_size(maximum_size)
256 result._root_node._key_width = key_width260 result._root_node._key_width = key_width
257 delta = []261 delta = []
258 for key, value in initial_value.items():262 for key, value in viewitems(initial_value):
259 delta.append((None, key, value))263 delta.append((None, key, value))
260 root_key = result.apply_delta(delta)264 root_key = result.apply_delta(delta)
261 return root_key265 return root_key
@@ -267,10 +271,10 @@
267 node.set_maximum_size(maximum_size)271 node.set_maximum_size(maximum_size)
268 node._key_width = key_width272 node._key_width = key_width
269 as_st = StaticTuple.from_sequence273 as_st = StaticTuple.from_sequence
270 node._items = dict([(as_st(key), val) for key, val274 node._items = dict((as_st(key), val)
271 in initial_value.iteritems()])275 for key, val in viewitems(initial_value))
272 node._raw_size = sum([node._key_value_len(key, value)276 node._raw_size = sum(node._key_value_len(key, value)
273 for key,value in node._items.iteritems()])277 for key, value in viewitems(node._items))
274 node._len = len(node._items)278 node._len = len(node._items)
275 node._compute_search_prefix()279 node._compute_search_prefix()
276 node._compute_serialised_prefix()280 node._compute_serialised_prefix()
@@ -333,7 +337,7 @@
333 node = a_map._get_node(node)337 node = a_map._get_node(node)
334 if isinstance(node, LeafNode):338 if isinstance(node, LeafNode):
335 path = (node._key, path)339 path = (node._key, path)
336 for key, value in node._items.items():340 for key, value in viewitems(node._items):
337 # For a LeafNode, the key is a serialized_key, rather than341 # For a LeafNode, the key is a serialized_key, rather than
338 # a search_key, but the heap is using search_keys342 # a search_key, but the heap is using search_keys
339 search_key = node._search_key_func(key)343 search_key = node._search_key_func(key)
@@ -341,11 +345,11 @@
341 else:345 else:
342 # type(node) == InternalNode346 # type(node) == InternalNode
343 path = (node._key, path)347 path = (node._key, path)
344 for prefix, child in node._items.items():348 for prefix, child in viewitems(node._items):
345 heapq.heappush(pending, (prefix, None, child, path))349 heapq.heappush(pending, (prefix, None, child, path))
346 def process_common_internal_nodes(self_node, basis_node):350 def process_common_internal_nodes(self_node, basis_node):
347 self_items = set(self_node._items.items())351 self_items = set(viewitems(self_node._items))
348 basis_items = set(basis_node._items.items())352 basis_items = set(viewitems(basis_node._items))
349 path = (self_node._key, None)353 path = (self_node._key, None)
350 for prefix, child in self_items - basis_items:354 for prefix, child in self_items - basis_items:
351 heapq.heappush(self_pending, (prefix, None, child, path))355 heapq.heappush(self_pending, (prefix, None, child, path))
@@ -353,8 +357,8 @@
353 for prefix, child in basis_items - self_items:357 for prefix, child in basis_items - self_items:
354 heapq.heappush(basis_pending, (prefix, None, child, path))358 heapq.heappush(basis_pending, (prefix, None, child, path))
355 def process_common_leaf_nodes(self_node, basis_node):359 def process_common_leaf_nodes(self_node, basis_node):
356 self_items = set(self_node._items.items())360 self_items = set(viewitems(self_node._items))
357 basis_items = set(basis_node._items.items())361 basis_items = set(viewitems(basis_node._items))
358 path = (self_node._key, None)362 path = (self_node._key, None)
359 for key, value in self_items - basis_items:363 for key, value in self_items - basis_items:
360 prefix = self._search_key_func(key)364 prefix = self._search_key_func(key)
@@ -766,17 +770,16 @@
766 pass770 pass
767 else:771 else:
768 # Short items, we need to match based on a prefix772 # Short items, we need to match based on a prefix
769 length_filter = filters.setdefault(len(key), set())773 filters.setdefault(len(key), set()).add(key)
770 length_filter.add(key)
771 if filters:774 if filters:
772 filters = filters.items()775 filters_itemview = viewitems(filters)
773 for item in self._items.iteritems():776 for item in viewitems(self._items):
774 for length, length_filter in filters:777 for length, length_filter in filters_itemview:
775 if item[0][:length] in length_filter:778 if item[0][:length] in length_filter:
776 yield item779 yield item
777 break780 break
778 else:781 else:
779 for item in self._items.iteritems():782 for item in viewitems(self._items):
780 yield item783 yield item
781784
782 def _key_value_len(self, key, value):785 def _key_value_len(self, key, value):
@@ -838,7 +841,7 @@
838 common_prefix = self._search_prefix841 common_prefix = self._search_prefix
839 split_at = len(common_prefix) + 1842 split_at = len(common_prefix) + 1
840 result = {}843 result = {}
841 for key, value in self._items.iteritems():844 for key, value in viewitems(self._items):
842 search_key = self._search_key(key)845 search_key = self._search_key(key)
843 prefix = search_key[:split_at]846 prefix = search_key[:split_at]
844 # TODO: Generally only 1 key can be exactly the right length,847 # TODO: Generally only 1 key can be exactly the right length,
@@ -871,7 +874,7 @@
871 for split, node in node_details:874 for split, node in node_details:
872 new_node.add_node(split, node)875 new_node.add_node(split, node)
873 result[prefix] = new_node876 result[prefix] = new_node
874 return common_prefix, result.items()877 return common_prefix, list(viewitems(result))
875878
876 def map(self, store, key, value):879 def map(self, store, key, value):
877 """Map key to value."""880 """Map key to value."""
@@ -906,7 +909,7 @@
906 else:909 else:
907 lines.append('%s\n' % (self._common_serialised_prefix,))910 lines.append('%s\n' % (self._common_serialised_prefix,))
908 prefix_len = len(self._common_serialised_prefix)911 prefix_len = len(self._common_serialised_prefix)
909 for key, value in sorted(self._items.items()):912 for key, value in sorted(viewitems(self._items)):
910 # Always add a final newline913 # Always add a final newline
911 value_lines = osutils.chunks_to_lines([value + '\n'])914 value_lines = osutils.chunks_to_lines([value + '\n'])
912 serialized = "%s\x00%s\n" % (self._serialise_key(key),915 serialized = "%s\x00%s\n" % (self._serialise_key(key),
@@ -1071,7 +1074,7 @@
1071 # yielding all nodes, yield whatever we have, and queue up a read1074 # yielding all nodes, yield whatever we have, and queue up a read
1072 # for whatever we are missing1075 # for whatever we are missing
1073 shortcut = True1076 shortcut = True
1074 for prefix, node in self._items.iteritems():1077 for prefix, node in viewitems(self._items):
1075 if node.__class__ is StaticTuple:1078 if node.__class__ is StaticTuple:
1076 keys[node] = (prefix, None)1079 keys[node] = (prefix, None)
1077 else:1080 else:
@@ -1147,10 +1150,10 @@
1147 else:1150 else:
1148 # The slow way. We walk every item in self._items, and check to1151 # The slow way. We walk every item in self._items, and check to
1149 # see if there are any matches1152 # see if there are any matches
1150 length_filters = length_filters.items()1153 length_filters_itemview = viewitems(length_filters)
1151 for prefix, node in self._items.iteritems():1154 for prefix, node in viewitems(self._items):
1152 node_key_filter = []1155 node_key_filter = []
1153 for length, length_filter in length_filters:1156 for length, length_filter in length_filters_itemview:
1154 sub_prefix = prefix[:length]1157 sub_prefix = prefix[:length]
1155 if sub_prefix in length_filter:1158 if sub_prefix in length_filter:
1156 node_key_filter.extend(prefix_to_keys[sub_prefix])1159 node_key_filter.extend(prefix_to_keys[sub_prefix])
@@ -1292,7 +1295,7 @@
1292 :param store: A VersionedFiles honouring the CHK extensions.1295 :param store: A VersionedFiles honouring the CHK extensions.
1293 :return: An iterable of the keys inserted by this operation.1296 :return: An iterable of the keys inserted by this operation.
1294 """1297 """
1295 for node in self._items.itervalues():1298 for node in viewvalues(self._items):
1296 if isinstance(node, StaticTuple):1299 if isinstance(node, StaticTuple):
1297 # Never deserialised.1300 # Never deserialised.
1298 continue1301 continue
@@ -1309,7 +1312,7 @@
1309 raise AssertionError("_search_prefix should not be None")1312 raise AssertionError("_search_prefix should not be None")
1310 lines.append('%s\n' % (self._search_prefix,))1313 lines.append('%s\n' % (self._search_prefix,))
1311 prefix_len = len(self._search_prefix)1314 prefix_len = len(self._search_prefix)
1312 for prefix, node in sorted(self._items.items()):1315 for prefix, node in sorted(viewitems(self._items)):
1313 if isinstance(node, StaticTuple):1316 if isinstance(node, StaticTuple):
1314 key = node[0]1317 key = node[0]
1315 else:1318 else:
@@ -1342,19 +1345,16 @@
1342 prefix for reaching node.1345 prefix for reaching node.
1343 """1346 """
1344 if offset >= self._node_width:1347 if offset >= self._node_width:
1345 for node in self._items.values():1348 for node in valueview(self._items):
1346 for result in node._split(offset):1349 for result in node._split(offset):
1347 yield result1350 yield result
1348 return
1349 for key, node in self._items.items():
1350 pass
13511351
1352 def refs(self):1352 def refs(self):
1353 """Return the references to other CHK's held by this node."""1353 """Return the references to other CHK's held by this node."""
1354 if self._key is None:1354 if self._key is None:
1355 raise AssertionError("unserialised nodes have no refs.")1355 raise AssertionError("unserialised nodes have no refs.")
1356 refs = []1356 refs = []
1357 for value in self._items.itervalues():1357 for value in viewvalues(self._items):
1358 if isinstance(value, StaticTuple):1358 if isinstance(value, StaticTuple):
1359 refs.append(value)1359 refs.append(value)
1360 else:1360 else:
@@ -1393,7 +1393,7 @@
1393 self._items[search_key] = unmapped1393 self._items[search_key] = unmapped
1394 if len(self._items) == 1:1394 if len(self._items) == 1:
1395 # this node is no longer needed:1395 # this node is no longer needed:
1396 return self._items.values()[0]1396 return list(viewvalues(self._items))[0]
1397 if isinstance(unmapped, InternalNode):1397 if isinstance(unmapped, InternalNode):
1398 return self1398 return self
1399 if check_remap:1399 if check_remap:
@@ -1443,7 +1443,7 @@
1443 if isinstance(node, InternalNode):1443 if isinstance(node, InternalNode):
1444 # Without looking at any leaf nodes, we are sure1444 # Without looking at any leaf nodes, we are sure
1445 return self1445 return self
1446 for key, value in node._items.iteritems():1446 for key, value in viewitems(node._items):
1447 if new_leaf._map_no_split(key, value):1447 if new_leaf._map_no_split(key, value):
1448 return self1448 return self
1449 trace.mutter("remap generated a new LeafNode")1449 trace.mutter("remap generated a new LeafNode")
@@ -1532,15 +1532,14 @@
1532 # indicate that we keep 100k prefix_refs around while1532 # indicate that we keep 100k prefix_refs around while
1533 # processing. They *should* be shorter lived than that...1533 # processing. They *should* be shorter lived than that...
1534 # It does cost us ~10s of processing time1534 # It does cost us ~10s of processing time
1535 #prefix_refs = [as_st(item) for item in node._items.iteritems()]1535 prefix_refs = list(viewitems(node._items))
1536 prefix_refs = node._items.items()
1537 items = []1536 items = []
1538 else:1537 else:
1539 prefix_refs = []1538 prefix_refs = []
1540 # Note: We don't use a StaticTuple here. Profiling showed a1539 # Note: We don't use a StaticTuple here. Profiling showed a
1541 # minor memory improvement (0.8MB out of 335MB peak 0.2%)1540 # minor memory improvement (0.8MB out of 335MB peak 0.2%)
1542 # But a significant slowdown (15s / 145s, or 10%)1541 # But a significant slowdown (15s / 145s, or 10%)
1543 items = node._items.items()1542 items = list(viewitems(node._items))
1544 yield record, node, prefix_refs, items1543 yield record, node, prefix_refs, items
15451544
1546 def _read_old_roots(self):1545 def _read_old_roots(self):
15471546
=== modified file 'breezy/chk_serializer.py'
--- breezy/chk_serializer.py 2017-05-22 00:56:52 +0000
+++ breezy/chk_serializer.py 2017-06-05 21:41:38 +0000
@@ -41,7 +41,7 @@
41def _validate_properties(props, _decode=cache_utf8._utf8_decode):41def _validate_properties(props, _decode=cache_utf8._utf8_decode):
42 # TODO: we really want an 'isascii' check for key42 # TODO: we really want an 'isascii' check for key
43 # Cast the utf8 properties into Unicode 'in place'43 # Cast the utf8 properties into Unicode 'in place'
44 for key, value in props.iteritems():44 for key, value in props.items():
45 props[key] = _decode(value)[0]45 props[key] = _decode(value)[0]
46 return props46 return props
4747
@@ -90,7 +90,7 @@
90 # For bzr revisions, the most common property is just 'branch-nick'90 # For bzr revisions, the most common property is just 'branch-nick'
91 # which changes infrequently.91 # which changes infrequently.
92 revprops = {}92 revprops = {}
93 for key, value in rev.properties.iteritems():93 for key, value in rev.properties.items():
94 revprops[key] = encode_utf8(value)[0]94 revprops[key] = encode_utf8(value)[0]
95 ret.append(('properties', revprops))95 ret.append(('properties', revprops))
96 ret.extend([96 ret.extend([
@@ -131,7 +131,7 @@
131 value = validator(value)131 value = validator(value)
132 bits[var_name] = value132 bits[var_name] = value
133 if len(bits) != len(schema):133 if len(bits) != len(schema):
134 missing = [key for key, (var_name, _, _) in schema.iteritems()134 missing = [key for key, (var_name, _, _) in schema.items()
135 if var_name not in bits]135 if var_name not in bits]
136 raise ValueError('Revision text was missing expected keys %s.'136 raise ValueError('Revision text was missing expected keys %s.'
137 ' text %r' % (missing, text))137 ' text %r' % (missing, text))
138138
=== modified file 'breezy/commands.py'
--- breezy/commands.py 2017-06-02 23:50:41 +0000
+++ breezy/commands.py 2017-06-05 21:41:38 +0000
@@ -169,18 +169,16 @@
169 # only load once169 # only load once
170 return170 return
171 import breezy.builtins171 import breezy.builtins
172 for cmd_class in _scan_module_for_commands(breezy.builtins).values():172 for cmd_class in _scan_module_for_commands(breezy.builtins):
173 builtin_command_registry.register(cmd_class)173 builtin_command_registry.register(cmd_class)
174 breezy.builtins._register_lazy_builtins()174 breezy.builtins._register_lazy_builtins()
175175
176176
177def _scan_module_for_commands(module):177def _scan_module_for_commands(module):
178 r = {}178 module_dict = module.__dict__
179 for name, obj in module.__dict__.items():179 for name in module_dict:
180 if name.startswith("cmd_"):180 if name.startswith("cmd_"):
181 real_name = _unsquish_command_name(name)181 yield module_dict[name]
182 r[real_name] = obj
183 return r
184182
185183
186def _list_bzr_commands(names):184def _list_bzr_commands(names):
@@ -628,7 +626,7 @@
628626
629 Maps from long option name to option object."""627 Maps from long option name to option object."""
630 r = Option.STD_OPTIONS.copy()628 r = Option.STD_OPTIONS.copy()
631 std_names = r.keys()629 std_names = set(r)
632 for o in self.takes_options:630 for o in self.takes_options:
633 if isinstance(o, string_types):631 if isinstance(o, string_types):
634 o = option.Option.OPTIONS[o]632 o = option.Option.OPTIONS[o]
@@ -824,8 +822,8 @@
824 raise errors.BzrCommandError(822 raise errors.BzrCommandError(
825 gettext('Only ASCII permitted in option names'))823 gettext('Only ASCII permitted in option names'))
826824
827 opts = dict([(k, v) for k, v in options.__dict__.items() if825 opts = dict((k, v) for k, v in options.__dict__.items() if
828 v is not option.OptionParser.DEFAULT_VALUE])826 v is not option.OptionParser.DEFAULT_VALUE)
829 return args, opts827 return args, opts
830828
831829
832830
=== modified file 'breezy/config.py'
--- breezy/config.py 2017-05-30 19:32:13 +0000
+++ breezy/config.py 2017-06-05 21:41:38 +0000
@@ -1722,7 +1722,7 @@
1722 certificate should be verified, False otherwise.1722 certificate should be verified, False otherwise.
1723 """1723 """
1724 credentials = None1724 credentials = None
1725 for auth_def_name, auth_def in self._get_config().items():1725 for auth_def_name, auth_def in self._get_config().iteritems():
1726 if not isinstance(auth_def, configobj.Section):1726 if not isinstance(auth_def, configobj.Section):
1727 raise ValueError("%s defined outside a section" % auth_def_name)1727 raise ValueError("%s defined outside a section" % auth_def_name)
17281728
@@ -1824,7 +1824,7 @@
1824 values['realm'] = realm1824 values['realm'] = realm
1825 config = self._get_config()1825 config = self._get_config()
1826 for_deletion = []1826 for_deletion = []
1827 for section, existing_values in config.items():1827 for section, existing_values in config.iteritems():
1828 for key in ('scheme', 'host', 'port', 'path', 'realm'):1828 for key in ('scheme', 'host', 'port', 'path', 'realm'):
1829 if existing_values.get(key) != values.get(key):1829 if existing_values.get(key) != values.get(key):
1830 break1830 break
@@ -2854,7 +2854,7 @@
2854 return self.options.get(name, default)2854 return self.options.get(name, default)
28552855
2856 def iter_option_names(self):2856 def iter_option_names(self):
2857 for k in self.options.iterkeys():2857 for k in self.options.keys():
2858 yield k2858 yield k
28592859
2860 def __repr__(self):2860 def __repr__(self):
@@ -2901,7 +2901,7 @@
29012901
2902 :param store: the store containing the section2902 :param store: the store containing the section
2903 """2903 """
2904 for k, expected in dirty.orig.iteritems():2904 for k, expected in dirty.orig.items():
2905 actual = dirty.get(k, _DeletedOption)2905 actual = dirty.get(k, _DeletedOption)
2906 reloaded = self.get(k, _NewlyCreatedOption)2906 reloaded = self.get(k, _NewlyCreatedOption)
2907 if actual is _DeletedOption:2907 if actual is _DeletedOption:
@@ -3009,7 +3009,7 @@
3009 # get_mutable_section() call below.3009 # get_mutable_section() call below.
3010 self.unload()3010 self.unload()
3011 # Apply the changes from the preserved dirty sections3011 # Apply the changes from the preserved dirty sections
3012 for section_id, dirty in dirty_sections.iteritems():3012 for section_id, dirty in dirty_sections.items():
3013 clean = self.get_mutable_section(section_id)3013 clean = self.get_mutable_section(section_id)
3014 clean.apply_changes(dirty, self)3014 clean.apply_changes(dirty, self)
3015 # Everything is clean now3015 # Everything is clean now
@@ -3153,7 +3153,7 @@
3153 if not self._need_saving():3153 if not self._need_saving():
3154 return3154 return
3155 # Preserve the current version3155 # Preserve the current version
3156 dirty_sections = dict(self.dirty_sections.items())3156 dirty_sections = self.dirty_sections.copy()
3157 self.apply_changes(dirty_sections)3157 self.apply_changes(dirty_sections)
3158 # Save to the persistent storage3158 # Save to the persistent storage
3159 self.save()3159 self.save()
@@ -3780,7 +3780,7 @@
3780 global _shared_stores_at_exit_installed3780 global _shared_stores_at_exit_installed
3781 stores = _shared_stores3781 stores = _shared_stores
3782 def save_config_changes():3782 def save_config_changes():
3783 for k, store in stores.items():3783 for k, store in stores.iteritems():
3784 store.save_changes()3784 store.save_changes()
3785 if not _shared_stores_at_exit_installed:3785 if not _shared_stores_at_exit_installed:
3786 # FIXME: Ugly hack waiting for library_state to always be3786 # FIXME: Ugly hack waiting for library_state to always be
37873787
=== modified file 'breezy/controldir.py'
--- breezy/controldir.py 2017-05-22 00:56:52 +0000
+++ breezy/controldir.py 2017-06-05 21:41:38 +0000
@@ -108,7 +108,7 @@
108 """Return a sequence of all branches local to this control directory.108 """Return a sequence of all branches local to this control directory.
109109
110 """110 """
111 return self.get_branches().values()111 return list(self.get_branches().values())
112112
113 def get_branches(self):113 def get_branches(self):
114 """Get all branches in this control directory, as a dictionary.114 """Get all branches in this control directory, as a dictionary.
115115
=== modified file 'breezy/dirstate.py'
--- breezy/dirstate.py 2017-06-04 18:09:30 +0000
+++ breezy/dirstate.py 2017-06-05 21:41:38 +0000
@@ -244,6 +244,8 @@
244 )244 )
245from .sixish import (245from .sixish import (
246 range,246 range,
247 viewitems,
248 viewvalues,
247 )249 )
248250
249251
@@ -975,7 +977,7 @@
975 # Directories that need to be read977 # Directories that need to be read
976 pending_dirs = set()978 pending_dirs = set()
977 paths_to_search = set()979 paths_to_search = set()
978 for entry_list in newly_found.itervalues():980 for entry_list in viewvalues(newly_found):
979 for dir_name_id, trees_info in entry_list:981 for dir_name_id, trees_info in entry_list:
980 found[dir_name_id] = trees_info982 found[dir_name_id] = trees_info
981 found_dir_names.add(dir_name_id[:2])983 found_dir_names.add(dir_name_id[:2])
@@ -1386,8 +1388,8 @@
1386 fingerprint, new_child_path)1388 fingerprint, new_child_path)
1387 self._check_delta_ids_absent(new_ids, delta, 0)1389 self._check_delta_ids_absent(new_ids, delta, 0)
1388 try:1390 try:
1389 self._apply_removals(removals.iteritems())1391 self._apply_removals(viewitems(removals))
1390 self._apply_insertions(insertions.values())1392 self._apply_insertions(viewvalues(insertions))
1391 # Validate parents1393 # Validate parents
1392 self._after_delta_check_parents(parents, 0)1394 self._after_delta_check_parents(parents, 0)
1393 except errors.BzrError as e:1395 except errors.BzrError as e:
@@ -2723,7 +2725,7 @@
2723 # --- end generation of full tree mappings2725 # --- end generation of full tree mappings
27242726
2725 # sort and output all the entries2727 # sort and output all the entries
2726 new_entries = self._sort_entries(by_path.items())2728 new_entries = self._sort_entries(viewitems(by_path))
2727 self._entries_to_current_state(new_entries)2729 self._entries_to_current_state(new_entries)
2728 self._parents = [rev_id for rev_id, tree in trees]2730 self._parents = [rev_id for rev_id, tree in trees]
2729 self._ghosts = list(ghosts)2731 self._ghosts = list(ghosts)
@@ -3288,7 +3290,7 @@
3288 raise AssertionError(3290 raise AssertionError(
3289 "entry %r has no data for any tree." % (entry,))3291 "entry %r has no data for any tree." % (entry,))
3290 if self._id_index is not None:3292 if self._id_index is not None:
3291 for file_id, entry_keys in self._id_index.iteritems():3293 for file_id, entry_keys in viewitems(self._id_index):
3292 for entry_key in entry_keys:3294 for entry_key in entry_keys:
3293 # Check that the entry in the map is pointing to the same3295 # Check that the entry in the map is pointing to the same
3294 # file_id3296 # file_id
32953297
=== modified file 'breezy/export_pot.py'
--- breezy/export_pot.py 2017-06-05 01:21:55 +0000
+++ breezy/export_pot.py 2017-06-05 21:41:38 +0000
@@ -202,7 +202,7 @@
202def _standard_options(exporter):202def _standard_options(exporter):
203 OPTIONS = option.Option.OPTIONS203 OPTIONS = option.Option.OPTIONS
204 context = exporter.get_context(option)204 context = exporter.get_context(option)
205 for name in sorted(OPTIONS.keys()):205 for name in sorted(OPTIONS):
206 opt = OPTIONS[name]206 opt = OPTIONS[name]
207 _write_option(exporter, context.from_string(name), opt, "option")207 _write_option(exporter, context.from_string(name), opt, "option")
208208
209209
=== modified file 'breezy/fetch.py'
--- breezy/fetch.py 2017-05-22 00:56:52 +0000
+++ breezy/fetch.py 2017-06-05 21:41:38 +0000
@@ -40,6 +40,9 @@
40 )40 )
41from .i18n import gettext41from .i18n import gettext
42from .revision import NULL_REVISION42from .revision import NULL_REVISION
43from .sixish import (
44 viewvalues,
45 )
43from .trace import mutter46from .trace import mutter
4447
4548
@@ -215,12 +218,11 @@
215 revision_id = tree.get_file_revision(root_id, u"")218 revision_id = tree.get_file_revision(root_id, u"")
216 revision_root[revision_id] = root_id219 revision_root[revision_id] = root_id
217 # Find out which parents we don't already know root ids for220 # Find out which parents we don't already know root ids for
218 parents = set()221 parents = set(viewvalues(parent_map))
219 for revision_parents in parent_map.itervalues():222 parents.difference_update(revision_root)
220 parents.update(revision_parents)223 parents.discard(NULL_REVISION)
221 parents.difference_update(revision_root.keys() + [NULL_REVISION])
222 # Limit to revisions present in the versionedfile224 # Limit to revisions present in the versionedfile
223 parents = graph.get_parent_map(parents).keys()225 parents = graph.get_parent_map(parents)
224 for tree in self.iter_rev_trees(parents):226 for tree in self.iter_rev_trees(parents):
225 root_id = tree.get_root_id()227 root_id = tree.get_root_id()
226 revision_root[tree.get_revision_id()] = root_id228 revision_root[tree.get_revision_id()] = root_id
227229
=== modified file 'breezy/fifo_cache.py'
--- breezy/fifo_cache.py 2011-12-19 13:23:58 +0000
+++ breezy/fifo_cache.py 2017-06-05 21:41:38 +0000
@@ -158,8 +158,8 @@
158 if len(args) == 1:158 if len(args) == 1:
159 arg = args[0]159 arg = args[0]
160 if isinstance(arg, dict):160 if isinstance(arg, dict):
161 for key, val in arg.iteritems():161 for key in arg:
162 self.add(key, val)162 self.add(key, arg[key])
163 else:163 else:
164 for key, val in args[0]:164 for key, val in args[0]:
165 self.add(key, val)165 self.add(key, val)
@@ -167,8 +167,8 @@
167 raise TypeError('update expected at most 1 argument, got %d'167 raise TypeError('update expected at most 1 argument, got %d'
168 % len(args))168 % len(args))
169 if kwargs:169 if kwargs:
170 for key, val in kwargs.iteritems():170 for key in kwargs:
171 self.add(key, val)171 self.add(key, kwargs[key])
172172
173173
174class FIFOSizeCache(FIFOCache):174class FIFOSizeCache(FIFOCache):
175175
=== modified file 'breezy/gpg.py'
--- breezy/gpg.py 2017-05-30 19:16:23 +0000
+++ breezy/gpg.py 2017-06-05 21:41:38 +0000
@@ -449,7 +449,7 @@
449 signers.setdefault(fingerprint, 0)449 signers.setdefault(fingerprint, 0)
450 signers[fingerprint] += 1450 signers[fingerprint] += 1
451 result = []451 result = []
452 for fingerprint, number in signers.items():452 for fingerprint, number in list(signers.items()):
453 result.append(ngettext(u"Unknown key {0} signed {1} commit",453 result.append(ngettext(u"Unknown key {0} signed {1} commit",
454 u"Unknown key {0} signed {1} commits",454 u"Unknown key {0} signed {1} commits",
455 number).format(fingerprint, number))455 number).format(fingerprint, number))
456456
=== modified file 'breezy/graph.py'
--- breezy/graph.py 2017-05-25 01:35:55 +0000
+++ breezy/graph.py 2017-06-05 21:41:38 +0000
@@ -25,6 +25,10 @@
25 revision,25 revision,
26 trace,26 trace,
27 )27 )
28from .sixish import (
29 viewitems,
30 viewvalues,
31 )
2832
29STEP_UNIQUE_SEARCHER_EVERY = 533STEP_UNIQUE_SEARCHER_EVERY = 5
3034
@@ -335,7 +339,7 @@
335 """339 """
336 parent_map = self._parents_provider.get_parent_map(keys)340 parent_map = self._parents_provider.get_parent_map(keys)
337 parent_child = {}341 parent_child = {}
338 for child, parents in sorted(parent_map.items()):342 for child, parents in sorted(viewitems(parent_map)):
339 for parent in parents:343 for parent in parents:
340 parent_child.setdefault(parent, []).append(child)344 parent_child.setdefault(parent, []).append(child)
341 return parent_child345 return parent_child
@@ -358,7 +362,7 @@
358 NULL_REVISION = revision.NULL_REVISION362 NULL_REVISION = revision.NULL_REVISION
359 known_revnos[NULL_REVISION] = 0363 known_revnos[NULL_REVISION] = 0
360364
361 searching_known_tips = list(known_revnos.keys())365 searching_known_tips = list(known_revnos)
362366
363 unknown_searched = {}367 unknown_searched = {}
364368
@@ -645,7 +649,7 @@
645 # TODO: it might be possible to collapse searchers faster when they649 # TODO: it might be possible to collapse searchers faster when they
646 # only have *some* search tips in common.650 # only have *some* search tips in common.
647 next_unique_searchers = []651 next_unique_searchers = []
648 for searchers in unique_search_tips.itervalues():652 for searchers in viewvalues(unique_search_tips):
649 if len(searchers) == 1:653 if len(searchers) == 1:
650 # Searching unique tips, go for it654 # Searching unique tips, go for it
651 next_unique_searchers.append(searchers[0])655 next_unique_searchers.append(searchers[0])
@@ -835,7 +839,7 @@
835 for c in candidate_heads)839 for c in candidate_heads)
836 active_searchers = dict(searchers)840 active_searchers = dict(searchers)
837 # skip over the actual candidate for each searcher841 # skip over the actual candidate for each searcher
838 for searcher in active_searchers.itervalues():842 for searcher in viewvalues(active_searchers):
839 next(searcher)843 next(searcher)
840 # The common walker finds nodes that are common to two or more of the844 # The common walker finds nodes that are common to two or more of the
841 # input keys, so that we don't access all history when a currently845 # input keys, so that we don't access all history when a currently
@@ -852,7 +856,7 @@
852 except StopIteration:856 except StopIteration:
853 # No common points being searched at this time.857 # No common points being searched at this time.
854 pass858 pass
855 for candidate in active_searchers.keys():859 for candidate in list(active_searchers):
856 try:860 try:
857 searcher = active_searchers[candidate]861 searcher = active_searchers[candidate]
858 except KeyError:862 except KeyError:
@@ -878,11 +882,11 @@
878 # some searcher has encountered our known common nodes:882 # some searcher has encountered our known common nodes:
879 # just stop it883 # just stop it
880 ancestor_set = {ancestor}884 ancestor_set = {ancestor}
881 for searcher in searchers.itervalues():885 for searcher in viewvalues(searchers):
882 searcher.stop_searching_any(ancestor_set)886 searcher.stop_searching_any(ancestor_set)
883 else:887 else:
884 # or it may have been just reached by all the searchers:888 # or it may have been just reached by all the searchers:
885 for searcher in searchers.itervalues():889 for searcher in viewvalues(searchers):
886 if ancestor not in searcher.seen:890 if ancestor not in searcher.seen:
887 break891 break
888 else:892 else:
@@ -890,7 +894,7 @@
890 # making it be known as a descendant of all candidates,894 # making it be known as a descendant of all candidates,
891 # so we can stop searching it, and any seen ancestors895 # so we can stop searching it, and any seen ancestors
892 new_common.add(ancestor)896 new_common.add(ancestor)
893 for searcher in searchers.itervalues():897 for searcher in viewvalues(searchers):
894 seen_ancestors =\898 seen_ancestors =\
895 searcher.find_seen_ancestors([ancestor])899 searcher.find_seen_ancestors([ancestor])
896 searcher.stop_searching_any(seen_ancestors)900 searcher.stop_searching_any(seen_ancestors)
@@ -1013,7 +1017,7 @@
1013 processed.update(pending)1017 processed.update(pending)
1014 next_map = self.get_parent_map(pending)1018 next_map = self.get_parent_map(pending)
1015 next_pending = set()1019 next_pending = set()
1016 for item in next_map.iteritems():1020 for item in viewitems(next_map):
1017 yield item1021 yield item
1018 next_pending.update(p for p in item[1] if p not in processed)1022 next_pending.update(p for p in item[1] if p not in processed)
1019 ghosts = pending.difference(next_map)1023 ghosts = pending.difference(next_map)
@@ -1249,7 +1253,7 @@
1249 ## for revision in revisions.intersection(descendants):1253 ## for revision in revisions.intersection(descendants):
1250 ## simple_ancestors.difference_update(descendants[revision])1254 ## simple_ancestors.difference_update(descendants[revision])
1251 ## return simple_ancestors1255 ## return simple_ancestors
1252 for revision, parent_ids in parent_map.iteritems():1256 for revision, parent_ids in viewitems(parent_map):
1253 if parent_ids is None:1257 if parent_ids is None:
1254 continue1258 continue
1255 for parent_id in parent_ids:1259 for parent_id in parent_ids:
@@ -1468,7 +1472,7 @@
1468 seen.update(revisions)1472 seen.update(revisions)
1469 parent_map = self._parents_provider.get_parent_map(revisions)1473 parent_map = self._parents_provider.get_parent_map(revisions)
1470 found_revisions.update(parent_map)1474 found_revisions.update(parent_map)
1471 for rev_id, parents in parent_map.iteritems():1475 for rev_id, parents in viewitems(parent_map):
1472 if parents is None:1476 if parents is None:
1473 continue1477 continue
1474 new_found_parents = [p for p in parents if p not in seen]1478 new_found_parents = [p for p in parents if p not in seen]
@@ -1511,7 +1515,7 @@
1511 all_parents = []1515 all_parents = []
1512 # We don't care if it is a ghost, since it can't be seen if it is1516 # We don't care if it is a ghost, since it can't be seen if it is
1513 # a ghost1517 # a ghost
1514 for parent_ids in parent_map.itervalues():1518 for parent_ids in viewvalues(parent_map):
1515 all_parents.extend(parent_ids)1519 all_parents.extend(parent_ids)
1516 next_pending = all_seen.intersection(all_parents).difference(seen_ancestors)1520 next_pending = all_seen.intersection(all_parents).difference(seen_ancestors)
1517 seen_ancestors.update(next_pending)1521 seen_ancestors.update(next_pending)
@@ -1556,14 +1560,14 @@
1556 stop_rev_references[parent_id] += 11560 stop_rev_references[parent_id] += 1
1557 # if only the stopped revisions reference it, the ref count will be1561 # if only the stopped revisions reference it, the ref count will be
1558 # 0 after this loop1562 # 0 after this loop
1559 for parents in self._current_parents.itervalues():1563 for parents in viewvalues(self._current_parents):
1560 for parent_id in parents:1564 for parent_id in parents:
1561 try:1565 try:
1562 stop_rev_references[parent_id] -= 11566 stop_rev_references[parent_id] -= 1
1563 except KeyError:1567 except KeyError:
1564 pass1568 pass
1565 stop_parents = set()1569 stop_parents = set()
1566 for rev_id, refs in stop_rev_references.iteritems():1570 for rev_id, refs in viewitems(stop_rev_references):
1567 if refs == 0:1571 if refs == 0:
1568 stop_parents.add(rev_id)1572 stop_parents.add(rev_id)
1569 self._next_query.difference_update(stop_parents)1573 self._next_query.difference_update(stop_parents)
@@ -1599,7 +1603,7 @@
1599def invert_parent_map(parent_map):1603def invert_parent_map(parent_map):
1600 """Given a map from child => parents, create a map of parent=>children"""1604 """Given a map from child => parents, create a map of parent=>children"""
1601 child_map = {}1605 child_map = {}
1602 for child, parents in parent_map.iteritems():1606 for child, parents in viewitems(parent_map):
1603 for p in parents:1607 for p in parents:
1604 # Any given parent is likely to have only a small handful1608 # Any given parent is likely to have only a small handful
1605 # of children, many will have only one. So we avoid mem overhead of1609 # of children, many will have only one. So we avoid mem overhead of
@@ -1651,7 +1655,7 @@
1651 # Will not have any nodes removed, even though you do have an1655 # Will not have any nodes removed, even though you do have an
1652 # 'uninteresting' linear D->B and E->C1656 # 'uninteresting' linear D->B and E->C
1653 children = {}1657 children = {}
1654 for child, parents in parent_map.iteritems():1658 for child, parents in viewitems(parent_map):
1655 children.setdefault(child, [])1659 children.setdefault(child, [])
1656 for p in parents:1660 for p in parents:
1657 children.setdefault(p, []).append(child)1661 children.setdefault(p, []).append(child)
16581662
=== modified file 'breezy/groupcompress.py'
--- breezy/groupcompress.py 2017-06-04 18:09:30 +0000
+++ breezy/groupcompress.py 2017-06-05 21:41:38 +0000
@@ -45,6 +45,7 @@
45from .sixish import (45from .sixish import (
46 map,46 map,
47 range,47 range,
48 viewitems,
48 )49 )
49from .versionedfile import (50from .versionedfile import (
50 _KeyRefs,51 _KeyRefs,
@@ -73,7 +74,7 @@
73 # groupcompress ordering is approximately reverse topological,74 # groupcompress ordering is approximately reverse topological,
74 # properly grouped by file-id.75 # properly grouped by file-id.
75 per_prefix_map = {}76 per_prefix_map = {}
76 for key, value in parent_map.iteritems():77 for key, value in viewitems(parent_map):
77 if isinstance(key, str) or len(key) == 1:78 if isinstance(key, str) or len(key) == 1:
78 prefix = ''79 prefix = ''
79 else:80 else:
@@ -1541,10 +1542,10 @@
1541 # This is the group the bytes are stored in, followed by the1542 # This is the group the bytes are stored in, followed by the
1542 # location in the group1543 # location in the group
1543 return locations[key][0]1544 return locations[key][0]
1544 present_keys = sorted(locations.iterkeys(), key=get_group)
1545 # We don't have an ordering for keys in the in-memory object, but1545 # We don't have an ordering for keys in the in-memory object, but
1546 # lets process the in-memory ones first.1546 # lets process the in-memory ones first.
1547 present_keys = list(unadded_keys) + present_keys1547 present_keys = list(unadded_keys)
1548 present_keys.extend(sorted(locations, key=get_group))
1548 # Now grab all of the ones from other sources1549 # Now grab all of the ones from other sources
1549 source_keys = [(self, present_keys)]1550 source_keys = [(self, present_keys)]
1550 source_keys.extend(source_result)1551 source_keys.extend(source_result)
@@ -1574,7 +1575,7 @@
1574 # start with one key, recurse to its oldest parent, then grab1575 # start with one key, recurse to its oldest parent, then grab
1575 # everything in the same group, etc.1576 # everything in the same group, etc.
1576 parent_map = dict((key, details[2]) for key, details in1577 parent_map = dict((key, details[2]) for key, details in
1577 locations.iteritems())1578 viewitems(locations))
1578 for key in unadded_keys:1579 for key in unadded_keys:
1579 parent_map[key] = self._unadded_refs[key]1580 parent_map[key] = self._unadded_refs[key]
1580 parent_map.update(fallback_parent_map)1581 parent_map.update(fallback_parent_map)
@@ -2032,10 +2033,10 @@
2032 if changed:2033 if changed:
2033 result = []2034 result = []
2034 if self._parents:2035 if self._parents:
2035 for key, (value, node_refs) in keys.iteritems():2036 for key, (value, node_refs) in viewitems(keys):
2036 result.append((key, value, node_refs))2037 result.append((key, value, node_refs))
2037 else:2038 else:
2038 for key, (value, node_refs) in keys.iteritems():2039 for key, (value, node_refs) in viewitems(keys):
2039 result.append((key, value))2040 result.append((key, value))
2040 records = result2041 records = result
2041 key_dependencies = self._key_dependencies2042 key_dependencies = self._key_dependencies
20422043
=== modified file 'breezy/hashcache.py'
--- breezy/hashcache.py 2017-05-22 00:56:52 +0000
+++ breezy/hashcache.py 2017-06-05 21:41:38 +0000
@@ -44,6 +44,7 @@
44 )44 )
45from .sixish import (45from .sixish import (
46 text_type,46 text_type,
47 viewitems,
47 )48 )
4849
4950
@@ -132,14 +133,12 @@
132 # Stat in inode order as optimisation for at least linux.133 # Stat in inode order as optimisation for at least linux.
133 def inode_order(path_and_cache):134 def inode_order(path_and_cache):
134 return path_and_cache[1][1][3]135 return path_and_cache[1][1][3]
135 for inum, path, cache_entry in sorted(self._cache, key=inode_order):136 for path, cache_val in sorted(viewitems(self._cache), key=inode_order):
136 abspath = osutils.pathjoin(self.root, path)137 abspath = osutils.pathjoin(self.root, path)
137 fp = self._fingerprint(abspath)138 fp = self._fingerprint(abspath)
138 self.stat_count += 1139 self.stat_count += 1
139140
140 cache_fp = cache_entry[1]141 if not fp or cache_val[1] != fp:
141
142 if (not fp) or (cache_fp != fp):
143 # not here or not a regular file anymore142 # not here or not a regular file anymore
144 self.removed_count += 1143 self.removed_count += 1
145 self.needs_write = True144 self.needs_write = True
@@ -229,7 +228,7 @@
229 try:228 try:
230 outf.write(CACHE_HEADER)229 outf.write(CACHE_HEADER)
231230
232 for path, c in self._cache.iteritems():231 for path, c in viewitems(self._cache):
233 line_info = [path.encode('utf-8'), '// ', c[0], ' ']232 line_info = [path.encode('utf-8'), '// ', c[0], ' ']
234 line_info.append(' '.join([str(fld) for fld in c[1]]))233 line_info.append(' '.join([str(fld) for fld in c[1]]))
235 line_info.append('\n')234 line_info.append('\n')
236235
=== modified file 'breezy/help.py'
--- breezy/help.py 2017-05-22 00:56:52 +0000
+++ breezy/help.py 2017-06-05 21:41:38 +0000
@@ -143,12 +143,12 @@
143143
144 def _check_prefix_uniqueness(self):144 def _check_prefix_uniqueness(self):
145 """Ensure that the index collection is able to differentiate safely."""145 """Ensure that the index collection is able to differentiate safely."""
146 prefixes = {}146 prefixes = set()
147 for index in self.search_path:147 for index in self.search_path:
148 prefixes.setdefault(index.prefix, []).append(index)148 prefix = index.prefix
149 for prefix, indices in prefixes.items():149 if prefix in prefixes:
150 if len(indices) > 1:
151 raise errors.DuplicateHelpPrefix(prefix)150 raise errors.DuplicateHelpPrefix(prefix)
151 prefixes.add(prefix)
152152
153 def search(self, topic):153 def search(self, topic):
154 """Search for topic across the help search path.154 """Search for topic across the help search path.
155155
=== modified file 'breezy/help_topics/__init__.py'
--- breezy/help_topics/__init__.py 2017-06-02 21:28:05 +0000
+++ breezy/help_topics/__init__.py 2017-06-05 21:41:38 +0000
@@ -233,7 +233,7 @@
233233
234 protl = []234 protl = []
235 decl = []235 decl = []
236 protos = transport_list_registry.keys( )236 protos = transport_list_registry.keys()
237 protos.sort(sort_func)237 protos.sort(sort_func)
238 for proto in protos:238 for proto in protos:
239 shorthelp = transport_list_registry.get_help(proto)239 shorthelp = transport_list_registry.get_help(proto)
240240
=== modified file 'breezy/index.py'
--- breezy/index.py 2017-06-05 01:46:14 +0000
+++ breezy/index.py 2017-06-05 21:41:38 +0000
@@ -45,6 +45,7 @@
45from .sixish import (45from .sixish import (
46 BytesIO,46 BytesIO,
47 viewvalues,47 viewvalues,
48 viewitems,
48 )49 )
49from .static_tuple import StaticTuple50from .static_tuple import StaticTuple
5051
@@ -144,7 +145,7 @@
144 if self._nodes_by_key is None:145 if self._nodes_by_key is None:
145 nodes_by_key = {}146 nodes_by_key = {}
146 if self.reference_lists:147 if self.reference_lists:
147 for key, (absent, references, value) in self._nodes.iteritems():148 for key, (absent, references, value) in viewitems(self._nodes):
148 if absent:149 if absent:
149 continue150 continue
150 key_dict = nodes_by_key151 key_dict = nodes_by_key
@@ -152,7 +153,7 @@
152 key_dict = key_dict.setdefault(subkey, {})153 key_dict = key_dict.setdefault(subkey, {})
153 key_dict[key[-1]] = key, value, references154 key_dict[key[-1]] = key, value, references
154 else:155 else:
155 for key, (absent, references, value) in self._nodes.iteritems():156 for key, (absent, references, value) in viewitems(self._nodes):
156 if absent:157 if absent:
157 continue158 continue
158 key_dict = nodes_by_key159 key_dict = nodes_by_key
@@ -276,7 +277,7 @@
276 # forward sorted by key. In future we may consider topological sorting,277 # forward sorted by key. In future we may consider topological sorting,
277 # at the cost of table scans for direct lookup, or a second index for278 # at the cost of table scans for direct lookup, or a second index for
278 # direct lookup279 # direct lookup
279 nodes = sorted(self._nodes.items())280 nodes = sorted(viewitems(self._nodes))
280 # if we do not prepass, we don't know how long it will be up front.281 # if we do not prepass, we don't know how long it will be up front.
281 expected_bytes = None282 expected_bytes = None
282 # we only need to pre-pass if we have reference lists at all.283 # we only need to pre-pass if we have reference lists at all.
@@ -479,7 +480,7 @@
479 stream.close()480 stream.close()
480 del lines[-1]481 del lines[-1]
481 _, _, _, trailers = self._parse_lines(lines, pos)482 _, _, _, trailers = self._parse_lines(lines, pos)
482 for key, absent, references, value in self._keys_by_offset.itervalues():483 for key, absent, references, value in viewvalues(self._keys_by_offset):
483 if absent:484 if absent:
484 continue485 continue
485 # resolve references:486 # resolve references:
@@ -510,7 +511,7 @@
510 % (ref_list_num, self.node_ref_lists))511 % (ref_list_num, self.node_ref_lists))
511 refs = set()512 refs = set()
512 nodes = self._nodes513 nodes = self._nodes
513 for key, (value, ref_lists) in nodes.iteritems():514 for key, (value, ref_lists) in viewitems(nodes):
514 ref_list = ref_lists[ref_list_num]515 ref_list = ref_lists[ref_list_num]
515 refs.update([ref for ref in ref_list if ref not in nodes])516 refs.update([ref for ref in ref_list if ref not in nodes])
516 return refs517 return refs
@@ -519,13 +520,13 @@
519 if self._nodes_by_key is None:520 if self._nodes_by_key is None:
520 nodes_by_key = {}521 nodes_by_key = {}
521 if self.node_ref_lists:522 if self.node_ref_lists:
522 for key, (value, references) in self._nodes.iteritems():523 for key, (value, references) in viewitems(self._nodes):
523 key_dict = nodes_by_key524 key_dict = nodes_by_key
524 for subkey in key[:-1]:525 for subkey in key[:-1]:
525 key_dict = key_dict.setdefault(subkey, {})526 key_dict = key_dict.setdefault(subkey, {})
526 key_dict[key[-1]] = key, value, references527 key_dict[key[-1]] = key, value, references
527 else:528 else:
528 for key, value in self._nodes.iteritems():529 for key, value in viewitems(self._nodes):
529 key_dict = nodes_by_key530 key_dict = nodes_by_key
530 for subkey in key[:-1]:531 for subkey in key[:-1]:
531 key_dict = key_dict.setdefault(subkey, {})532 key_dict = key_dict.setdefault(subkey, {})
@@ -548,10 +549,10 @@
548 if self._nodes is None:549 if self._nodes is None:
549 self._buffer_all()550 self._buffer_all()
550 if self.node_ref_lists:551 if self.node_ref_lists:
551 for key, (value, node_ref_lists) in self._nodes.iteritems():552 for key, (value, node_ref_lists) in viewitems(self._nodes):
552 yield self, key, value, node_ref_lists553 yield self, key, value, node_ref_lists
553 else:554 else:
554 for key, value in self._nodes.iteritems():555 for key, value in viewitems(self._nodes):
555 yield self, key, value556 yield self, key, value
556557
557 def _read_prefix(self, stream):558 def _read_prefix(self, stream):
@@ -1599,11 +1600,11 @@
1599 trace.mutter_callsite(3,1600 trace.mutter_callsite(3,
1600 "iter_all_entries scales with size of history.")1601 "iter_all_entries scales with size of history.")
1601 if self.reference_lists:1602 if self.reference_lists:
1602 for key, (absent, references, value) in self._nodes.iteritems():1603 for key, (absent, references, value) in viewitems(self._nodes):
1603 if not absent:1604 if not absent:
1604 yield self, key, value, references1605 yield self, key, value, references
1605 else:1606 else:
1606 for key, (absent, references, value) in self._nodes.iteritems():1607 for key, (absent, references, value) in viewitems(self._nodes):
1607 if not absent:1608 if not absent:
1608 yield self, key, value1609 yield self, key, value
16091610
16101611
=== modified file 'breezy/inventory.py'
--- breezy/inventory.py 2017-05-30 19:16:23 +0000
+++ breezy/inventory.py 2017-06-05 21:41:38 +0000
@@ -48,7 +48,10 @@
48 lazy_regex,48 lazy_regex,
49 trace,49 trace,
50 )50 )
5151from .sixish import (
52 viewitems,
53 viewvalues,
54 )
52from .static_tuple import StaticTuple55from .static_tuple import StaticTuple
5356
5457
@@ -227,9 +230,6 @@
227230
228 known_kinds = ('file', 'directory', 'symlink')231 known_kinds = ('file', 'directory', 'symlink')
229232
230 def sorted_children(self):
231 return sorted(self.children.items())
232
233 @staticmethod233 @staticmethod
234 def versionable_kind(kind):234 def versionable_kind(kind):
235 return (kind in ('file', 'directory', 'symlink', 'tree-reference'))235 return (kind in ('file', 'directory', 'symlink', 'tree-reference'))
@@ -402,6 +402,9 @@
402 super(InventoryDirectory, self).__init__(file_id, name, parent_id)402 super(InventoryDirectory, self).__init__(file_id, name, parent_id)
403 self.children = {}403 self.children = {}
404404
405 def sorted_children(self):
406 return sorted(viewitems(self.children))
407
405 def kind_character(self):408 def kind_character(self):
406 """See InventoryEntry.kind_character."""409 """See InventoryEntry.kind_character."""
407 return '/'410 return '/'
@@ -665,7 +668,7 @@
665668
666 # unrolling the recursive called changed the time from669 # unrolling the recursive called changed the time from
667 # 440ms/663ms (inline/total) to 116ms/116ms670 # 440ms/663ms (inline/total) to 116ms/116ms
668 children = sorted(from_dir.children.items())671 children = sorted(viewitems(from_dir.children))
669 if not recursive:672 if not recursive:
670 for name, ie in children:673 for name, ie in children:
671 yield name, ie674 yield name, ie
@@ -690,7 +693,7 @@
690 continue693 continue
691694
692 # But do this child first695 # But do this child first
693 new_children = sorted(ie.children.items())696 new_children = sorted(viewitems(ie.children))
694 new_children = collections.deque(new_children)697 new_children = collections.deque(new_children)
695 stack.append((path, new_children))698 stack.append((path, new_children))
696 # Break out of inner loop, so that we start outer loop with child699 # Break out of inner loop, so that we start outer loop with child
@@ -771,7 +774,7 @@
771 cur_relpath, cur_dir = stack.pop()774 cur_relpath, cur_dir = stack.pop()
772775
773 child_dirs = []776 child_dirs = []
774 for child_name, child_ie in sorted(cur_dir.children.iteritems()):777 for child_name, child_ie in sorted(viewitems(cur_dir.children)):
775778
776 child_relpath = cur_relpath + child_name779 child_relpath = cur_relpath + child_name
777780
@@ -814,7 +817,7 @@
814 """817 """
815 accum = []818 accum = []
816 def descend(dir_ie, dir_path):819 def descend(dir_ie, dir_path):
817 kids = sorted(dir_ie.children.items())820 kids = sorted(viewitems(dir_ie.children))
818 for name, ie in kids:821 for name, ie in kids:
819 child_path = osutils.pathjoin(dir_path, name)822 child_path = osutils.pathjoin(dir_path, name)
820 accum.append((child_path, ie))823 accum.append((child_path, ie))
@@ -1102,9 +1105,8 @@
1102 XXX: We may not want to merge this into bzr.dev.1105 XXX: We may not want to merge this into bzr.dev.
1103 """1106 """
1104 if self.root is None:1107 if self.root is None:
1105 return1108 return ()
1106 for _, ie in self._byid.iteritems():1109 return iter(viewvalues(self._byid))
1107 yield ie
11081110
1109 def __len__(self):1111 def __len__(self):
1110 """Returns number of entries."""1112 """Returns number of entries."""
@@ -1138,8 +1140,10 @@
1138 "inventory already contains entry with id {%s}" %1140 "inventory already contains entry with id {%s}" %
1139 entry.file_id)1141 entry.file_id)
1140 self._byid[entry.file_id] = entry1142 self._byid[entry.file_id] = entry
1141 for child in getattr(entry, 'children', {}).itervalues():1143 children = getattr(entry, 'children', {})
1142 self._add_child(child)1144 if children is not None:
1145 for child in viewvalues(children):
1146 self._add_child(child)
1143 return entry1147 return entry
11441148
1145 def add(self, entry):1149 def add(self, entry):
@@ -1288,7 +1292,7 @@
1288 ie = to_find_delete.pop()1292 ie = to_find_delete.pop()
1289 to_delete.append(ie.file_id)1293 to_delete.append(ie.file_id)
1290 if ie.kind == 'directory':1294 if ie.kind == 'directory':
1291 to_find_delete.extend(ie.children.values())1295 to_find_delete.extend(viewvalues(ie.children))
1292 for file_id in reversed(to_delete):1296 for file_id in reversed(to_delete):
1293 ie = self[file_id]1297 ie = self[file_id]
1294 del self._byid[file_id]1298 del self._byid[file_id]
@@ -1589,7 +1593,7 @@
1589 result = CHKInventory(self._search_key_name)1593 result = CHKInventory(self._search_key_name)
1590 if propagate_caches:1594 if propagate_caches:
1591 # Just propagate the path-to-fileid cache for now1595 # Just propagate the path-to-fileid cache for now
1592 result._path_to_fileid_cache = dict(self._path_to_fileid_cache.iteritems())1596 result._path_to_fileid_cache = self._path_to_fileid_cache.copy()
1593 search_key_func = chk_map.search_key_registry.get(self._search_key_name)1597 search_key_func = chk_map.search_key_registry.get(self._search_key_name)
1594 self.id_to_entry._ensure_root()1598 self.id_to_entry._ensure_root()
1595 maximum_size = self.id_to_entry._root_node.maximum_size1599 maximum_size = self.id_to_entry._root_node.maximum_size
@@ -1708,7 +1712,7 @@
1708 continue1712 continue
1709 # This loop could potentially be better by using the id_basename1713 # This loop could potentially be better by using the id_basename
1710 # map to just get the child file ids.1714 # map to just get the child file ids.
1711 for child in entry.children.values():1715 for child in viewvalues(entry.children):
1712 if child.file_id not in altered:1716 if child.file_id not in altered:
1713 raise errors.InconsistentDelta(self.id2path(child.file_id),1717 raise errors.InconsistentDelta(self.id2path(child.file_id),
1714 child.file_id, "Child not deleted or reparented when "1718 child.file_id, "Child not deleted or reparented when "
@@ -1720,7 +1724,7 @@
1720 # re-keying, but its simpler to just output that as a delete+add1724 # re-keying, but its simpler to just output that as a delete+add
1721 # to spend less time calculating the delta.1725 # to spend less time calculating the delta.
1722 delta_list = []1726 delta_list = []
1723 for key, (old_key, value) in parent_id_basename_delta.iteritems():1727 for key, (old_key, value) in viewitems(parent_id_basename_delta):
1724 if value is not None:1728 if value is not None:
1725 delta_list.append((old_key, key, value))1729 delta_list.append((old_key, key, value))
1726 else:1730 else:
17271731
=== modified file 'breezy/knit.py'
--- breezy/knit.py 2017-06-04 18:09:30 +0000
+++ breezy/knit.py 2017-06-05 21:41:38 +0000
@@ -99,6 +99,8 @@
99from .sixish import (99from .sixish import (
100 BytesIO,100 BytesIO,
101 range,101 range,
102 viewitems,
103 viewvalues,
102 )104 )
103from .versionedfile import (105from .versionedfile import (
104 _KeyRefs,106 _KeyRefs,
@@ -843,8 +845,8 @@
843 if compression_parent not in all_build_index_memos:845 if compression_parent not in all_build_index_memos:
844 next_keys.add(compression_parent)846 next_keys.add(compression_parent)
845 build_keys = next_keys847 build_keys = next_keys
846 return sum([index_memo[2] for index_memo848 return sum(index_memo[2]
847 in all_build_index_memos.itervalues()])849 for index_memo in viewvalues(all_build_index_memos))
848850
849851
850class KnitVersionedFiles(VersionedFilesWithFallbacks):852class KnitVersionedFiles(VersionedFilesWithFallbacks):
@@ -1173,7 +1175,7 @@
1173 build_details = self._index.get_build_details(pending_components)1175 build_details = self._index.get_build_details(pending_components)
1174 current_components = set(pending_components)1176 current_components = set(pending_components)
1175 pending_components = set()1177 pending_components = set()
1176 for key, details in build_details.iteritems():1178 for key, details in viewitems(build_details):
1177 (index_memo, compression_parent, parents,1179 (index_memo, compression_parent, parents,
1178 record_details) = details1180 record_details) = details
1179 method = record_details[0]1181 method = record_details[0]
@@ -1280,7 +1282,7 @@
1280 # key = component_id, r = record_details, i_m = index_memo,1282 # key = component_id, r = record_details, i_m = index_memo,
1281 # n = next1283 # n = next
1282 records = [(key, i_m) for key, (r, i_m, n)1284 records = [(key, i_m) for key, (r, i_m, n)
1283 in position_map.iteritems()]1285 in viewitems(position_map)]
1284 # Sort by the index memo, so that we request records from the1286 # Sort by the index memo, so that we request records from the
1285 # same pack file together, and in forward-sorted order1287 # same pack file together, and in forward-sorted order
1286 records.sort(key=operator.itemgetter(1))1288 records.sort(key=operator.itemgetter(1))
@@ -1411,7 +1413,7 @@
1411 # map from key to1413 # map from key to
1412 # (record_details, access_memo, compression_parent_key)1414 # (record_details, access_memo, compression_parent_key)
1413 positions = dict((key, self._build_details_to_components(details))1415 positions = dict((key, self._build_details_to_components(details))
1414 for key, details in build_details.iteritems())1416 for key, details in viewitems(build_details))
1415 absent_keys = keys.difference(set(positions))1417 absent_keys = keys.difference(set(positions))
1416 # There may be more absent keys : if we're missing the basis component1418 # There may be more absent keys : if we're missing the basis component
1417 # and are trying to include the delta closure.1419 # and are trying to include the delta closure.
@@ -1525,7 +1527,7 @@
1525 missing = set(keys)1527 missing = set(keys)
1526 record_map = self._get_record_map(missing, allow_missing=True)1528 record_map = self._get_record_map(missing, allow_missing=True)
1527 result = {}1529 result = {}
1528 for key, details in record_map.iteritems():1530 for key, details in viewitems(record_map):
1529 if key not in missing:1531 if key not in missing:
1530 continue1532 continue
1531 # record entry 2 is the 'digest'.1533 # record entry 2 is the 'digest'.
@@ -1757,7 +1759,7 @@
1757 # we need key, position, length1759 # we need key, position, length
1758 key_records = []1760 key_records = []
1759 build_details = self._index.get_build_details(keys)1761 build_details = self._index.get_build_details(keys)
1760 for key, details in build_details.iteritems():1762 for key, details in viewitems(build_details):
1761 if key in keys:1763 if key in keys:
1762 key_records.append((key, details[0]))1764 key_records.append((key, details[0]))
1763 records_iter = enumerate(self._read_records_iter(key_records))1765 records_iter = enumerate(self._read_records_iter(key_records))
@@ -2165,8 +2167,8 @@
2165 # one line with next ('' for None)2167 # one line with next ('' for None)
2166 # one line with byte count of the record bytes2168 # one line with byte count of the record bytes
2167 # the record bytes2169 # the record bytes
2168 for key, (record_bytes, (method, noeol), next) in \2170 for key, (record_bytes, (method, noeol), next) in viewitems(
2169 self._raw_record_map.iteritems():2171 self._raw_record_map):
2170 key_bytes = '\x00'.join(key)2172 key_bytes = '\x00'.join(key)
2171 parents = self.global_map.get(key, None)2173 parents = self.global_map.get(key, None)
2172 if parents is None:2174 if parents is None:
@@ -2890,10 +2892,10 @@
2890 del keys[key]2892 del keys[key]
2891 result = []2893 result = []
2892 if self._parents:2894 if self._parents:
2893 for key, (value, node_refs) in keys.iteritems():2895 for key, (value, node_refs) in viewitems(keys):
2894 result.append((key, value, node_refs))2896 result.append((key, value, node_refs))
2895 else:2897 else:
2896 for key, (value, node_refs) in keys.iteritems():2898 for key, (value, node_refs) in viewitems(keys):
2897 result.append((key, value))2899 result.append((key, value))
2898 self._add_callback(result)2900 self._add_callback(result)
2899 if missing_compression_parents:2901 if missing_compression_parents:
@@ -3269,7 +3271,7 @@
3269 self._all_build_details.update(build_details)3271 self._all_build_details.update(build_details)
3270 # new_nodes = self._vf._index._get_entries(this_iteration)3272 # new_nodes = self._vf._index._get_entries(this_iteration)
3271 pending = set()3273 pending = set()
3272 for key, details in build_details.iteritems():3274 for key, details in viewitems(build_details):
3273 (index_memo, compression_parent, parent_keys,3275 (index_memo, compression_parent, parent_keys,
3274 record_details) = details3276 record_details) = details
3275 self._parent_map[key] = parent_keys3277 self._parent_map[key] = parent_keys
@@ -3290,7 +3292,7 @@
3290 else:3292 else:
3291 self._num_compression_children[compression_parent] = 13293 self._num_compression_children[compression_parent] = 1
32923294
3293 missing_versions = this_iteration.difference(build_details.keys())3295 missing_versions = this_iteration.difference(build_details)
3294 if missing_versions:3296 if missing_versions:
3295 for key in missing_versions:3297 for key in missing_versions:
3296 if key in self._parent_map and key in self._text_cache:3298 if key in self._parent_map and key in self._text_cache:
32973299
=== modified file 'breezy/log.py'
--- breezy/log.py 2017-06-04 18:09:30 +0000
+++ breezy/log.py 2017-06-05 21:41:38 +0000
@@ -864,7 +864,7 @@
864 if match is None:864 if match is None:
865 return log_rev_iterator865 return log_rev_iterator
866 searchRE = [(k, [re.compile(x, re.IGNORECASE) for x in v])866 searchRE = [(k, [re.compile(x, re.IGNORECASE) for x in v])
867 for (k,v) in match.iteritems()]867 for k, v in match.items()]
868 return _filter_re(searchRE, log_rev_iterator)868 return _filter_re(searchRE, log_rev_iterator)
869869
870870
@@ -881,7 +881,7 @@
881 'author': (rev.get_apparent_authors()),881 'author': (rev.get_apparent_authors()),
882 'bugs': list(rev.iter_bugs())882 'bugs': list(rev.iter_bugs())
883 }883 }
884 strings[''] = [item for inner_list in strings.itervalues()884 strings[''] = [item for inner_list in strings.values()
885 for item in inner_list]885 for item in inner_list]
886 for (k,v) in searchRE:886 for (k,v) in searchRE:
887 if k in strings and not _match_any_filter(strings[k], v):887 if k in strings and not _match_any_filter(strings[k], v):
888888
=== modified file 'breezy/lru_cache.py'
--- breezy/lru_cache.py 2017-05-30 19:16:23 +0000
+++ breezy/lru_cache.py 2017-06-05 21:41:38 +0000
@@ -21,6 +21,11 @@
21from . import (21from . import (
22 trace,22 trace,
23 )23 )
24from .sixish import (
25 viewitems,
26 viewkeys,
27 )
28
2429
25_null_key = object()30_null_key = object()
2631
@@ -129,11 +134,12 @@
129134
130 :return: An unordered list of keys that are currently cached.135 :return: An unordered list of keys that are currently cached.
131 """136 """
132 return self._cache.keys()137 # GZ 2016-06-04: Maybe just make this return the view?
138 return list(viewkeys(self._cache))
133139
134 def as_dict(self):140 def as_dict(self):
135 """Get a new dict with the same key:value pairs as the cache"""141 """Get a new dict with the same key:value pairs as the cache"""
136 return dict((k, n.value) for k, n in self._cache.iteritems())142 return dict((k, n.value) for k, n in viewitems(self._cache))
137143
138 def cleanup(self):144 def cleanup(self):
139 """Clear the cache until it shrinks to the requested size.145 """Clear the cache until it shrinks to the requested size.
140146
=== modified file 'breezy/mail_client.py'
--- breezy/mail_client.py 2017-05-22 00:56:52 +0000
+++ breezy/mail_client.py 2017-06-05 21:41:38 +0000
@@ -252,7 +252,7 @@
252 if body is not None:252 if body is not None:
253 message_options['body'] = body253 message_options['body'] = body
254 options_list = ['%s=%s' % (k, urlutils.escape(v)) for (k, v) in254 options_list = ['%s=%s' % (k, urlutils.escape(v)) for (k, v) in
255 sorted(message_options.iteritems())]255 sorted(message_options.items())]
256 return ['mailto:%s?%s' % (self._encode_safe(to or ''),256 return ['mailto:%s?%s' % (self._encode_safe(to or ''),
257 '&'.join(options_list))]257 '&'.join(options_list))]
258mail_client_registry.register('evolution', Evolution,258mail_client_registry.register('evolution', Evolution,
@@ -316,7 +316,7 @@
316 else:316 else:
317 options_list = []317 options_list = []
318 options_list.extend(["%s='%s'" % (k, v) for k, v in318 options_list.extend(["%s='%s'" % (k, v) for k, v in
319 sorted(message_options.iteritems())])319 sorted(message_options.items())])
320 return ['-compose', ','.join(options_list)]320 return ['-compose', ','.join(options_list)]
321mail_client_registry.register('thunderbird', Thunderbird,321mail_client_registry.register('thunderbird', Thunderbird,
322 help=Thunderbird.__doc__)322 help=Thunderbird.__doc__)
323323
=== modified file 'breezy/merge.py'
--- breezy/merge.py 2017-05-30 19:16:23 +0000
+++ breezy/merge.py 2017-06-05 21:41:38 +0000
@@ -48,6 +48,9 @@
48 hooks,48 hooks,
49 registry,49 registry,
50 )50 )
51from .sixish import (
52 viewitems,
53 )
51# TODO: Report back as changes are merged in54# TODO: Report back as changes are merged in
5255
5356
@@ -2289,7 +2292,7 @@
2289 filtered_parent_map = {}2292 filtered_parent_map = {}
2290 child_map = {}2293 child_map = {}
2291 tails = []2294 tails = []
2292 for key, parent_keys in parent_map.iteritems():2295 for key, parent_keys in viewitems(parent_map):
2293 culled_parent_keys = [p for p in parent_keys if p in parent_map]2296 culled_parent_keys = [p for p in parent_keys if p in parent_map]
2294 if not culled_parent_keys:2297 if not culled_parent_keys:
2295 tails.append(key)2298 tails.append(key)
22962299
=== modified file 'breezy/mutabletree.py'
--- breezy/mutabletree.py 2017-05-30 19:32:13 +0000
+++ breezy/mutabletree.py 2017-06-05 21:41:38 +0000
@@ -42,6 +42,9 @@
42""")42""")
4343
44from .decorators import needs_read_lock, needs_write_lock44from .decorators import needs_read_lock, needs_write_lock
45from .sixish import (
46 viewvalues,
47 )
4548
4649
47def needs_tree_write_lock(unbound):50def needs_tree_write_lock(unbound):
@@ -551,7 +554,9 @@
551 """Helper for MutableTree.smart_add."""554 """Helper for MutableTree.smart_add."""
552555
553 def get_inventory_delta(self):556 def get_inventory_delta(self):
554 return self._invdelta.values()557 # GZ 2016-06-05: Returning view would probably be fine but currently
558 # Inventory.apply_delta is documented as requiring a list of changes.
559 return list(viewvalues(self._invdelta))
555560
556 def _get_ie(self, inv_path):561 def _get_ie(self, inv_path):
557 """Retrieve the most up to date inventory entry for a path.562 """Retrieve the most up to date inventory entry for a path.
@@ -633,12 +638,12 @@
633 prev_dir = None638 prev_dir = None
634639
635 is_inside = osutils.is_inside_or_parent_of_any640 is_inside = osutils.is_inside_or_parent_of_any
636 for path, (inv_path, this_ie) in sorted(641 for path in sorted(user_dirs):
637 user_dirs.iteritems(), key=operator.itemgetter(0)):
638 if (prev_dir is None or not is_inside([prev_dir], path)):642 if (prev_dir is None or not is_inside([prev_dir], path)):
643 inv_path, this_ie = user_dirs[path]
639 yield (path, inv_path, this_ie, None)644 yield (path, inv_path, this_ie, None)
640 prev_dir = path645 prev_dir = path
641 646
642 def __init__(self, tree, action, conflicts_related=None):647 def __init__(self, tree, action, conflicts_related=None):
643 self.tree = tree648 self.tree = tree
644 if action is None:649 if action is None:
645650
=== modified file 'breezy/plugins/bash_completion/bashcomp.py'
--- breezy/plugins/bash_completion/bashcomp.py 2017-05-22 00:56:52 +0000
+++ breezy/plugins/bash_completion/bashcomp.py 2017-06-05 21:41:38 +0000
@@ -189,7 +189,7 @@
189 brz_version += "."189 brz_version += "."
190 else:190 else:
191 brz_version += " and the following plugins:"191 brz_version += " and the following plugins:"
192 for name, plugin in sorted(self.data.plugins.iteritems()):192 for name, plugin in sorted(self.data.plugins.items()):
193 brz_version += "\n# %s" % plugin193 brz_version += "\n# %s" % plugin
194 return brz_version194 return brz_version
195195
@@ -314,7 +314,7 @@
314 self.data.global_options.add(short)314 self.data.global_options.add(short)
315315
316 def aliases(self):316 def aliases(self):
317 for alias, expansion in config.GlobalConfig().get_aliases().iteritems():317 for alias, expansion in config.GlobalConfig().get_aliases().items():
318 for token in cmdline.split(expansion):318 for token in cmdline.split(expansion):
319 if not token.startswith("-"):319 if not token.startswith("-"):
320 self.user_aliases.setdefault(token, set()).add(alias)320 self.user_aliases.setdefault(token, set()).add(alias)
@@ -352,7 +352,7 @@
352 if useralias not in cmd_data.aliases]))352 if useralias not in cmd_data.aliases]))
353353
354 opts = cmd.options()354 opts = cmd.options()
355 for optname, opt in sorted(opts.iteritems()):355 for optname, opt in sorted(opts.items()):
356 cmd_data.options.extend(self.option(opt))356 cmd_data.options.extend(self.option(opt))
357357
358 if 'help' == name or 'help' in cmd.aliases:358 if 'help' == name or 'help' in cmd.aliases:
@@ -471,7 +471,7 @@
471 if args:471 if args:
472 parser.error("script does not take positional arguments")472 parser.error("script does not take positional arguments")
473 kwargs = dict()473 kwargs = dict()
474 for name, value in opts.__dict__.iteritems():474 for name, value in opts.__dict__.items():
475 if value is not None:475 if value is not None:
476 kwargs[name] = value476 kwargs[name] = value
477477
478478
=== modified file 'breezy/plugins/bisect/tests.py'
--- breezy/plugins/bisect/tests.py 2017-05-24 14:51:33 +0000
+++ breezy/plugins/bisect/tests.py 2017-06-05 21:41:38 +0000
@@ -44,10 +44,10 @@
44 1.3: "one dot three", 2: "two", 3: "three",44 1.3: "one dot three", 2: "two", 3: "three",
45 4: "four", 5: "five"}45 4: "four", 5: "five"}
4646
47 test_file = open("test_file")47 with open("test_file") as f:
48 content = test_file.read().strip()48 content = f.read().strip()
49 if content != rev_contents[rev]:49 if content != rev_contents[rev]:
50 rev_ids = dict((rev_contents[k], k) for k in rev_contents.keys())50 rev_ids = dict((rev_contents[k], k) for k in rev_contents)
51 found_rev = rev_ids[content]51 found_rev = rev_ids[content]
52 raise AssertionError("expected rev %0.1f, found rev %0.1f"52 raise AssertionError("expected rev %0.1f, found rev %0.1f"
53 % (rev, found_rev))53 % (rev, found_rev))
5454
=== modified file 'breezy/plugins/fastimport/branch_updater.py'
--- breezy/plugins/fastimport/branch_updater.py 2017-05-23 23:21:16 +0000
+++ breezy/plugins/fastimport/branch_updater.py 2017-06-05 21:41:38 +0000
@@ -77,7 +77,7 @@
77 """77 """
78 branch_tips = []78 branch_tips = []
79 lost_heads = []79 lost_heads = []
80 ref_names = self.heads_by_ref.keys()80 ref_names = list(self.heads_by_ref)
81 if self.branch is not None:81 if self.branch is not None:
82 trunk = self.select_trunk(ref_names)82 trunk = self.select_trunk(ref_names)
83 default_tip = self.heads_by_ref[trunk][0]83 default_tip = self.heads_by_ref[trunk][0]
8484
=== modified file 'breezy/plugins/fastimport/cache_manager.py'
--- breezy/plugins/fastimport/cache_manager.py 2017-05-23 23:21:16 +0000
+++ breezy/plugins/fastimport/cache_manager.py 2017-06-05 21:41:38 +0000
@@ -51,7 +51,7 @@
5151
52 def finalize(self):52 def finalize(self):
53 if self.disk_blobs is not None:53 if self.disk_blobs is not None:
54 for info in self.disk_blobs.itervalues():54 for info in self.disk_blobs.values():
55 if info[-1] is not None:55 if info[-1] is not None:
56 os.unlink(info[-1])56 os.unlink(info[-1])
57 self.disk_blobs = None57 self.disk_blobs = None
@@ -145,17 +145,17 @@
145 #self._show_stats_for(self._blobs, "other blobs", note=note)145 #self._show_stats_for(self._blobs, "other blobs", note=note)
146 #self.reftracker.dump_stats(note=note)146 #self.reftracker.dump_stats(note=note)
147147
148 def _show_stats_for(self, dict, label, note=trace.note, tuple_key=False):148 def _show_stats_for(self, a_dict, label, note, tuple_key=False):
149 """Dump statistics about a given dictionary.149 """Dump statistics about a given dictionary.
150150
151 By the key and value need to support len().151 By the key and value need to support len().
152 """152 """
153 count = len(dict)153 count = len(a_dict)
154 if tuple_key:154 if tuple_key:
155 size = sum(map(len, (''.join(k) for k in dict.keys())))155 size = sum(map(len, (''.join(k) for k in a_dict)))
156 else:156 else:
157 size = sum(map(len, dict.keys()))157 size = sum(map(len, a_dict))
158 size += sum(map(len, dict.values()))158 size += sum(map(len, a_dict.values()))
159 size = size * 1.0 / 1024159 size = size * 1.0 / 1024
160 unit = 'K'160 unit = 'K'
161 if size > 1024:161 if size > 1024:
@@ -176,7 +176,7 @@
176 self.inventories.clear()176 self.inventories.clear()
177177
178 def _flush_blobs_to_disk(self):178 def _flush_blobs_to_disk(self):
179 blobs = self._sticky_blobs.keys()179 blobs = list(self._sticky_blobs)
180 sticky_blobs = self._sticky_blobs180 sticky_blobs = self._sticky_blobs
181 total_blobs = len(sticky_blobs)181 total_blobs = len(sticky_blobs)
182 blobs.sort(key=lambda k:len(sticky_blobs[k]))182 blobs.sort(key=lambda k:len(sticky_blobs[k]))
@@ -275,16 +275,3 @@
275 if self._decref(id, self._sticky_blobs, None):275 if self._decref(id, self._sticky_blobs, None):
276 self._sticky_memory_bytes -= len(content)276 self._sticky_memory_bytes -= len(content)
277 return content277 return content
278
279
280def invert_dictset(d):
281 """Invert a dictionary with keys matching a set of values, turned into lists."""
282 # Based on recipe from ASPN
283 result = {}
284 for k, c in d.iteritems():
285 for v in c:
286 keys = result.setdefault(v, [])
287 keys.append(k)
288 return result
289
290
291278
=== modified file 'breezy/plugins/fastimport/exporter.py'
--- breezy/plugins/fastimport/exporter.py 2017-05-23 23:37:18 +0000
+++ breezy/plugins/fastimport/exporter.py 2017-06-05 21:41:38 +0000
@@ -292,7 +292,7 @@
292 if self.export_marks_file:292 if self.export_marks_file:
293 revision_ids = dict((m, r) for r, m in self.revid_to_mark.items())293 revision_ids = dict((m, r) for r, m in self.revid_to_mark.items())
294 marks_file.export_marks(self.export_marks_file, revision_ids)294 marks_file.export_marks(self.export_marks_file, revision_ids)
295 295
296 def is_empty_dir(self, tree, path):296 def is_empty_dir(self, tree, path):
297 path_id = tree.path2id(path)297 path_id = tree.path2id(path)
298 if path_id is None:298 if path_id is None:
299299
=== modified file 'breezy/plugins/fastimport/helpers.py'
--- breezy/plugins/fastimport/helpers.py 2017-05-23 23:21:16 +0000
+++ breezy/plugins/fastimport/helpers.py 2017-06-05 21:41:38 +0000
@@ -181,7 +181,7 @@
181 """Invert a dictionary with keys matching a set of values, turned into lists."""181 """Invert a dictionary with keys matching a set of values, turned into lists."""
182 # Based on recipe from ASPN182 # Based on recipe from ASPN
183 result = {}183 result = {}
184 for k, c in d.iteritems():184 for k, c in d.items():
185 for v in c:185 for v in c:
186 keys = result.setdefault(v, [])186 keys = result.setdefault(v, [])
187 keys.append(k)187 keys.append(k)
@@ -192,7 +192,7 @@
192 """Invert a dictionary with keys matching each value turned into a list."""192 """Invert a dictionary with keys matching each value turned into a list."""
193 # Based on recipe from ASPN193 # Based on recipe from ASPN
194 result = {}194 result = {}
195 for k, v in d.iteritems():195 for k, v in d.items():
196 keys = result.setdefault(v, [])196 keys = result.setdefault(v, [])
197 keys.append(k)197 keys.append(k)
198 return result198 return result
199199
=== modified file 'breezy/plugins/fastimport/idmapfile.py'
--- breezy/plugins/fastimport/idmapfile.py 2017-05-23 23:21:16 +0000
+++ breezy/plugins/fastimport/idmapfile.py 2017-06-05 21:41:38 +0000
@@ -29,13 +29,9 @@
29 :param filename: name of the file to save the data to29 :param filename: name of the file to save the data to
30 :param revision_ids: a dictionary of commit ids to revision ids.30 :param revision_ids: a dictionary of commit ids to revision ids.
31 """31 """
32 f = open(filename, 'wb')32 with open(filename, 'wb') as f:
33 try:33 for commit_id in revision_ids:
34 for commit_id, rev_id in revision_ids.iteritems():34 f.write("%s %s\n" % (commit_id, revision_ids[commit_id]))
35 f.write("%s %s\n" % (commit_id, rev_id))
36 f.flush()
37 finally:
38 f.close()
3935
4036
41def load_id_map(filename):37def load_id_map(filename):
4238
=== modified file 'breezy/plugins/fastimport/marks_file.py'
--- breezy/plugins/fastimport/marks_file.py 2017-05-23 23:21:16 +0000
+++ breezy/plugins/fastimport/marks_file.py 2017-06-05 21:41:38 +0000
@@ -74,6 +74,6 @@
74 return74 return
7575
76 # Write the revision info76 # Write the revision info
77 for mark, revid in revision_ids.iteritems():77 for mark in revision_ids:
78 f.write(':%s %s\n' % (str(mark).lstrip(':'), revid))78 f.write(':%s %s\n' % (str(mark).lstrip(':'), revision_ids[mark]))
79 f.close()79 f.close()
8080
=== modified file 'breezy/plugins/fastimport/processors/info_processor.py'
--- breezy/plugins/fastimport/processors/info_processor.py 2017-06-04 18:09:30 +0000
+++ breezy/plugins/fastimport/processors/info_processor.py 2017-06-05 21:41:38 +0000
@@ -87,11 +87,11 @@
87 # Commit stats87 # Commit stats
88 if self.cmd_counts['commit']:88 if self.cmd_counts['commit']:
89 p_items = []89 p_items = []
90 for _ in range(self.max_parent_count + 1):90 for i in range(self.max_parent_count + 1):
91 if i in self.parent_counts:91 if i in self.parent_counts:
92 count = self.parent_counts[i]92 count = self.parent_counts[i]
93 p_items.append(("parents-%d" % i, count))93 p_items.append(("parents-%d" % i, count))
94 merges_count = len(self.merges.keys())94 merges_count = len(self.merges)
95 p_items.append(('total revisions merged', merges_count))95 p_items.append(('total revisions merged', merges_count))
96 flags = {96 flags = {
97 'separate authors found': self.separate_authors_found,97 'separate authors found': self.separate_authors_found,
@@ -100,21 +100,21 @@
100 'blobs referenced by SHA': self.sha_blob_references,100 'blobs referenced by SHA': self.sha_blob_references,
101 }101 }
102 self._dump_stats_group("Parent counts", p_items, str)102 self._dump_stats_group("Parent counts", p_items, str)
103 self._dump_stats_group("Commit analysis", flags.iteritems(), _found)103 self._dump_stats_group("Commit analysis", flags.items(), _found)
104 heads = invert_dictset(self.reftracker.heads)104 heads = invert_dictset(self.reftracker.heads)
105 self._dump_stats_group("Head analysis", heads.iteritems(), None,105 self._dump_stats_group("Head analysis", heads.items(), None,
106 _iterable_as_config_list)106 _iterable_as_config_list)
107 # note("\t%d\t%s" % (len(self.committers), 'unique committers'))107 # note("\t%d\t%s" % (len(self.committers), 'unique committers'))
108 self._dump_stats_group("Merges", self.merges.iteritems(), None)108 self._dump_stats_group("Merges", self.merges.items(), None)
109 # We only show the rename old path and copy source paths when -vv109 # We only show the rename old path and copy source paths when -vv
110 # (verbose=2) is specified. The output here for mysql's data can't110 # (verbose=2) is specified. The output here for mysql's data can't
111 # be parsed currently so this bit of code needs more work anyhow ..111 # be parsed currently so this bit of code needs more work anyhow ..
112 if self.verbose >= 2:112 if self.verbose >= 2:
113 self._dump_stats_group("Rename old paths",113 self._dump_stats_group("Rename old paths",
114 self.rename_old_paths.iteritems(), len,114 self.rename_old_paths.items(), len,
115 _iterable_as_config_list)115 _iterable_as_config_list)
116 self._dump_stats_group("Copy source paths",116 self._dump_stats_group("Copy source paths",
117 self.copy_source_paths.iteritems(), len,117 self.copy_source_paths.items(), len,
118 _iterable_as_config_list)118 _iterable_as_config_list)
119119
120 # Blob stats120 # Blob stats
@@ -123,11 +123,10 @@
123 if self.verbose:123 if self.verbose:
124 del self.blobs['used']124 del self.blobs['used']
125 self._dump_stats_group("Blob usage tracking",125 self._dump_stats_group("Blob usage tracking",
126 self.blobs.iteritems(), len, _iterable_as_config_list)126 self.blobs.items(), len, _iterable_as_config_list)
127 if self.blob_ref_counts:127 if self.blob_ref_counts:
128 blobs_by_count = invert_dict(self.blob_ref_counts)128 blobs_by_count = invert_dict(self.blob_ref_counts)
129 blob_items = blobs_by_count.items()129 blob_items = sorted(blobs_by_count.items())
130 blob_items.sort()
131 self._dump_stats_group("Blob reference counts",130 self._dump_stats_group("Blob reference counts",
132 blob_items, len, _iterable_as_config_list)131 blob_items, len, _iterable_as_config_list)
133132
@@ -136,7 +135,7 @@
136 reset_stats = {135 reset_stats = {
137 'lightweight tags': self.lightweight_tags,136 'lightweight tags': self.lightweight_tags,
138 }137 }
139 self._dump_stats_group("Reset analysis", reset_stats.iteritems())138 self._dump_stats_group("Reset analysis", reset_stats.items())
140139
141 def _dump_stats_group(self, title, items, normal_formatter=None,140 def _dump_stats_group(self, title, items, normal_formatter=None,
142 verbose_formatter=None):141 verbose_formatter=None):
143142
=== modified file 'breezy/plugins/fastimport/revision_store.py'
--- breezy/plugins/fastimport/revision_store.py 2017-05-30 23:57:10 +0000
+++ breezy/plugins/fastimport/revision_store.py 2017-06-05 21:41:38 +0000
@@ -269,7 +269,7 @@
269 # repository.CommitBuilder.record_entry_contents().269 # repository.CommitBuilder.record_entry_contents().
270 parent_candidate_entries = ie.parent_candidates(self._rev_parent_invs)270 parent_candidate_entries = ie.parent_candidates(self._rev_parent_invs)
271 head_set = self._commit_builder._heads(ie.file_id,271 head_set = self._commit_builder._heads(ie.file_id,
272 parent_candidate_entries.keys())272 list(parent_candidate_entries))
273 heads = []273 heads = []
274 for inv in self._rev_parent_invs:274 for inv in self._rev_parent_invs:
275 if inv.has_id(ie.file_id):275 if inv.has_id(ie.file_id):
276276
=== modified file 'breezy/plugins/launchpad/lp_api_lite.py'
--- breezy/plugins/launchpad/lp_api_lite.py 2017-05-22 00:56:52 +0000
+++ breezy/plugins/launchpad/lp_api_lite.py 2017-06-05 21:41:38 +0000
@@ -194,7 +194,7 @@
194 # Note: this assumes that a given rev won't get tagged multiple times. But194 # Note: this assumes that a given rev won't get tagged multiple times. But
195 # it should be valid for the package importer branches that we care195 # it should be valid for the package importer branches that we care
196 # about196 # about
197 reverse_dict = dict((rev, tag) for tag, rev in tag_dict.iteritems())197 reverse_dict = dict((rev, tag) for tag, rev in tag_dict.items())
198 the_branch.lock_read()198 the_branch.lock_read()
199 try:199 try:
200 last_rev = the_branch.last_revision()200 last_rev = the_branch.last_revision()
201201
=== modified file 'breezy/plugins/launchpad/lp_registration.py'
--- breezy/plugins/launchpad/lp_registration.py 2017-06-02 01:01:21 +0000
+++ breezy/plugins/launchpad/lp_registration.py 2017-06-05 21:41:38 +0000
@@ -94,7 +94,7 @@
94 # NB: these should always end in a slash to avoid xmlrpclib appending94 # NB: these should always end in a slash to avoid xmlrpclib appending
95 # '/RPC2'95 # '/RPC2'
96 LAUNCHPAD_INSTANCE = {}96 LAUNCHPAD_INSTANCE = {}
97 for instance, domain in LAUNCHPAD_DOMAINS.iteritems():97 for instance, domain in LAUNCHPAD_DOMAINS.items():
98 LAUNCHPAD_INSTANCE[instance] = 'https://xmlrpc.%s/bazaar/' % domain98 LAUNCHPAD_INSTANCE[instance] = 'https://xmlrpc.%s/bazaar/' % domain
9999
100 # We use production as the default because edge has been deprecated circa100 # We use production as the default because edge has been deprecated circa
@@ -197,7 +197,7 @@
197 else:197 else:
198 domains = (198 domains = (
199 'bazaar.%s' % domain199 'bazaar.%s' % domain
200 for domain in self.LAUNCHPAD_DOMAINS.itervalues())200 for domain in self.LAUNCHPAD_DOMAINS.values())
201 if hostinfo not in domains:201 if hostinfo not in domains:
202 raise NotLaunchpadBranch(branch_url)202 raise NotLaunchpadBranch(branch_url)
203 return path.lstrip('/')203 return path.lstrip('/')
204204
=== modified file 'breezy/plugins/stats/cmds.py'
--- breezy/plugins/stats/cmds.py 2017-06-01 23:52:12 +0000
+++ breezy/plugins/stats/cmds.py 2017-06-05 21:41:38 +0000
@@ -60,7 +60,7 @@
60 info[1][email] = info[1].setdefault(email, 0) + 160 info[1][email] = info[1].setdefault(email, 0) + 1
61 info[2][username] = info[2].setdefault(username, 0) + 161 info[2][username] = info[2].setdefault(username, 0) + 1
62 res = [(len(revs), revs, emails, fnames)62 res = [(len(revs), revs, emails, fnames)
63 for revs, emails, fnames in committer_to_info.itervalues()]63 for revs, emails, fnames in committer_to_info.values()]
64 res.sort(reverse=True)64 res.sort(reverse=True)
65 return res65 return res
6666
@@ -89,7 +89,7 @@
89 old_email_id = email_to_id[old_email]89 old_email_id = email_to_id[old_email]
90 assert old_email_id in (old_id, new_id)90 assert old_email_id in (old_id, new_id)
91 email_to_id[old_email] = cur_id91 email_to_id[old_email] = cur_id
92 for email, usernames in email_users.iteritems():92 for email, usernames in email_users.items():
93 assert email not in email_to_id93 assert email not in email_to_id
94 if not email:94 if not email:
95 # We use a different algorithm for usernames that have no email95 # We use a different algorithm for usernames that have no email
@@ -131,7 +131,7 @@
131 collapse_ids(user_id, cur_id, id_combos)131 collapse_ids(user_id, cur_id, id_combos)
132 username_to_id[low_user] = cur_id132 username_to_id[low_user] = cur_id
133 combo_to_best_combo = {}133 combo_to_best_combo = {}
134 for cur_id, combos in id_to_combos.iteritems():134 for cur_id, combos in id_to_combos.items():
135 best_combo = sorted(combos,135 best_combo = sorted(combos,
136 key=lambda x:combo_count[x],136 key=lambda x:combo_count[x],
137 reverse=True)[0]137 reverse=True)[0]
@@ -206,10 +206,10 @@
206 for count, revs, emails, fullnames in info:206 for count, revs, emails, fullnames in info:
207 # Get the most common email name207 # Get the most common email name
208 sorted_emails = sorted(((count, email)208 sorted_emails = sorted(((count, email)
209 for email,count in emails.iteritems()),209 for email, count in emails.items()),
210 reverse=True)210 reverse=True)
211 sorted_fullnames = sorted(((count, fullname)211 sorted_fullnames = sorted(((count, fullname)
212 for fullname,count in fullnames.iteritems()),212 for fullname, count in fullnames.items()),
213 reverse=True)213 reverse=True)
214 if sorted_fullnames[0][1] == '' and sorted_emails[0][1] == '':214 if sorted_fullnames[0][1] == '' and sorted_emails[0][1] == '':
215 to_file.write('%4d %s\n'215 to_file.write('%4d %s\n'
@@ -237,7 +237,7 @@
237 if gather_class_stats is not None:237 if gather_class_stats is not None:
238 to_file.write(' Contributions:\n')238 to_file.write(' Contributions:\n')
239 classes, total = gather_class_stats(revs)239 classes, total = gather_class_stats(revs)
240 for name,count in sorted(classes.items(), lambda x,y: cmp((x[1], x[0]), (y[1], y[0]))):240 for name, count in sorted(classes.items(), key=classify_key):
241 if name is None:241 if name is None:
242 name = "Unknown"242 name = "Unknown"
243 to_file.write(" %4.0f%% %s\n" % ((float(count) / total) * 100.0, name))243 to_file.write(" %4.0f%% %s\n" % ((float(count) / total) * 100.0, name))
@@ -342,6 +342,11 @@
342 return ret, total342 return ret, total
343343
344344
345def classify_key(item):
346 """Sort key for item of (author, count) from classify_delta."""
347 return -item[1], item[0]
348
349
345def display_credits(credits, to_file):350def display_credits(credits, to_file):
346 (coders, documenters, artists, translators) = credits351 (coders, documenters, artists, translators) = credits
347 def print_section(name, lst):352 def print_section(name, lst):
@@ -392,8 +397,8 @@
392 finally:397 finally:
393 repository.unlock()398 repository.unlock()
394 def sort_class(name):399 def sort_class(name):
395 return map(lambda (x,y): x,400 return [author
396 sorted(ret[name].items(), lambda x,y: cmp((x[1], x[0]), (y[1], y[0])), reverse=True))401 for author, _ in sorted(ret[name].items(), key=classify_key)]
397 return (sort_class("code"), sort_class("documentation"), sort_class("art"), sort_class("translation"))402 return (sort_class("code"), sort_class("documentation"), sort_class("art"), sort_class("translation"))
398403
399404
400405
=== modified file 'breezy/plugins/weave_fmt/bzrdir.py'
--- breezy/plugins/weave_fmt/bzrdir.py 2017-05-30 19:32:13 +0000
+++ breezy/plugins/weave_fmt/bzrdir.py 2017-06-05 21:41:38 +0000
@@ -435,7 +435,7 @@
435 self.text_weaves[file_id] = w435 self.text_weaves[file_id] = w
436 text_changed = False436 text_changed = False
437 parent_candiate_entries = ie.parent_candidates(parent_invs)437 parent_candiate_entries = ie.parent_candidates(parent_invs)
438 heads = graph.Graph(self).heads(parent_candiate_entries.keys())438 heads = graph.Graph(self).heads(parent_candiate_entries)
439 # XXX: Note that this is unordered - and this is tolerable because439 # XXX: Note that this is unordered - and this is tolerable because
440 # the previous code was also unordered.440 # the previous code was also unordered.
441 previous_entries = dict((head, parent_candiate_entries[head]) for head441 previous_entries = dict((head, parent_candiate_entries[head]) for head
@@ -457,7 +457,7 @@
457 # save against.457 # save against.
458 #ie.snapshot(rev, PATH, previous_revisions, REVISION_TREE, InMemoryWeaveStore(self.text_weaves))458 #ie.snapshot(rev, PATH, previous_revisions, REVISION_TREE, InMemoryWeaveStore(self.text_weaves))
459 if len(previous_revisions) == 1:459 if len(previous_revisions) == 1:
460 previous_ie = previous_revisions.values()[0]460 previous_ie = next(iter(previous_revisions.values()))
461 if ie._unchanged(previous_ie):461 if ie._unchanged(previous_ie):
462 ie.revision = previous_ie.revision462 ie.revision = previous_ie.revision
463 return463 return
@@ -479,7 +479,7 @@
479 The order must be such that an revision is imported after all479 The order must be such that an revision is imported after all
480 its (present) parents.480 its (present) parents.
481 """481 """
482 todo = set(self.revisions.keys())482 todo = set(self.revisions)
483 done = self.absent_revisions.copy()483 done = self.absent_revisions.copy()
484 order = []484 order = []
485 while todo:485 while todo:
486486
=== modified file 'breezy/reconcile.py'
--- breezy/reconcile.py 2017-05-22 00:56:52 +0000
+++ breezy/reconcile.py 2017-06-05 21:41:38 +0000
@@ -430,7 +430,7 @@
430 # NB: This is really not needed, reconcile != pack.430 # NB: This is really not needed, reconcile != pack.
431 per_id_bad_parents[key[0]] = {}431 per_id_bad_parents[key[0]] = {}
432 # Generate per-knit/weave data.432 # Generate per-knit/weave data.
433 for key, details in bad_parents.iteritems():433 for key, details in bad_parents.items():
434 file_id = key[0]434 file_id = key[0]
435 rev_id = key[1]435 rev_id = key[1]
436 knit_parents = tuple([parent[-1] for parent in details[0]])436 knit_parents = tuple([parent[-1] for parent in details[0]])
437437
=== modified file 'breezy/registry.py'
--- breezy/registry.py 2017-05-22 00:56:52 +0000
+++ breezy/registry.py 2017-06-05 21:41:38 +0000
@@ -227,17 +227,16 @@
227227
228 def keys(self):228 def keys(self):
229 """Get a list of registered entries"""229 """Get a list of registered entries"""
230 return sorted(self._dict.keys())230 return sorted(self._dict)
231231
232 def iteritems(self):232 def iteritems(self):
233 for key, getter in self._dict.iteritems():233 for key in self._dict:
234 yield key, getter.get_obj()234 yield key, self._dict[key].get_obj()
235235
236 def items(self):236 def items(self):
237 # We should not use the iteritems() implementation below (see bug237 # We should not use the iteritems() implementation below (see bug
238 # #430510)238 # #430510)
239 return sorted([(key, getter.get_obj())239 return [(key, self._dict[key].get_obj()) for key in self.keys()]
240 for key, getter in self._dict.items()])
241240
242 def _set_default_key(self, key):241 def _set_default_key(self, key):
243 if key not in self._dict:242 if key not in self._dict:
244243
=== modified file 'breezy/remote.py'
--- breezy/remote.py 2017-05-30 19:32:13 +0000
+++ breezy/remote.py 2017-06-05 21:41:38 +0000
@@ -51,6 +51,10 @@
51from .i18n import gettext51from .i18n import gettext
52from .inventory import Inventory52from .inventory import Inventory
53from .lockable_files import LockableFiles53from .lockable_files import LockableFiles
54from .sixish import (
55 viewitems,
56 viewvalues,
57 )
54from .smart import client, vfs, repository as smart_repo58from .smart import client, vfs, repository as smart_repo
55from .smart.client import _SmartClient59from .smart.client import _SmartClient
56from .revision import NULL_REVISION60from .revision import NULL_REVISION
@@ -698,7 +702,7 @@
698 raise errors.UnexpectedSmartServerResponse(response)702 raise errors.UnexpectedSmartServerResponse(response)
699 body = bencode.bdecode(handler.read_body_bytes())703 body = bencode.bdecode(handler.read_body_bytes())
700 ret = {}704 ret = {}
701 for (name, value) in body.iteritems():705 for name, value in viewitems(body):
702 ret[name] = self._open_branch(name, value[0], value[1],706 ret[name] = self._open_branch(name, value[0], value[1],
703 possible_transports=possible_transports,707 possible_transports=possible_transports,
704 ignore_fallbacks=ignore_fallbacks)708 ignore_fallbacks=ignore_fallbacks)
@@ -2088,8 +2092,7 @@
2088 def revision_ids_to_search_result(self, result_set):2092 def revision_ids_to_search_result(self, result_set):
2089 """Convert a set of revision ids to a graph SearchResult."""2093 """Convert a set of revision ids to a graph SearchResult."""
2090 result_parents = set()2094 result_parents = set()
2091 for parents in self.get_graph().get_parent_map(2095 for parents in viewvalues(self.get_graph().get_parent_map(result_set)):
2092 result_set).itervalues():
2093 result_parents.update(parents)2096 result_parents.update(parents)
2094 included_keys = result_set.intersection(result_parents)2097 included_keys = result_set.intersection(result_parents)
2095 start_keys = result_set.difference(included_keys)2098 start_keys = result_set.difference(included_keys)
@@ -2214,15 +2217,15 @@
2214 for fallback in self._fallback_repositories:2217 for fallback in self._fallback_repositories:
2215 if not absent:2218 if not absent:
2216 break2219 break
2217 desired_files = [(key[0], key[1], identifier) for2220 desired_files = [(key[0], key[1], identifier)
2218 (identifier, key) in absent.iteritems()]2221 for identifier, key in viewitems(absent)]
2219 for (identifier, bytes_iterator) in fallback.iter_files_bytes(desired_files):2222 for (identifier, bytes_iterator) in fallback.iter_files_bytes(desired_files):
2220 del absent[identifier]2223 del absent[identifier]
2221 yield identifier, bytes_iterator2224 yield identifier, bytes_iterator
2222 if absent:2225 if absent:
2223 # There may be more missing items, but raise an exception2226 # There may be more missing items, but raise an exception
2224 # for just one.2227 # for just one.
2225 missing_identifier = absent.keys()[0]2228 missing_identifier = next(iter(absent))
2226 missing_key = absent[missing_identifier]2229 missing_key = absent[missing_identifier]
2227 raise errors.RevisionNotPresent(revision_id=missing_key[1],2230 raise errors.RevisionNotPresent(revision_id=missing_key[1],
2228 file_id=missing_key[0])2231 file_id=missing_key[0])
@@ -2262,7 +2265,7 @@
2262 # There is one other "bug" which is that ghosts in2265 # There is one other "bug" which is that ghosts in
2263 # get_revision_graph() are not returned at all. But we won't worry2266 # get_revision_graph() are not returned at all. But we won't worry
2264 # about that for now.2267 # about that for now.
2265 for node_id, parent_ids in rg.iteritems():2268 for node_id, parent_ids in viewitems(rg):
2266 if parent_ids == ():2269 if parent_ids == ():
2267 rg[node_id] = (NULL_REVISION,)2270 rg[node_id] = (NULL_REVISION,)
2268 rg[NULL_REVISION] = ()2271 rg[NULL_REVISION] = ()
22692272
=== modified file 'breezy/rename_map.py'
--- breezy/rename_map.py 2017-05-22 00:56:52 +0000
+++ breezy/rename_map.py 2017-06-05 21:41:38 +0000
@@ -25,6 +25,7 @@
25from .i18n import gettext25from .i18n import gettext
26from .sixish import (26from .sixish import (
27 BytesIO,27 BytesIO,
28 viewitems,
28 )29 )
29from .ui import ui_factory30from .ui import ui_factory
3031
@@ -109,7 +110,7 @@
109 task.update(gettext('Determining hash hits'), num, len(paths))110 task.update(gettext('Determining hash hits'), num, len(paths))
110 hits = self.hitcounts(self.tree.get_file_lines(None,111 hits = self.hitcounts(self.tree.get_file_lines(None,
111 path=path))112 path=path))
112 all_hits.extend((v, path, k) for k, v in hits.items())113 all_hits.extend((v, path, k) for k, v in viewitems(hits))
113 finally:114 finally:
114 task.finished()115 task.finished()
115 return all_hits116 return all_hits
@@ -150,7 +151,7 @@
150 break151 break
151 required_parents.setdefault(path, []).append(child)152 required_parents.setdefault(path, []).append(child)
152 require_ids = {}153 require_ids = {}
153 for parent, children in required_parents.iteritems():154 for parent, children in viewitems(required_parents):
154 child_file_ids = set()155 child_file_ids = set()
155 for child in children:156 for child in children:
156 file_id = matches.get(child)157 file_id = matches.get(child)
@@ -167,8 +168,8 @@
167 parent directories.168 parent directories.
168 """169 """
169 all_hits = []170 all_hits = []
170 for file_id, file_id_children in missing_parents.iteritems():171 for file_id, file_id_children in viewitems(missing_parents):
171 for path, path_children in required_parents.iteritems():172 for path, path_children in viewitems(required_parents):
172 hits = len(path_children.intersection(file_id_children))173 hits = len(path_children.intersection(file_id_children))
173 if hits > 0:174 if hits > 0:
174 all_hits.append((hits, path, file_id))175 all_hits.append((hits, path, file_id))
@@ -250,8 +251,8 @@
250251
251 def _make_inventory_delta(self, matches):252 def _make_inventory_delta(self, matches):
252 delta = []253 delta = []
253 file_id_matches = dict((f, p) for p, f in matches.items())254 file_id_matches = dict((f, p) for p, f in viewitems(matches))
254 for old_path, entry in self.tree.iter_entries_by_dir(matches.values()):255 for old_path, entry in self.tree.iter_entries_by_dir(file_id_matches):
255 new_path = file_id_matches[entry.file_id]256 new_path = file_id_matches[entry.file_id]
256 parent_path, new_name = osutils.split(new_path)257 parent_path, new_name = osutils.split(new_path)
257 parent_id = matches.get(parent_path)258 parent_id = matches.get(parent_path)
258259
=== modified file 'breezy/repofmt/groupcompress_repo.py'
--- breezy/repofmt/groupcompress_repo.py 2017-05-22 00:56:52 +0000
+++ breezy/repofmt/groupcompress_repo.py 2017-06-05 21:41:38 +0000
@@ -58,6 +58,10 @@
58from ..vf_repository import (58from ..vf_repository import (
59 StreamSource,59 StreamSource,
60 )60 )
61from ..sixish import (
62 viewitems,
63 viewvalues,
64 )
61from ..static_tuple import StaticTuple65from ..static_tuple import StaticTuple
6266
6367
@@ -276,7 +280,7 @@
276 remaining_keys.difference_update(cur_keys)280 remaining_keys.difference_update(cur_keys)
277 next_keys = set()281 next_keys = set()
278 def handle_internal_node(node):282 def handle_internal_node(node):
279 for prefix, value in node._items.iteritems():283 for prefix, value in viewitems(node._items):
280 # We don't want to request the same key twice, and we284 # We don't want to request the same key twice, and we
281 # want to order it by the first time it is seen.285 # want to order it by the first time it is seen.
282 # Even further, we don't want to request a key which is286 # Even further, we don't want to request a key which is
@@ -543,7 +547,7 @@
543 ancestor_keys = revision_vf.get_parent_map(revision_vf.keys())547 ancestor_keys = revision_vf.get_parent_map(revision_vf.keys())
544 # Strip keys back into revision_ids.548 # Strip keys back into revision_ids.
545 ancestors = dict((k[0], tuple([p[0] for p in parents]))549 ancestors = dict((k[0], tuple([p[0] for p in parents]))
546 for k, parents in ancestor_keys.iteritems())550 for k, parents in viewitems(ancestor_keys))
547 del ancestor_keys551 del ancestor_keys
548 # TODO: _generate_text_key_index should be much cheaper to generate from552 # TODO: _generate_text_key_index should be much cheaper to generate from
549 # a chk repository, rather than the current implementation553 # a chk repository, rather than the current implementation
@@ -665,7 +669,7 @@
665 if search_key_name is None:669 if search_key_name is None:
666 # Find the name corresponding to the search_key_func670 # Find the name corresponding to the search_key_func
667 search_key_reg = chk_map.search_key_registry671 search_key_reg = chk_map.search_key_registry
668 for search_key_name, func in search_key_reg.iteritems():672 for search_key_name, func in viewitems(search_key_reg):
669 if func == chk_inv.id_to_entry._search_key_func:673 if func == chk_inv.id_to_entry._search_key_func:
670 break674 break
671 canonical_inv = inventory.CHKInventory.from_inventory(675 canonical_inv = inventory.CHKInventory.from_inventory(
@@ -741,7 +745,7 @@
741 # any present parent inventories, which may be used when calculating745 # any present parent inventories, which may be used when calculating
742 # deltas for streaming.746 # deltas for streaming.
743 all_inv_keys = set(corresponding_invs)747 all_inv_keys = set(corresponding_invs)
744 for parent_inv_keys in inv_parent_map.itervalues():748 for parent_inv_keys in viewvalues(inv_parent_map):
745 all_inv_keys.update(parent_inv_keys)749 all_inv_keys.update(parent_inv_keys)
746 # Filter out ghost parents.750 # Filter out ghost parents.
747 all_inv_keys.intersection_update(751 all_inv_keys.intersection_update(
748752
=== modified file 'breezy/repofmt/knitpack_repo.py'
--- breezy/repofmt/knitpack_repo.py 2017-05-25 21:59:11 +0000
+++ breezy/repofmt/knitpack_repo.py 2017-06-05 21:41:38 +0000
@@ -68,6 +68,7 @@
68 RepositoryPackCollection,68 RepositoryPackCollection,
69 )69 )
70from ..sixish import (70from ..sixish import (
71 viewitems,
71 zip72 zip
72 )73 )
73from ..vf_repository import (74from ..vf_repository import (
@@ -642,7 +643,7 @@
642 request_groups[index].append((key, value))643 request_groups[index].append((key, value))
643 record_index = 0644 record_index = 0
644 pb.update("Copied record", record_index, len(nodes))645 pb.update("Copied record", record_index, len(nodes))
645 for index, items in request_groups.iteritems():646 for index, items in viewitems(request_groups):
646 pack_readv_requests = []647 pack_readv_requests = []
647 for key, value in items:648 for key, value in items:
648 # ---- KnitGraphIndex.get_position649 # ---- KnitGraphIndex.get_position
@@ -740,7 +741,7 @@
740 fileid_revisions = repo._find_file_ids_from_xml_inventory_lines(741 fileid_revisions = repo._find_file_ids_from_xml_inventory_lines(
741 inv_lines, self.revision_keys)742 inv_lines, self.revision_keys)
742 text_filter = []743 text_filter = []
743 for fileid, file_revids in fileid_revisions.iteritems():744 for fileid, file_revids in viewitems(fileid_revisions):
744 text_filter.extend([(fileid, file_revid) for file_revid in file_revids])745 text_filter.extend([(fileid, file_revid) for file_revid in file_revids])
745 self._text_filter = text_filter746 self._text_filter = text_filter
746747
@@ -934,7 +935,7 @@
934 request_groups[index] = []935 request_groups[index] = []
935 request_groups[index].append((key, value, references))936 request_groups[index].append((key, value, references))
936 result = []937 result = []
937 for index, items in request_groups.iteritems():938 for index, items in viewitems(request_groups):
938 pack_readv_requests = []939 pack_readv_requests = []
939 for key, value, references in items:940 for key, value, references in items:
940 # ---- KnitGraphIndex.get_position941 # ---- KnitGraphIndex.get_position
941942
=== modified file 'breezy/repofmt/pack_repo.py'
--- breezy/repofmt/pack_repo.py 2017-05-22 00:56:52 +0000
+++ breezy/repofmt/pack_repo.py 2017-06-05 21:41:38 +0000
@@ -1288,10 +1288,10 @@
12881288
1289 def _remove_pack_indices(self, pack, ignore_missing=False):1289 def _remove_pack_indices(self, pack, ignore_missing=False):
1290 """Remove the indices for pack from the aggregated indices.1290 """Remove the indices for pack from the aggregated indices.
1291 1291
1292 :param ignore_missing: Suppress KeyErrors from calling remove_index.1292 :param ignore_missing: Suppress KeyErrors from calling remove_index.
1293 """1293 """
1294 for index_type in Pack.index_definitions.keys():1294 for index_type in Pack.index_definitions:
1295 attr_name = index_type + '_index'1295 attr_name = index_type + '_index'
1296 aggregate_index = getattr(self, attr_name)1296 aggregate_index = getattr(self, attr_name)
1297 if aggregate_index is not None:1297 if aggregate_index is not None:
@@ -1344,7 +1344,7 @@
13441344
1345 # do a two-way diff against our original content1345 # do a two-way diff against our original content
1346 current_nodes = set()1346 current_nodes = set()
1347 for name, sizes in self._names.iteritems():1347 for name, sizes in self._names.items():
1348 current_nodes.add(1348 current_nodes.add(
1349 ((name, ), ' '.join(str(size) for size in sizes)))1349 ((name, ), ' '.join(str(size) for size in sizes)))
13501350
13511351
=== modified file 'breezy/repository.py'
--- breezy/repository.py 2017-05-30 19:32:13 +0000
+++ breezy/repository.py 2017-06-05 21:41:38 +0000
@@ -48,6 +48,10 @@
48from .decorators import needs_read_lock, needs_write_lock, only_raises48from .decorators import needs_read_lock, needs_write_lock, only_raises
49from .inter import InterObject49from .inter import InterObject
50from .lock import _RelockDebugMixin, LogicalLockResult50from .lock import _RelockDebugMixin, LogicalLockResult
51from .sixish import (
52 viewitems,
53 viewvalues,
54 )
51from .trace import (55from .trace import (
52 log_exception_quietly, note, mutter, mutter_callsite, warning)56 log_exception_quietly, note, mutter, mutter_callsite, warning)
5357
@@ -141,7 +145,7 @@
141 raise ValueError('Invalid value for %s: %r' % (context, text))145 raise ValueError('Invalid value for %s: %r' % (context, text))
142146
143 def _validate_revprops(self, revprops):147 def _validate_revprops(self, revprops):
144 for key, value in revprops.iteritems():148 for key, value in viewitems(revprops):
145 # We know that the XML serializers do not round trip '\r'149 # We know that the XML serializers do not round trip '\r'
146 # correctly, so refuse to accept them150 # correctly, so refuse to accept them
147 if not isinstance(value, basestring):151 if not isinstance(value, basestring):
@@ -911,9 +915,8 @@
911 :return: set of revisions that are parents of revision_ids which are915 :return: set of revisions that are parents of revision_ids which are
912 not part of revision_ids themselves916 not part of revision_ids themselves
913 """917 """
914 parent_map = self.get_parent_map(revision_ids)918 parent_ids = set(itertools.chain.from_iterable(viewvalues(
915 parent_ids = set(itertools.chain.from_iterable(919 self.get_parent_map(revision_ids))))
916 parent_map.itervalues()))
917 parent_ids.difference_update(revision_ids)920 parent_ids.difference_update(revision_ids)
918 parent_ids.discard(_mod_revision.NULL_REVISION)921 parent_ids.discard(_mod_revision.NULL_REVISION)
919 return parent_ids922 return parent_ids
@@ -1053,8 +1056,8 @@
1053 else:1056 else:
1054 query_keys.append((revision_id ,))1057 query_keys.append((revision_id ,))
1055 vf = self.revisions.without_fallbacks()1058 vf = self.revisions.without_fallbacks()
1056 for ((revision_id,), parent_keys) in \1059 for (revision_id,), parent_keys in viewitems(
1057 vf.get_parent_map(query_keys).iteritems():1060 vf.get_parent_map(query_keys)):
1058 if parent_keys:1061 if parent_keys:
1059 result[revision_id] = tuple([parent_revid1062 result[revision_id] = tuple([parent_revid
1060 for (parent_revid,) in parent_keys])1063 for (parent_revid,) in parent_keys])
@@ -1747,7 +1750,7 @@
1747 # Filter ghosts, and null:1750 # Filter ghosts, and null:
1748 if _mod_revision.NULL_REVISION in revision_graph:1751 if _mod_revision.NULL_REVISION in revision_graph:
1749 del revision_graph[_mod_revision.NULL_REVISION]1752 del revision_graph[_mod_revision.NULL_REVISION]
1750 for key, parents in revision_graph.items():1753 for key, parents in viewitems(revision_graph):
1751 revision_graph[key] = tuple(parent for parent in parents if parent1754 revision_graph[key] = tuple(parent for parent in parents if parent
1752 in revision_graph)1755 in revision_graph)
1753 return revision_graph1756 return revision_graph
17541757
=== modified file 'breezy/revision.py'
--- breezy/revision.py 2017-05-30 19:16:23 +0000
+++ breezy/revision.py 2017-06-05 21:41:38 +0000
@@ -85,7 +85,7 @@
8585
86 def _check_properties(self):86 def _check_properties(self):
87 """Verify that all revision properties are OK."""87 """Verify that all revision properties are OK."""
88 for name, value in self.properties.iteritems():88 for name, value in self.properties.items():
89 if not isinstance(name, basestring) or contains_whitespace(name):89 if not isinstance(name, basestring) or contains_whitespace(name):
90 raise ValueError("invalid property name %r" % name)90 raise ValueError("invalid property name %r" % name)
91 if not isinstance(value, basestring):91 if not isinstance(value, basestring):
9292
=== modified file 'breezy/smart/bzrdir.py'
--- breezy/smart/bzrdir.py 2017-05-22 00:56:52 +0000
+++ breezy/smart/bzrdir.py 2017-06-05 21:41:38 +0000
@@ -441,7 +441,7 @@
441 """441 """
442 branches = self._bzrdir.get_branches()442 branches = self._bzrdir.get_branches()
443 ret = {}443 ret = {}
444 for name, b in branches.iteritems():444 for name, b in branches.items():
445 if name is None:445 if name is None:
446 name = ""446 name = ""
447 ret[name] = ("branch", b._format.network_name())447 ret[name] = ("branch", b._format.network_name())
448448
=== modified file 'breezy/smart/client.py'
--- breezy/smart/client.py 2017-05-22 00:56:52 +0000
+++ breezy/smart/client.py 2017-06-05 21:41:38 +0000
@@ -339,7 +339,7 @@
339 self.medium = medium339 self.medium = medium
340340
341 def __repr__(self):341 def __repr__(self):
342 attrs = dict((k, v) for (k, v) in self.__dict__.iteritems()342 attrs = dict((k, v) for k, v in self.__dict__.items()
343 if v is not None)343 if v is not None)
344 return '<%s %r>' % (self.__class__.__name__, attrs)344 return '<%s %r>' % (self.__class__.__name__, attrs)
345345
346346
=== modified file 'breezy/smtp_connection.py'
--- breezy/smtp_connection.py 2017-05-22 00:56:52 +0000
+++ breezy/smtp_connection.py 2017-06-05 21:41:38 +0000
@@ -183,7 +183,7 @@
183 message.as_string())183 message.as_string())
184 except smtplib.SMTPRecipientsRefused as e:184 except smtplib.SMTPRecipientsRefused as e:
185 raise SMTPError('server refused recipient: %d %s' %185 raise SMTPError('server refused recipient: %d %s' %
186 e.recipients.values()[0])186 next(iter(e.recipients.values())))
187 except smtplib.SMTPResponseException as e:187 except smtplib.SMTPResponseException as e:
188 raise SMTPError('%d %s' % (e.smtp_code, e.smtp_error))188 raise SMTPError('%d %s' % (e.smtp_code, e.smtp_error))
189 except smtplib.SMTPException as e:189 except smtplib.SMTPException as e:
190190
=== modified file 'breezy/tag.py'
--- breezy/tag.py 2017-05-30 19:16:23 +0000
+++ breezy/tag.py 2017-06-05 21:41:38 +0000
@@ -226,7 +226,7 @@
226226
227 def _serialize_tag_dict(self, tag_dict):227 def _serialize_tag_dict(self, tag_dict):
228 td = dict((k.encode('utf-8'), v)228 td = dict((k.encode('utf-8'), v)
229 for k,v in tag_dict.items())229 for k, v in tag_dict.items())
230 return bencode.bencode(td)230 return bencode.bencode(td)
231231
232 def _deserialize_tag_dict(self, tag_content):232 def _deserialize_tag_dict(self, tag_content):
@@ -324,7 +324,7 @@
324 :param rename_map: Dictionary mapping old revids to new revids324 :param rename_map: Dictionary mapping old revids to new revids
325 """325 """
326 reverse_tags = self.get_reverse_tag_dict()326 reverse_tags = self.get_reverse_tag_dict()
327 for revid, names in reverse_tags.iteritems():327 for revid, names in reverse_tags.items():
328 if revid in rename_map:328 if revid in rename_map:
329 for name in names:329 for name in names:
330 self.set_tag(name, rename_map[revid])330 self.set_tag(name, rename_map[revid])
331331
=== modified file 'breezy/tests/per_branch/test_check.py'
--- breezy/tests/per_branch/test_check.py 2017-05-30 19:16:23 +0000
+++ breezy/tests/per_branch/test_check.py 2017-06-05 21:41:38 +0000
@@ -96,7 +96,7 @@
96 'unknown ref kind for ref %s' % ref)96 'unknown ref kind for ref %s' % ref)
97 node_distances = branch.repository.get_graph().find_lefthand_distances(97 node_distances = branch.repository.get_graph().find_lefthand_distances(
98 distances)98 distances)
99 for key, distance in node_distances.iteritems():99 for key, distance in node_distances.items():
100 refs[('lefthand-distance', key)] = distance100 refs[('lefthand-distance', key)] = distance
101 if key in existences and distance > 0:101 if key in existences and distance > 0:
102 refs[('revision-existence', key)] = True102 refs[('revision-existence', key)] = True
103103
=== modified file 'breezy/tests/per_bzrdir/test_bzrdir.py'
--- breezy/tests/per_bzrdir/test_bzrdir.py 2017-05-22 00:56:52 +0000
+++ breezy/tests/per_bzrdir/test_bzrdir.py 2017-06-05 21:41:38 +0000
@@ -160,7 +160,7 @@
160 self.assertEqual(text_index,160 self.assertEqual(text_index,
161 right_repo._generate_text_key_index())161 right_repo._generate_text_key_index())
162 desired_files = []162 desired_files = []
163 for file_id, revision_id in text_index.iterkeys():163 for file_id, revision_id in text_index:
164 desired_files.append(164 desired_files.append(
165 (file_id, revision_id, (file_id, revision_id)))165 (file_id, revision_id, (file_id, revision_id)))
166 left_texts = [(identifier, "".join(bytes_iterator)) for166 left_texts = [(identifier, "".join(bytes_iterator)) for
167167
=== modified file 'breezy/tests/per_controldir/test_controldir.py'
--- breezy/tests/per_controldir/test_controldir.py 2017-05-24 16:33:08 +0000
+++ breezy/tests/per_controldir/test_controldir.py 2017-06-05 21:41:38 +0000
@@ -1250,7 +1250,7 @@
1250 def test_get_branches(self):1250 def test_get_branches(self):
1251 repo = self.make_repository('branch-1')1251 repo = self.make_repository('branch-1')
1252 target_branch = repo.bzrdir.create_branch()1252 target_branch = repo.bzrdir.create_branch()
1253 self.assertEqual([""], repo.bzrdir.get_branches().keys())1253 self.assertEqual([""], list(repo.bzrdir.get_branches()))
12541254
1255 def test_create_repository(self):1255 def test_create_repository(self):
1256 # a bzrdir can construct a repository for itself.1256 # a bzrdir can construct a repository for itself.
12571257
=== modified file 'breezy/tests/per_controldir_colo/test_supported.py'
--- breezy/tests/per_controldir_colo/test_supported.py 2017-05-21 18:10:28 +0000
+++ breezy/tests/per_controldir_colo/test_supported.py 2017-06-05 21:41:38 +0000
@@ -131,7 +131,7 @@
131 def test_get_branches(self):131 def test_get_branches(self):
132 repo = self.make_repository('branch-1')132 repo = self.make_repository('branch-1')
133 target_branch = repo.bzrdir.create_branch(name='foo')133 target_branch = repo.bzrdir.create_branch(name='foo')
134 self.assertEqual(['foo'], repo.bzrdir.get_branches().keys())134 self.assertEqual(['foo'], list(repo.bzrdir.get_branches()))
135 self.assertEqual(target_branch.base,135 self.assertEqual(target_branch.base,
136 repo.bzrdir.get_branches()['foo'].base)136 repo.bzrdir.get_branches()['foo'].base)
137137
@@ -142,7 +142,7 @@
142 except errors.InvalidBranchName:142 except errors.InvalidBranchName:
143 raise tests.TestNotApplicable(143 raise tests.TestNotApplicable(
144 "format does not support branches with / in their name")144 "format does not support branches with / in their name")
145 self.assertEqual(['foo/bar'], repo.bzrdir.get_branches().keys())145 self.assertEqual(['foo/bar'], list(repo.bzrdir.get_branches()))
146 self.assertEqual(146 self.assertEqual(
147 target_branch.base, repo.bzrdir.open_branch(name='foo/bar').base)147 target_branch.base, repo.bzrdir.open_branch(name='foo/bar').base)
148148
149149
=== modified file 'breezy/tests/per_controldir_colo/test_unsupported.py'
--- breezy/tests/per_controldir_colo/test_unsupported.py 2017-05-21 18:10:28 +0000
+++ breezy/tests/per_controldir_colo/test_unsupported.py 2017-06-05 21:41:38 +0000
@@ -79,5 +79,4 @@
79 def test_get_branches(self):79 def test_get_branches(self):
80 made_control = self.make_bzrdir_with_repo()80 made_control = self.make_bzrdir_with_repo()
81 made_control.create_branch()81 made_control.create_branch()
82 self.assertEqual(made_control.get_branches().keys(),82 self.assertEqual(list(made_control.get_branches()), [""])
83 [""])
8483
=== modified file 'breezy/tests/per_foreign_vcs/__init__.py'
--- breezy/tests/per_foreign_vcs/__init__.py 2017-05-23 14:08:03 +0000
+++ breezy/tests/per_foreign_vcs/__init__.py 2017-06-05 21:41:38 +0000
@@ -27,7 +27,7 @@
2727
28def vcs_scenarios():28def vcs_scenarios():
29 scenarios = []29 scenarios = []
30 for name, vcs in foreign.foreign_vcs_registry.iteritems():30 for name, vcs in foreign.foreign_vcs_registry.items():
31 scenarios.append((vcs.__class__.__name__, {31 scenarios.append((vcs.__class__.__name__, {
32 "branch_factory": vcs.branch_format.get_foreign_tests_branch_factory(),32 "branch_factory": vcs.branch_format.get_foreign_tests_branch_factory(),
33 "repository_factory": vcs.repository_format.get_foreign_tests_repository_factory(),33 "repository_factory": vcs.repository_format.get_foreign_tests_repository_factory(),
3434
=== modified file 'breezy/tests/per_pack_repository.py'
--- breezy/tests/per_pack_repository.py 2017-06-04 18:09:30 +0000
+++ breezy/tests/per_pack_repository.py 2017-06-05 21:41:38 +0000
@@ -253,10 +253,11 @@
253 repo.abort_write_group()253 repo.abort_write_group()
254 raise254 raise
255 else:255 else:
256 old_names = repo._pack_collection._names.keys()256 old_names = set(repo._pack_collection._names)
257 result = repo.commit_write_group()257 result = repo.commit_write_group()
258 cur_names = repo._pack_collection._names.keys()258 cur_names = set(repo._pack_collection._names)
259 new_names = list(set(cur_names) - set(old_names))259 # In this test, len(result) is always 1, so unordered is ok
260 new_names = list(cur_names - old_names)
260 self.assertEqual(new_names, result)261 self.assertEqual(new_names, result)
261 finally:262 finally:
262 repo.unlock()263 repo.unlock()
263264
=== modified file 'breezy/tests/per_workingtree/test_parents.py'
--- breezy/tests/per_workingtree/test_parents.py 2017-05-22 00:56:52 +0000
+++ breezy/tests/per_workingtree/test_parents.py 2017-06-05 21:41:38 +0000
@@ -371,8 +371,8 @@
371371
372 def make_inv_delta(self, old, new):372 def make_inv_delta(self, old, new):
373 """Make an inventory delta from two inventories."""373 """Make an inventory delta from two inventories."""
374 old_ids = set(old._byid.iterkeys())374 old_ids = set(old._byid)
375 new_ids = set(new._byid.iterkeys())375 new_ids = set(new._byid)
376 adds = new_ids - old_ids376 adds = new_ids - old_ids
377 deletes = old_ids - new_ids377 deletes = old_ids - new_ids
378 common = old_ids.intersection(new_ids)378 common = old_ids.intersection(new_ids)
379379
=== modified file 'breezy/tests/test_btree_index.py'
--- breezy/tests/test_btree_index.py 2017-06-04 18:09:30 +0000
+++ breezy/tests/test_btree_index.py 2017-06-05 21:41:38 +0000
@@ -643,7 +643,7 @@
643 self.assertEqual(1, len(list(index.iter_entries([nodes[30][0]]))))643 self.assertEqual(1, len(list(index.iter_entries([nodes[30][0]]))))
644 self.assertEqual([1, 4], index._row_lengths)644 self.assertEqual([1, 4], index._row_lengths)
645 self.assertIsNot(None, index._root_node)645 self.assertIsNot(None, index._root_node)
646 internal_node_pre_clear = index._internal_node_cache.keys()646 internal_node_pre_clear = set(index._internal_node_cache)
647 self.assertTrue(len(index._leaf_node_cache) > 0)647 self.assertTrue(len(index._leaf_node_cache) > 0)
648 index.clear_cache()648 index.clear_cache()
649 # We don't touch _root_node or _internal_node_cache, both should be649 # We don't touch _root_node or _internal_node_cache, both should be
@@ -655,7 +655,7 @@
655 # becuase without a 3-level index, we don't have any internal655 # becuase without a 3-level index, we don't have any internal
656 # nodes cached.656 # nodes cached.
657 self.assertEqual(internal_node_pre_clear,657 self.assertEqual(internal_node_pre_clear,
658 index._internal_node_cache.keys())658 set(index._internal_node_cache))
659 self.assertEqual(0, len(index._leaf_node_cache))659 self.assertEqual(0, len(index._leaf_node_cache))
660660
661 def test_trivial_constructor(self):661 def test_trivial_constructor(self):
@@ -737,7 +737,7 @@
737 index = btree_index.BTreeGraphIndex(trans, 'index', None)737 index = btree_index.BTreeGraphIndex(trans, 'index', None)
738 del trans._activity[:]738 del trans._activity[:]
739 nodes = dict(index._read_nodes([0]))739 nodes = dict(index._read_nodes([0]))
740 self.assertEqual([0], nodes.keys())740 self.assertEqual({0}, set(nodes))
741 node = nodes[0]741 node = nodes[0]
742 self.assertEqual([('key',)], node.all_keys())742 self.assertEqual([('key',)], node.all_keys())
743 self.assertEqual([('get', 'index')], trans._activity)743 self.assertEqual([('get', 'index')], trans._activity)
744744
=== modified file 'breezy/tests/test_bundle.py'
--- breezy/tests/test_bundle.py 2017-05-25 01:35:55 +0000
+++ breezy/tests/test_bundle.py 2017-06-05 21:41:38 +0000
@@ -105,7 +105,7 @@
105 return self.ids[parent_dir]105 return self.ids[parent_dir]
106106
107 def iter_entries(self):107 def iter_entries(self):
108 for path, file_id in self.ids.iteritems():108 for path, file_id in self.ids.items():
109 yield path, self[file_id]109 yield path, self[file_id]
110110
111 def kind(self, file_id):111 def kind(self, file_id):
112112
=== modified file 'breezy/tests/test_fifo_cache.py'
--- breezy/tests/test_fifo_cache.py 2017-05-22 00:56:52 +0000
+++ breezy/tests/test_fifo_cache.py 2017-06-05 21:41:38 +0000
@@ -20,6 +20,11 @@
20 fifo_cache,20 fifo_cache,
21 tests,21 tests,
22 )22 )
23from ..sixish import (
24 viewitems,
25 viewkeys,
26 viewvalues,
27 )
2328
2429
25class TestFIFOCache(tests.TestCase):30class TestFIFOCache(tests.TestCase):
@@ -33,12 +38,10 @@
33 self.assertEqual(2, c[1])38 self.assertEqual(2, c[1])
34 self.assertEqual(2, c.get(1))39 self.assertEqual(2, c.get(1))
35 self.assertEqual(2, c.get(1, None))40 self.assertEqual(2, c.get(1, None))
36 self.assertEqual([1], c.keys())41 self.assertEqual([1], list(c))
37 self.assertEqual([1], list(c.iterkeys()))42 self.assertEqual({1}, viewkeys(c))
38 self.assertEqual([(1, 2)], c.items())43 self.assertEqual([(1, 2)], sorted(viewitems(c)))
39 self.assertEqual([(1, 2)], list(c.iteritems()))44 self.assertEqual([2], sorted(viewvalues(c)))
40 self.assertEqual([2], c.values())
41 self.assertEqual([2], list(c.itervalues()))
42 self.assertEqual({1: 2}, c)45 self.assertEqual({1: 2}, c)
4346
44 def test_cache_size(self):47 def test_cache_size(self):
@@ -54,12 +57,10 @@
54 self.assertEqual(0, len(c))57 self.assertEqual(0, len(c))
55 self.assertEqual(None, c.get(1))58 self.assertEqual(None, c.get(1))
56 self.assertEqual(None, c.get(1, None))59 self.assertEqual(None, c.get(1, None))
57 self.assertEqual([], c.keys())60 self.assertEqual([], list(c))
58 self.assertEqual([], list(c.iterkeys()))61 self.assertEqual(set(), viewkeys(c))
59 self.assertEqual([], c.items())62 self.assertEqual([], list(viewitems(c)))
60 self.assertEqual([], list(c.iteritems()))63 self.assertEqual([], list(viewvalues(c)))
61 self.assertEqual([], c.values())
62 self.assertEqual([], list(c.itervalues()))
63 self.assertEqual({}, c)64 self.assertEqual({}, c)
6465
65 def test_add_maintains_fifo(self):66 def test_add_maintains_fifo(self):
@@ -68,16 +69,16 @@
68 c[2] = 369 c[2] = 3
69 c[3] = 470 c[3] = 4
70 c[4] = 571 c[4] = 5
71 self.assertEqual([1, 2, 3, 4], sorted(c.keys()))72 self.assertEqual({1, 2, 3, 4}, viewkeys(c))
72 c[5] = 673 c[5] = 6
73 # This should pop out the oldest entry74 # This should pop out the oldest entry
74 self.assertEqual([2, 3, 4, 5], sorted(c.keys()))75 self.assertEqual({2, 3, 4, 5}, viewkeys(c))
75 # Replacing an item doesn't change the stored keys76 # Replacing an item doesn't change the stored keys
76 c[2] = 777 c[2] = 7
77 self.assertEqual([2, 3, 4, 5], sorted(c.keys()))78 self.assertEqual({2, 3, 4, 5}, viewkeys(c))
78 # But it does change the position in the FIFO79 # But it does change the position in the FIFO
79 c[6] = 780 c[6] = 7
80 self.assertEqual([2, 4, 5, 6], sorted(c.keys()))81 self.assertEqual({2, 4, 5, 6}, viewkeys(c))
81 self.assertEqual([4, 5, 2, 6], list(c._queue))82 self.assertEqual([4, 5, 2, 6], list(c._queue))
8283
83 def test_default_after_cleanup_count(self):84 def test_default_after_cleanup_count(self):
@@ -89,10 +90,10 @@
89 c[4] = 590 c[4] = 5
90 c[5] = 691 c[5] = 6
91 # So far, everything fits92 # So far, everything fits
92 self.assertEqual([1, 2, 3, 4, 5], sorted(c.keys()))93 self.assertEqual({1, 2, 3, 4, 5}, viewkeys(c))
93 c[6] = 794 c[6] = 7
94 # But adding one more should shrink down to after_cleanup_count95 # But adding one more should shrink down to after_cleanup_count
95 self.assertEqual([3, 4, 5, 6], sorted(c.keys()))96 self.assertEqual({3, 4, 5, 6}, viewkeys(c))
9697
97 def test_clear(self):98 def test_clear(self):
98 c = fifo_cache.FIFOCache(5)99 c = fifo_cache.FIFOCache(5)
@@ -102,9 +103,9 @@
102 c[4] = 5103 c[4] = 5
103 c[5] = 6104 c[5] = 6
104 c.cleanup()105 c.cleanup()
105 self.assertEqual([2, 3, 4, 5], sorted(c.keys()))106 self.assertEqual({2, 3, 4, 5}, viewkeys(c))
106 c.clear()107 c.clear()
107 self.assertEqual([], c.keys())108 self.assertEqual(set(), viewkeys(c))
108 self.assertEqual([], list(c._queue))109 self.assertEqual([], list(c._queue))
109 self.assertEqual({}, c)110 self.assertEqual({}, c)
110111
@@ -246,12 +247,10 @@
246 self.assertEqual('2', c[1])247 self.assertEqual('2', c[1])
247 self.assertEqual('2', c.get(1))248 self.assertEqual('2', c.get(1))
248 self.assertEqual('2', c.get(1, None))249 self.assertEqual('2', c.get(1, None))
249 self.assertEqual([1], c.keys())250 self.assertEqual([1], list(c))
250 self.assertEqual([1], list(c.iterkeys()))251 self.assertEqual({1}, viewkeys(c))
251 self.assertEqual([(1, '2')], c.items())252 self.assertEqual([(1, '2')], sorted(viewitems(c)))
252 self.assertEqual([(1, '2')], list(c.iteritems()))253 self.assertEqual(['2'], sorted(viewvalues(c)))
253 self.assertEqual(['2'], c.values())
254 self.assertEqual(['2'], list(c.itervalues()))
255 self.assertEqual({1: '2'}, c)254 self.assertEqual({1: '2'}, c)
256 self.assertEqual(1024*1024, c.cache_size())255 self.assertEqual(1024*1024, c.cache_size())
257256
@@ -262,12 +261,10 @@
262 self.assertEqual(0, len(c))261 self.assertEqual(0, len(c))
263 self.assertEqual(None, c.get(1))262 self.assertEqual(None, c.get(1))
264 self.assertEqual(None, c.get(1, None))263 self.assertEqual(None, c.get(1, None))
265 self.assertEqual([], c.keys())264 self.assertEqual([], list(c))
266 self.assertEqual([], list(c.iterkeys()))265 self.assertEqual(set(), viewkeys(c))
267 self.assertEqual([], c.items())266 self.assertEqual([], list(viewitems(c)))
268 self.assertEqual([], list(c.iteritems()))267 self.assertEqual([], list(viewvalues(c)))
269 self.assertEqual([], c.values())
270 self.assertEqual([], list(c.itervalues()))
271 self.assertEqual({}, c)268 self.assertEqual({}, c)
272269
273 def test_add_maintains_fifo(self):270 def test_add_maintains_fifo(self):
274271
=== modified file 'breezy/tests/test_graph.py'
--- breezy/tests/test_graph.py 2017-05-25 01:35:55 +0000
+++ breezy/tests/test_graph.py 2017-06-05 21:41:38 +0000
@@ -497,7 +497,7 @@
497 """497 """
498 pending = [NULL_REVISION]498 pending = [NULL_REVISION]
499 descendants = {}499 descendants = {}
500 for descendant, parents in ancestors.iteritems():500 for descendant, parents in ancestors.items():
501 for parent in parents:501 for parent in parents:
502 descendants.setdefault(parent, []).append(descendant)502 descendants.setdefault(parent, []).append(descendant)
503 while len(pending) > 0:503 while len(pending) > 0:
504504
=== modified file 'breezy/tests/test_groupcompress.py'
--- breezy/tests/test_groupcompress.py 2017-06-04 18:09:30 +0000
+++ breezy/tests/test_groupcompress.py 2017-06-05 21:41:38 +0000
@@ -304,7 +304,7 @@
304 for key in sorted(key_to_text):304 for key in sorted(key_to_text):
305 compressor.compress(key, key_to_text[key], None)305 compressor.compress(key, key_to_text[key], None)
306 locs = dict((key, (start, end)) for key, (start, _, end, _)306 locs = dict((key, (start, end)) for key, (start, _, end, _)
307 in compressor.labels_deltas.iteritems())307 in compressor.labels_deltas.items())
308 block = compressor.flush()308 block = compressor.flush()
309 raw_bytes = block.to_bytes()309 raw_bytes = block.to_bytes()
310 # Go through from_bytes(to_bytes()) so that we start with a compressed310 # Go through from_bytes(to_bytes()) so that we start with a compressed
@@ -961,7 +961,7 @@
961 for key in sorted(key_to_text):961 for key in sorted(key_to_text):
962 compressor.compress(key, key_to_text[key], None)962 compressor.compress(key, key_to_text[key], None)
963 locs = dict((key, (start, end)) for key, (start, _, end, _)963 locs = dict((key, (start, end)) for key, (start, _, end, _)
964 in compressor.labels_deltas.iteritems())964 in compressor.labels_deltas.items())
965 block = compressor.flush()965 block = compressor.flush()
966 raw_bytes = block.to_bytes()966 raw_bytes = block.to_bytes()
967 return locs, groupcompress.GroupCompressBlock.from_bytes(raw_bytes)967 return locs, groupcompress.GroupCompressBlock.from_bytes(raw_bytes)
968968
=== modified file 'breezy/tests/test_inv.py'
--- breezy/tests/test_inv.py 2017-05-22 00:56:52 +0000
+++ breezy/tests/test_inv.py 2017-06-05 21:41:38 +0000
@@ -939,7 +939,7 @@
939 new_inv = CHKInventory.deserialise(chk_bytes, bytes, ("revid",))939 new_inv = CHKInventory.deserialise(chk_bytes, bytes, ("revid",))
940 root_entry = new_inv[inv.root.file_id]940 root_entry = new_inv[inv.root.file_id]
941 self.assertEqual(None, root_entry._children)941 self.assertEqual(None, root_entry._children)
942 self.assertEqual(['file'], root_entry.children.keys())942 self.assertEqual({'file'}, set(root_entry.children))
943 file_direct = new_inv["fileid"]943 file_direct = new_inv["fileid"]
944 file_found = root_entry.children['file']944 file_found = root_entry.children['file']
945 self.assertEqual(file_direct.kind, file_found.kind)945 self.assertEqual(file_direct.kind, file_found.kind)
@@ -1472,9 +1472,9 @@
1472 s = expected_children.setdefault(entry.parent_id, [])1472 s = expected_children.setdefault(entry.parent_id, [])
1473 s.append(entry.file_id)1473 s.append(entry.file_id)
1474 val_children = dict((k, sorted(v)) for k, v1474 val_children = dict((k, sorted(v)) for k, v
1475 in val_children.iteritems())1475 in val_children.items())
1476 expected_children = dict((k, sorted(v)) for k, v1476 expected_children = dict((k, sorted(v)) for k, v
1477 in expected_children.iteritems())1477 in expected_children.items())
1478 self.assertEqual(expected_children, val_children)1478 self.assertEqual(expected_children, val_children)
14791479
1480 def test_make_simple_inventory(self):1480 def test_make_simple_inventory(self):
14811481
=== modified file 'breezy/tests/test_knit.py'
--- breezy/tests/test_knit.py 2017-05-25 01:35:55 +0000
+++ breezy/tests/test_knit.py 2017-06-05 21:41:38 +0000
@@ -626,7 +626,7 @@
626 self.fail('Annotation was not identical with reloading.')626 self.fail('Annotation was not identical with reloading.')
627 # Now delete the packs-in-use, which should trigger another reload, but627 # Now delete the packs-in-use, which should trigger another reload, but
628 # this time we just raise an exception because we can't recover628 # this time we just raise an exception because we can't recover
629 for trans, name in vf._access._indices.itervalues():629 for trans, name in vf._access._indices.values():
630 trans.delete(name)630 trans.delete(name)
631 self.assertRaises(errors.NoSuchFile, vf.annotate, key)631 self.assertRaises(errors.NoSuchFile, vf.annotate, key)
632 self.assertEqual([2, 1, 1], reload_counter)632 self.assertEqual([2, 1, 1], reload_counter)
@@ -639,7 +639,7 @@
639 self.assertEqual([1, 1, 0], reload_counter)639 self.assertEqual([1, 1, 0], reload_counter)
640 # Now delete the packs-in-use, which should trigger another reload, but640 # Now delete the packs-in-use, which should trigger another reload, but
641 # this time we just raise an exception because we can't recover641 # this time we just raise an exception because we can't recover
642 for trans, name in vf._access._indices.itervalues():642 for trans, name in vf._access._indices.values():
643 trans.delete(name)643 trans.delete(name)
644 self.assertRaises(errors.NoSuchFile, vf._get_record_map, keys)644 self.assertRaises(errors.NoSuchFile, vf._get_record_map, keys)
645 self.assertEqual([2, 1, 1], reload_counter)645 self.assertEqual([2, 1, 1], reload_counter)
@@ -658,7 +658,7 @@
658 self.assertEqual(('rev-3',), record.key)658 self.assertEqual(('rev-3',), record.key)
659 self.assertEqual([1, 1, 0], reload_counter)659 self.assertEqual([1, 1, 0], reload_counter)
660 # Now delete all pack files, and see that we raise the right error660 # Now delete all pack files, and see that we raise the right error
661 for trans, name in vf._access._indices.itervalues():661 for trans, name in vf._access._indices.values():
662 trans.delete(name)662 trans.delete(name)
663 self.assertListRaises(errors.NoSuchFile,663 self.assertListRaises(errors.NoSuchFile,
664 vf.get_record_stream, keys, 'topological', False)664 vf.get_record_stream, keys, 'topological', False)
@@ -682,7 +682,7 @@
682 self.assertEqual(plain_lines, reload_lines)682 self.assertEqual(plain_lines, reload_lines)
683 self.assertEqual(21, len(plain_lines))683 self.assertEqual(21, len(plain_lines))
684 # Now delete all pack files, and see that we raise the right error684 # Now delete all pack files, and see that we raise the right error
685 for trans, name in vf._access._indices.itervalues():685 for trans, name in vf._access._indices.values():
686 trans.delete(name)686 trans.delete(name)
687 self.assertListRaises(errors.NoSuchFile,687 self.assertListRaises(errors.NoSuchFile,
688 vf.iter_lines_added_or_present_in_keys, keys)688 vf.iter_lines_added_or_present_in_keys, keys)
689689
=== modified file 'breezy/tests/test_merge.py'
--- breezy/tests/test_merge.py 2017-05-22 00:56:52 +0000
+++ breezy/tests/test_merge.py 2017-06-05 21:41:38 +0000
@@ -1043,7 +1043,7 @@
10431043
1044 def assertPruneTails(self, pruned_map, tails, parent_map):1044 def assertPruneTails(self, pruned_map, tails, parent_map):
1045 child_map = {}1045 child_map = {}
1046 for key, parent_keys in parent_map.iteritems():1046 for key, parent_keys in parent_map.items():
1047 child_map.setdefault(key, [])1047 child_map.setdefault(key, [])
1048 for pkey in parent_keys:1048 for pkey in parent_keys:
1049 child_map.setdefault(pkey, []).append(key)1049 child_map.setdefault(pkey, []).append(key)
10501050
=== modified file 'breezy/tests/test_merge_core.py'
--- breezy/tests/test_merge_core.py 2017-05-22 00:56:52 +0000
+++ breezy/tests/test_merge_core.py 2017-06-05 21:41:38 +0000
@@ -179,7 +179,7 @@
179179
180 def apply_inv_change(self, inventory_change, orig_inventory):180 def apply_inv_change(self, inventory_change, orig_inventory):
181 orig_inventory_by_path = {}181 orig_inventory_by_path = {}
182 for file_id, path in orig_inventory.iteritems():182 for file_id, path in orig_inventory.items():
183 orig_inventory_by_path[path] = file_id183 orig_inventory_by_path[path] = file_id
184184
185 def parent_id(file_id):185 def parent_id(file_id):
@@ -203,13 +203,13 @@
203 return pathjoin(dirname, os.path.basename(orig_inventory[file_id]))203 return pathjoin(dirname, os.path.basename(orig_inventory[file_id]))
204204
205 new_inventory = {}205 new_inventory = {}
206 for file_id in orig_inventory.iterkeys():206 for file_id in orig_inventory:
207 path = new_path(file_id)207 path = new_path(file_id)
208 if path is None:208 if path is None:
209 continue209 continue
210 new_inventory[file_id] = path210 new_inventory[file_id] = path
211211
212 for file_id, path in inventory_change.iteritems():212 for file_id, path in inventory_change.items():
213 if file_id in orig_inventory:213 if file_id in orig_inventory:
214 continue214 continue
215 new_inventory[file_id] = path215 new_inventory[file_id] = path
216216
=== modified file 'breezy/tests/test_remote.py'
--- breezy/tests/test_remote.py 2017-05-30 19:16:23 +0000
+++ breezy/tests/test_remote.py 2017-06-05 21:41:38 +0000
@@ -3928,13 +3928,13 @@
3928 # the public implementation of get_parent_map obeys stacking3928 # the public implementation of get_parent_map obeys stacking
3929 _, branch = self.prepare_stacked_remote_branch()3929 _, branch = self.prepare_stacked_remote_branch()
3930 repo = branch.repository3930 repo = branch.repository
3931 self.assertEqual(['rev1'], repo.get_parent_map(['rev1']).keys())3931 self.assertEqual({'rev1'}, set(repo.get_parent_map(['rev1'])))
39323932
3933 def test_unstacked_get_parent_map(self):3933 def test_unstacked_get_parent_map(self):
3934 # _unstacked_provider.get_parent_map ignores stacking3934 # _unstacked_provider.get_parent_map ignores stacking
3935 _, branch = self.prepare_stacked_remote_branch()3935 _, branch = self.prepare_stacked_remote_branch()
3936 provider = branch.repository._unstacked_provider3936 provider = branch.repository._unstacked_provider
3937 self.assertEqual([], provider.get_parent_map(['rev1']).keys())3937 self.assertEqual(set(), set(provider.get_parent_map(['rev1'])))
39383938
3939 def fetch_stream_to_rev_order(self, stream):3939 def fetch_stream_to_rev_order(self, stream):
3940 result = []3940 result = []
39413941
=== modified file 'breezy/tests/test_versionedfile.py'
--- breezy/tests/test_versionedfile.py 2017-05-25 01:35:55 +0000
+++ breezy/tests/test_versionedfile.py 2017-06-05 21:41:38 +0000
@@ -94,9 +94,9 @@
94 # such, it should end up in the various caches94 # such, it should end up in the various caches
95 gen._process_one_record(record.key, record.get_bytes_as('chunked'))95 gen._process_one_record(record.key, record.get_bytes_as('chunked'))
96 # The chunks should be cached, the refcount untouched96 # The chunks should be cached, the refcount untouched
97 self.assertEqual([('one',)], gen.chunks.keys())97 self.assertEqual({('one',)}, set(gen.chunks))
98 self.assertEqual({('one',): 2, ('two',): 1}, gen.refcounts)98 self.assertEqual({('one',): 2, ('two',): 1}, gen.refcounts)
99 self.assertEqual([], gen.diffs.keys())99 self.assertEqual(set(), set(gen.diffs))
100 # Next we get 'two', which is something we output, but also needed for100 # Next we get 'two', which is something we output, but also needed for
101 # three101 # three
102 record = next(stream)102 record = next(stream)
@@ -105,10 +105,9 @@
105 # Both are now cached, and the diff for two has been extracted, and105 # Both are now cached, and the diff for two has been extracted, and
106 # one's refcount has been updated. two has been removed from the106 # one's refcount has been updated. two has been removed from the
107 # parent_map107 # parent_map
108 self.assertEqual(sorted([('one',), ('two',)]),108 self.assertEqual({('one',), ('two',)}, set(gen.chunks))
109 sorted(gen.chunks.keys()))
110 self.assertEqual({('one',): 1, ('two',): 1}, gen.refcounts)109 self.assertEqual({('one',): 1, ('two',): 1}, gen.refcounts)
111 self.assertEqual([('two',)], gen.diffs.keys())110 self.assertEqual({('two',)}, set(gen.diffs))
112 self.assertEqual({('three',): (('one',), ('two',))},111 self.assertEqual({('three',): (('one',), ('two',))},
113 gen.parent_map)112 gen.parent_map)
114 # Finally 'three', which allows us to remove all parents from the113 # Finally 'three', which allows us to remove all parents from the
@@ -118,10 +117,9 @@
118 gen._process_one_record(record.key, record.get_bytes_as('chunked'))117 gen._process_one_record(record.key, record.get_bytes_as('chunked'))
119 # Both are now cached, and the diff for two has been extracted, and118 # Both are now cached, and the diff for two has been extracted, and
120 # one's refcount has been updated119 # one's refcount has been updated
121 self.assertEqual([], gen.chunks.keys())120 self.assertEqual(set(), set(gen.chunks))
122 self.assertEqual({}, gen.refcounts)121 self.assertEqual({}, gen.refcounts)
123 self.assertEqual(sorted([('two',), ('three',)]),122 self.assertEqual({('two',), ('three',)}, set(gen.diffs))
124 sorted(gen.diffs.keys()))
125123
126 def test_compute_diffs(self):124 def test_compute_diffs(self):
127 vf = self.make_three_vf()125 vf = self.make_three_vf()
128126
=== modified file 'breezy/transform.py'
--- breezy/transform.py 2017-05-30 19:32:13 +0000
+++ breezy/transform.py 2017-06-05 21:41:38 +0000
@@ -61,6 +61,10 @@
61 splitpath,61 splitpath,
62 )62 )
63from .progress import ProgressPhase63from .progress import ProgressPhase
64from .sixish import (
65 viewitems,
66 viewvalues,
67 )
6468
6569
66ROOT_PARENT = "root-parent"70ROOT_PARENT = "root-parent"
@@ -228,8 +232,8 @@
228 irrelevant.232 irrelevant.
229233
230 """234 """
231 new_roots = [k for k, v in self._new_parent.iteritems() if v ==235 new_roots = [k for k, v in viewitems(self._new_parent)
232 ROOT_PARENT]236 if v == ROOT_PARENT]
233 if len(new_roots) < 1:237 if len(new_roots) < 1:
234 return238 return
235 if len(new_roots) != 1:239 if len(new_roots) != 1:
@@ -479,7 +483,7 @@
479 file_id = self.tree_file_id(trans_id)483 file_id = self.tree_file_id(trans_id)
480 if file_id is not None:484 if file_id is not None:
481 return file_id485 return file_id
482 for key, value in self._non_present_ids.iteritems():486 for key, value in viewitems(self._non_present_ids):
483 if value == trans_id:487 if value == trans_id:
484 return key488 return key
485489
@@ -509,9 +513,9 @@
509 Only new paths and parents of tree files with assigned ids are used.513 Only new paths and parents of tree files with assigned ids are used.
510 """514 """
511 by_parent = {}515 by_parent = {}
512 items = list(self._new_parent.iteritems())516 items = list(viewitems(self._new_parent))
513 items.extend((t, self.final_parent(t)) for t in517 items.extend((t, self.final_parent(t))
514 self._tree_id_paths.keys())518 for t in list(self._tree_id_paths))
515 for trans_id, parent_id in items:519 for trans_id, parent_id in items:
516 if parent_id not in by_parent:520 if parent_id not in by_parent:
517 by_parent[parent_id] = set()521 by_parent[parent_id] = set()
@@ -555,7 +559,7 @@
555 Active parents are those which gain children, and those which are559 Active parents are those which gain children, and those which are
556 removed. This is a necessary first step in detecting conflicts.560 removed. This is a necessary first step in detecting conflicts.
557 """561 """
558 parents = self.by_parent().keys()562 parents = list(self.by_parent())
559 parents.extend([t for t in self._removed_contents if563 parents.extend([t for t in self._removed_contents if
560 self.tree_kind(t) == 'directory'])564 self.tree_kind(t) == 'directory'])
561 for trans_id in self._removed_id:565 for trans_id in self._removed_id:
@@ -634,7 +638,7 @@
634 def _unversioned_parents(self, by_parent):638 def _unversioned_parents(self, by_parent):
635 """If parent directories are versioned, children must be versioned."""639 """If parent directories are versioned, children must be versioned."""
636 conflicts = []640 conflicts = []
637 for parent_id, children in by_parent.iteritems():641 for parent_id, children in viewitems(by_parent):
638 if parent_id == ROOT_PARENT:642 if parent_id == ROOT_PARENT:
639 continue643 continue
640 if self.final_file_id(parent_id) is not None:644 if self.final_file_id(parent_id) is not None:
@@ -651,7 +655,7 @@
651 However, existing entries with no contents are okay.655 However, existing entries with no contents are okay.
652 """656 """
653 conflicts = []657 conflicts = []
654 for trans_id in self._new_id.iterkeys():658 for trans_id in self._new_id:
655 kind = self.final_kind(trans_id)659 kind = self.final_kind(trans_id)
656 if kind is None:660 if kind is None:
657 conflicts.append(('versioning no contents', trans_id))661 conflicts.append(('versioning no contents', trans_id))
@@ -693,7 +697,7 @@
693 conflicts = []697 conflicts = []
694 if (self._new_name, self._new_parent) == ({}, {}):698 if (self._new_name, self._new_parent) == ({}, {}):
695 return conflicts699 return conflicts
696 for children in by_parent.itervalues():700 for children in viewvalues(by_parent):
697 name_ids = []701 name_ids = []
698 for child_tid in children:702 for child_tid in children:
699 name = self.final_name(child_tid)703 name = self.final_name(child_tid)
@@ -724,7 +728,7 @@
724 self._removed_id))728 self._removed_id))
725 all_ids = self._tree.all_file_ids()729 all_ids = self._tree.all_file_ids()
726 active_tree_ids = all_ids.difference(removed_tree_ids)730 active_tree_ids = all_ids.difference(removed_tree_ids)
727 for trans_id, file_id in self._new_id.iteritems():731 for trans_id, file_id in viewitems(self._new_id):
728 if file_id in active_tree_ids:732 if file_id in active_tree_ids:
729 old_trans_id = self.trans_id_tree_file_id(file_id)733 old_trans_id = self.trans_id_tree_file_id(file_id)
730 conflicts.append(('duplicate id', old_trans_id, trans_id))734 conflicts.append(('duplicate id', old_trans_id, trans_id))
@@ -733,7 +737,7 @@
733 def _parent_type_conflicts(self, by_parent):737 def _parent_type_conflicts(self, by_parent):
734 """Children must have a directory parent"""738 """Children must have a directory parent"""
735 conflicts = []739 conflicts = []
736 for parent_id, children in by_parent.iteritems():740 for parent_id, children in viewitems(by_parent):
737 if parent_id == ROOT_PARENT:741 if parent_id == ROOT_PARENT:
738 continue742 continue
739 no_children = True743 no_children = True
@@ -868,12 +872,12 @@
868 def _affected_ids(self):872 def _affected_ids(self):
869 """Return the set of transform ids affected by the transform"""873 """Return the set of transform ids affected by the transform"""
870 trans_ids = set(self._removed_id)874 trans_ids = set(self._removed_id)
871 trans_ids.update(self._new_id.keys())875 trans_ids.update(self._new_id)
872 trans_ids.update(self._removed_contents)876 trans_ids.update(self._removed_contents)
873 trans_ids.update(self._new_contents.keys())877 trans_ids.update(self._new_contents)
874 trans_ids.update(self._new_executability.keys())878 trans_ids.update(self._new_executability)
875 trans_ids.update(self._new_name.keys())879 trans_ids.update(self._new_name)
876 trans_ids.update(self._new_parent.keys())880 trans_ids.update(self._new_parent)
877 return trans_ids881 return trans_ids
878882
879 def _get_file_id_maps(self):883 def _get_file_id_maps(self):
@@ -953,7 +957,7 @@
953 from_trans_ids, to_trans_ids = self._get_file_id_maps()957 from_trans_ids, to_trans_ids = self._get_file_id_maps()
954 results = []958 results = []
955 # Now iterate through all active file_ids959 # Now iterate through all active file_ids
956 for file_id in set(from_trans_ids.keys() + to_trans_ids.keys()):960 for file_id in set(from_trans_ids).union(to_trans_ids):
957 modified = False961 modified = False
958 from_trans_id = from_trans_ids.get(file_id)962 from_trans_id = from_trans_ids.get(file_id)
959 # find file ids, and determine versioning state963 # find file ids, and determine versioning state
@@ -1096,11 +1100,11 @@
1096 :param serializer: A Serialiser like pack.ContainerSerializer.1100 :param serializer: A Serialiser like pack.ContainerSerializer.
1097 """1101 """
1098 new_name = dict((k, v.encode('utf-8')) for k, v in1102 new_name = dict((k, v.encode('utf-8')) for k, v in
1099 self._new_name.items())1103 viewitems(self._new_name))
1100 new_executability = dict((k, int(v)) for k, v in1104 new_executability = dict((k, int(v)) for k, v in
1101 self._new_executability.items())1105 viewitems(self._new_executability))
1102 tree_path_ids = dict((k.encode('utf-8'), v)1106 tree_path_ids = dict((k.encode('utf-8'), v)
1103 for k, v in self._tree_path_ids.items())1107 for k, v in viewitems(self._tree_path_ids))
1104 attribs = {1108 attribs = {
1105 '_id_number': self._id_number,1109 '_id_number': self._id_number,
1106 '_new_name': new_name,1110 '_new_name': new_name,
@@ -1114,7 +1118,7 @@
1114 }1118 }
1115 yield serializer.bytes_record(bencode.bencode(attribs),1119 yield serializer.bytes_record(bencode.bencode(attribs),
1116 (('attribs',),))1120 (('attribs',),))
1117 for trans_id, kind in self._new_contents.items():1121 for trans_id, kind in viewitems(self._new_contents):
1118 if kind == 'file':1122 if kind == 'file':
1119 lines = osutils.chunks_to_lines(1123 lines = osutils.chunks_to_lines(
1120 self._read_file_chunks(trans_id))1124 self._read_file_chunks(trans_id))
@@ -1137,15 +1141,15 @@
1137 attribs = bencode.bdecode(content)1141 attribs = bencode.bdecode(content)
1138 self._id_number = attribs['_id_number']1142 self._id_number = attribs['_id_number']
1139 self._new_name = dict((k, v.decode('utf-8'))1143 self._new_name = dict((k, v.decode('utf-8'))
1140 for k, v in attribs['_new_name'].items())1144 for k, v in viewitems(attribs['_new_name']))
1141 self._new_parent = attribs['_new_parent']1145 self._new_parent = attribs['_new_parent']
1142 self._new_executability = dict((k, bool(v)) for k, v in1146 self._new_executability = dict((k, bool(v))
1143 attribs['_new_executability'].items())1147 for k, v in viewitems(attribs['_new_executability']))
1144 self._new_id = attribs['_new_id']1148 self._new_id = attribs['_new_id']
1145 self._r_new_id = dict((v, k) for k, v in self._new_id.items())1149 self._r_new_id = dict((v, k) for k, v in viewitems(self._new_id))
1146 self._tree_path_ids = {}1150 self._tree_path_ids = {}
1147 self._tree_id_paths = {}1151 self._tree_id_paths = {}
1148 for bytepath, trans_id in attribs['_tree_path_ids'].items():1152 for bytepath, trans_id in viewitems(attribs['_tree_path_ids']):
1149 path = bytepath.decode('utf-8')1153 path = bytepath.decode('utf-8')
1150 self._tree_path_ids[path] = trans_id1154 self._tree_path_ids[path] = trans_id
1151 self._tree_id_paths[trans_id] = path1155 self._tree_id_paths[trans_id] = path
@@ -1201,9 +1205,9 @@
1201 if self._tree is None:1205 if self._tree is None:
1202 return1206 return
1203 try:1207 try:
1204 limbo_paths = self._limbo_files.values() + list(1208 limbo_paths = list(viewvalues(self._limbo_files))
1205 self._possibly_stale_limbo_files)1209 limbo_paths.extend(self._possibly_stale_limbo_files)
1206 limbo_paths = sorted(limbo_paths, reverse=True)1210 limbo_paths.sort(reverse=True)
1207 for path in limbo_paths:1211 for path in limbo_paths:
1208 try:1212 try:
1209 delete_any(path)1213 delete_any(path)
@@ -1676,8 +1680,8 @@
1676 in (trans_id, None)):1680 in (trans_id, None)):
1677 use_direct_path = True1681 use_direct_path = True
1678 else:1682 else:
1679 for l_filename, l_trans_id in\1683 for l_filename, l_trans_id in viewitems(
1680 self._limbo_children_names[parent].iteritems():1684 self._limbo_children_names[parent]):
1681 if l_trans_id == trans_id:1685 if l_trans_id == trans_id:
1682 continue1686 continue
1683 if l_filename.lower() == filename.lower():1687 if l_filename.lower() == filename.lower():
@@ -1767,7 +1771,7 @@
1767 new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in1771 new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
1768 new_paths)1772 new_paths)
1769 entries = self._tree.iter_entries_by_dir(1773 entries = self._tree.iter_entries_by_dir(
1770 new_path_file_ids.values())1774 viewvalues(new_path_file_ids))
1771 old_paths = dict((e.file_id, p) for p, e in entries)1775 old_paths = dict((e.file_id, p) for p, e in entries)
1772 final_kinds = {}1776 final_kinds = {}
1773 for num, (path, trans_id) in enumerate(new_paths):1777 for num, (path, trans_id) in enumerate(new_paths):
@@ -1814,8 +1818,7 @@
18141818
1815 If inventory_delta is None, no inventory delta generation is performed.1819 If inventory_delta is None, no inventory delta generation is performed.
1816 """1820 """
1817 tree_paths = list(self._tree_path_ids.iteritems())1821 tree_paths = sorted(viewitems(self._tree_path_ids), reverse=True)
1818 tree_paths.sort(reverse=True)
1819 child_pb = ui.ui_factory.nested_progress_bar()1822 child_pb = ui.ui_factory.nested_progress_bar()
1820 try:1823 try:
1821 for num, (path, trans_id) in enumerate(tree_paths):1824 for num, (path, trans_id) in enumerate(tree_paths):
@@ -1907,7 +1910,7 @@
1907 # problems. (we could observe start time, and finish time, and if1910 # problems. (we could observe start time, and finish time, and if
1908 # it is less than eg 10% overhead, add a sleep call.)1911 # it is less than eg 10% overhead, add a sleep call.)
1909 paths = FinalPaths(self)1912 paths = FinalPaths(self)
1910 for trans_id, observed in self._observed_sha1s.iteritems():1913 for trans_id, observed in viewitems(self._observed_sha1s):
1911 path = paths.get_path(trans_id)1914 path = paths.get_path(trans_id)
1912 # We could get the file_id, but dirstate prefers to use the path1915 # We could get the file_id, but dirstate prefers to use the path
1913 # anyway, and it is 'cheaper' to determine.1916 # anyway, and it is 'cheaper' to determine.
@@ -2057,7 +2060,7 @@
2057 tree_ids = set(self._transform._tree.all_file_ids())2060 tree_ids = set(self._transform._tree.all_file_ids())
2058 tree_ids.difference_update(self._transform.tree_file_id(t)2061 tree_ids.difference_update(self._transform.tree_file_id(t)
2059 for t in self._transform._removed_id)2062 for t in self._transform._removed_id)
2060 tree_ids.update(self._transform._new_id.values())2063 tree_ids.update(viewvalues(self._transform._new_id))
2061 return tree_ids2064 return tree_ids
20622065
2063 def __iter__(self):2066 def __iter__(self):
@@ -2120,7 +2123,7 @@
2120 return children2123 return children
2121 children = set(self._transform.iter_tree_children(trans_id))2124 children = set(self._transform.iter_tree_children(trans_id))
2122 # children in the _new_parent set are provided by _by_parent.2125 # children in the _new_parent set are provided by _by_parent.
2123 children.difference_update(self._transform._new_parent.keys())2126 children.difference_update(self._transform._new_parent)
2124 children.update(self._by_parent.get(trans_id, []))2127 children.update(self._by_parent.get(trans_id, []))
2125 self._all_children_cache[trans_id] = children2128 self._all_children_cache[trans_id] = children
2126 return children2129 return children
21272130
=== modified file 'breezy/transport/http/_urllib2_wrappers.py'
--- breezy/transport/http/_urllib2_wrappers.py 2017-06-01 23:13:43 +0000
+++ breezy/transport/http/_urllib2_wrappers.py 2017-06-05 21:41:38 +0000
@@ -190,7 +190,7 @@
190 def getheaders(self):190 def getheaders(self):
191 if self.headers is None:191 if self.headers is None:
192 raise httplib.ResponseNotReady()192 raise httplib.ResponseNotReady()
193 return self.headers.items()193 return list(self.headers.items())
194194
195195
196class _ReportingFileSocket(object):196class _ReportingFileSocket(object):
@@ -761,7 +761,7 @@
761 # before sending the request. And not all versions of python 2.5 do761 # before sending the request. And not all versions of python 2.5 do
762 # that. Since we replace urllib2.AbstractHTTPHandler.do_open we do it762 # that. Since we replace urllib2.AbstractHTTPHandler.do_open we do it
763 # ourself below.763 # ourself below.
764 headers = dict((name.title(), val) for name, val in headers.iteritems())764 headers = dict((name.title(), val) for name, val in headers.items())
765765
766 try:766 try:
767 method = request.get_method()767 method = request.get_method()
@@ -774,7 +774,7 @@
774 if 'http' in debug.debug_flags:774 if 'http' in debug.debug_flags:
775 trace.mutter('> %s %s' % (method, url))775 trace.mutter('> %s %s' % (method, url))
776 hdrs = []776 hdrs = []
777 for k,v in headers.iteritems():777 for k,v in headers.items():
778 # People are often told to paste -Dhttp output to help778 # People are often told to paste -Dhttp output to help
779 # debug. Don't compromise credentials.779 # debug. Don't compromise credentials.
780 if k in ('Authorization', 'Proxy-Authorization'):780 if k in ('Authorization', 'Proxy-Authorization'):
781781
=== modified file 'breezy/transport/ssh.py'
--- breezy/transport/ssh.py 2017-05-22 00:56:52 +0000
+++ breezy/transport/ssh.py 2017-06-05 21:41:38 +0000
@@ -629,8 +629,8 @@
629 try:629 try:
630 f = open(bzr_hostkey_path, 'w')630 f = open(bzr_hostkey_path, 'w')
631 f.write('# SSH host keys collected by bzr\n')631 f.write('# SSH host keys collected by bzr\n')
632 for hostname, keys in BRZ_HOSTKEYS.iteritems():632 for hostname, keys in BRZ_HOSTKEYS.items():
633 for keytype, key in keys.iteritems():633 for keytype, key in keys.items():
634 f.write('%s %s %s\n' % (hostname, keytype, key.get_base64()))634 f.write('%s %s %s\n' % (hostname, keytype, key.get_base64()))
635 f.close()635 f.close()
636 except IOError as e:636 except IOError as e:
637637
=== modified file 'breezy/tree.py'
--- breezy/tree.py 2017-06-04 18:09:30 +0000
+++ breezy/tree.py 2017-06-05 21:41:38 +0000
@@ -42,6 +42,9 @@
4242
43from .decorators import needs_read_lock43from .decorators import needs_read_lock
44from .inter import InterObject44from .inter import InterObject
45from .sixish import (
46 viewvalues,
47 )
4548
4649
47class Tree(object):50class Tree(object):
@@ -864,12 +867,12 @@
864 @needs_read_lock867 @needs_read_lock
865 def iter_child_entries(self, file_id, path=None):868 def iter_child_entries(self, file_id, path=None):
866 inv, inv_file_id = self._unpack_file_id(file_id)869 inv, inv_file_id = self._unpack_file_id(file_id)
867 return inv[inv_file_id].children.itervalues()870 return iter(viewvalues(inv[inv_file_id].children))
868871
869 def iter_children(self, file_id, path=None):872 def iter_children(self, file_id, path=None):
870 """See Tree.iter_children."""873 """See Tree.iter_children."""
871 entry = self.iter_entries_by_dir([file_id]).next()[1]874 entry = self.iter_entries_by_dir([file_id]).next()[1]
872 for child in getattr(entry, 'children', {}).itervalues():875 for child in viewvalues(getattr(entry, 'children', {})):
873 yield child.file_id876 yield child.file_id
874877
875878
@@ -1592,7 +1595,7 @@
1592 # might ensure better ordering, in case a caller strictly1595 # might ensure better ordering, in case a caller strictly
1593 # requires parents before children.1596 # requires parents before children.
1594 for idx, other_extra in enumerate(self._others_extra):1597 for idx, other_extra in enumerate(self._others_extra):
1595 others = sorted(other_extra.itervalues(),1598 others = sorted(viewvalues(other_extra),
1596 key=lambda x: self._path_to_key(x[0]))1599 key=lambda x: self._path_to_key(x[0]))
1597 for other_path, other_ie in others:1600 for other_path, other_ie in others:
1598 file_id = other_ie.file_id1601 file_id = other_ie.file_id
15991602
=== modified file 'breezy/tsort.py'
--- breezy/tsort.py 2017-05-22 00:56:52 +0000
+++ breezy/tsort.py 2017-06-05 21:41:38 +0000
@@ -363,7 +363,7 @@
363 # we need to do a check late in the process to detect end-of-merges363 # we need to do a check late in the process to detect end-of-merges
364 # which requires the parents to be accessible: its easier for now364 # which requires the parents to be accessible: its easier for now
365 # to just keep the original graph around.365 # to just keep the original graph around.
366 self._original_graph = dict(self._graph.items())366 self._original_graph = self._graph.copy()
367 # we need to know the revision numbers of revisions to determine367 # we need to know the revision numbers of revisions to determine
368 # the revision numbers of their descendants368 # the revision numbers of their descendants
369 # this is a graph from node to [revno_tuple, first_child]369 # this is a graph from node to [revno_tuple, first_child]
370370
=== modified file 'breezy/uncommit.py'
--- breezy/uncommit.py 2017-05-22 00:56:52 +0000
+++ breezy/uncommit.py 2017-06-05 21:41:38 +0000
@@ -40,7 +40,7 @@
40 reverse_tags = branch.tags.get_reverse_tag_dict()40 reverse_tags = branch.tags.get_reverse_tag_dict()
41 ancestors = graph.find_unique_ancestors(old_tip, parents)41 ancestors = graph.find_unique_ancestors(old_tip, parents)
42 removed_tags = []42 removed_tags = []
43 for revid, tags in reverse_tags.iteritems():43 for revid, tags in reverse_tags.items():
44 if not revid in ancestors:44 if not revid in ancestors:
45 continue45 continue
46 for tag in tags:46 for tag in tags:
4747
=== modified file 'breezy/urlutils.py'
--- breezy/urlutils.py 2017-06-04 18:09:30 +0000
+++ breezy/urlutils.py 2017-06-05 21:41:38 +0000
@@ -578,7 +578,7 @@
578 (base, existing_parameters) = split_segment_parameters(url)578 (base, existing_parameters) = split_segment_parameters(url)
579 new_parameters = {}579 new_parameters = {}
580 new_parameters.update(existing_parameters)580 new_parameters.update(existing_parameters)
581 for key, value in parameters.iteritems():581 for key, value in parameters.items():
582 if not isinstance(key, str):582 if not isinstance(key, str):
583 raise TypeError("parameter key %r is not a bytestring" % key)583 raise TypeError("parameter key %r is not a bytestring" % key)
584 if not isinstance(value, str):584 if not isinstance(value, str):
585585
=== modified file 'breezy/versionedfile.py'
--- breezy/versionedfile.py 2017-05-25 21:59:11 +0000
+++ breezy/versionedfile.py 2017-06-05 21:41:38 +0000
@@ -44,6 +44,8 @@
44from .registry import Registry44from .registry import Registry
45from .sixish import (45from .sixish import (
46 BytesIO,46 BytesIO,
47 viewitems,
48 viewvalues,
47 zip,49 zip,
48 )50 )
49from .textmerge import TextMerge51from .textmerge import TextMerge
@@ -250,7 +252,7 @@
250 refcounts = {}252 refcounts = {}
251 setdefault = refcounts.setdefault253 setdefault = refcounts.setdefault
252 just_parents = set()254 just_parents = set()
253 for child_key, parent_keys in parent_map.iteritems():255 for child_key, parent_keys in viewitems(parent_map):
254 if not parent_keys:256 if not parent_keys:
255 # parent_keys may be None if a given VersionedFile claims to257 # parent_keys may be None if a given VersionedFile claims to
256 # not support graph operations.258 # not support graph operations.
@@ -491,7 +493,7 @@
491 except KeyError:493 except KeyError:
492 raise errors.RevisionNotPresent(version_id, self)494 raise errors.RevisionNotPresent(version_id, self)
493 # We need to filter out ghosts, because we can't diff against them.495 # We need to filter out ghosts, because we can't diff against them.
494 knit_versions = set(self.get_parent_map(knit_versions).keys())496 knit_versions = set(self.get_parent_map(knit_versions))
495 lines = dict(zip(knit_versions,497 lines = dict(zip(knit_versions,
496 self._get_lf_split_line_list(knit_versions)))498 self._get_lf_split_line_list(knit_versions)))
497 diffs = []499 diffs = []
@@ -535,7 +537,7 @@
535 for version, parent_ids, expected_sha1, mpdiff in records:537 for version, parent_ids, expected_sha1, mpdiff in records:
536 needed_parents.update(p for p in parent_ids538 needed_parents.update(p for p in parent_ids
537 if not mpvf.has_version(p))539 if not mpvf.has_version(p))
538 present_parents = set(self.get_parent_map(needed_parents).keys())540 present_parents = set(self.get_parent_map(needed_parents))
539 for parent_id, lines in zip(present_parents,541 for parent_id, lines in zip(present_parents,
540 self._get_lf_split_line_list(present_parents)):542 self._get_lf_split_line_list(present_parents)):
541 mpvf.add_version(lines, parent_id, [])543 mpvf.add_version(lines, parent_id, [])
@@ -1095,7 +1097,7 @@
1095 this_parent_map = self.get_parent_map(pending)1097 this_parent_map = self.get_parent_map(pending)
1096 parent_map.update(this_parent_map)1098 parent_map.update(this_parent_map)
1097 pending = set(itertools.chain.from_iterable(1099 pending = set(itertools.chain.from_iterable(
1098 this_parent_map.itervalues()))1100 viewvalues(this_parent_map)))
1099 pending.difference_update(parent_map)1101 pending.difference_update(parent_map)
1100 kg = _mod_graph.KnownGraph(parent_map)1102 kg = _mod_graph.KnownGraph(parent_map)
1101 return kg1103 return kg
@@ -1297,11 +1299,11 @@
1297 """1299 """
1298 prefixes = self._partition_keys(keys)1300 prefixes = self._partition_keys(keys)
1299 result = {}1301 result = {}
1300 for prefix, suffixes in prefixes.items():1302 for prefix, suffixes in viewitems(prefixes):
1301 path = self._mapper.map(prefix)1303 path = self._mapper.map(prefix)
1302 vf = self._get_vf(path)1304 vf = self._get_vf(path)
1303 parent_map = vf.get_parent_map(suffixes)1305 parent_map = vf.get_parent_map(suffixes)
1304 for key, parents in parent_map.items():1306 for key, parents in viewitems(parent_map):
1305 result[prefix + (key,)] = tuple(1307 result[prefix + (key,)] = tuple(
1306 prefix + (parent,) for parent in parents)1308 prefix + (parent,) for parent in parents)
1307 return result1309 return result
@@ -1353,7 +1355,7 @@
1353 def _iter_keys_vf(self, keys):1355 def _iter_keys_vf(self, keys):
1354 prefixes = self._partition_keys(keys)1356 prefixes = self._partition_keys(keys)
1355 sha1s = {}1357 sha1s = {}
1356 for prefix, suffixes in prefixes.items():1358 for prefix, suffixes in viewitems(prefixes):
1357 path = self._mapper.map(prefix)1359 path = self._mapper.map(prefix)
1358 vf = self._get_vf(path)1360 vf = self._get_vf(path)
1359 yield prefix, suffixes, vf1361 yield prefix, suffixes, vf
@@ -1363,7 +1365,7 @@
1363 sha1s = {}1365 sha1s = {}
1364 for prefix,suffixes, vf in self._iter_keys_vf(keys):1366 for prefix,suffixes, vf in self._iter_keys_vf(keys):
1365 vf_sha1s = vf.get_sha1s(suffixes)1367 vf_sha1s = vf.get_sha1s(suffixes)
1366 for suffix, sha1 in vf_sha1s.iteritems():1368 for suffix, sha1 in viewitems(vf_sha1s):
1367 sha1s[prefix + (suffix,)] = sha11369 sha1s[prefix + (suffix,)] = sha1
1368 return sha1s1370 return sha1s
13691371
@@ -1553,7 +1555,7 @@
1553 result.update(1555 result.update(
1554 _mod_graph.StackedParentsProvider(1556 _mod_graph.StackedParentsProvider(
1555 self._providers).get_parent_map(keys))1557 self._providers).get_parent_map(keys))
1556 for key, parents in result.iteritems():1558 for key, parents in viewitems(result):
1557 if parents == ():1559 if parents == ():
1558 result[key] = (revision.NULL_REVISION,)1560 result[key] = (revision.NULL_REVISION,)
1559 return result1561 return result
@@ -1732,8 +1734,8 @@
17321734
1733 def get_parent_map(self, keys):1735 def get_parent_map(self, keys):
1734 """See VersionedFiles.get_parent_map."""1736 """See VersionedFiles.get_parent_map."""
1735 return dict([((k,), tuple([(p,) for p in v]))1737 parent_view = viewitems(self._get_parent_map(k for (k,) in keys))
1736 for k,v in self._get_parent_map([k for (k,) in keys]).iteritems()])1738 return dict(((k,), tuple((p,) for p in v)) for k, v in parent_view)
17371739
1738 def get_sha1s(self, keys):1740 def get_sha1s(self, keys):
1739 """See VersionedFiles.get_sha1s."""1741 """See VersionedFiles.get_sha1s."""
@@ -1889,7 +1891,7 @@
1889 # gc-optimal ordering is approximately reverse topological,1891 # gc-optimal ordering is approximately reverse topological,
1890 # properly grouped by file-id.1892 # properly grouped by file-id.
1891 per_prefix_map = {}1893 per_prefix_map = {}
1892 for item in parent_map.iteritems():1894 for item in viewitems(parent_map):
1893 key = item[0]1895 key = item[0]
1894 if isinstance(key, str) or len(key) == 1:1896 if isinstance(key, str) or len(key) == 1:
1895 prefix = ''1897 prefix = ''
@@ -1936,9 +1938,9 @@
19361938
1937 def get_new_keys(self):1939 def get_new_keys(self):
1938 return self.new_keys1940 return self.new_keys
1939 1941
1940 def get_unsatisfied_refs(self):1942 def get_unsatisfied_refs(self):
1941 return self.refs.iterkeys()1943 return self.refs.keys()
19421944
1943 def _satisfy_refs_for_key(self, key):1945 def _satisfy_refs_for_key(self, key):
1944 try:1946 try:
@@ -1958,10 +1960,7 @@
1958 self._satisfy_refs_for_key(key)1960 self._satisfy_refs_for_key(key)
19591961
1960 def get_referrers(self):1962 def get_referrers(self):
1961 result = set()1963 return set(itertools.chain.from_iterable(viewvalues(self.refs)))
1962 for referrers in self.refs.itervalues():
1963 result.update(referrers)
1964 return result
19651964
19661965
19671966
19681967
=== modified file 'breezy/vf_repository.py'
--- breezy/vf_repository.py 2017-06-04 18:09:30 +0000
+++ breezy/vf_repository.py 2017-06-05 21:41:38 +0000
@@ -75,6 +75,8 @@
7575
76from .sixish import (76from .sixish import (
77 range,77 range,
78 viewitems,
79 viewvalues,
78 )80 )
7981
80from .trace import (82from .trace import (
@@ -434,7 +436,7 @@
434 # XXX: Friction: parent_candidates should return a list not a dict436 # XXX: Friction: parent_candidates should return a list not a dict
435 # so that we don't have to walk the inventories again.437 # so that we don't have to walk the inventories again.
436 parent_candidate_entries = ie.parent_candidates(parent_invs)438 parent_candidate_entries = ie.parent_candidates(parent_invs)
437 head_set = self._heads(ie.file_id, parent_candidate_entries.keys())439 head_set = self._heads(ie.file_id, parent_candidate_entries)
438 heads = []440 heads = []
439 for inv in parent_invs:441 for inv in parent_invs:
440 if inv.has_id(ie.file_id):442 if inv.has_id(ie.file_id):
@@ -692,12 +694,12 @@
692 seen_root = False # Is the root in the basis delta?694 seen_root = False # Is the root in the basis delta?
693 inv_delta = self._basis_delta695 inv_delta = self._basis_delta
694 modified_rev = self._new_revision_id696 modified_rev = self._new_revision_id
695 for change, head_candidates in changes.values():697 for change, head_candidates in viewvalues(changes):
696 if change[3][1]: # versioned in target.698 if change[3][1]: # versioned in target.
697 # Several things may be happening here:699 # Several things may be happening here:
698 # We may have a fork in the per-file graph700 # We may have a fork in the per-file graph
699 # - record a change with the content from tree701 # - record a change with the content from tree
700 # We may have a change against < all trees 702 # We may have a change against < all trees
701 # - carry over the tree that hasn't changed703 # - carry over the tree that hasn't changed
702 # We may have a change against all trees704 # We may have a change against all trees
703 # - record the change with the content from tree705 # - record the change with the content from tree
@@ -1196,7 +1198,7 @@
1196 graph = self.get_graph()1198 graph = self.get_graph()
1197 parent_map = graph.get_parent_map(revision_ids)1199 parent_map = graph.get_parent_map(revision_ids)
1198 # The old API returned a list, should this actually be a set?1200 # The old API returned a list, should this actually be a set?
1199 return parent_map.keys()1201 return list(parent_map)
12001202
1201 def __init__(self, _format, a_bzrdir, control_files):1203 def __init__(self, _format, a_bzrdir, control_files):
1202 """Instantiate a VersionedFileRepository.1204 """Instantiate a VersionedFileRepository.
@@ -1351,7 +1353,7 @@
1351 referrers = frozenset(r[0] for r in key_deps.get_referrers())1353 referrers = frozenset(r[0] for r in key_deps.get_referrers())
1352 file_ids = self.fileids_altered_by_revision_ids(referrers)1354 file_ids = self.fileids_altered_by_revision_ids(referrers)
1353 missing_texts = set()1355 missing_texts = set()
1354 for file_id, version_ids in file_ids.iteritems():1356 for file_id, version_ids in viewitems(file_ids):
1355 missing_texts.update(1357 missing_texts.update(
1356 (file_id, version_id) for version_id in version_ids)1358 (file_id, version_id) for version_id in version_ids)
1357 present_texts = self.texts.get_parent_map(missing_texts)1359 present_texts = self.texts.get_parent_map(missing_texts)
@@ -1499,8 +1501,7 @@
1499 revision_ids. Each altered file-ids has the exact revision_ids that1501 revision_ids. Each altered file-ids has the exact revision_ids that
1500 altered it listed explicitly.1502 altered it listed explicitly.
1501 """1503 """
1502 seen = set(self._serializer._find_text_key_references(1504 seen = set(self._serializer._find_text_key_references(line_iterator))
1503 line_iterator).iterkeys())
1504 parent_keys = self._find_parent_keys_of_revisions(revision_keys)1505 parent_keys = self._find_parent_keys_of_revisions(revision_keys)
1505 parent_seen = set(self._serializer._find_text_key_references(1506 parent_seen = set(self._serializer._find_text_key_references(
1506 self._inventory_xml_lines_for_keys(parent_keys)))1507 self._inventory_xml_lines_for_keys(parent_keys)))
@@ -1520,7 +1521,7 @@
1520 """1521 """
1521 parent_map = self.revisions.get_parent_map(revision_keys)1522 parent_map = self.revisions.get_parent_map(revision_keys)
1522 parent_keys = set(itertools.chain.from_iterable(1523 parent_keys = set(itertools.chain.from_iterable(
1523 parent_map.itervalues()))1524 viewvalues(parent_map)))
1524 parent_keys.difference_update(revision_keys)1525 parent_keys.difference_update(revision_keys)
1525 parent_keys.discard(_mod_revision.NULL_REVISION)1526 parent_keys.discard(_mod_revision.NULL_REVISION)
1526 return parent_keys1527 return parent_keys
@@ -1603,7 +1604,7 @@
1603 # a cache of the text keys to allow reuse; costs a dict of all the1604 # a cache of the text keys to allow reuse; costs a dict of all the
1604 # keys, but saves a 2-tuple for every child of a given key.1605 # keys, but saves a 2-tuple for every child of a given key.
1605 text_key_cache = {}1606 text_key_cache = {}
1606 for text_key, valid in text_key_references.iteritems():1607 for text_key, valid in viewitems(text_key_references):
1607 if not valid:1608 if not valid:
1608 invalid_keys.add(text_key)1609 invalid_keys.add(text_key)
1609 else:1610 else:
@@ -1705,7 +1706,7 @@
1705 file_ids = self.fileids_altered_by_revision_ids(revision_ids, inv_w)1706 file_ids = self.fileids_altered_by_revision_ids(revision_ids, inv_w)
1706 count = 01707 count = 0
1707 num_file_ids = len(file_ids)1708 num_file_ids = len(file_ids)
1708 for file_id, altered_versions in file_ids.iteritems():1709 for file_id, altered_versions in viewitems(file_ids):
1709 if pb is not None:1710 if pb is not None:
1710 pb.update(gettext("Fetch texts"), count, num_file_ids)1711 pb.update(gettext("Fetch texts"), count, num_file_ids)
1711 count += 11712 count += 1
@@ -1880,8 +1881,8 @@
1880 raise ValueError('get_parent_map(None) is not valid')1881 raise ValueError('get_parent_map(None) is not valid')
1881 else:1882 else:
1882 query_keys.append((revision_id ,))1883 query_keys.append((revision_id ,))
1883 for ((revision_id,), parent_keys) in \1884 for (revision_id,), parent_keys in viewitems(
1884 self.revisions.get_parent_map(query_keys).iteritems():1885 self.revisions.get_parent_map(query_keys)):
1885 if parent_keys:1886 if parent_keys:
1886 result[revision_id] = tuple([parent_revid1887 result[revision_id] = tuple([parent_revid
1887 for (parent_revid,) in parent_keys])1888 for (parent_revid,) in parent_keys])
@@ -1905,10 +1906,8 @@
19051906
1906 def revision_ids_to_search_result(self, result_set):1907 def revision_ids_to_search_result(self, result_set):
1907 """Convert a set of revision ids to a graph SearchResult."""1908 """Convert a set of revision ids to a graph SearchResult."""
1908 result_parents = set()1909 result_parents = set(itertools.chain.from_iterable(viewvalues(
1909 for parents in self.get_graph().get_parent_map(1910 self.get_graph().get_parent_map(result_set))))
1910 result_set).itervalues():
1911 result_parents.update(parents)
1912 included_keys = result_set.intersection(result_parents)1911 included_keys = result_set.intersection(result_parents)
1913 start_keys = result_set.difference(included_keys)1912 start_keys = result_set.difference(included_keys)
1914 exclude_keys = result_parents.difference(result_set)1913 exclude_keys = result_parents.difference(result_set)
@@ -2334,7 +2333,7 @@
2334 raise AssertionError(2333 raise AssertionError(
2335 'cannot copy revisions to fill in missing deltas %s' % (2334 'cannot copy revisions to fill in missing deltas %s' % (
2336 keys['revisions'],))2335 keys['revisions'],))
2337 for substream_kind, keys in keys.iteritems():2336 for substream_kind, keys in viewitems(keys):
2338 vf = getattr(self.from_repository, substream_kind)2337 vf = getattr(self.from_repository, substream_kind)
2339 if vf is None and keys:2338 if vf is None and keys:
2340 raise AssertionError(2339 raise AssertionError(
@@ -2531,8 +2530,7 @@
2531 def _check_file_version_parents(self, texts, progress_bar):2530 def _check_file_version_parents(self, texts, progress_bar):
2532 """See check_file_version_parents."""2531 """See check_file_version_parents."""
2533 wrong_parents = {}2532 wrong_parents = {}
2534 self.file_ids = {file_id for file_id, _ in2533 self.file_ids = {file_id for file_id, _ in self.text_index}
2535 self.text_index.iterkeys()}
2536 # text keys is now grouped by file_id2534 # text keys is now grouped by file_id
2537 n_versions = len(self.text_index)2535 n_versions = len(self.text_index)
2538 progress_bar.update(gettext('loading text store'), 0, n_versions)2536 progress_bar.update(gettext('loading text store'), 0, n_versions)
@@ -2540,7 +2538,7 @@
2540 # On unlistable transports this could well be empty/error...2538 # On unlistable transports this could well be empty/error...
2541 text_keys = self.repository.texts.keys()2539 text_keys = self.repository.texts.keys()
2542 unused_keys = frozenset(text_keys) - set(self.text_index)2540 unused_keys = frozenset(text_keys) - set(self.text_index)
2543 for num, key in enumerate(self.text_index.iterkeys()):2541 for num, key in enumerate(self.text_index):
2544 progress_bar.update(gettext('checking text graph'), num, n_versions)2542 progress_bar.update(gettext('checking text graph'), num, n_versions)
2545 correct_parents = self.calculate_file_version_parents(key)2543 correct_parents = self.calculate_file_version_parents(key)
2546 try:2544 try:
@@ -2814,11 +2812,10 @@
2814 source may be not have _fallback_repositories even though it is2812 source may be not have _fallback_repositories even though it is
2815 stacked.)2813 stacked.)
2816 """2814 """
2817 parent_revs = set()2815 parent_revs = set(itertools.chain.from_iterable(viewvalues(
2818 for parents in parent_map.values():2816 parent_map)))
2819 parent_revs.update(parents)
2820 present_parents = self.source.get_parent_map(parent_revs)2817 present_parents = self.source.get_parent_map(parent_revs)
2821 absent_parents = set(parent_revs).difference(present_parents)2818 absent_parents = parent_revs.difference(present_parents)
2822 parent_invs_keys_for_stacking = self.source.inventories.get_parent_map(2819 parent_invs_keys_for_stacking = self.source.inventories.get_parent_map(
2823 (rev_id,) for rev_id in absent_parents)2820 (rev_id,) for rev_id in absent_parents)
2824 parent_inv_ids = [key[-1] for key in parent_invs_keys_for_stacking]2821 parent_inv_ids = [key[-1] for key in parent_invs_keys_for_stacking]
@@ -3154,7 +3151,7 @@
3154 # commit to determine parents. There is a latent/real bug here where3151 # commit to determine parents. There is a latent/real bug here where
3155 # the parents inserted are not those commit would do - in particular3152 # the parents inserted are not those commit would do - in particular
3156 # they are not filtered by heads(). RBC, AB3153 # they are not filtered by heads(). RBC, AB
3157 for revision, tree in parent_trees.iteritems():3154 for revision, tree in viewitems(parent_trees):
3158 if not tree.has_id(ie.file_id):3155 if not tree.has_id(ie.file_id):
3159 continue3156 continue
3160 parent_id = tree.get_file_revision(ie.file_id)3157 parent_id = tree.get_file_revision(ie.file_id)
31613158
=== modified file 'breezy/vf_search.py'
--- breezy/vf_search.py 2017-05-25 01:35:55 +0000
+++ breezy/vf_search.py 2017-06-05 21:41:38 +0000
@@ -18,6 +18,8 @@
1818
19from __future__ import absolute_import19from __future__ import absolute_import
2020
21import itertools
22
21from . import (23from . import (
22 debug,24 debug,
23 revision,25 revision,
@@ -29,11 +31,14 @@
29 Graph,31 Graph,
30 invert_parent_map,32 invert_parent_map,
31 )33 )
34from .sixish import (
35 viewvalues,
36 )
3237
3338
34class AbstractSearchResult(object):39class AbstractSearchResult(object):
35 """The result of a search, describing a set of keys.40 """The result of a search, describing a set of keys.
36 41
37 Search results are typically used as the 'fetch_spec' parameter when42 Search results are typically used as the 'fetch_spec' parameter when
38 fetching revisions.43 fetching revisions.
3944
@@ -380,9 +385,7 @@
380 # start_set is all the keys in the cache385 # start_set is all the keys in the cache
381 start_set = set(parent_map)386 start_set = set(parent_map)
382 # result set is all the references to keys in the cache387 # result set is all the references to keys in the cache
383 result_parents = set()388 result_parents = set(itertools.chain.from_iterable(viewvalues(parent_map)))
384 for parents in parent_map.itervalues():
385 result_parents.update(parents)
386 stop_keys = result_parents.difference(start_set)389 stop_keys = result_parents.difference(start_set)
387 # We don't need to send ghosts back to the server as a position to390 # We don't need to send ghosts back to the server as a position to
388 # stop either.391 # stop either.
@@ -420,14 +423,14 @@
420 next_revs = next(s)423 next_revs = next(s)
421 except StopIteration:424 except StopIteration:
422 break425 break
423 for parents in s._current_parents.itervalues():426 for parents in viewvalues(s._current_parents):
424 f_heads = heads.intersection(parents)427 f_heads = heads.intersection(parents)
425 if f_heads:428 if f_heads:
426 found_heads.update(f_heads)429 found_heads.update(f_heads)
427 stop_keys = exclude_keys.intersection(next_revs)430 stop_keys = exclude_keys.intersection(next_revs)
428 if stop_keys:431 if stop_keys:
429 s.stop_searching_any(stop_keys)432 s.stop_searching_any(stop_keys)
430 for parents in s._current_parents.itervalues():433 for parents in viewvalues(s._current_parents):
431 f_heads = heads.intersection(parents)434 f_heads = heads.intersection(parents)
432 if f_heads:435 if f_heads:
433 found_heads.update(f_heads)436 found_heads.update(f_heads)
434437
=== modified file 'breezy/weave.py'
--- breezy/weave.py 2017-06-04 18:09:30 +0000
+++ breezy/weave.py 2017-06-05 21:41:38 +0000
@@ -990,7 +990,7 @@
990 # map from version name -> all parent names990 # map from version name -> all parent names
991 combined_parents = _reweave_parent_graphs(wa, wb)991 combined_parents = _reweave_parent_graphs(wa, wb)
992 mutter("combined parents: %r", combined_parents)992 mutter("combined parents: %r", combined_parents)
993 order = tsort.topo_sort(combined_parents.iteritems())993 order = tsort.topo_sort(combined_parents.items())
994 mutter("order to reweave: %r", order)994 mutter("order to reweave: %r", order)
995995
996 if pb and not msg:996 if pb and not msg:
997997
=== modified file 'breezy/workingtree.py'
--- breezy/workingtree.py 2017-05-30 19:32:13 +0000
+++ breezy/workingtree.py 2017-06-05 21:41:38 +0000
@@ -2343,9 +2343,9 @@
2343 @needs_tree_write_lock2343 @needs_tree_write_lock
2344 def set_merge_modified(self, modified_hashes):2344 def set_merge_modified(self, modified_hashes):
2345 def iter_stanzas():2345 def iter_stanzas():
2346 for file_id, hash in modified_hashes.iteritems():2346 for file_id in modified_hashes:
2347 yield _mod_rio.Stanza(file_id=file_id.decode('utf8'),2347 yield _mod_rio.Stanza(file_id=file_id.decode('utf8'),
2348 hash=hash)2348 hash=modified_hashes[file_id])
2349 self._put_rio('merge-hashes', iter_stanzas(), MERGE_MODIFIED_HEADER_1)2349 self._put_rio('merge-hashes', iter_stanzas(), MERGE_MODIFIED_HEADER_1)
23502350
2351 @needs_read_lock2351 @needs_read_lock
23522352
=== modified file 'breezy/workingtree_4.py'
--- breezy/workingtree_4.py 2017-05-30 19:32:13 +0000
+++ breezy/workingtree_4.py 2017-06-05 21:41:38 +0000
@@ -71,6 +71,7 @@
71 )71 )
72from .sixish import (72from .sixish import (
73 BytesIO,73 BytesIO,
74 viewitems,
74 )75 )
75from .transport.local import LocalTransport76from .transport.local import LocalTransport
76from .tree import (77from .tree import (
@@ -1018,7 +1019,7 @@
1018 raise errors.PathsNotVersionedError(1019 raise errors.PathsNotVersionedError(
1019 [p.decode('utf-8') for p in paths])1020 [p.decode('utf-8') for p in paths])
10201021
1021 for dir_name_id, trees_info in found.iteritems():1022 for dir_name_id, trees_info in viewitems(found):
1022 for index in search_indexes:1023 for index in search_indexes:
1023 if trees_info[index][0] not in ('r', 'a'):1024 if trees_info[index][0] not in ('r', 'a'):
1024 found_ids.add(dir_name_id[2])1025 found_ids.add(dir_name_id[2])

Subscribers

People subscribed via source and target branches