Merge lp:~gz/brz/dict_viewing into lp:brz

Proposed by Martin Packman
Status: Merged
Approved by: Martin Packman
Approved revision: no longer in the source branch.
Merge reported by: The Breezy Bot
Merged at revision: not available
Proposed branch: lp:~gz/brz/dict_viewing
Merge into: lp:brz
Diff against target: 3844 lines (+524/-481)
93 files modified
breezy/_annotator_py.py (+2/-1)
breezy/_known_graph_py.py (+10/-6)
breezy/branch.py (+4/-4)
breezy/branchbuilder.py (+4/-1)
breezy/btree_index.py (+8/-4)
breezy/builtins.py (+10/-8)
breezy/bundle/bundle_data.py (+4/-3)
breezy/bundle/commands.py (+2/-1)
breezy/bundle/serializer/v4.py (+2/-1)
breezy/bzrdir.py (+4/-4)
breezy/check.py (+8/-7)
breezy/chk_map.py (+37/-38)
breezy/chk_serializer.py (+3/-3)
breezy/commands.py (+7/-9)
breezy/config.py (+7/-7)
breezy/controldir.py (+1/-1)
breezy/dirstate.py (+7/-5)
breezy/export_pot.py (+1/-1)
breezy/fetch.py (+7/-5)
breezy/fifo_cache.py (+4/-4)
breezy/gpg.py (+1/-1)
breezy/graph.py (+20/-16)
breezy/groupcompress.py (+7/-6)
breezy/hashcache.py (+4/-5)
breezy/help.py (+4/-4)
breezy/help_topics/__init__.py (+1/-1)
breezy/index.py (+12/-11)
breezy/inventory.py (+21/-17)
breezy/knit.py (+15/-13)
breezy/log.py (+2/-2)
breezy/lru_cache.py (+8/-2)
breezy/mail_client.py (+2/-2)
breezy/merge.py (+4/-1)
breezy/mutabletree.py (+9/-4)
breezy/plugins/bash_completion/bashcomp.py (+4/-4)
breezy/plugins/bisect/tests.py (+3/-3)
breezy/plugins/fastimport/branch_updater.py (+1/-1)
breezy/plugins/fastimport/cache_manager.py (+7/-20)
breezy/plugins/fastimport/exporter.py (+1/-1)
breezy/plugins/fastimport/helpers.py (+2/-2)
breezy/plugins/fastimport/idmapfile.py (+3/-7)
breezy/plugins/fastimport/marks_file.py (+2/-2)
breezy/plugins/fastimport/processors/info_processor.py (+10/-11)
breezy/plugins/fastimport/revision_store.py (+1/-1)
breezy/plugins/launchpad/lp_api_lite.py (+1/-1)
breezy/plugins/launchpad/lp_registration.py (+2/-2)
breezy/plugins/stats/cmds.py (+13/-8)
breezy/plugins/weave_fmt/bzrdir.py (+3/-3)
breezy/reconcile.py (+1/-1)
breezy/registry.py (+4/-5)
breezy/remote.py (+10/-7)
breezy/rename_map.py (+7/-6)
breezy/repofmt/groupcompress_repo.py (+8/-4)
breezy/repofmt/knitpack_repo.py (+4/-3)
breezy/repofmt/pack_repo.py (+3/-3)
breezy/repository.py (+10/-7)
breezy/revision.py (+1/-1)
breezy/smart/bzrdir.py (+1/-1)
breezy/smart/client.py (+1/-1)
breezy/smtp_connection.py (+1/-1)
breezy/tag.py (+2/-2)
breezy/tests/per_branch/test_check.py (+1/-1)
breezy/tests/per_bzrdir/test_bzrdir.py (+1/-1)
breezy/tests/per_controldir/test_controldir.py (+1/-1)
breezy/tests/per_controldir_colo/test_supported.py (+2/-2)
breezy/tests/per_controldir_colo/test_unsupported.py (+1/-2)
breezy/tests/per_foreign_vcs/__init__.py (+1/-1)
breezy/tests/per_pack_repository.py (+4/-3)
breezy/tests/per_workingtree/test_parents.py (+2/-2)
breezy/tests/test_btree_index.py (+3/-3)
breezy/tests/test_bundle.py (+1/-1)
breezy/tests/test_fifo_cache.py (+29/-32)
breezy/tests/test_graph.py (+1/-1)
breezy/tests/test_groupcompress.py (+2/-2)
breezy/tests/test_inv.py (+3/-3)
breezy/tests/test_knit.py (+4/-4)
breezy/tests/test_merge.py (+1/-1)
breezy/tests/test_merge_core.py (+3/-3)
breezy/tests/test_remote.py (+2/-2)
breezy/tests/test_versionedfile.py (+6/-8)
breezy/transform.py (+41/-38)
breezy/transport/http/_urllib2_wrappers.py (+3/-3)
breezy/transport/ssh.py (+2/-2)
breezy/tree.py (+6/-3)
breezy/tsort.py (+1/-1)
breezy/uncommit.py (+1/-1)
breezy/urlutils.py (+1/-1)
breezy/versionedfile.py (+17/-18)
breezy/vf_repository.py (+22/-25)
breezy/vf_search.py (+9/-6)
breezy/weave.py (+1/-1)
breezy/workingtree.py (+2/-2)
breezy/workingtree_4.py (+2/-1)
To merge this branch: bzr merge lp:~gz/brz/dict_viewing
Reviewer Review Type Date Requested Status
Jelmer Vernooij Approve
Review via email: mp+325108@code.launchpad.net

Commit message

Apply 2to3 dict fixer and clean up with sixish view methods

Description of the change

One of the few remaining big-bang 2to3 changes, this branch runs the dict fixer.

The basic scheme is iter(items|values) gets the iter stripped, plus other futzing to try and keep semantics. The plain methods get wrapped in list() which is often not the right thing, but the fixer is being conservative due to changes during iteration.

Unfortunately, we have a bunch of types that have some dict-like methods but aren't dict:

* chk_map: CHKMap.iteritems LeafNode.iteritems
* lru_cache: LRUCache.keys
* registry: Registry methods keys iteritems items
* versionedfile: VersionedFiles.keys
* groupcompress: GroupCompressVersionedFiles.keys _GCGraphIndex.keys
* knit: _KndxIndex.keys _KnitGraphIndex.keys KnitVersionedFiles.keys

Which had lead to a bunch of bogus changes.

For general sanity, I switched uses of items/values over the the sixish view helpers which is the new Python 3 behaviour, and have non-horrible 2.7 performance.

There's a small amount of other drive-by stuff, but mostly this branch can just be verified by the tests passing.

To post a comment you must log in.
Revision history for this message
Jelmer Vernooij (jelmer) :
review: Approve
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote :

Running landing tests failed
http://10.242.247.184:8080/job/brz-dev/66/

Revision history for this message
The Breezy Bot (the-breezy-bot) wrote :

Running landing tests failed
http://10.242.247.184:8080/job/brz-dev/67/

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'breezy/_annotator_py.py'
2--- breezy/_annotator_py.py 2017-06-04 18:09:30 +0000
3+++ breezy/_annotator_py.py 2017-06-05 21:41:38 +0000
4@@ -33,6 +33,7 @@
5 )
6 from .sixish import (
7 range,
8+ viewitems,
9 )
10
11
12@@ -92,7 +93,7 @@
13 vf_keys_needed.add(key)
14 needed_keys = set()
15 next_parent_map.update(self._vf.get_parent_map(parent_lookup))
16- for key, parent_keys in next_parent_map.iteritems():
17+ for key, parent_keys in viewitems(next_parent_map):
18 if parent_keys is None: # No graph versionedfile
19 parent_keys = ()
20 next_parent_map[key] = ()
21
22=== modified file 'breezy/_known_graph_py.py'
23--- breezy/_known_graph_py.py 2017-05-22 00:56:52 +0000
24+++ breezy/_known_graph_py.py 2017-06-05 21:41:38 +0000
25@@ -24,6 +24,10 @@
26 errors,
27 revision,
28 )
29+from .sixish import (
30+ viewitems,
31+ viewvalues,
32+ )
33
34
35 class _KnownGraphNode(object):
36@@ -81,7 +85,7 @@
37 child_keys,
38 """
39 nodes = self._nodes
40- for key, parent_keys in parent_map.iteritems():
41+ for key, parent_keys in viewitems(parent_map):
42 if key in nodes:
43 node = nodes[key]
44 node.parent_keys = parent_keys
45@@ -97,11 +101,11 @@
46 parent_node.child_keys.append(key)
47
48 def _find_tails(self):
49- return [node for node in self._nodes.itervalues()
50+ return [node for node in viewvalues(self._nodes)
51 if not node.parent_keys]
52
53 def _find_tips(self):
54- return [node for node in self._nodes.itervalues()
55+ return [node for node in viewvalues(self._nodes)
56 if not node.child_keys]
57
58 def _find_gdfo(self):
59@@ -234,7 +238,7 @@
60 seen = set()
61 pending = []
62 min_gdfo = None
63- for node in candidate_nodes.values():
64+ for node in viewvalues(candidate_nodes):
65 if node.parent_keys:
66 pending.extend(node.parent_keys)
67 if min_gdfo is None or node.gdfo < min_gdfo:
68@@ -261,7 +265,7 @@
69
70 All parents must occur before all children.
71 """
72- for node in self._nodes.itervalues():
73+ for node in viewvalues(self._nodes):
74 if node.gdfo is None:
75 raise errors.GraphCycleError(self._nodes)
76 pending = self._find_tails()
77@@ -339,7 +343,7 @@
78 """Compute the merge sorted graph output."""
79 from breezy import tsort
80 as_parent_map = dict((node.key, node.parent_keys)
81- for node in self._nodes.itervalues()
82+ for node in viewvalues(self._nodes)
83 if node.parent_keys is not None)
84 # We intentionally always generate revnos and never force the
85 # mainline_revisions
86
87=== modified file 'breezy/branch.py'
88--- breezy/branch.py 2017-05-30 19:32:13 +0000
89+++ breezy/branch.py 2017-06-05 21:41:38 +0000
90@@ -66,6 +66,7 @@
91 from .lock import _RelockDebugMixin, LogicalLockResult
92 from .sixish import (
93 BytesIO,
94+ viewitems,
95 )
96 from .trace import mutter, mutter_callsite, note, is_quiet
97
98@@ -397,7 +398,7 @@
99 return self.get_rev_id(revno[0])
100 revision_id_to_revno = self.get_revision_id_to_revno_map()
101 revision_ids = [revision_id for revision_id, this_revno
102- in revision_id_to_revno.iteritems()
103+ in viewitems(revision_id_to_revno)
104 if revno == this_revno]
105 if len(revision_ids) == 1:
106 return revision_ids[0]
107@@ -1330,8 +1331,7 @@
108 old_base = self.base
109 new_base = target.base
110 target_reference_dict = target._get_all_reference_info()
111- for file_id, (tree_path, branch_location) in (
112- reference_dict.items()):
113+ for file_id, (tree_path, branch_location) in viewitems(reference_dict):
114 branch_location = urlutils.rebase_url(branch_location,
115 old_base, new_base)
116 target_reference_dict.setdefault(
117@@ -2735,7 +2735,7 @@
118 """
119 s = BytesIO()
120 writer = rio.RioWriter(s)
121- for key, (tree_path, branch_location) in info_dict.iteritems():
122+ for key, (tree_path, branch_location) in viewitems(info_dict):
123 stanza = rio.Stanza(file_id=key, tree_path=tree_path,
124 branch_location=branch_location)
125 writer.write_stanza(stanza)
126
127=== modified file 'breezy/branchbuilder.py'
128--- breezy/branchbuilder.py 2017-05-22 00:56:52 +0000
129+++ breezy/branchbuilder.py 2017-06-05 21:41:38 +0000
130@@ -25,6 +25,9 @@
131 memorytree,
132 revision,
133 )
134+from .sixish import (
135+ viewitems,
136+ )
137
138
139 class BranchBuilder(object):
140@@ -277,7 +280,7 @@
141 if pending.to_unversion_ids:
142 tree.unversion(pending.to_unversion_ids)
143 tree.add(pending.to_add_files, pending.to_add_file_ids, pending.to_add_kinds)
144- for file_id, content in pending.new_contents.iteritems():
145+ for file_id, content in viewitems(pending.new_contents):
146 tree.put_file_bytes_non_atomic(file_id, content)
147
148 def get_branch(self):
149
150=== modified file 'breezy/btree_index.py'
151--- breezy/btree_index.py 2017-06-05 01:46:14 +0000
152+++ breezy/btree_index.py 2017-06-05 21:41:38 +0000
153@@ -44,6 +44,9 @@
154 BytesIO,
155 map,
156 range,
157+ viewitems,
158+ viewkeys,
159+ viewvalues,
160 )
161
162
163@@ -557,13 +560,13 @@
164 if self._nodes_by_key is None:
165 nodes_by_key = {}
166 if self.reference_lists:
167- for key, (references, value) in self._nodes.iteritems():
168+ for key, (references, value) in viewitems(self._nodes):
169 key_dict = nodes_by_key
170 for subkey in key[:-1]:
171 key_dict = key_dict.setdefault(subkey, {})
172 key_dict[key[-1]] = key, value, references
173 else:
174- for key, (references, value) in self._nodes.iteritems():
175+ for key, (references, value) in viewitems(self._nodes):
176 key_dict = nodes_by_key
177 for subkey in key[:-1]:
178 key_dict = key_dict.setdefault(subkey, {})
179@@ -905,7 +908,8 @@
180
181 def _get_offsets_to_cached_pages(self):
182 """Determine what nodes we already have cached."""
183- cached_offsets = set(self._internal_node_cache.keys())
184+ cached_offsets = set(self._internal_node_cache)
185+ # cache may be dict or LRUCache, keys() is the common method
186 cached_offsets.update(self._leaf_node_cache.keys())
187 if self._root_node is not None:
188 cached_offsets.add(0)
189@@ -944,7 +948,7 @@
190 def _cache_leaf_values(self, nodes):
191 """Cache directly from key => value, skipping the btree."""
192 if self._leaf_value_cache is not None:
193- for node in nodes.itervalues():
194+ for node in viewvalues(nodes):
195 for key, value in node.all_items():
196 if key in self._leaf_value_cache:
197 # Don't add the rest of the keys, we've seen this node
198
199=== modified file 'breezy/builtins.py'
200--- breezy/builtins.py 2017-06-04 21:47:02 +0000
201+++ breezy/builtins.py 2017-06-05 21:41:38 +0000
202@@ -81,6 +81,8 @@
203 from .sixish import (
204 BytesIO,
205 text_type,
206+ viewitems,
207+ viewvalues,
208 )
209 from .trace import mutter, note, warning, is_quiet, get_verbosity_level
210
211@@ -201,7 +203,7 @@
212 reference = control_dir.get_branch_reference()
213 except errors.NotBranchError:
214 # There is no active branch, just return the colocated branches.
215- for name, branch in control_dir.get_branches().iteritems():
216+ for name, branch in viewitems(control_dir.get_branches()):
217 yield name, branch
218 return
219 if reference is not None:
220@@ -212,7 +214,7 @@
221 if ref_branch is None or ref_branch.name:
222 if ref_branch is not None:
223 control_dir = ref_branch.bzrdir
224- for name, branch in control_dir.get_branches().iteritems():
225+ for name, branch in viewitems(control_dir.get_branches()):
226 yield name, branch
227 else:
228 repo = ref_branch.bzrdir.find_repository()
229@@ -845,7 +847,7 @@
230 self.cleanup_now()
231 if len(ignored) > 0:
232 if verbose:
233- for glob in sorted(ignored.keys()):
234+ for glob in sorted(ignored):
235 for path in ignored[glob]:
236 self.outf.write(
237 gettext("ignored {0} matching \"{1}\"\n").format(
238@@ -1583,9 +1585,9 @@
239 names[name] = active
240 # Only mention the current branch explicitly if it's not
241 # one of the colocated branches
242- if not any(names.values()) and active_branch is not None:
243+ if not any(viewvalues(names)) and active_branch is not None:
244 self.outf.write("* %s\n" % gettext("(default)"))
245- for name in sorted(names.keys()):
246+ for name in sorted(names):
247 active = names[name]
248 if active:
249 prefix = "*"
250@@ -3963,7 +3965,7 @@
251 def print_aliases(self):
252 """Print out the defined aliases in a similar format to bash."""
253 aliases = _mod_config.GlobalConfig().get_aliases()
254- for key, value in sorted(aliases.iteritems()):
255+ for key, value in sorted(viewitems(aliases)):
256 self.outf.write('brz alias %s="%s"\n' % (key, value))
257
258 @display_command
259@@ -6004,7 +6006,7 @@
260 from .tag import tag_sort_methods
261 branch, relpath = Branch.open_containing(directory)
262
263- tags = branch.tags.get_tag_dict().items()
264+ tags = list(viewitems(branch.tags.get_tag_dict()))
265 if not tags:
266 return
267
268@@ -6648,7 +6650,7 @@
269 if tree is None:
270 tree = branch.basis_tree()
271 if path is None:
272- info = branch._get_all_reference_info().iteritems()
273+ info = viewitems(branch._get_all_reference_info())
274 self._display_reference_info(tree, branch, info)
275 else:
276 file_id = tree.path2id(path)
277
278=== modified file 'breezy/bundle/bundle_data.py'
279--- breezy/bundle/bundle_data.py 2017-05-30 19:32:13 +0000
280+++ breezy/bundle/bundle_data.py 2017-06-05 21:41:38 +0000
281@@ -42,6 +42,7 @@
282 from ..revision import Revision, NULL_REVISION
283 from ..sixish import (
284 BytesIO,
285+ viewitems,
286 )
287 from ..testament import StrictTestament
288 from ..trace import mutter, warning
289@@ -106,7 +107,7 @@
290 revision_info.timestamp = revision.timestamp
291 revision_info.message = revision.message.split('\n')
292 revision_info.properties = [': '.join(p) for p in
293- revision.properties.iteritems()]
294+ viewitems(revision.properties)]
295 return revision_info
296
297
298@@ -252,7 +253,7 @@
299
300 count = 0
301 missing = {}
302- for revision_id, sha1 in rev_to_sha.iteritems():
303+ for revision_id, sha1 in viewitems(rev_to_sha):
304 if repository.has_revision(revision_id):
305 testament = StrictTestament.from_revision(repository,
306 revision_id)
307@@ -772,7 +773,7 @@
308
309 def sorted_path_id(self):
310 paths = []
311- for result in self._new_id.iteritems():
312+ for result in viewitems(self._new_id):
313 paths.append(result)
314 for id in self.base_tree.all_file_ids():
315 path = self.id2path(id)
316
317=== modified file 'breezy/bundle/commands.py'
318--- breezy/bundle/commands.py 2017-05-22 00:56:52 +0000
319+++ breezy/bundle/commands.py 2017-06-05 21:41:38 +0000
320@@ -39,6 +39,7 @@
321 from ..commands import Command
322 from ..sixish import (
323 BytesIO,
324+ viewitems,
325 )
326
327
328@@ -76,7 +77,7 @@
329 if file_id is not None:
330 file_ids.add(file_id)
331 self.outf.write(gettext('Records\n'))
332- for kind, records in sorted(by_kind.iteritems()):
333+ for kind, records in sorted(viewitems(by_kind)):
334 multiparent = sum(1 for b, m, k, r, f in records if
335 len(m.get('parents', [])) > 1)
336 self.outf.write(gettext('{0}: {1} ({2} multiparent)\n').format(
337
338=== modified file 'breezy/bundle/serializer/v4.py'
339--- breezy/bundle/serializer/v4.py 2017-05-25 01:35:55 +0000
340+++ breezy/bundle/serializer/v4.py 2017-06-05 21:41:38 +0000
341@@ -37,6 +37,7 @@
342 from ...i18n import ngettext
343 from ...sixish import (
344 BytesIO,
345+ viewitems,
346 )
347
348
349@@ -352,7 +353,7 @@
350 text_keys = []
351 altered_fileids = self.repository.fileids_altered_by_revision_ids(
352 self.revision_ids)
353- for file_id, revision_ids in altered_fileids.iteritems():
354+ for file_id, revision_ids in viewitems(altered_fileids):
355 for revision_id in revision_ids:
356 text_keys.append((file_id, revision_id))
357 self._add_mp_records_keys('file', self.repository.texts, text_keys)
358
359=== modified file 'breezy/bzrdir.py'
360--- breezy/bzrdir.py 2017-05-30 19:16:23 +0000
361+++ breezy/bzrdir.py 2017-06-05 21:41:38 +0000
362@@ -1140,7 +1140,7 @@
363
364 def check_support_status(self, allow_unsupported, recommend_upgrade=True,
365 basedir=None):
366- for name, necessity in self.features.iteritems():
367+ for name, necessity in self.features.items():
368 if name in self._present_features:
369 continue
370 if necessity == "optional":
371@@ -1179,7 +1179,7 @@
372 """
373 lines = [self.get_format_string()]
374 lines.extend([("%s %s\n" % (item[1], item[0])) for item in
375- self.features.iteritems()])
376+ self.features.items()])
377 return "".join(lines)
378
379 @classmethod
380@@ -1210,7 +1210,7 @@
381
382 :param updated_flags: Updated feature flags
383 """
384- for name, necessity in updated_flags.iteritems():
385+ for name, necessity in updated_flags.items():
386 if necessity is None:
387 try:
388 del self.features[name]
389@@ -1246,7 +1246,7 @@
390 @classmethod
391 def known_formats(cls):
392 result = set()
393- for name, format in cls.formats.iteritems():
394+ for name, format in cls.formats.items():
395 if callable(format):
396 format = format()
397 result.add(format)
398
399=== modified file 'breezy/check.py'
400--- breezy/check.py 2017-05-22 00:56:52 +0000
401+++ breezy/check.py 2017-06-05 21:41:38 +0000
402@@ -56,6 +56,9 @@
403 from .branch import Branch
404 from .controldir import ControlDir
405 from .revision import NULL_REVISION
406+from .sixish import (
407+ viewitems,
408+ )
409 from .trace import note
410 from .workingtree import WorkingTree
411 from .i18n import gettext
412@@ -128,7 +131,7 @@
413 # landing].
414 distances = set()
415 existences = set()
416- for ref, wantlist in callback_refs.iteritems():
417+ for ref, wantlist in viewitems(callback_refs):
418 wanting_items.update(wantlist)
419 kind, value = ref
420 if kind == 'trees':
421@@ -141,7 +144,7 @@
422 raise AssertionError(
423 'unknown ref kind for ref %s' % ref)
424 node_distances = repo.get_graph().find_lefthand_distances(distances)
425- for key, distance in node_distances.iteritems():
426+ for key, distance in viewitems(node_distances):
427 refs[('lefthand-distance', key)] = distance
428 if key in existences and distance > 0:
429 refs[('revision-existence', key)] = True
430@@ -229,7 +232,7 @@
431 note(gettext('%6d revisions missing parents in ancestry'),
432 len(self.missing_parent_links))
433 if verbose:
434- for link, linkers in self.missing_parent_links.items():
435+ for link, linkers in viewitems(self.missing_parent_links):
436 note(gettext(' %s should be in the ancestry for:'), link)
437 for linker in linkers:
438 note(' * %s', linker)
439@@ -320,12 +323,10 @@
440 text_key_references=self.text_key_references,
441 ancestors=self.ancestors)
442 storebar.update('file-graph', 1)
443- result = weave_checker.check_file_version_parents(
444+ wrongs, unused_versions = weave_checker.check_file_version_parents(
445 self.repository.texts)
446 self.checked_weaves = weave_checker.file_ids
447- bad_parents, unused_versions = result
448- bad_parents = bad_parents.items()
449- for text_key, (stored_parents, correct_parents) in bad_parents:
450+ for text_key, (stored_parents, correct_parents) in viewitems(wrongs):
451 # XXX not ready for id join/split operations.
452 weave_id = text_key[0]
453 revision_id = text_key[-1]
454
455=== modified file 'breezy/chk_map.py'
456--- breezy/chk_map.py 2017-06-04 18:09:30 +0000
457+++ breezy/chk_map.py 2017-06-05 21:41:38 +0000
458@@ -56,6 +56,10 @@
459 static_tuple,
460 trace,
461 )
462+from .sixish import (
463+ viewitems,
464+ viewvalues,
465+ )
466 from .static_tuple import StaticTuple
467
468 # approx 4MB
469@@ -212,11 +216,11 @@
470 if isinstance(node, InternalNode):
471 # Trigger all child nodes to get loaded
472 list(node._iter_nodes(self._store))
473- for prefix, sub in sorted(node._items.iteritems()):
474+ for prefix, sub in sorted(viewitems(node._items)):
475 result.extend(self._dump_tree_node(sub, prefix, indent + ' ',
476 include_keys=include_keys))
477 else:
478- for key, value in sorted(node._items.iteritems()):
479+ for key, value in sorted(viewitems(node._items)):
480 # Don't use prefix nor indent here to line up when used in
481 # tests in conjunction with assertEqualDiff
482 result.append(' %r %r' % (tuple(key), value))
483@@ -255,7 +259,7 @@
484 result._root_node.set_maximum_size(maximum_size)
485 result._root_node._key_width = key_width
486 delta = []
487- for key, value in initial_value.items():
488+ for key, value in viewitems(initial_value):
489 delta.append((None, key, value))
490 root_key = result.apply_delta(delta)
491 return root_key
492@@ -267,10 +271,10 @@
493 node.set_maximum_size(maximum_size)
494 node._key_width = key_width
495 as_st = StaticTuple.from_sequence
496- node._items = dict([(as_st(key), val) for key, val
497- in initial_value.iteritems()])
498- node._raw_size = sum([node._key_value_len(key, value)
499- for key,value in node._items.iteritems()])
500+ node._items = dict((as_st(key), val)
501+ for key, val in viewitems(initial_value))
502+ node._raw_size = sum(node._key_value_len(key, value)
503+ for key, value in viewitems(node._items))
504 node._len = len(node._items)
505 node._compute_search_prefix()
506 node._compute_serialised_prefix()
507@@ -333,7 +337,7 @@
508 node = a_map._get_node(node)
509 if isinstance(node, LeafNode):
510 path = (node._key, path)
511- for key, value in node._items.items():
512+ for key, value in viewitems(node._items):
513 # For a LeafNode, the key is a serialized_key, rather than
514 # a search_key, but the heap is using search_keys
515 search_key = node._search_key_func(key)
516@@ -341,11 +345,11 @@
517 else:
518 # type(node) == InternalNode
519 path = (node._key, path)
520- for prefix, child in node._items.items():
521+ for prefix, child in viewitems(node._items):
522 heapq.heappush(pending, (prefix, None, child, path))
523 def process_common_internal_nodes(self_node, basis_node):
524- self_items = set(self_node._items.items())
525- basis_items = set(basis_node._items.items())
526+ self_items = set(viewitems(self_node._items))
527+ basis_items = set(viewitems(basis_node._items))
528 path = (self_node._key, None)
529 for prefix, child in self_items - basis_items:
530 heapq.heappush(self_pending, (prefix, None, child, path))
531@@ -353,8 +357,8 @@
532 for prefix, child in basis_items - self_items:
533 heapq.heappush(basis_pending, (prefix, None, child, path))
534 def process_common_leaf_nodes(self_node, basis_node):
535- self_items = set(self_node._items.items())
536- basis_items = set(basis_node._items.items())
537+ self_items = set(viewitems(self_node._items))
538+ basis_items = set(viewitems(basis_node._items))
539 path = (self_node._key, None)
540 for key, value in self_items - basis_items:
541 prefix = self._search_key_func(key)
542@@ -766,17 +770,16 @@
543 pass
544 else:
545 # Short items, we need to match based on a prefix
546- length_filter = filters.setdefault(len(key), set())
547- length_filter.add(key)
548+ filters.setdefault(len(key), set()).add(key)
549 if filters:
550- filters = filters.items()
551- for item in self._items.iteritems():
552- for length, length_filter in filters:
553+ filters_itemview = viewitems(filters)
554+ for item in viewitems(self._items):
555+ for length, length_filter in filters_itemview:
556 if item[0][:length] in length_filter:
557 yield item
558 break
559 else:
560- for item in self._items.iteritems():
561+ for item in viewitems(self._items):
562 yield item
563
564 def _key_value_len(self, key, value):
565@@ -838,7 +841,7 @@
566 common_prefix = self._search_prefix
567 split_at = len(common_prefix) + 1
568 result = {}
569- for key, value in self._items.iteritems():
570+ for key, value in viewitems(self._items):
571 search_key = self._search_key(key)
572 prefix = search_key[:split_at]
573 # TODO: Generally only 1 key can be exactly the right length,
574@@ -871,7 +874,7 @@
575 for split, node in node_details:
576 new_node.add_node(split, node)
577 result[prefix] = new_node
578- return common_prefix, result.items()
579+ return common_prefix, list(viewitems(result))
580
581 def map(self, store, key, value):
582 """Map key to value."""
583@@ -906,7 +909,7 @@
584 else:
585 lines.append('%s\n' % (self._common_serialised_prefix,))
586 prefix_len = len(self._common_serialised_prefix)
587- for key, value in sorted(self._items.items()):
588+ for key, value in sorted(viewitems(self._items)):
589 # Always add a final newline
590 value_lines = osutils.chunks_to_lines([value + '\n'])
591 serialized = "%s\x00%s\n" % (self._serialise_key(key),
592@@ -1071,7 +1074,7 @@
593 # yielding all nodes, yield whatever we have, and queue up a read
594 # for whatever we are missing
595 shortcut = True
596- for prefix, node in self._items.iteritems():
597+ for prefix, node in viewitems(self._items):
598 if node.__class__ is StaticTuple:
599 keys[node] = (prefix, None)
600 else:
601@@ -1147,10 +1150,10 @@
602 else:
603 # The slow way. We walk every item in self._items, and check to
604 # see if there are any matches
605- length_filters = length_filters.items()
606- for prefix, node in self._items.iteritems():
607+ length_filters_itemview = viewitems(length_filters)
608+ for prefix, node in viewitems(self._items):
609 node_key_filter = []
610- for length, length_filter in length_filters:
611+ for length, length_filter in length_filters_itemview:
612 sub_prefix = prefix[:length]
613 if sub_prefix in length_filter:
614 node_key_filter.extend(prefix_to_keys[sub_prefix])
615@@ -1292,7 +1295,7 @@
616 :param store: A VersionedFiles honouring the CHK extensions.
617 :return: An iterable of the keys inserted by this operation.
618 """
619- for node in self._items.itervalues():
620+ for node in viewvalues(self._items):
621 if isinstance(node, StaticTuple):
622 # Never deserialised.
623 continue
624@@ -1309,7 +1312,7 @@
625 raise AssertionError("_search_prefix should not be None")
626 lines.append('%s\n' % (self._search_prefix,))
627 prefix_len = len(self._search_prefix)
628- for prefix, node in sorted(self._items.items()):
629+ for prefix, node in sorted(viewitems(self._items)):
630 if isinstance(node, StaticTuple):
631 key = node[0]
632 else:
633@@ -1342,19 +1345,16 @@
634 prefix for reaching node.
635 """
636 if offset >= self._node_width:
637- for node in self._items.values():
638+ for node in valueview(self._items):
639 for result in node._split(offset):
640 yield result
641- return
642- for key, node in self._items.items():
643- pass
644
645 def refs(self):
646 """Return the references to other CHK's held by this node."""
647 if self._key is None:
648 raise AssertionError("unserialised nodes have no refs.")
649 refs = []
650- for value in self._items.itervalues():
651+ for value in viewvalues(self._items):
652 if isinstance(value, StaticTuple):
653 refs.append(value)
654 else:
655@@ -1393,7 +1393,7 @@
656 self._items[search_key] = unmapped
657 if len(self._items) == 1:
658 # this node is no longer needed:
659- return self._items.values()[0]
660+ return list(viewvalues(self._items))[0]
661 if isinstance(unmapped, InternalNode):
662 return self
663 if check_remap:
664@@ -1443,7 +1443,7 @@
665 if isinstance(node, InternalNode):
666 # Without looking at any leaf nodes, we are sure
667 return self
668- for key, value in node._items.iteritems():
669+ for key, value in viewitems(node._items):
670 if new_leaf._map_no_split(key, value):
671 return self
672 trace.mutter("remap generated a new LeafNode")
673@@ -1532,15 +1532,14 @@
674 # indicate that we keep 100k prefix_refs around while
675 # processing. They *should* be shorter lived than that...
676 # It does cost us ~10s of processing time
677- #prefix_refs = [as_st(item) for item in node._items.iteritems()]
678- prefix_refs = node._items.items()
679+ prefix_refs = list(viewitems(node._items))
680 items = []
681 else:
682 prefix_refs = []
683 # Note: We don't use a StaticTuple here. Profiling showed a
684 # minor memory improvement (0.8MB out of 335MB peak 0.2%)
685 # But a significant slowdown (15s / 145s, or 10%)
686- items = node._items.items()
687+ items = list(viewitems(node._items))
688 yield record, node, prefix_refs, items
689
690 def _read_old_roots(self):
691
692=== modified file 'breezy/chk_serializer.py'
693--- breezy/chk_serializer.py 2017-05-22 00:56:52 +0000
694+++ breezy/chk_serializer.py 2017-06-05 21:41:38 +0000
695@@ -41,7 +41,7 @@
696 def _validate_properties(props, _decode=cache_utf8._utf8_decode):
697 # TODO: we really want an 'isascii' check for key
698 # Cast the utf8 properties into Unicode 'in place'
699- for key, value in props.iteritems():
700+ for key, value in props.items():
701 props[key] = _decode(value)[0]
702 return props
703
704@@ -90,7 +90,7 @@
705 # For bzr revisions, the most common property is just 'branch-nick'
706 # which changes infrequently.
707 revprops = {}
708- for key, value in rev.properties.iteritems():
709+ for key, value in rev.properties.items():
710 revprops[key] = encode_utf8(value)[0]
711 ret.append(('properties', revprops))
712 ret.extend([
713@@ -131,7 +131,7 @@
714 value = validator(value)
715 bits[var_name] = value
716 if len(bits) != len(schema):
717- missing = [key for key, (var_name, _, _) in schema.iteritems()
718+ missing = [key for key, (var_name, _, _) in schema.items()
719 if var_name not in bits]
720 raise ValueError('Revision text was missing expected keys %s.'
721 ' text %r' % (missing, text))
722
723=== modified file 'breezy/commands.py'
724--- breezy/commands.py 2017-06-02 23:50:41 +0000
725+++ breezy/commands.py 2017-06-05 21:41:38 +0000
726@@ -169,18 +169,16 @@
727 # only load once
728 return
729 import breezy.builtins
730- for cmd_class in _scan_module_for_commands(breezy.builtins).values():
731+ for cmd_class in _scan_module_for_commands(breezy.builtins):
732 builtin_command_registry.register(cmd_class)
733 breezy.builtins._register_lazy_builtins()
734
735
736 def _scan_module_for_commands(module):
737- r = {}
738- for name, obj in module.__dict__.items():
739+ module_dict = module.__dict__
740+ for name in module_dict:
741 if name.startswith("cmd_"):
742- real_name = _unsquish_command_name(name)
743- r[real_name] = obj
744- return r
745+ yield module_dict[name]
746
747
748 def _list_bzr_commands(names):
749@@ -628,7 +626,7 @@
750
751 Maps from long option name to option object."""
752 r = Option.STD_OPTIONS.copy()
753- std_names = r.keys()
754+ std_names = set(r)
755 for o in self.takes_options:
756 if isinstance(o, string_types):
757 o = option.Option.OPTIONS[o]
758@@ -824,8 +822,8 @@
759 raise errors.BzrCommandError(
760 gettext('Only ASCII permitted in option names'))
761
762- opts = dict([(k, v) for k, v in options.__dict__.items() if
763- v is not option.OptionParser.DEFAULT_VALUE])
764+ opts = dict((k, v) for k, v in options.__dict__.items() if
765+ v is not option.OptionParser.DEFAULT_VALUE)
766 return args, opts
767
768
769
770=== modified file 'breezy/config.py'
771--- breezy/config.py 2017-05-30 19:32:13 +0000
772+++ breezy/config.py 2017-06-05 21:41:38 +0000
773@@ -1722,7 +1722,7 @@
774 certificate should be verified, False otherwise.
775 """
776 credentials = None
777- for auth_def_name, auth_def in self._get_config().items():
778+ for auth_def_name, auth_def in self._get_config().iteritems():
779 if not isinstance(auth_def, configobj.Section):
780 raise ValueError("%s defined outside a section" % auth_def_name)
781
782@@ -1824,7 +1824,7 @@
783 values['realm'] = realm
784 config = self._get_config()
785 for_deletion = []
786- for section, existing_values in config.items():
787+ for section, existing_values in config.iteritems():
788 for key in ('scheme', 'host', 'port', 'path', 'realm'):
789 if existing_values.get(key) != values.get(key):
790 break
791@@ -2854,7 +2854,7 @@
792 return self.options.get(name, default)
793
794 def iter_option_names(self):
795- for k in self.options.iterkeys():
796+ for k in self.options.keys():
797 yield k
798
799 def __repr__(self):
800@@ -2901,7 +2901,7 @@
801
802 :param store: the store containing the section
803 """
804- for k, expected in dirty.orig.iteritems():
805+ for k, expected in dirty.orig.items():
806 actual = dirty.get(k, _DeletedOption)
807 reloaded = self.get(k, _NewlyCreatedOption)
808 if actual is _DeletedOption:
809@@ -3009,7 +3009,7 @@
810 # get_mutable_section() call below.
811 self.unload()
812 # Apply the changes from the preserved dirty sections
813- for section_id, dirty in dirty_sections.iteritems():
814+ for section_id, dirty in dirty_sections.items():
815 clean = self.get_mutable_section(section_id)
816 clean.apply_changes(dirty, self)
817 # Everything is clean now
818@@ -3153,7 +3153,7 @@
819 if not self._need_saving():
820 return
821 # Preserve the current version
822- dirty_sections = dict(self.dirty_sections.items())
823+ dirty_sections = self.dirty_sections.copy()
824 self.apply_changes(dirty_sections)
825 # Save to the persistent storage
826 self.save()
827@@ -3780,7 +3780,7 @@
828 global _shared_stores_at_exit_installed
829 stores = _shared_stores
830 def save_config_changes():
831- for k, store in stores.items():
832+ for k, store in stores.iteritems():
833 store.save_changes()
834 if not _shared_stores_at_exit_installed:
835 # FIXME: Ugly hack waiting for library_state to always be
836
837=== modified file 'breezy/controldir.py'
838--- breezy/controldir.py 2017-05-22 00:56:52 +0000
839+++ breezy/controldir.py 2017-06-05 21:41:38 +0000
840@@ -108,7 +108,7 @@
841 """Return a sequence of all branches local to this control directory.
842
843 """
844- return self.get_branches().values()
845+ return list(self.get_branches().values())
846
847 def get_branches(self):
848 """Get all branches in this control directory, as a dictionary.
849
850=== modified file 'breezy/dirstate.py'
851--- breezy/dirstate.py 2017-06-04 18:09:30 +0000
852+++ breezy/dirstate.py 2017-06-05 21:41:38 +0000
853@@ -244,6 +244,8 @@
854 )
855 from .sixish import (
856 range,
857+ viewitems,
858+ viewvalues,
859 )
860
861
862@@ -975,7 +977,7 @@
863 # Directories that need to be read
864 pending_dirs = set()
865 paths_to_search = set()
866- for entry_list in newly_found.itervalues():
867+ for entry_list in viewvalues(newly_found):
868 for dir_name_id, trees_info in entry_list:
869 found[dir_name_id] = trees_info
870 found_dir_names.add(dir_name_id[:2])
871@@ -1386,8 +1388,8 @@
872 fingerprint, new_child_path)
873 self._check_delta_ids_absent(new_ids, delta, 0)
874 try:
875- self._apply_removals(removals.iteritems())
876- self._apply_insertions(insertions.values())
877+ self._apply_removals(viewitems(removals))
878+ self._apply_insertions(viewvalues(insertions))
879 # Validate parents
880 self._after_delta_check_parents(parents, 0)
881 except errors.BzrError as e:
882@@ -2723,7 +2725,7 @@
883 # --- end generation of full tree mappings
884
885 # sort and output all the entries
886- new_entries = self._sort_entries(by_path.items())
887+ new_entries = self._sort_entries(viewitems(by_path))
888 self._entries_to_current_state(new_entries)
889 self._parents = [rev_id for rev_id, tree in trees]
890 self._ghosts = list(ghosts)
891@@ -3288,7 +3290,7 @@
892 raise AssertionError(
893 "entry %r has no data for any tree." % (entry,))
894 if self._id_index is not None:
895- for file_id, entry_keys in self._id_index.iteritems():
896+ for file_id, entry_keys in viewitems(self._id_index):
897 for entry_key in entry_keys:
898 # Check that the entry in the map is pointing to the same
899 # file_id
900
901=== modified file 'breezy/export_pot.py'
902--- breezy/export_pot.py 2017-06-05 01:21:55 +0000
903+++ breezy/export_pot.py 2017-06-05 21:41:38 +0000
904@@ -202,7 +202,7 @@
905 def _standard_options(exporter):
906 OPTIONS = option.Option.OPTIONS
907 context = exporter.get_context(option)
908- for name in sorted(OPTIONS.keys()):
909+ for name in sorted(OPTIONS):
910 opt = OPTIONS[name]
911 _write_option(exporter, context.from_string(name), opt, "option")
912
913
914=== modified file 'breezy/fetch.py'
915--- breezy/fetch.py 2017-05-22 00:56:52 +0000
916+++ breezy/fetch.py 2017-06-05 21:41:38 +0000
917@@ -40,6 +40,9 @@
918 )
919 from .i18n import gettext
920 from .revision import NULL_REVISION
921+from .sixish import (
922+ viewvalues,
923+ )
924 from .trace import mutter
925
926
927@@ -215,12 +218,11 @@
928 revision_id = tree.get_file_revision(root_id, u"")
929 revision_root[revision_id] = root_id
930 # Find out which parents we don't already know root ids for
931- parents = set()
932- for revision_parents in parent_map.itervalues():
933- parents.update(revision_parents)
934- parents.difference_update(revision_root.keys() + [NULL_REVISION])
935+ parents = set(viewvalues(parent_map))
936+ parents.difference_update(revision_root)
937+ parents.discard(NULL_REVISION)
938 # Limit to revisions present in the versionedfile
939- parents = graph.get_parent_map(parents).keys()
940+ parents = graph.get_parent_map(parents)
941 for tree in self.iter_rev_trees(parents):
942 root_id = tree.get_root_id()
943 revision_root[tree.get_revision_id()] = root_id
944
945=== modified file 'breezy/fifo_cache.py'
946--- breezy/fifo_cache.py 2011-12-19 13:23:58 +0000
947+++ breezy/fifo_cache.py 2017-06-05 21:41:38 +0000
948@@ -158,8 +158,8 @@
949 if len(args) == 1:
950 arg = args[0]
951 if isinstance(arg, dict):
952- for key, val in arg.iteritems():
953- self.add(key, val)
954+ for key in arg:
955+ self.add(key, arg[key])
956 else:
957 for key, val in args[0]:
958 self.add(key, val)
959@@ -167,8 +167,8 @@
960 raise TypeError('update expected at most 1 argument, got %d'
961 % len(args))
962 if kwargs:
963- for key, val in kwargs.iteritems():
964- self.add(key, val)
965+ for key in kwargs:
966+ self.add(key, kwargs[key])
967
968
969 class FIFOSizeCache(FIFOCache):
970
971=== modified file 'breezy/gpg.py'
972--- breezy/gpg.py 2017-05-30 19:16:23 +0000
973+++ breezy/gpg.py 2017-06-05 21:41:38 +0000
974@@ -449,7 +449,7 @@
975 signers.setdefault(fingerprint, 0)
976 signers[fingerprint] += 1
977 result = []
978- for fingerprint, number in signers.items():
979+ for fingerprint, number in list(signers.items()):
980 result.append(ngettext(u"Unknown key {0} signed {1} commit",
981 u"Unknown key {0} signed {1} commits",
982 number).format(fingerprint, number))
983
984=== modified file 'breezy/graph.py'
985--- breezy/graph.py 2017-05-25 01:35:55 +0000
986+++ breezy/graph.py 2017-06-05 21:41:38 +0000
987@@ -25,6 +25,10 @@
988 revision,
989 trace,
990 )
991+from .sixish import (
992+ viewitems,
993+ viewvalues,
994+ )
995
996 STEP_UNIQUE_SEARCHER_EVERY = 5
997
998@@ -335,7 +339,7 @@
999 """
1000 parent_map = self._parents_provider.get_parent_map(keys)
1001 parent_child = {}
1002- for child, parents in sorted(parent_map.items()):
1003+ for child, parents in sorted(viewitems(parent_map)):
1004 for parent in parents:
1005 parent_child.setdefault(parent, []).append(child)
1006 return parent_child
1007@@ -358,7 +362,7 @@
1008 NULL_REVISION = revision.NULL_REVISION
1009 known_revnos[NULL_REVISION] = 0
1010
1011- searching_known_tips = list(known_revnos.keys())
1012+ searching_known_tips = list(known_revnos)
1013
1014 unknown_searched = {}
1015
1016@@ -645,7 +649,7 @@
1017 # TODO: it might be possible to collapse searchers faster when they
1018 # only have *some* search tips in common.
1019 next_unique_searchers = []
1020- for searchers in unique_search_tips.itervalues():
1021+ for searchers in viewvalues(unique_search_tips):
1022 if len(searchers) == 1:
1023 # Searching unique tips, go for it
1024 next_unique_searchers.append(searchers[0])
1025@@ -835,7 +839,7 @@
1026 for c in candidate_heads)
1027 active_searchers = dict(searchers)
1028 # skip over the actual candidate for each searcher
1029- for searcher in active_searchers.itervalues():
1030+ for searcher in viewvalues(active_searchers):
1031 next(searcher)
1032 # The common walker finds nodes that are common to two or more of the
1033 # input keys, so that we don't access all history when a currently
1034@@ -852,7 +856,7 @@
1035 except StopIteration:
1036 # No common points being searched at this time.
1037 pass
1038- for candidate in active_searchers.keys():
1039+ for candidate in list(active_searchers):
1040 try:
1041 searcher = active_searchers[candidate]
1042 except KeyError:
1043@@ -878,11 +882,11 @@
1044 # some searcher has encountered our known common nodes:
1045 # just stop it
1046 ancestor_set = {ancestor}
1047- for searcher in searchers.itervalues():
1048+ for searcher in viewvalues(searchers):
1049 searcher.stop_searching_any(ancestor_set)
1050 else:
1051 # or it may have been just reached by all the searchers:
1052- for searcher in searchers.itervalues():
1053+ for searcher in viewvalues(searchers):
1054 if ancestor not in searcher.seen:
1055 break
1056 else:
1057@@ -890,7 +894,7 @@
1058 # making it be known as a descendant of all candidates,
1059 # so we can stop searching it, and any seen ancestors
1060 new_common.add(ancestor)
1061- for searcher in searchers.itervalues():
1062+ for searcher in viewvalues(searchers):
1063 seen_ancestors =\
1064 searcher.find_seen_ancestors([ancestor])
1065 searcher.stop_searching_any(seen_ancestors)
1066@@ -1013,7 +1017,7 @@
1067 processed.update(pending)
1068 next_map = self.get_parent_map(pending)
1069 next_pending = set()
1070- for item in next_map.iteritems():
1071+ for item in viewitems(next_map):
1072 yield item
1073 next_pending.update(p for p in item[1] if p not in processed)
1074 ghosts = pending.difference(next_map)
1075@@ -1249,7 +1253,7 @@
1076 ## for revision in revisions.intersection(descendants):
1077 ## simple_ancestors.difference_update(descendants[revision])
1078 ## return simple_ancestors
1079- for revision, parent_ids in parent_map.iteritems():
1080+ for revision, parent_ids in viewitems(parent_map):
1081 if parent_ids is None:
1082 continue
1083 for parent_id in parent_ids:
1084@@ -1468,7 +1472,7 @@
1085 seen.update(revisions)
1086 parent_map = self._parents_provider.get_parent_map(revisions)
1087 found_revisions.update(parent_map)
1088- for rev_id, parents in parent_map.iteritems():
1089+ for rev_id, parents in viewitems(parent_map):
1090 if parents is None:
1091 continue
1092 new_found_parents = [p for p in parents if p not in seen]
1093@@ -1511,7 +1515,7 @@
1094 all_parents = []
1095 # We don't care if it is a ghost, since it can't be seen if it is
1096 # a ghost
1097- for parent_ids in parent_map.itervalues():
1098+ for parent_ids in viewvalues(parent_map):
1099 all_parents.extend(parent_ids)
1100 next_pending = all_seen.intersection(all_parents).difference(seen_ancestors)
1101 seen_ancestors.update(next_pending)
1102@@ -1556,14 +1560,14 @@
1103 stop_rev_references[parent_id] += 1
1104 # if only the stopped revisions reference it, the ref count will be
1105 # 0 after this loop
1106- for parents in self._current_parents.itervalues():
1107+ for parents in viewvalues(self._current_parents):
1108 for parent_id in parents:
1109 try:
1110 stop_rev_references[parent_id] -= 1
1111 except KeyError:
1112 pass
1113 stop_parents = set()
1114- for rev_id, refs in stop_rev_references.iteritems():
1115+ for rev_id, refs in viewitems(stop_rev_references):
1116 if refs == 0:
1117 stop_parents.add(rev_id)
1118 self._next_query.difference_update(stop_parents)
1119@@ -1599,7 +1603,7 @@
1120 def invert_parent_map(parent_map):
1121 """Given a map from child => parents, create a map of parent=>children"""
1122 child_map = {}
1123- for child, parents in parent_map.iteritems():
1124+ for child, parents in viewitems(parent_map):
1125 for p in parents:
1126 # Any given parent is likely to have only a small handful
1127 # of children, many will have only one. So we avoid mem overhead of
1128@@ -1651,7 +1655,7 @@
1129 # Will not have any nodes removed, even though you do have an
1130 # 'uninteresting' linear D->B and E->C
1131 children = {}
1132- for child, parents in parent_map.iteritems():
1133+ for child, parents in viewitems(parent_map):
1134 children.setdefault(child, [])
1135 for p in parents:
1136 children.setdefault(p, []).append(child)
1137
1138=== modified file 'breezy/groupcompress.py'
1139--- breezy/groupcompress.py 2017-06-04 18:09:30 +0000
1140+++ breezy/groupcompress.py 2017-06-05 21:41:38 +0000
1141@@ -45,6 +45,7 @@
1142 from .sixish import (
1143 map,
1144 range,
1145+ viewitems,
1146 )
1147 from .versionedfile import (
1148 _KeyRefs,
1149@@ -73,7 +74,7 @@
1150 # groupcompress ordering is approximately reverse topological,
1151 # properly grouped by file-id.
1152 per_prefix_map = {}
1153- for key, value in parent_map.iteritems():
1154+ for key, value in viewitems(parent_map):
1155 if isinstance(key, str) or len(key) == 1:
1156 prefix = ''
1157 else:
1158@@ -1541,10 +1542,10 @@
1159 # This is the group the bytes are stored in, followed by the
1160 # location in the group
1161 return locations[key][0]
1162- present_keys = sorted(locations.iterkeys(), key=get_group)
1163 # We don't have an ordering for keys in the in-memory object, but
1164 # lets process the in-memory ones first.
1165- present_keys = list(unadded_keys) + present_keys
1166+ present_keys = list(unadded_keys)
1167+ present_keys.extend(sorted(locations, key=get_group))
1168 # Now grab all of the ones from other sources
1169 source_keys = [(self, present_keys)]
1170 source_keys.extend(source_result)
1171@@ -1574,7 +1575,7 @@
1172 # start with one key, recurse to its oldest parent, then grab
1173 # everything in the same group, etc.
1174 parent_map = dict((key, details[2]) for key, details in
1175- locations.iteritems())
1176+ viewitems(locations))
1177 for key in unadded_keys:
1178 parent_map[key] = self._unadded_refs[key]
1179 parent_map.update(fallback_parent_map)
1180@@ -2032,10 +2033,10 @@
1181 if changed:
1182 result = []
1183 if self._parents:
1184- for key, (value, node_refs) in keys.iteritems():
1185+ for key, (value, node_refs) in viewitems(keys):
1186 result.append((key, value, node_refs))
1187 else:
1188- for key, (value, node_refs) in keys.iteritems():
1189+ for key, (value, node_refs) in viewitems(keys):
1190 result.append((key, value))
1191 records = result
1192 key_dependencies = self._key_dependencies
1193
1194=== modified file 'breezy/hashcache.py'
1195--- breezy/hashcache.py 2017-05-22 00:56:52 +0000
1196+++ breezy/hashcache.py 2017-06-05 21:41:38 +0000
1197@@ -44,6 +44,7 @@
1198 )
1199 from .sixish import (
1200 text_type,
1201+ viewitems,
1202 )
1203
1204
1205@@ -132,14 +133,12 @@
1206 # Stat in inode order as optimisation for at least linux.
1207 def inode_order(path_and_cache):
1208 return path_and_cache[1][1][3]
1209- for inum, path, cache_entry in sorted(self._cache, key=inode_order):
1210+ for path, cache_val in sorted(viewitems(self._cache), key=inode_order):
1211 abspath = osutils.pathjoin(self.root, path)
1212 fp = self._fingerprint(abspath)
1213 self.stat_count += 1
1214
1215- cache_fp = cache_entry[1]
1216-
1217- if (not fp) or (cache_fp != fp):
1218+ if not fp or cache_val[1] != fp:
1219 # not here or not a regular file anymore
1220 self.removed_count += 1
1221 self.needs_write = True
1222@@ -229,7 +228,7 @@
1223 try:
1224 outf.write(CACHE_HEADER)
1225
1226- for path, c in self._cache.iteritems():
1227+ for path, c in viewitems(self._cache):
1228 line_info = [path.encode('utf-8'), '// ', c[0], ' ']
1229 line_info.append(' '.join([str(fld) for fld in c[1]]))
1230 line_info.append('\n')
1231
1232=== modified file 'breezy/help.py'
1233--- breezy/help.py 2017-05-22 00:56:52 +0000
1234+++ breezy/help.py 2017-06-05 21:41:38 +0000
1235@@ -143,12 +143,12 @@
1236
1237 def _check_prefix_uniqueness(self):
1238 """Ensure that the index collection is able to differentiate safely."""
1239- prefixes = {}
1240+ prefixes = set()
1241 for index in self.search_path:
1242- prefixes.setdefault(index.prefix, []).append(index)
1243- for prefix, indices in prefixes.items():
1244- if len(indices) > 1:
1245+ prefix = index.prefix
1246+ if prefix in prefixes:
1247 raise errors.DuplicateHelpPrefix(prefix)
1248+ prefixes.add(prefix)
1249
1250 def search(self, topic):
1251 """Search for topic across the help search path.
1252
1253=== modified file 'breezy/help_topics/__init__.py'
1254--- breezy/help_topics/__init__.py 2017-06-02 21:28:05 +0000
1255+++ breezy/help_topics/__init__.py 2017-06-05 21:41:38 +0000
1256@@ -233,7 +233,7 @@
1257
1258 protl = []
1259 decl = []
1260- protos = transport_list_registry.keys( )
1261+ protos = transport_list_registry.keys()
1262 protos.sort(sort_func)
1263 for proto in protos:
1264 shorthelp = transport_list_registry.get_help(proto)
1265
1266=== modified file 'breezy/index.py'
1267--- breezy/index.py 2017-06-05 01:46:14 +0000
1268+++ breezy/index.py 2017-06-05 21:41:38 +0000
1269@@ -45,6 +45,7 @@
1270 from .sixish import (
1271 BytesIO,
1272 viewvalues,
1273+ viewitems,
1274 )
1275 from .static_tuple import StaticTuple
1276
1277@@ -144,7 +145,7 @@
1278 if self._nodes_by_key is None:
1279 nodes_by_key = {}
1280 if self.reference_lists:
1281- for key, (absent, references, value) in self._nodes.iteritems():
1282+ for key, (absent, references, value) in viewitems(self._nodes):
1283 if absent:
1284 continue
1285 key_dict = nodes_by_key
1286@@ -152,7 +153,7 @@
1287 key_dict = key_dict.setdefault(subkey, {})
1288 key_dict[key[-1]] = key, value, references
1289 else:
1290- for key, (absent, references, value) in self._nodes.iteritems():
1291+ for key, (absent, references, value) in viewitems(self._nodes):
1292 if absent:
1293 continue
1294 key_dict = nodes_by_key
1295@@ -276,7 +277,7 @@
1296 # forward sorted by key. In future we may consider topological sorting,
1297 # at the cost of table scans for direct lookup, or a second index for
1298 # direct lookup
1299- nodes = sorted(self._nodes.items())
1300+ nodes = sorted(viewitems(self._nodes))
1301 # if we do not prepass, we don't know how long it will be up front.
1302 expected_bytes = None
1303 # we only need to pre-pass if we have reference lists at all.
1304@@ -479,7 +480,7 @@
1305 stream.close()
1306 del lines[-1]
1307 _, _, _, trailers = self._parse_lines(lines, pos)
1308- for key, absent, references, value in self._keys_by_offset.itervalues():
1309+ for key, absent, references, value in viewvalues(self._keys_by_offset):
1310 if absent:
1311 continue
1312 # resolve references:
1313@@ -510,7 +511,7 @@
1314 % (ref_list_num, self.node_ref_lists))
1315 refs = set()
1316 nodes = self._nodes
1317- for key, (value, ref_lists) in nodes.iteritems():
1318+ for key, (value, ref_lists) in viewitems(nodes):
1319 ref_list = ref_lists[ref_list_num]
1320 refs.update([ref for ref in ref_list if ref not in nodes])
1321 return refs
1322@@ -519,13 +520,13 @@
1323 if self._nodes_by_key is None:
1324 nodes_by_key = {}
1325 if self.node_ref_lists:
1326- for key, (value, references) in self._nodes.iteritems():
1327+ for key, (value, references) in viewitems(self._nodes):
1328 key_dict = nodes_by_key
1329 for subkey in key[:-1]:
1330 key_dict = key_dict.setdefault(subkey, {})
1331 key_dict[key[-1]] = key, value, references
1332 else:
1333- for key, value in self._nodes.iteritems():
1334+ for key, value in viewitems(self._nodes):
1335 key_dict = nodes_by_key
1336 for subkey in key[:-1]:
1337 key_dict = key_dict.setdefault(subkey, {})
1338@@ -548,10 +549,10 @@
1339 if self._nodes is None:
1340 self._buffer_all()
1341 if self.node_ref_lists:
1342- for key, (value, node_ref_lists) in self._nodes.iteritems():
1343+ for key, (value, node_ref_lists) in viewitems(self._nodes):
1344 yield self, key, value, node_ref_lists
1345 else:
1346- for key, value in self._nodes.iteritems():
1347+ for key, value in viewitems(self._nodes):
1348 yield self, key, value
1349
1350 def _read_prefix(self, stream):
1351@@ -1599,11 +1600,11 @@
1352 trace.mutter_callsite(3,
1353 "iter_all_entries scales with size of history.")
1354 if self.reference_lists:
1355- for key, (absent, references, value) in self._nodes.iteritems():
1356+ for key, (absent, references, value) in viewitems(self._nodes):
1357 if not absent:
1358 yield self, key, value, references
1359 else:
1360- for key, (absent, references, value) in self._nodes.iteritems():
1361+ for key, (absent, references, value) in viewitems(self._nodes):
1362 if not absent:
1363 yield self, key, value
1364
1365
1366=== modified file 'breezy/inventory.py'
1367--- breezy/inventory.py 2017-05-30 19:16:23 +0000
1368+++ breezy/inventory.py 2017-06-05 21:41:38 +0000
1369@@ -48,7 +48,10 @@
1370 lazy_regex,
1371 trace,
1372 )
1373-
1374+from .sixish import (
1375+ viewitems,
1376+ viewvalues,
1377+ )
1378 from .static_tuple import StaticTuple
1379
1380
1381@@ -227,9 +230,6 @@
1382
1383 known_kinds = ('file', 'directory', 'symlink')
1384
1385- def sorted_children(self):
1386- return sorted(self.children.items())
1387-
1388 @staticmethod
1389 def versionable_kind(kind):
1390 return (kind in ('file', 'directory', 'symlink', 'tree-reference'))
1391@@ -402,6 +402,9 @@
1392 super(InventoryDirectory, self).__init__(file_id, name, parent_id)
1393 self.children = {}
1394
1395+ def sorted_children(self):
1396+ return sorted(viewitems(self.children))
1397+
1398 def kind_character(self):
1399 """See InventoryEntry.kind_character."""
1400 return '/'
1401@@ -665,7 +668,7 @@
1402
1403 # unrolling the recursive called changed the time from
1404 # 440ms/663ms (inline/total) to 116ms/116ms
1405- children = sorted(from_dir.children.items())
1406+ children = sorted(viewitems(from_dir.children))
1407 if not recursive:
1408 for name, ie in children:
1409 yield name, ie
1410@@ -690,7 +693,7 @@
1411 continue
1412
1413 # But do this child first
1414- new_children = sorted(ie.children.items())
1415+ new_children = sorted(viewitems(ie.children))
1416 new_children = collections.deque(new_children)
1417 stack.append((path, new_children))
1418 # Break out of inner loop, so that we start outer loop with child
1419@@ -771,7 +774,7 @@
1420 cur_relpath, cur_dir = stack.pop()
1421
1422 child_dirs = []
1423- for child_name, child_ie in sorted(cur_dir.children.iteritems()):
1424+ for child_name, child_ie in sorted(viewitems(cur_dir.children)):
1425
1426 child_relpath = cur_relpath + child_name
1427
1428@@ -814,7 +817,7 @@
1429 """
1430 accum = []
1431 def descend(dir_ie, dir_path):
1432- kids = sorted(dir_ie.children.items())
1433+ kids = sorted(viewitems(dir_ie.children))
1434 for name, ie in kids:
1435 child_path = osutils.pathjoin(dir_path, name)
1436 accum.append((child_path, ie))
1437@@ -1102,9 +1105,8 @@
1438 XXX: We may not want to merge this into bzr.dev.
1439 """
1440 if self.root is None:
1441- return
1442- for _, ie in self._byid.iteritems():
1443- yield ie
1444+ return ()
1445+ return iter(viewvalues(self._byid))
1446
1447 def __len__(self):
1448 """Returns number of entries."""
1449@@ -1138,8 +1140,10 @@
1450 "inventory already contains entry with id {%s}" %
1451 entry.file_id)
1452 self._byid[entry.file_id] = entry
1453- for child in getattr(entry, 'children', {}).itervalues():
1454- self._add_child(child)
1455+ children = getattr(entry, 'children', {})
1456+ if children is not None:
1457+ for child in viewvalues(children):
1458+ self._add_child(child)
1459 return entry
1460
1461 def add(self, entry):
1462@@ -1288,7 +1292,7 @@
1463 ie = to_find_delete.pop()
1464 to_delete.append(ie.file_id)
1465 if ie.kind == 'directory':
1466- to_find_delete.extend(ie.children.values())
1467+ to_find_delete.extend(viewvalues(ie.children))
1468 for file_id in reversed(to_delete):
1469 ie = self[file_id]
1470 del self._byid[file_id]
1471@@ -1589,7 +1593,7 @@
1472 result = CHKInventory(self._search_key_name)
1473 if propagate_caches:
1474 # Just propagate the path-to-fileid cache for now
1475- result._path_to_fileid_cache = dict(self._path_to_fileid_cache.iteritems())
1476+ result._path_to_fileid_cache = self._path_to_fileid_cache.copy()
1477 search_key_func = chk_map.search_key_registry.get(self._search_key_name)
1478 self.id_to_entry._ensure_root()
1479 maximum_size = self.id_to_entry._root_node.maximum_size
1480@@ -1708,7 +1712,7 @@
1481 continue
1482 # This loop could potentially be better by using the id_basename
1483 # map to just get the child file ids.
1484- for child in entry.children.values():
1485+ for child in viewvalues(entry.children):
1486 if child.file_id not in altered:
1487 raise errors.InconsistentDelta(self.id2path(child.file_id),
1488 child.file_id, "Child not deleted or reparented when "
1489@@ -1720,7 +1724,7 @@
1490 # re-keying, but its simpler to just output that as a delete+add
1491 # to spend less time calculating the delta.
1492 delta_list = []
1493- for key, (old_key, value) in parent_id_basename_delta.iteritems():
1494+ for key, (old_key, value) in viewitems(parent_id_basename_delta):
1495 if value is not None:
1496 delta_list.append((old_key, key, value))
1497 else:
1498
1499=== modified file 'breezy/knit.py'
1500--- breezy/knit.py 2017-06-04 18:09:30 +0000
1501+++ breezy/knit.py 2017-06-05 21:41:38 +0000
1502@@ -99,6 +99,8 @@
1503 from .sixish import (
1504 BytesIO,
1505 range,
1506+ viewitems,
1507+ viewvalues,
1508 )
1509 from .versionedfile import (
1510 _KeyRefs,
1511@@ -843,8 +845,8 @@
1512 if compression_parent not in all_build_index_memos:
1513 next_keys.add(compression_parent)
1514 build_keys = next_keys
1515- return sum([index_memo[2] for index_memo
1516- in all_build_index_memos.itervalues()])
1517+ return sum(index_memo[2]
1518+ for index_memo in viewvalues(all_build_index_memos))
1519
1520
1521 class KnitVersionedFiles(VersionedFilesWithFallbacks):
1522@@ -1173,7 +1175,7 @@
1523 build_details = self._index.get_build_details(pending_components)
1524 current_components = set(pending_components)
1525 pending_components = set()
1526- for key, details in build_details.iteritems():
1527+ for key, details in viewitems(build_details):
1528 (index_memo, compression_parent, parents,
1529 record_details) = details
1530 method = record_details[0]
1531@@ -1280,7 +1282,7 @@
1532 # key = component_id, r = record_details, i_m = index_memo,
1533 # n = next
1534 records = [(key, i_m) for key, (r, i_m, n)
1535- in position_map.iteritems()]
1536+ in viewitems(position_map)]
1537 # Sort by the index memo, so that we request records from the
1538 # same pack file together, and in forward-sorted order
1539 records.sort(key=operator.itemgetter(1))
1540@@ -1411,7 +1413,7 @@
1541 # map from key to
1542 # (record_details, access_memo, compression_parent_key)
1543 positions = dict((key, self._build_details_to_components(details))
1544- for key, details in build_details.iteritems())
1545+ for key, details in viewitems(build_details))
1546 absent_keys = keys.difference(set(positions))
1547 # There may be more absent keys : if we're missing the basis component
1548 # and are trying to include the delta closure.
1549@@ -1525,7 +1527,7 @@
1550 missing = set(keys)
1551 record_map = self._get_record_map(missing, allow_missing=True)
1552 result = {}
1553- for key, details in record_map.iteritems():
1554+ for key, details in viewitems(record_map):
1555 if key not in missing:
1556 continue
1557 # record entry 2 is the 'digest'.
1558@@ -1757,7 +1759,7 @@
1559 # we need key, position, length
1560 key_records = []
1561 build_details = self._index.get_build_details(keys)
1562- for key, details in build_details.iteritems():
1563+ for key, details in viewitems(build_details):
1564 if key in keys:
1565 key_records.append((key, details[0]))
1566 records_iter = enumerate(self._read_records_iter(key_records))
1567@@ -2165,8 +2167,8 @@
1568 # one line with next ('' for None)
1569 # one line with byte count of the record bytes
1570 # the record bytes
1571- for key, (record_bytes, (method, noeol), next) in \
1572- self._raw_record_map.iteritems():
1573+ for key, (record_bytes, (method, noeol), next) in viewitems(
1574+ self._raw_record_map):
1575 key_bytes = '\x00'.join(key)
1576 parents = self.global_map.get(key, None)
1577 if parents is None:
1578@@ -2890,10 +2892,10 @@
1579 del keys[key]
1580 result = []
1581 if self._parents:
1582- for key, (value, node_refs) in keys.iteritems():
1583+ for key, (value, node_refs) in viewitems(keys):
1584 result.append((key, value, node_refs))
1585 else:
1586- for key, (value, node_refs) in keys.iteritems():
1587+ for key, (value, node_refs) in viewitems(keys):
1588 result.append((key, value))
1589 self._add_callback(result)
1590 if missing_compression_parents:
1591@@ -3269,7 +3271,7 @@
1592 self._all_build_details.update(build_details)
1593 # new_nodes = self._vf._index._get_entries(this_iteration)
1594 pending = set()
1595- for key, details in build_details.iteritems():
1596+ for key, details in viewitems(build_details):
1597 (index_memo, compression_parent, parent_keys,
1598 record_details) = details
1599 self._parent_map[key] = parent_keys
1600@@ -3290,7 +3292,7 @@
1601 else:
1602 self._num_compression_children[compression_parent] = 1
1603
1604- missing_versions = this_iteration.difference(build_details.keys())
1605+ missing_versions = this_iteration.difference(build_details)
1606 if missing_versions:
1607 for key in missing_versions:
1608 if key in self._parent_map and key in self._text_cache:
1609
1610=== modified file 'breezy/log.py'
1611--- breezy/log.py 2017-06-04 18:09:30 +0000
1612+++ breezy/log.py 2017-06-05 21:41:38 +0000
1613@@ -864,7 +864,7 @@
1614 if match is None:
1615 return log_rev_iterator
1616 searchRE = [(k, [re.compile(x, re.IGNORECASE) for x in v])
1617- for (k,v) in match.iteritems()]
1618+ for k, v in match.items()]
1619 return _filter_re(searchRE, log_rev_iterator)
1620
1621
1622@@ -881,7 +881,7 @@
1623 'author': (rev.get_apparent_authors()),
1624 'bugs': list(rev.iter_bugs())
1625 }
1626- strings[''] = [item for inner_list in strings.itervalues()
1627+ strings[''] = [item for inner_list in strings.values()
1628 for item in inner_list]
1629 for (k,v) in searchRE:
1630 if k in strings and not _match_any_filter(strings[k], v):
1631
1632=== modified file 'breezy/lru_cache.py'
1633--- breezy/lru_cache.py 2017-05-30 19:16:23 +0000
1634+++ breezy/lru_cache.py 2017-06-05 21:41:38 +0000
1635@@ -21,6 +21,11 @@
1636 from . import (
1637 trace,
1638 )
1639+from .sixish import (
1640+ viewitems,
1641+ viewkeys,
1642+ )
1643+
1644
1645 _null_key = object()
1646
1647@@ -129,11 +134,12 @@
1648
1649 :return: An unordered list of keys that are currently cached.
1650 """
1651- return self._cache.keys()
1652+ # GZ 2016-06-04: Maybe just make this return the view?
1653+ return list(viewkeys(self._cache))
1654
1655 def as_dict(self):
1656 """Get a new dict with the same key:value pairs as the cache"""
1657- return dict((k, n.value) for k, n in self._cache.iteritems())
1658+ return dict((k, n.value) for k, n in viewitems(self._cache))
1659
1660 def cleanup(self):
1661 """Clear the cache until it shrinks to the requested size.
1662
1663=== modified file 'breezy/mail_client.py'
1664--- breezy/mail_client.py 2017-05-22 00:56:52 +0000
1665+++ breezy/mail_client.py 2017-06-05 21:41:38 +0000
1666@@ -252,7 +252,7 @@
1667 if body is not None:
1668 message_options['body'] = body
1669 options_list = ['%s=%s' % (k, urlutils.escape(v)) for (k, v) in
1670- sorted(message_options.iteritems())]
1671+ sorted(message_options.items())]
1672 return ['mailto:%s?%s' % (self._encode_safe(to or ''),
1673 '&'.join(options_list))]
1674 mail_client_registry.register('evolution', Evolution,
1675@@ -316,7 +316,7 @@
1676 else:
1677 options_list = []
1678 options_list.extend(["%s='%s'" % (k, v) for k, v in
1679- sorted(message_options.iteritems())])
1680+ sorted(message_options.items())])
1681 return ['-compose', ','.join(options_list)]
1682 mail_client_registry.register('thunderbird', Thunderbird,
1683 help=Thunderbird.__doc__)
1684
1685=== modified file 'breezy/merge.py'
1686--- breezy/merge.py 2017-05-30 19:16:23 +0000
1687+++ breezy/merge.py 2017-06-05 21:41:38 +0000
1688@@ -48,6 +48,9 @@
1689 hooks,
1690 registry,
1691 )
1692+from .sixish import (
1693+ viewitems,
1694+ )
1695 # TODO: Report back as changes are merged in
1696
1697
1698@@ -2289,7 +2292,7 @@
1699 filtered_parent_map = {}
1700 child_map = {}
1701 tails = []
1702- for key, parent_keys in parent_map.iteritems():
1703+ for key, parent_keys in viewitems(parent_map):
1704 culled_parent_keys = [p for p in parent_keys if p in parent_map]
1705 if not culled_parent_keys:
1706 tails.append(key)
1707
1708=== modified file 'breezy/mutabletree.py'
1709--- breezy/mutabletree.py 2017-05-30 19:32:13 +0000
1710+++ breezy/mutabletree.py 2017-06-05 21:41:38 +0000
1711@@ -42,6 +42,9 @@
1712 """)
1713
1714 from .decorators import needs_read_lock, needs_write_lock
1715+from .sixish import (
1716+ viewvalues,
1717+ )
1718
1719
1720 def needs_tree_write_lock(unbound):
1721@@ -551,7 +554,9 @@
1722 """Helper for MutableTree.smart_add."""
1723
1724 def get_inventory_delta(self):
1725- return self._invdelta.values()
1726+ # GZ 2016-06-05: Returning view would probably be fine but currently
1727+ # Inventory.apply_delta is documented as requiring a list of changes.
1728+ return list(viewvalues(self._invdelta))
1729
1730 def _get_ie(self, inv_path):
1731 """Retrieve the most up to date inventory entry for a path.
1732@@ -633,12 +638,12 @@
1733 prev_dir = None
1734
1735 is_inside = osutils.is_inside_or_parent_of_any
1736- for path, (inv_path, this_ie) in sorted(
1737- user_dirs.iteritems(), key=operator.itemgetter(0)):
1738+ for path in sorted(user_dirs):
1739 if (prev_dir is None or not is_inside([prev_dir], path)):
1740+ inv_path, this_ie = user_dirs[path]
1741 yield (path, inv_path, this_ie, None)
1742 prev_dir = path
1743-
1744+
1745 def __init__(self, tree, action, conflicts_related=None):
1746 self.tree = tree
1747 if action is None:
1748
1749=== modified file 'breezy/plugins/bash_completion/bashcomp.py'
1750--- breezy/plugins/bash_completion/bashcomp.py 2017-05-22 00:56:52 +0000
1751+++ breezy/plugins/bash_completion/bashcomp.py 2017-06-05 21:41:38 +0000
1752@@ -189,7 +189,7 @@
1753 brz_version += "."
1754 else:
1755 brz_version += " and the following plugins:"
1756- for name, plugin in sorted(self.data.plugins.iteritems()):
1757+ for name, plugin in sorted(self.data.plugins.items()):
1758 brz_version += "\n# %s" % plugin
1759 return brz_version
1760
1761@@ -314,7 +314,7 @@
1762 self.data.global_options.add(short)
1763
1764 def aliases(self):
1765- for alias, expansion in config.GlobalConfig().get_aliases().iteritems():
1766+ for alias, expansion in config.GlobalConfig().get_aliases().items():
1767 for token in cmdline.split(expansion):
1768 if not token.startswith("-"):
1769 self.user_aliases.setdefault(token, set()).add(alias)
1770@@ -352,7 +352,7 @@
1771 if useralias not in cmd_data.aliases]))
1772
1773 opts = cmd.options()
1774- for optname, opt in sorted(opts.iteritems()):
1775+ for optname, opt in sorted(opts.items()):
1776 cmd_data.options.extend(self.option(opt))
1777
1778 if 'help' == name or 'help' in cmd.aliases:
1779@@ -471,7 +471,7 @@
1780 if args:
1781 parser.error("script does not take positional arguments")
1782 kwargs = dict()
1783- for name, value in opts.__dict__.iteritems():
1784+ for name, value in opts.__dict__.items():
1785 if value is not None:
1786 kwargs[name] = value
1787
1788
1789=== modified file 'breezy/plugins/bisect/tests.py'
1790--- breezy/plugins/bisect/tests.py 2017-05-24 14:51:33 +0000
1791+++ breezy/plugins/bisect/tests.py 2017-06-05 21:41:38 +0000
1792@@ -44,10 +44,10 @@
1793 1.3: "one dot three", 2: "two", 3: "three",
1794 4: "four", 5: "five"}
1795
1796- test_file = open("test_file")
1797- content = test_file.read().strip()
1798+ with open("test_file") as f:
1799+ content = f.read().strip()
1800 if content != rev_contents[rev]:
1801- rev_ids = dict((rev_contents[k], k) for k in rev_contents.keys())
1802+ rev_ids = dict((rev_contents[k], k) for k in rev_contents)
1803 found_rev = rev_ids[content]
1804 raise AssertionError("expected rev %0.1f, found rev %0.1f"
1805 % (rev, found_rev))
1806
1807=== modified file 'breezy/plugins/fastimport/branch_updater.py'
1808--- breezy/plugins/fastimport/branch_updater.py 2017-05-23 23:21:16 +0000
1809+++ breezy/plugins/fastimport/branch_updater.py 2017-06-05 21:41:38 +0000
1810@@ -77,7 +77,7 @@
1811 """
1812 branch_tips = []
1813 lost_heads = []
1814- ref_names = self.heads_by_ref.keys()
1815+ ref_names = list(self.heads_by_ref)
1816 if self.branch is not None:
1817 trunk = self.select_trunk(ref_names)
1818 default_tip = self.heads_by_ref[trunk][0]
1819
1820=== modified file 'breezy/plugins/fastimport/cache_manager.py'
1821--- breezy/plugins/fastimport/cache_manager.py 2017-05-23 23:21:16 +0000
1822+++ breezy/plugins/fastimport/cache_manager.py 2017-06-05 21:41:38 +0000
1823@@ -51,7 +51,7 @@
1824
1825 def finalize(self):
1826 if self.disk_blobs is not None:
1827- for info in self.disk_blobs.itervalues():
1828+ for info in self.disk_blobs.values():
1829 if info[-1] is not None:
1830 os.unlink(info[-1])
1831 self.disk_blobs = None
1832@@ -145,17 +145,17 @@
1833 #self._show_stats_for(self._blobs, "other blobs", note=note)
1834 #self.reftracker.dump_stats(note=note)
1835
1836- def _show_stats_for(self, dict, label, note=trace.note, tuple_key=False):
1837+ def _show_stats_for(self, a_dict, label, note, tuple_key=False):
1838 """Dump statistics about a given dictionary.
1839
1840 By the key and value need to support len().
1841 """
1842- count = len(dict)
1843+ count = len(a_dict)
1844 if tuple_key:
1845- size = sum(map(len, (''.join(k) for k in dict.keys())))
1846+ size = sum(map(len, (''.join(k) for k in a_dict)))
1847 else:
1848- size = sum(map(len, dict.keys()))
1849- size += sum(map(len, dict.values()))
1850+ size = sum(map(len, a_dict))
1851+ size += sum(map(len, a_dict.values()))
1852 size = size * 1.0 / 1024
1853 unit = 'K'
1854 if size > 1024:
1855@@ -176,7 +176,7 @@
1856 self.inventories.clear()
1857
1858 def _flush_blobs_to_disk(self):
1859- blobs = self._sticky_blobs.keys()
1860+ blobs = list(self._sticky_blobs)
1861 sticky_blobs = self._sticky_blobs
1862 total_blobs = len(sticky_blobs)
1863 blobs.sort(key=lambda k:len(sticky_blobs[k]))
1864@@ -275,16 +275,3 @@
1865 if self._decref(id, self._sticky_blobs, None):
1866 self._sticky_memory_bytes -= len(content)
1867 return content
1868-
1869-
1870-def invert_dictset(d):
1871- """Invert a dictionary with keys matching a set of values, turned into lists."""
1872- # Based on recipe from ASPN
1873- result = {}
1874- for k, c in d.iteritems():
1875- for v in c:
1876- keys = result.setdefault(v, [])
1877- keys.append(k)
1878- return result
1879-
1880-
1881
1882=== modified file 'breezy/plugins/fastimport/exporter.py'
1883--- breezy/plugins/fastimport/exporter.py 2017-05-23 23:37:18 +0000
1884+++ breezy/plugins/fastimport/exporter.py 2017-06-05 21:41:38 +0000
1885@@ -292,7 +292,7 @@
1886 if self.export_marks_file:
1887 revision_ids = dict((m, r) for r, m in self.revid_to_mark.items())
1888 marks_file.export_marks(self.export_marks_file, revision_ids)
1889-
1890+
1891 def is_empty_dir(self, tree, path):
1892 path_id = tree.path2id(path)
1893 if path_id is None:
1894
1895=== modified file 'breezy/plugins/fastimport/helpers.py'
1896--- breezy/plugins/fastimport/helpers.py 2017-05-23 23:21:16 +0000
1897+++ breezy/plugins/fastimport/helpers.py 2017-06-05 21:41:38 +0000
1898@@ -181,7 +181,7 @@
1899 """Invert a dictionary with keys matching a set of values, turned into lists."""
1900 # Based on recipe from ASPN
1901 result = {}
1902- for k, c in d.iteritems():
1903+ for k, c in d.items():
1904 for v in c:
1905 keys = result.setdefault(v, [])
1906 keys.append(k)
1907@@ -192,7 +192,7 @@
1908 """Invert a dictionary with keys matching each value turned into a list."""
1909 # Based on recipe from ASPN
1910 result = {}
1911- for k, v in d.iteritems():
1912+ for k, v in d.items():
1913 keys = result.setdefault(v, [])
1914 keys.append(k)
1915 return result
1916
1917=== modified file 'breezy/plugins/fastimport/idmapfile.py'
1918--- breezy/plugins/fastimport/idmapfile.py 2017-05-23 23:21:16 +0000
1919+++ breezy/plugins/fastimport/idmapfile.py 2017-06-05 21:41:38 +0000
1920@@ -29,13 +29,9 @@
1921 :param filename: name of the file to save the data to
1922 :param revision_ids: a dictionary of commit ids to revision ids.
1923 """
1924- f = open(filename, 'wb')
1925- try:
1926- for commit_id, rev_id in revision_ids.iteritems():
1927- f.write("%s %s\n" % (commit_id, rev_id))
1928- f.flush()
1929- finally:
1930- f.close()
1931+ with open(filename, 'wb') as f:
1932+ for commit_id in revision_ids:
1933+ f.write("%s %s\n" % (commit_id, revision_ids[commit_id]))
1934
1935
1936 def load_id_map(filename):
1937
1938=== modified file 'breezy/plugins/fastimport/marks_file.py'
1939--- breezy/plugins/fastimport/marks_file.py 2017-05-23 23:21:16 +0000
1940+++ breezy/plugins/fastimport/marks_file.py 2017-06-05 21:41:38 +0000
1941@@ -74,6 +74,6 @@
1942 return
1943
1944 # Write the revision info
1945- for mark, revid in revision_ids.iteritems():
1946- f.write(':%s %s\n' % (str(mark).lstrip(':'), revid))
1947+ for mark in revision_ids:
1948+ f.write(':%s %s\n' % (str(mark).lstrip(':'), revision_ids[mark]))
1949 f.close()
1950
1951=== modified file 'breezy/plugins/fastimport/processors/info_processor.py'
1952--- breezy/plugins/fastimport/processors/info_processor.py 2017-06-04 18:09:30 +0000
1953+++ breezy/plugins/fastimport/processors/info_processor.py 2017-06-05 21:41:38 +0000
1954@@ -87,11 +87,11 @@
1955 # Commit stats
1956 if self.cmd_counts['commit']:
1957 p_items = []
1958- for _ in range(self.max_parent_count + 1):
1959+ for i in range(self.max_parent_count + 1):
1960 if i in self.parent_counts:
1961 count = self.parent_counts[i]
1962 p_items.append(("parents-%d" % i, count))
1963- merges_count = len(self.merges.keys())
1964+ merges_count = len(self.merges)
1965 p_items.append(('total revisions merged', merges_count))
1966 flags = {
1967 'separate authors found': self.separate_authors_found,
1968@@ -100,21 +100,21 @@
1969 'blobs referenced by SHA': self.sha_blob_references,
1970 }
1971 self._dump_stats_group("Parent counts", p_items, str)
1972- self._dump_stats_group("Commit analysis", flags.iteritems(), _found)
1973+ self._dump_stats_group("Commit analysis", flags.items(), _found)
1974 heads = invert_dictset(self.reftracker.heads)
1975- self._dump_stats_group("Head analysis", heads.iteritems(), None,
1976+ self._dump_stats_group("Head analysis", heads.items(), None,
1977 _iterable_as_config_list)
1978 # note("\t%d\t%s" % (len(self.committers), 'unique committers'))
1979- self._dump_stats_group("Merges", self.merges.iteritems(), None)
1980+ self._dump_stats_group("Merges", self.merges.items(), None)
1981 # We only show the rename old path and copy source paths when -vv
1982 # (verbose=2) is specified. The output here for mysql's data can't
1983 # be parsed currently so this bit of code needs more work anyhow ..
1984 if self.verbose >= 2:
1985 self._dump_stats_group("Rename old paths",
1986- self.rename_old_paths.iteritems(), len,
1987+ self.rename_old_paths.items(), len,
1988 _iterable_as_config_list)
1989 self._dump_stats_group("Copy source paths",
1990- self.copy_source_paths.iteritems(), len,
1991+ self.copy_source_paths.items(), len,
1992 _iterable_as_config_list)
1993
1994 # Blob stats
1995@@ -123,11 +123,10 @@
1996 if self.verbose:
1997 del self.blobs['used']
1998 self._dump_stats_group("Blob usage tracking",
1999- self.blobs.iteritems(), len, _iterable_as_config_list)
2000+ self.blobs.items(), len, _iterable_as_config_list)
2001 if self.blob_ref_counts:
2002 blobs_by_count = invert_dict(self.blob_ref_counts)
2003- blob_items = blobs_by_count.items()
2004- blob_items.sort()
2005+ blob_items = sorted(blobs_by_count.items())
2006 self._dump_stats_group("Blob reference counts",
2007 blob_items, len, _iterable_as_config_list)
2008
2009@@ -136,7 +135,7 @@
2010 reset_stats = {
2011 'lightweight tags': self.lightweight_tags,
2012 }
2013- self._dump_stats_group("Reset analysis", reset_stats.iteritems())
2014+ self._dump_stats_group("Reset analysis", reset_stats.items())
2015
2016 def _dump_stats_group(self, title, items, normal_formatter=None,
2017 verbose_formatter=None):
2018
2019=== modified file 'breezy/plugins/fastimport/revision_store.py'
2020--- breezy/plugins/fastimport/revision_store.py 2017-05-30 23:57:10 +0000
2021+++ breezy/plugins/fastimport/revision_store.py 2017-06-05 21:41:38 +0000
2022@@ -269,7 +269,7 @@
2023 # repository.CommitBuilder.record_entry_contents().
2024 parent_candidate_entries = ie.parent_candidates(self._rev_parent_invs)
2025 head_set = self._commit_builder._heads(ie.file_id,
2026- parent_candidate_entries.keys())
2027+ list(parent_candidate_entries))
2028 heads = []
2029 for inv in self._rev_parent_invs:
2030 if inv.has_id(ie.file_id):
2031
2032=== modified file 'breezy/plugins/launchpad/lp_api_lite.py'
2033--- breezy/plugins/launchpad/lp_api_lite.py 2017-05-22 00:56:52 +0000
2034+++ breezy/plugins/launchpad/lp_api_lite.py 2017-06-05 21:41:38 +0000
2035@@ -194,7 +194,7 @@
2036 # Note: this assumes that a given rev won't get tagged multiple times. But
2037 # it should be valid for the package importer branches that we care
2038 # about
2039- reverse_dict = dict((rev, tag) for tag, rev in tag_dict.iteritems())
2040+ reverse_dict = dict((rev, tag) for tag, rev in tag_dict.items())
2041 the_branch.lock_read()
2042 try:
2043 last_rev = the_branch.last_revision()
2044
2045=== modified file 'breezy/plugins/launchpad/lp_registration.py'
2046--- breezy/plugins/launchpad/lp_registration.py 2017-06-02 01:01:21 +0000
2047+++ breezy/plugins/launchpad/lp_registration.py 2017-06-05 21:41:38 +0000
2048@@ -94,7 +94,7 @@
2049 # NB: these should always end in a slash to avoid xmlrpclib appending
2050 # '/RPC2'
2051 LAUNCHPAD_INSTANCE = {}
2052- for instance, domain in LAUNCHPAD_DOMAINS.iteritems():
2053+ for instance, domain in LAUNCHPAD_DOMAINS.items():
2054 LAUNCHPAD_INSTANCE[instance] = 'https://xmlrpc.%s/bazaar/' % domain
2055
2056 # We use production as the default because edge has been deprecated circa
2057@@ -197,7 +197,7 @@
2058 else:
2059 domains = (
2060 'bazaar.%s' % domain
2061- for domain in self.LAUNCHPAD_DOMAINS.itervalues())
2062+ for domain in self.LAUNCHPAD_DOMAINS.values())
2063 if hostinfo not in domains:
2064 raise NotLaunchpadBranch(branch_url)
2065 return path.lstrip('/')
2066
2067=== modified file 'breezy/plugins/stats/cmds.py'
2068--- breezy/plugins/stats/cmds.py 2017-06-01 23:52:12 +0000
2069+++ breezy/plugins/stats/cmds.py 2017-06-05 21:41:38 +0000
2070@@ -60,7 +60,7 @@
2071 info[1][email] = info[1].setdefault(email, 0) + 1
2072 info[2][username] = info[2].setdefault(username, 0) + 1
2073 res = [(len(revs), revs, emails, fnames)
2074- for revs, emails, fnames in committer_to_info.itervalues()]
2075+ for revs, emails, fnames in committer_to_info.values()]
2076 res.sort(reverse=True)
2077 return res
2078
2079@@ -89,7 +89,7 @@
2080 old_email_id = email_to_id[old_email]
2081 assert old_email_id in (old_id, new_id)
2082 email_to_id[old_email] = cur_id
2083- for email, usernames in email_users.iteritems():
2084+ for email, usernames in email_users.items():
2085 assert email not in email_to_id
2086 if not email:
2087 # We use a different algorithm for usernames that have no email
2088@@ -131,7 +131,7 @@
2089 collapse_ids(user_id, cur_id, id_combos)
2090 username_to_id[low_user] = cur_id
2091 combo_to_best_combo = {}
2092- for cur_id, combos in id_to_combos.iteritems():
2093+ for cur_id, combos in id_to_combos.items():
2094 best_combo = sorted(combos,
2095 key=lambda x:combo_count[x],
2096 reverse=True)[0]
2097@@ -206,10 +206,10 @@
2098 for count, revs, emails, fullnames in info:
2099 # Get the most common email name
2100 sorted_emails = sorted(((count, email)
2101- for email,count in emails.iteritems()),
2102+ for email, count in emails.items()),
2103 reverse=True)
2104 sorted_fullnames = sorted(((count, fullname)
2105- for fullname,count in fullnames.iteritems()),
2106+ for fullname, count in fullnames.items()),
2107 reverse=True)
2108 if sorted_fullnames[0][1] == '' and sorted_emails[0][1] == '':
2109 to_file.write('%4d %s\n'
2110@@ -237,7 +237,7 @@
2111 if gather_class_stats is not None:
2112 to_file.write(' Contributions:\n')
2113 classes, total = gather_class_stats(revs)
2114- for name,count in sorted(classes.items(), lambda x,y: cmp((x[1], x[0]), (y[1], y[0]))):
2115+ for name, count in sorted(classes.items(), key=classify_key):
2116 if name is None:
2117 name = "Unknown"
2118 to_file.write(" %4.0f%% %s\n" % ((float(count) / total) * 100.0, name))
2119@@ -342,6 +342,11 @@
2120 return ret, total
2121
2122
2123+def classify_key(item):
2124+ """Sort key for item of (author, count) from classify_delta."""
2125+ return -item[1], item[0]
2126+
2127+
2128 def display_credits(credits, to_file):
2129 (coders, documenters, artists, translators) = credits
2130 def print_section(name, lst):
2131@@ -392,8 +397,8 @@
2132 finally:
2133 repository.unlock()
2134 def sort_class(name):
2135- return map(lambda (x,y): x,
2136- sorted(ret[name].items(), lambda x,y: cmp((x[1], x[0]), (y[1], y[0])), reverse=True))
2137+ return [author
2138+ for author, _ in sorted(ret[name].items(), key=classify_key)]
2139 return (sort_class("code"), sort_class("documentation"), sort_class("art"), sort_class("translation"))
2140
2141
2142
2143=== modified file 'breezy/plugins/weave_fmt/bzrdir.py'
2144--- breezy/plugins/weave_fmt/bzrdir.py 2017-05-30 19:32:13 +0000
2145+++ breezy/plugins/weave_fmt/bzrdir.py 2017-06-05 21:41:38 +0000
2146@@ -435,7 +435,7 @@
2147 self.text_weaves[file_id] = w
2148 text_changed = False
2149 parent_candiate_entries = ie.parent_candidates(parent_invs)
2150- heads = graph.Graph(self).heads(parent_candiate_entries.keys())
2151+ heads = graph.Graph(self).heads(parent_candiate_entries)
2152 # XXX: Note that this is unordered - and this is tolerable because
2153 # the previous code was also unordered.
2154 previous_entries = dict((head, parent_candiate_entries[head]) for head
2155@@ -457,7 +457,7 @@
2156 # save against.
2157 #ie.snapshot(rev, PATH, previous_revisions, REVISION_TREE, InMemoryWeaveStore(self.text_weaves))
2158 if len(previous_revisions) == 1:
2159- previous_ie = previous_revisions.values()[0]
2160+ previous_ie = next(iter(previous_revisions.values()))
2161 if ie._unchanged(previous_ie):
2162 ie.revision = previous_ie.revision
2163 return
2164@@ -479,7 +479,7 @@
2165 The order must be such that an revision is imported after all
2166 its (present) parents.
2167 """
2168- todo = set(self.revisions.keys())
2169+ todo = set(self.revisions)
2170 done = self.absent_revisions.copy()
2171 order = []
2172 while todo:
2173
2174=== modified file 'breezy/reconcile.py'
2175--- breezy/reconcile.py 2017-05-22 00:56:52 +0000
2176+++ breezy/reconcile.py 2017-06-05 21:41:38 +0000
2177@@ -430,7 +430,7 @@
2178 # NB: This is really not needed, reconcile != pack.
2179 per_id_bad_parents[key[0]] = {}
2180 # Generate per-knit/weave data.
2181- for key, details in bad_parents.iteritems():
2182+ for key, details in bad_parents.items():
2183 file_id = key[0]
2184 rev_id = key[1]
2185 knit_parents = tuple([parent[-1] for parent in details[0]])
2186
2187=== modified file 'breezy/registry.py'
2188--- breezy/registry.py 2017-05-22 00:56:52 +0000
2189+++ breezy/registry.py 2017-06-05 21:41:38 +0000
2190@@ -227,17 +227,16 @@
2191
2192 def keys(self):
2193 """Get a list of registered entries"""
2194- return sorted(self._dict.keys())
2195+ return sorted(self._dict)
2196
2197 def iteritems(self):
2198- for key, getter in self._dict.iteritems():
2199- yield key, getter.get_obj()
2200+ for key in self._dict:
2201+ yield key, self._dict[key].get_obj()
2202
2203 def items(self):
2204 # We should not use the iteritems() implementation below (see bug
2205 # #430510)
2206- return sorted([(key, getter.get_obj())
2207- for key, getter in self._dict.items()])
2208+ return [(key, self._dict[key].get_obj()) for key in self.keys()]
2209
2210 def _set_default_key(self, key):
2211 if key not in self._dict:
2212
2213=== modified file 'breezy/remote.py'
2214--- breezy/remote.py 2017-05-30 19:32:13 +0000
2215+++ breezy/remote.py 2017-06-05 21:41:38 +0000
2216@@ -51,6 +51,10 @@
2217 from .i18n import gettext
2218 from .inventory import Inventory
2219 from .lockable_files import LockableFiles
2220+from .sixish import (
2221+ viewitems,
2222+ viewvalues,
2223+ )
2224 from .smart import client, vfs, repository as smart_repo
2225 from .smart.client import _SmartClient
2226 from .revision import NULL_REVISION
2227@@ -698,7 +702,7 @@
2228 raise errors.UnexpectedSmartServerResponse(response)
2229 body = bencode.bdecode(handler.read_body_bytes())
2230 ret = {}
2231- for (name, value) in body.iteritems():
2232+ for name, value in viewitems(body):
2233 ret[name] = self._open_branch(name, value[0], value[1],
2234 possible_transports=possible_transports,
2235 ignore_fallbacks=ignore_fallbacks)
2236@@ -2088,8 +2092,7 @@
2237 def revision_ids_to_search_result(self, result_set):
2238 """Convert a set of revision ids to a graph SearchResult."""
2239 result_parents = set()
2240- for parents in self.get_graph().get_parent_map(
2241- result_set).itervalues():
2242+ for parents in viewvalues(self.get_graph().get_parent_map(result_set)):
2243 result_parents.update(parents)
2244 included_keys = result_set.intersection(result_parents)
2245 start_keys = result_set.difference(included_keys)
2246@@ -2214,15 +2217,15 @@
2247 for fallback in self._fallback_repositories:
2248 if not absent:
2249 break
2250- desired_files = [(key[0], key[1], identifier) for
2251- (identifier, key) in absent.iteritems()]
2252+ desired_files = [(key[0], key[1], identifier)
2253+ for identifier, key in viewitems(absent)]
2254 for (identifier, bytes_iterator) in fallback.iter_files_bytes(desired_files):
2255 del absent[identifier]
2256 yield identifier, bytes_iterator
2257 if absent:
2258 # There may be more missing items, but raise an exception
2259 # for just one.
2260- missing_identifier = absent.keys()[0]
2261+ missing_identifier = next(iter(absent))
2262 missing_key = absent[missing_identifier]
2263 raise errors.RevisionNotPresent(revision_id=missing_key[1],
2264 file_id=missing_key[0])
2265@@ -2262,7 +2265,7 @@
2266 # There is one other "bug" which is that ghosts in
2267 # get_revision_graph() are not returned at all. But we won't worry
2268 # about that for now.
2269- for node_id, parent_ids in rg.iteritems():
2270+ for node_id, parent_ids in viewitems(rg):
2271 if parent_ids == ():
2272 rg[node_id] = (NULL_REVISION,)
2273 rg[NULL_REVISION] = ()
2274
2275=== modified file 'breezy/rename_map.py'
2276--- breezy/rename_map.py 2017-05-22 00:56:52 +0000
2277+++ breezy/rename_map.py 2017-06-05 21:41:38 +0000
2278@@ -25,6 +25,7 @@
2279 from .i18n import gettext
2280 from .sixish import (
2281 BytesIO,
2282+ viewitems,
2283 )
2284 from .ui import ui_factory
2285
2286@@ -109,7 +110,7 @@
2287 task.update(gettext('Determining hash hits'), num, len(paths))
2288 hits = self.hitcounts(self.tree.get_file_lines(None,
2289 path=path))
2290- all_hits.extend((v, path, k) for k, v in hits.items())
2291+ all_hits.extend((v, path, k) for k, v in viewitems(hits))
2292 finally:
2293 task.finished()
2294 return all_hits
2295@@ -150,7 +151,7 @@
2296 break
2297 required_parents.setdefault(path, []).append(child)
2298 require_ids = {}
2299- for parent, children in required_parents.iteritems():
2300+ for parent, children in viewitems(required_parents):
2301 child_file_ids = set()
2302 for child in children:
2303 file_id = matches.get(child)
2304@@ -167,8 +168,8 @@
2305 parent directories.
2306 """
2307 all_hits = []
2308- for file_id, file_id_children in missing_parents.iteritems():
2309- for path, path_children in required_parents.iteritems():
2310+ for file_id, file_id_children in viewitems(missing_parents):
2311+ for path, path_children in viewitems(required_parents):
2312 hits = len(path_children.intersection(file_id_children))
2313 if hits > 0:
2314 all_hits.append((hits, path, file_id))
2315@@ -250,8 +251,8 @@
2316
2317 def _make_inventory_delta(self, matches):
2318 delta = []
2319- file_id_matches = dict((f, p) for p, f in matches.items())
2320- for old_path, entry in self.tree.iter_entries_by_dir(matches.values()):
2321+ file_id_matches = dict((f, p) for p, f in viewitems(matches))
2322+ for old_path, entry in self.tree.iter_entries_by_dir(file_id_matches):
2323 new_path = file_id_matches[entry.file_id]
2324 parent_path, new_name = osutils.split(new_path)
2325 parent_id = matches.get(parent_path)
2326
2327=== modified file 'breezy/repofmt/groupcompress_repo.py'
2328--- breezy/repofmt/groupcompress_repo.py 2017-05-22 00:56:52 +0000
2329+++ breezy/repofmt/groupcompress_repo.py 2017-06-05 21:41:38 +0000
2330@@ -58,6 +58,10 @@
2331 from ..vf_repository import (
2332 StreamSource,
2333 )
2334+from ..sixish import (
2335+ viewitems,
2336+ viewvalues,
2337+ )
2338 from ..static_tuple import StaticTuple
2339
2340
2341@@ -276,7 +280,7 @@
2342 remaining_keys.difference_update(cur_keys)
2343 next_keys = set()
2344 def handle_internal_node(node):
2345- for prefix, value in node._items.iteritems():
2346+ for prefix, value in viewitems(node._items):
2347 # We don't want to request the same key twice, and we
2348 # want to order it by the first time it is seen.
2349 # Even further, we don't want to request a key which is
2350@@ -543,7 +547,7 @@
2351 ancestor_keys = revision_vf.get_parent_map(revision_vf.keys())
2352 # Strip keys back into revision_ids.
2353 ancestors = dict((k[0], tuple([p[0] for p in parents]))
2354- for k, parents in ancestor_keys.iteritems())
2355+ for k, parents in viewitems(ancestor_keys))
2356 del ancestor_keys
2357 # TODO: _generate_text_key_index should be much cheaper to generate from
2358 # a chk repository, rather than the current implementation
2359@@ -665,7 +669,7 @@
2360 if search_key_name is None:
2361 # Find the name corresponding to the search_key_func
2362 search_key_reg = chk_map.search_key_registry
2363- for search_key_name, func in search_key_reg.iteritems():
2364+ for search_key_name, func in viewitems(search_key_reg):
2365 if func == chk_inv.id_to_entry._search_key_func:
2366 break
2367 canonical_inv = inventory.CHKInventory.from_inventory(
2368@@ -741,7 +745,7 @@
2369 # any present parent inventories, which may be used when calculating
2370 # deltas for streaming.
2371 all_inv_keys = set(corresponding_invs)
2372- for parent_inv_keys in inv_parent_map.itervalues():
2373+ for parent_inv_keys in viewvalues(inv_parent_map):
2374 all_inv_keys.update(parent_inv_keys)
2375 # Filter out ghost parents.
2376 all_inv_keys.intersection_update(
2377
2378=== modified file 'breezy/repofmt/knitpack_repo.py'
2379--- breezy/repofmt/knitpack_repo.py 2017-05-25 21:59:11 +0000
2380+++ breezy/repofmt/knitpack_repo.py 2017-06-05 21:41:38 +0000
2381@@ -68,6 +68,7 @@
2382 RepositoryPackCollection,
2383 )
2384 from ..sixish import (
2385+ viewitems,
2386 zip
2387 )
2388 from ..vf_repository import (
2389@@ -642,7 +643,7 @@
2390 request_groups[index].append((key, value))
2391 record_index = 0
2392 pb.update("Copied record", record_index, len(nodes))
2393- for index, items in request_groups.iteritems():
2394+ for index, items in viewitems(request_groups):
2395 pack_readv_requests = []
2396 for key, value in items:
2397 # ---- KnitGraphIndex.get_position
2398@@ -740,7 +741,7 @@
2399 fileid_revisions = repo._find_file_ids_from_xml_inventory_lines(
2400 inv_lines, self.revision_keys)
2401 text_filter = []
2402- for fileid, file_revids in fileid_revisions.iteritems():
2403+ for fileid, file_revids in viewitems(fileid_revisions):
2404 text_filter.extend([(fileid, file_revid) for file_revid in file_revids])
2405 self._text_filter = text_filter
2406
2407@@ -934,7 +935,7 @@
2408 request_groups[index] = []
2409 request_groups[index].append((key, value, references))
2410 result = []
2411- for index, items in request_groups.iteritems():
2412+ for index, items in viewitems(request_groups):
2413 pack_readv_requests = []
2414 for key, value, references in items:
2415 # ---- KnitGraphIndex.get_position
2416
2417=== modified file 'breezy/repofmt/pack_repo.py'
2418--- breezy/repofmt/pack_repo.py 2017-05-22 00:56:52 +0000
2419+++ breezy/repofmt/pack_repo.py 2017-06-05 21:41:38 +0000
2420@@ -1288,10 +1288,10 @@
2421
2422 def _remove_pack_indices(self, pack, ignore_missing=False):
2423 """Remove the indices for pack from the aggregated indices.
2424-
2425+
2426 :param ignore_missing: Suppress KeyErrors from calling remove_index.
2427 """
2428- for index_type in Pack.index_definitions.keys():
2429+ for index_type in Pack.index_definitions:
2430 attr_name = index_type + '_index'
2431 aggregate_index = getattr(self, attr_name)
2432 if aggregate_index is not None:
2433@@ -1344,7 +1344,7 @@
2434
2435 # do a two-way diff against our original content
2436 current_nodes = set()
2437- for name, sizes in self._names.iteritems():
2438+ for name, sizes in self._names.items():
2439 current_nodes.add(
2440 ((name, ), ' '.join(str(size) for size in sizes)))
2441
2442
2443=== modified file 'breezy/repository.py'
2444--- breezy/repository.py 2017-05-30 19:32:13 +0000
2445+++ breezy/repository.py 2017-06-05 21:41:38 +0000
2446@@ -48,6 +48,10 @@
2447 from .decorators import needs_read_lock, needs_write_lock, only_raises
2448 from .inter import InterObject
2449 from .lock import _RelockDebugMixin, LogicalLockResult
2450+from .sixish import (
2451+ viewitems,
2452+ viewvalues,
2453+ )
2454 from .trace import (
2455 log_exception_quietly, note, mutter, mutter_callsite, warning)
2456
2457@@ -141,7 +145,7 @@
2458 raise ValueError('Invalid value for %s: %r' % (context, text))
2459
2460 def _validate_revprops(self, revprops):
2461- for key, value in revprops.iteritems():
2462+ for key, value in viewitems(revprops):
2463 # We know that the XML serializers do not round trip '\r'
2464 # correctly, so refuse to accept them
2465 if not isinstance(value, basestring):
2466@@ -911,9 +915,8 @@
2467 :return: set of revisions that are parents of revision_ids which are
2468 not part of revision_ids themselves
2469 """
2470- parent_map = self.get_parent_map(revision_ids)
2471- parent_ids = set(itertools.chain.from_iterable(
2472- parent_map.itervalues()))
2473+ parent_ids = set(itertools.chain.from_iterable(viewvalues(
2474+ self.get_parent_map(revision_ids))))
2475 parent_ids.difference_update(revision_ids)
2476 parent_ids.discard(_mod_revision.NULL_REVISION)
2477 return parent_ids
2478@@ -1053,8 +1056,8 @@
2479 else:
2480 query_keys.append((revision_id ,))
2481 vf = self.revisions.without_fallbacks()
2482- for ((revision_id,), parent_keys) in \
2483- vf.get_parent_map(query_keys).iteritems():
2484+ for (revision_id,), parent_keys in viewitems(
2485+ vf.get_parent_map(query_keys)):
2486 if parent_keys:
2487 result[revision_id] = tuple([parent_revid
2488 for (parent_revid,) in parent_keys])
2489@@ -1747,7 +1750,7 @@
2490 # Filter ghosts, and null:
2491 if _mod_revision.NULL_REVISION in revision_graph:
2492 del revision_graph[_mod_revision.NULL_REVISION]
2493- for key, parents in revision_graph.items():
2494+ for key, parents in viewitems(revision_graph):
2495 revision_graph[key] = tuple(parent for parent in parents if parent
2496 in revision_graph)
2497 return revision_graph
2498
2499=== modified file 'breezy/revision.py'
2500--- breezy/revision.py 2017-05-30 19:16:23 +0000
2501+++ breezy/revision.py 2017-06-05 21:41:38 +0000
2502@@ -85,7 +85,7 @@
2503
2504 def _check_properties(self):
2505 """Verify that all revision properties are OK."""
2506- for name, value in self.properties.iteritems():
2507+ for name, value in self.properties.items():
2508 if not isinstance(name, basestring) or contains_whitespace(name):
2509 raise ValueError("invalid property name %r" % name)
2510 if not isinstance(value, basestring):
2511
2512=== modified file 'breezy/smart/bzrdir.py'
2513--- breezy/smart/bzrdir.py 2017-05-22 00:56:52 +0000
2514+++ breezy/smart/bzrdir.py 2017-06-05 21:41:38 +0000
2515@@ -441,7 +441,7 @@
2516 """
2517 branches = self._bzrdir.get_branches()
2518 ret = {}
2519- for name, b in branches.iteritems():
2520+ for name, b in branches.items():
2521 if name is None:
2522 name = ""
2523 ret[name] = ("branch", b._format.network_name())
2524
2525=== modified file 'breezy/smart/client.py'
2526--- breezy/smart/client.py 2017-05-22 00:56:52 +0000
2527+++ breezy/smart/client.py 2017-06-05 21:41:38 +0000
2528@@ -339,7 +339,7 @@
2529 self.medium = medium
2530
2531 def __repr__(self):
2532- attrs = dict((k, v) for (k, v) in self.__dict__.iteritems()
2533+ attrs = dict((k, v) for k, v in self.__dict__.items()
2534 if v is not None)
2535 return '<%s %r>' % (self.__class__.__name__, attrs)
2536
2537
2538=== modified file 'breezy/smtp_connection.py'
2539--- breezy/smtp_connection.py 2017-05-22 00:56:52 +0000
2540+++ breezy/smtp_connection.py 2017-06-05 21:41:38 +0000
2541@@ -183,7 +183,7 @@
2542 message.as_string())
2543 except smtplib.SMTPRecipientsRefused as e:
2544 raise SMTPError('server refused recipient: %d %s' %
2545- e.recipients.values()[0])
2546+ next(iter(e.recipients.values())))
2547 except smtplib.SMTPResponseException as e:
2548 raise SMTPError('%d %s' % (e.smtp_code, e.smtp_error))
2549 except smtplib.SMTPException as e:
2550
2551=== modified file 'breezy/tag.py'
2552--- breezy/tag.py 2017-05-30 19:16:23 +0000
2553+++ breezy/tag.py 2017-06-05 21:41:38 +0000
2554@@ -226,7 +226,7 @@
2555
2556 def _serialize_tag_dict(self, tag_dict):
2557 td = dict((k.encode('utf-8'), v)
2558- for k,v in tag_dict.items())
2559+ for k, v in tag_dict.items())
2560 return bencode.bencode(td)
2561
2562 def _deserialize_tag_dict(self, tag_content):
2563@@ -324,7 +324,7 @@
2564 :param rename_map: Dictionary mapping old revids to new revids
2565 """
2566 reverse_tags = self.get_reverse_tag_dict()
2567- for revid, names in reverse_tags.iteritems():
2568+ for revid, names in reverse_tags.items():
2569 if revid in rename_map:
2570 for name in names:
2571 self.set_tag(name, rename_map[revid])
2572
2573=== modified file 'breezy/tests/per_branch/test_check.py'
2574--- breezy/tests/per_branch/test_check.py 2017-05-30 19:16:23 +0000
2575+++ breezy/tests/per_branch/test_check.py 2017-06-05 21:41:38 +0000
2576@@ -96,7 +96,7 @@
2577 'unknown ref kind for ref %s' % ref)
2578 node_distances = branch.repository.get_graph().find_lefthand_distances(
2579 distances)
2580- for key, distance in node_distances.iteritems():
2581+ for key, distance in node_distances.items():
2582 refs[('lefthand-distance', key)] = distance
2583 if key in existences and distance > 0:
2584 refs[('revision-existence', key)] = True
2585
2586=== modified file 'breezy/tests/per_bzrdir/test_bzrdir.py'
2587--- breezy/tests/per_bzrdir/test_bzrdir.py 2017-05-22 00:56:52 +0000
2588+++ breezy/tests/per_bzrdir/test_bzrdir.py 2017-06-05 21:41:38 +0000
2589@@ -160,7 +160,7 @@
2590 self.assertEqual(text_index,
2591 right_repo._generate_text_key_index())
2592 desired_files = []
2593- for file_id, revision_id in text_index.iterkeys():
2594+ for file_id, revision_id in text_index:
2595 desired_files.append(
2596 (file_id, revision_id, (file_id, revision_id)))
2597 left_texts = [(identifier, "".join(bytes_iterator)) for
2598
2599=== modified file 'breezy/tests/per_controldir/test_controldir.py'
2600--- breezy/tests/per_controldir/test_controldir.py 2017-05-24 16:33:08 +0000
2601+++ breezy/tests/per_controldir/test_controldir.py 2017-06-05 21:41:38 +0000
2602@@ -1250,7 +1250,7 @@
2603 def test_get_branches(self):
2604 repo = self.make_repository('branch-1')
2605 target_branch = repo.bzrdir.create_branch()
2606- self.assertEqual([""], repo.bzrdir.get_branches().keys())
2607+ self.assertEqual([""], list(repo.bzrdir.get_branches()))
2608
2609 def test_create_repository(self):
2610 # a bzrdir can construct a repository for itself.
2611
2612=== modified file 'breezy/tests/per_controldir_colo/test_supported.py'
2613--- breezy/tests/per_controldir_colo/test_supported.py 2017-05-21 18:10:28 +0000
2614+++ breezy/tests/per_controldir_colo/test_supported.py 2017-06-05 21:41:38 +0000
2615@@ -131,7 +131,7 @@
2616 def test_get_branches(self):
2617 repo = self.make_repository('branch-1')
2618 target_branch = repo.bzrdir.create_branch(name='foo')
2619- self.assertEqual(['foo'], repo.bzrdir.get_branches().keys())
2620+ self.assertEqual(['foo'], list(repo.bzrdir.get_branches()))
2621 self.assertEqual(target_branch.base,
2622 repo.bzrdir.get_branches()['foo'].base)
2623
2624@@ -142,7 +142,7 @@
2625 except errors.InvalidBranchName:
2626 raise tests.TestNotApplicable(
2627 "format does not support branches with / in their name")
2628- self.assertEqual(['foo/bar'], repo.bzrdir.get_branches().keys())
2629+ self.assertEqual(['foo/bar'], list(repo.bzrdir.get_branches()))
2630 self.assertEqual(
2631 target_branch.base, repo.bzrdir.open_branch(name='foo/bar').base)
2632
2633
2634=== modified file 'breezy/tests/per_controldir_colo/test_unsupported.py'
2635--- breezy/tests/per_controldir_colo/test_unsupported.py 2017-05-21 18:10:28 +0000
2636+++ breezy/tests/per_controldir_colo/test_unsupported.py 2017-06-05 21:41:38 +0000
2637@@ -79,5 +79,4 @@
2638 def test_get_branches(self):
2639 made_control = self.make_bzrdir_with_repo()
2640 made_control.create_branch()
2641- self.assertEqual(made_control.get_branches().keys(),
2642- [""])
2643+ self.assertEqual(list(made_control.get_branches()), [""])
2644
2645=== modified file 'breezy/tests/per_foreign_vcs/__init__.py'
2646--- breezy/tests/per_foreign_vcs/__init__.py 2017-05-23 14:08:03 +0000
2647+++ breezy/tests/per_foreign_vcs/__init__.py 2017-06-05 21:41:38 +0000
2648@@ -27,7 +27,7 @@
2649
2650 def vcs_scenarios():
2651 scenarios = []
2652- for name, vcs in foreign.foreign_vcs_registry.iteritems():
2653+ for name, vcs in foreign.foreign_vcs_registry.items():
2654 scenarios.append((vcs.__class__.__name__, {
2655 "branch_factory": vcs.branch_format.get_foreign_tests_branch_factory(),
2656 "repository_factory": vcs.repository_format.get_foreign_tests_repository_factory(),
2657
2658=== modified file 'breezy/tests/per_pack_repository.py'
2659--- breezy/tests/per_pack_repository.py 2017-06-04 18:09:30 +0000
2660+++ breezy/tests/per_pack_repository.py 2017-06-05 21:41:38 +0000
2661@@ -253,10 +253,11 @@
2662 repo.abort_write_group()
2663 raise
2664 else:
2665- old_names = repo._pack_collection._names.keys()
2666+ old_names = set(repo._pack_collection._names)
2667 result = repo.commit_write_group()
2668- cur_names = repo._pack_collection._names.keys()
2669- new_names = list(set(cur_names) - set(old_names))
2670+ cur_names = set(repo._pack_collection._names)
2671+ # In this test, len(result) is always 1, so unordered is ok
2672+ new_names = list(cur_names - old_names)
2673 self.assertEqual(new_names, result)
2674 finally:
2675 repo.unlock()
2676
2677=== modified file 'breezy/tests/per_workingtree/test_parents.py'
2678--- breezy/tests/per_workingtree/test_parents.py 2017-05-22 00:56:52 +0000
2679+++ breezy/tests/per_workingtree/test_parents.py 2017-06-05 21:41:38 +0000
2680@@ -371,8 +371,8 @@
2681
2682 def make_inv_delta(self, old, new):
2683 """Make an inventory delta from two inventories."""
2684- old_ids = set(old._byid.iterkeys())
2685- new_ids = set(new._byid.iterkeys())
2686+ old_ids = set(old._byid)
2687+ new_ids = set(new._byid)
2688 adds = new_ids - old_ids
2689 deletes = old_ids - new_ids
2690 common = old_ids.intersection(new_ids)
2691
2692=== modified file 'breezy/tests/test_btree_index.py'
2693--- breezy/tests/test_btree_index.py 2017-06-04 18:09:30 +0000
2694+++ breezy/tests/test_btree_index.py 2017-06-05 21:41:38 +0000
2695@@ -643,7 +643,7 @@
2696 self.assertEqual(1, len(list(index.iter_entries([nodes[30][0]]))))
2697 self.assertEqual([1, 4], index._row_lengths)
2698 self.assertIsNot(None, index._root_node)
2699- internal_node_pre_clear = index._internal_node_cache.keys()
2700+ internal_node_pre_clear = set(index._internal_node_cache)
2701 self.assertTrue(len(index._leaf_node_cache) > 0)
2702 index.clear_cache()
2703 # We don't touch _root_node or _internal_node_cache, both should be
2704@@ -655,7 +655,7 @@
2705 # becuase without a 3-level index, we don't have any internal
2706 # nodes cached.
2707 self.assertEqual(internal_node_pre_clear,
2708- index._internal_node_cache.keys())
2709+ set(index._internal_node_cache))
2710 self.assertEqual(0, len(index._leaf_node_cache))
2711
2712 def test_trivial_constructor(self):
2713@@ -737,7 +737,7 @@
2714 index = btree_index.BTreeGraphIndex(trans, 'index', None)
2715 del trans._activity[:]
2716 nodes = dict(index._read_nodes([0]))
2717- self.assertEqual([0], nodes.keys())
2718+ self.assertEqual({0}, set(nodes))
2719 node = nodes[0]
2720 self.assertEqual([('key',)], node.all_keys())
2721 self.assertEqual([('get', 'index')], trans._activity)
2722
2723=== modified file 'breezy/tests/test_bundle.py'
2724--- breezy/tests/test_bundle.py 2017-05-25 01:35:55 +0000
2725+++ breezy/tests/test_bundle.py 2017-06-05 21:41:38 +0000
2726@@ -105,7 +105,7 @@
2727 return self.ids[parent_dir]
2728
2729 def iter_entries(self):
2730- for path, file_id in self.ids.iteritems():
2731+ for path, file_id in self.ids.items():
2732 yield path, self[file_id]
2733
2734 def kind(self, file_id):
2735
2736=== modified file 'breezy/tests/test_fifo_cache.py'
2737--- breezy/tests/test_fifo_cache.py 2017-05-22 00:56:52 +0000
2738+++ breezy/tests/test_fifo_cache.py 2017-06-05 21:41:38 +0000
2739@@ -20,6 +20,11 @@
2740 fifo_cache,
2741 tests,
2742 )
2743+from ..sixish import (
2744+ viewitems,
2745+ viewkeys,
2746+ viewvalues,
2747+ )
2748
2749
2750 class TestFIFOCache(tests.TestCase):
2751@@ -33,12 +38,10 @@
2752 self.assertEqual(2, c[1])
2753 self.assertEqual(2, c.get(1))
2754 self.assertEqual(2, c.get(1, None))
2755- self.assertEqual([1], c.keys())
2756- self.assertEqual([1], list(c.iterkeys()))
2757- self.assertEqual([(1, 2)], c.items())
2758- self.assertEqual([(1, 2)], list(c.iteritems()))
2759- self.assertEqual([2], c.values())
2760- self.assertEqual([2], list(c.itervalues()))
2761+ self.assertEqual([1], list(c))
2762+ self.assertEqual({1}, viewkeys(c))
2763+ self.assertEqual([(1, 2)], sorted(viewitems(c)))
2764+ self.assertEqual([2], sorted(viewvalues(c)))
2765 self.assertEqual({1: 2}, c)
2766
2767 def test_cache_size(self):
2768@@ -54,12 +57,10 @@
2769 self.assertEqual(0, len(c))
2770 self.assertEqual(None, c.get(1))
2771 self.assertEqual(None, c.get(1, None))
2772- self.assertEqual([], c.keys())
2773- self.assertEqual([], list(c.iterkeys()))
2774- self.assertEqual([], c.items())
2775- self.assertEqual([], list(c.iteritems()))
2776- self.assertEqual([], c.values())
2777- self.assertEqual([], list(c.itervalues()))
2778+ self.assertEqual([], list(c))
2779+ self.assertEqual(set(), viewkeys(c))
2780+ self.assertEqual([], list(viewitems(c)))
2781+ self.assertEqual([], list(viewvalues(c)))
2782 self.assertEqual({}, c)
2783
2784 def test_add_maintains_fifo(self):
2785@@ -68,16 +69,16 @@
2786 c[2] = 3
2787 c[3] = 4
2788 c[4] = 5
2789- self.assertEqual([1, 2, 3, 4], sorted(c.keys()))
2790+ self.assertEqual({1, 2, 3, 4}, viewkeys(c))
2791 c[5] = 6
2792 # This should pop out the oldest entry
2793- self.assertEqual([2, 3, 4, 5], sorted(c.keys()))
2794+ self.assertEqual({2, 3, 4, 5}, viewkeys(c))
2795 # Replacing an item doesn't change the stored keys
2796 c[2] = 7
2797- self.assertEqual([2, 3, 4, 5], sorted(c.keys()))
2798+ self.assertEqual({2, 3, 4, 5}, viewkeys(c))
2799 # But it does change the position in the FIFO
2800 c[6] = 7
2801- self.assertEqual([2, 4, 5, 6], sorted(c.keys()))
2802+ self.assertEqual({2, 4, 5, 6}, viewkeys(c))
2803 self.assertEqual([4, 5, 2, 6], list(c._queue))
2804
2805 def test_default_after_cleanup_count(self):
2806@@ -89,10 +90,10 @@
2807 c[4] = 5
2808 c[5] = 6
2809 # So far, everything fits
2810- self.assertEqual([1, 2, 3, 4, 5], sorted(c.keys()))
2811+ self.assertEqual({1, 2, 3, 4, 5}, viewkeys(c))
2812 c[6] = 7
2813 # But adding one more should shrink down to after_cleanup_count
2814- self.assertEqual([3, 4, 5, 6], sorted(c.keys()))
2815+ self.assertEqual({3, 4, 5, 6}, viewkeys(c))
2816
2817 def test_clear(self):
2818 c = fifo_cache.FIFOCache(5)
2819@@ -102,9 +103,9 @@
2820 c[4] = 5
2821 c[5] = 6
2822 c.cleanup()
2823- self.assertEqual([2, 3, 4, 5], sorted(c.keys()))
2824+ self.assertEqual({2, 3, 4, 5}, viewkeys(c))
2825 c.clear()
2826- self.assertEqual([], c.keys())
2827+ self.assertEqual(set(), viewkeys(c))
2828 self.assertEqual([], list(c._queue))
2829 self.assertEqual({}, c)
2830
2831@@ -246,12 +247,10 @@
2832 self.assertEqual('2', c[1])
2833 self.assertEqual('2', c.get(1))
2834 self.assertEqual('2', c.get(1, None))
2835- self.assertEqual([1], c.keys())
2836- self.assertEqual([1], list(c.iterkeys()))
2837- self.assertEqual([(1, '2')], c.items())
2838- self.assertEqual([(1, '2')], list(c.iteritems()))
2839- self.assertEqual(['2'], c.values())
2840- self.assertEqual(['2'], list(c.itervalues()))
2841+ self.assertEqual([1], list(c))
2842+ self.assertEqual({1}, viewkeys(c))
2843+ self.assertEqual([(1, '2')], sorted(viewitems(c)))
2844+ self.assertEqual(['2'], sorted(viewvalues(c)))
2845 self.assertEqual({1: '2'}, c)
2846 self.assertEqual(1024*1024, c.cache_size())
2847
2848@@ -262,12 +261,10 @@
2849 self.assertEqual(0, len(c))
2850 self.assertEqual(None, c.get(1))
2851 self.assertEqual(None, c.get(1, None))
2852- self.assertEqual([], c.keys())
2853- self.assertEqual([], list(c.iterkeys()))
2854- self.assertEqual([], c.items())
2855- self.assertEqual([], list(c.iteritems()))
2856- self.assertEqual([], c.values())
2857- self.assertEqual([], list(c.itervalues()))
2858+ self.assertEqual([], list(c))
2859+ self.assertEqual(set(), viewkeys(c))
2860+ self.assertEqual([], list(viewitems(c)))
2861+ self.assertEqual([], list(viewvalues(c)))
2862 self.assertEqual({}, c)
2863
2864 def test_add_maintains_fifo(self):
2865
2866=== modified file 'breezy/tests/test_graph.py'
2867--- breezy/tests/test_graph.py 2017-05-25 01:35:55 +0000
2868+++ breezy/tests/test_graph.py 2017-06-05 21:41:38 +0000
2869@@ -497,7 +497,7 @@
2870 """
2871 pending = [NULL_REVISION]
2872 descendants = {}
2873- for descendant, parents in ancestors.iteritems():
2874+ for descendant, parents in ancestors.items():
2875 for parent in parents:
2876 descendants.setdefault(parent, []).append(descendant)
2877 while len(pending) > 0:
2878
2879=== modified file 'breezy/tests/test_groupcompress.py'
2880--- breezy/tests/test_groupcompress.py 2017-06-04 18:09:30 +0000
2881+++ breezy/tests/test_groupcompress.py 2017-06-05 21:41:38 +0000
2882@@ -304,7 +304,7 @@
2883 for key in sorted(key_to_text):
2884 compressor.compress(key, key_to_text[key], None)
2885 locs = dict((key, (start, end)) for key, (start, _, end, _)
2886- in compressor.labels_deltas.iteritems())
2887+ in compressor.labels_deltas.items())
2888 block = compressor.flush()
2889 raw_bytes = block.to_bytes()
2890 # Go through from_bytes(to_bytes()) so that we start with a compressed
2891@@ -961,7 +961,7 @@
2892 for key in sorted(key_to_text):
2893 compressor.compress(key, key_to_text[key], None)
2894 locs = dict((key, (start, end)) for key, (start, _, end, _)
2895- in compressor.labels_deltas.iteritems())
2896+ in compressor.labels_deltas.items())
2897 block = compressor.flush()
2898 raw_bytes = block.to_bytes()
2899 return locs, groupcompress.GroupCompressBlock.from_bytes(raw_bytes)
2900
2901=== modified file 'breezy/tests/test_inv.py'
2902--- breezy/tests/test_inv.py 2017-05-22 00:56:52 +0000
2903+++ breezy/tests/test_inv.py 2017-06-05 21:41:38 +0000
2904@@ -939,7 +939,7 @@
2905 new_inv = CHKInventory.deserialise(chk_bytes, bytes, ("revid",))
2906 root_entry = new_inv[inv.root.file_id]
2907 self.assertEqual(None, root_entry._children)
2908- self.assertEqual(['file'], root_entry.children.keys())
2909+ self.assertEqual({'file'}, set(root_entry.children))
2910 file_direct = new_inv["fileid"]
2911 file_found = root_entry.children['file']
2912 self.assertEqual(file_direct.kind, file_found.kind)
2913@@ -1472,9 +1472,9 @@
2914 s = expected_children.setdefault(entry.parent_id, [])
2915 s.append(entry.file_id)
2916 val_children = dict((k, sorted(v)) for k, v
2917- in val_children.iteritems())
2918+ in val_children.items())
2919 expected_children = dict((k, sorted(v)) for k, v
2920- in expected_children.iteritems())
2921+ in expected_children.items())
2922 self.assertEqual(expected_children, val_children)
2923
2924 def test_make_simple_inventory(self):
2925
2926=== modified file 'breezy/tests/test_knit.py'
2927--- breezy/tests/test_knit.py 2017-05-25 01:35:55 +0000
2928+++ breezy/tests/test_knit.py 2017-06-05 21:41:38 +0000
2929@@ -626,7 +626,7 @@
2930 self.fail('Annotation was not identical with reloading.')
2931 # Now delete the packs-in-use, which should trigger another reload, but
2932 # this time we just raise an exception because we can't recover
2933- for trans, name in vf._access._indices.itervalues():
2934+ for trans, name in vf._access._indices.values():
2935 trans.delete(name)
2936 self.assertRaises(errors.NoSuchFile, vf.annotate, key)
2937 self.assertEqual([2, 1, 1], reload_counter)
2938@@ -639,7 +639,7 @@
2939 self.assertEqual([1, 1, 0], reload_counter)
2940 # Now delete the packs-in-use, which should trigger another reload, but
2941 # this time we just raise an exception because we can't recover
2942- for trans, name in vf._access._indices.itervalues():
2943+ for trans, name in vf._access._indices.values():
2944 trans.delete(name)
2945 self.assertRaises(errors.NoSuchFile, vf._get_record_map, keys)
2946 self.assertEqual([2, 1, 1], reload_counter)
2947@@ -658,7 +658,7 @@
2948 self.assertEqual(('rev-3',), record.key)
2949 self.assertEqual([1, 1, 0], reload_counter)
2950 # Now delete all pack files, and see that we raise the right error
2951- for trans, name in vf._access._indices.itervalues():
2952+ for trans, name in vf._access._indices.values():
2953 trans.delete(name)
2954 self.assertListRaises(errors.NoSuchFile,
2955 vf.get_record_stream, keys, 'topological', False)
2956@@ -682,7 +682,7 @@
2957 self.assertEqual(plain_lines, reload_lines)
2958 self.assertEqual(21, len(plain_lines))
2959 # Now delete all pack files, and see that we raise the right error
2960- for trans, name in vf._access._indices.itervalues():
2961+ for trans, name in vf._access._indices.values():
2962 trans.delete(name)
2963 self.assertListRaises(errors.NoSuchFile,
2964 vf.iter_lines_added_or_present_in_keys, keys)
2965
2966=== modified file 'breezy/tests/test_merge.py'
2967--- breezy/tests/test_merge.py 2017-05-22 00:56:52 +0000
2968+++ breezy/tests/test_merge.py 2017-06-05 21:41:38 +0000
2969@@ -1043,7 +1043,7 @@
2970
2971 def assertPruneTails(self, pruned_map, tails, parent_map):
2972 child_map = {}
2973- for key, parent_keys in parent_map.iteritems():
2974+ for key, parent_keys in parent_map.items():
2975 child_map.setdefault(key, [])
2976 for pkey in parent_keys:
2977 child_map.setdefault(pkey, []).append(key)
2978
2979=== modified file 'breezy/tests/test_merge_core.py'
2980--- breezy/tests/test_merge_core.py 2017-05-22 00:56:52 +0000
2981+++ breezy/tests/test_merge_core.py 2017-06-05 21:41:38 +0000
2982@@ -179,7 +179,7 @@
2983
2984 def apply_inv_change(self, inventory_change, orig_inventory):
2985 orig_inventory_by_path = {}
2986- for file_id, path in orig_inventory.iteritems():
2987+ for file_id, path in orig_inventory.items():
2988 orig_inventory_by_path[path] = file_id
2989
2990 def parent_id(file_id):
2991@@ -203,13 +203,13 @@
2992 return pathjoin(dirname, os.path.basename(orig_inventory[file_id]))
2993
2994 new_inventory = {}
2995- for file_id in orig_inventory.iterkeys():
2996+ for file_id in orig_inventory:
2997 path = new_path(file_id)
2998 if path is None:
2999 continue
3000 new_inventory[file_id] = path
3001
3002- for file_id, path in inventory_change.iteritems():
3003+ for file_id, path in inventory_change.items():
3004 if file_id in orig_inventory:
3005 continue
3006 new_inventory[file_id] = path
3007
3008=== modified file 'breezy/tests/test_remote.py'
3009--- breezy/tests/test_remote.py 2017-05-30 19:16:23 +0000
3010+++ breezy/tests/test_remote.py 2017-06-05 21:41:38 +0000
3011@@ -3928,13 +3928,13 @@
3012 # the public implementation of get_parent_map obeys stacking
3013 _, branch = self.prepare_stacked_remote_branch()
3014 repo = branch.repository
3015- self.assertEqual(['rev1'], repo.get_parent_map(['rev1']).keys())
3016+ self.assertEqual({'rev1'}, set(repo.get_parent_map(['rev1'])))
3017
3018 def test_unstacked_get_parent_map(self):
3019 # _unstacked_provider.get_parent_map ignores stacking
3020 _, branch = self.prepare_stacked_remote_branch()
3021 provider = branch.repository._unstacked_provider
3022- self.assertEqual([], provider.get_parent_map(['rev1']).keys())
3023+ self.assertEqual(set(), set(provider.get_parent_map(['rev1'])))
3024
3025 def fetch_stream_to_rev_order(self, stream):
3026 result = []
3027
3028=== modified file 'breezy/tests/test_versionedfile.py'
3029--- breezy/tests/test_versionedfile.py 2017-05-25 01:35:55 +0000
3030+++ breezy/tests/test_versionedfile.py 2017-06-05 21:41:38 +0000
3031@@ -94,9 +94,9 @@
3032 # such, it should end up in the various caches
3033 gen._process_one_record(record.key, record.get_bytes_as('chunked'))
3034 # The chunks should be cached, the refcount untouched
3035- self.assertEqual([('one',)], gen.chunks.keys())
3036+ self.assertEqual({('one',)}, set(gen.chunks))
3037 self.assertEqual({('one',): 2, ('two',): 1}, gen.refcounts)
3038- self.assertEqual([], gen.diffs.keys())
3039+ self.assertEqual(set(), set(gen.diffs))
3040 # Next we get 'two', which is something we output, but also needed for
3041 # three
3042 record = next(stream)
3043@@ -105,10 +105,9 @@
3044 # Both are now cached, and the diff for two has been extracted, and
3045 # one's refcount has been updated. two has been removed from the
3046 # parent_map
3047- self.assertEqual(sorted([('one',), ('two',)]),
3048- sorted(gen.chunks.keys()))
3049+ self.assertEqual({('one',), ('two',)}, set(gen.chunks))
3050 self.assertEqual({('one',): 1, ('two',): 1}, gen.refcounts)
3051- self.assertEqual([('two',)], gen.diffs.keys())
3052+ self.assertEqual({('two',)}, set(gen.diffs))
3053 self.assertEqual({('three',): (('one',), ('two',))},
3054 gen.parent_map)
3055 # Finally 'three', which allows us to remove all parents from the
3056@@ -118,10 +117,9 @@
3057 gen._process_one_record(record.key, record.get_bytes_as('chunked'))
3058 # Both are now cached, and the diff for two has been extracted, and
3059 # one's refcount has been updated
3060- self.assertEqual([], gen.chunks.keys())
3061+ self.assertEqual(set(), set(gen.chunks))
3062 self.assertEqual({}, gen.refcounts)
3063- self.assertEqual(sorted([('two',), ('three',)]),
3064- sorted(gen.diffs.keys()))
3065+ self.assertEqual({('two',), ('three',)}, set(gen.diffs))
3066
3067 def test_compute_diffs(self):
3068 vf = self.make_three_vf()
3069
3070=== modified file 'breezy/transform.py'
3071--- breezy/transform.py 2017-05-30 19:32:13 +0000
3072+++ breezy/transform.py 2017-06-05 21:41:38 +0000
3073@@ -61,6 +61,10 @@
3074 splitpath,
3075 )
3076 from .progress import ProgressPhase
3077+from .sixish import (
3078+ viewitems,
3079+ viewvalues,
3080+ )
3081
3082
3083 ROOT_PARENT = "root-parent"
3084@@ -228,8 +232,8 @@
3085 irrelevant.
3086
3087 """
3088- new_roots = [k for k, v in self._new_parent.iteritems() if v ==
3089- ROOT_PARENT]
3090+ new_roots = [k for k, v in viewitems(self._new_parent)
3091+ if v == ROOT_PARENT]
3092 if len(new_roots) < 1:
3093 return
3094 if len(new_roots) != 1:
3095@@ -479,7 +483,7 @@
3096 file_id = self.tree_file_id(trans_id)
3097 if file_id is not None:
3098 return file_id
3099- for key, value in self._non_present_ids.iteritems():
3100+ for key, value in viewitems(self._non_present_ids):
3101 if value == trans_id:
3102 return key
3103
3104@@ -509,9 +513,9 @@
3105 Only new paths and parents of tree files with assigned ids are used.
3106 """
3107 by_parent = {}
3108- items = list(self._new_parent.iteritems())
3109- items.extend((t, self.final_parent(t)) for t in
3110- self._tree_id_paths.keys())
3111+ items = list(viewitems(self._new_parent))
3112+ items.extend((t, self.final_parent(t))
3113+ for t in list(self._tree_id_paths))
3114 for trans_id, parent_id in items:
3115 if parent_id not in by_parent:
3116 by_parent[parent_id] = set()
3117@@ -555,7 +559,7 @@
3118 Active parents are those which gain children, and those which are
3119 removed. This is a necessary first step in detecting conflicts.
3120 """
3121- parents = self.by_parent().keys()
3122+ parents = list(self.by_parent())
3123 parents.extend([t for t in self._removed_contents if
3124 self.tree_kind(t) == 'directory'])
3125 for trans_id in self._removed_id:
3126@@ -634,7 +638,7 @@
3127 def _unversioned_parents(self, by_parent):
3128 """If parent directories are versioned, children must be versioned."""
3129 conflicts = []
3130- for parent_id, children in by_parent.iteritems():
3131+ for parent_id, children in viewitems(by_parent):
3132 if parent_id == ROOT_PARENT:
3133 continue
3134 if self.final_file_id(parent_id) is not None:
3135@@ -651,7 +655,7 @@
3136 However, existing entries with no contents are okay.
3137 """
3138 conflicts = []
3139- for trans_id in self._new_id.iterkeys():
3140+ for trans_id in self._new_id:
3141 kind = self.final_kind(trans_id)
3142 if kind is None:
3143 conflicts.append(('versioning no contents', trans_id))
3144@@ -693,7 +697,7 @@
3145 conflicts = []
3146 if (self._new_name, self._new_parent) == ({}, {}):
3147 return conflicts
3148- for children in by_parent.itervalues():
3149+ for children in viewvalues(by_parent):
3150 name_ids = []
3151 for child_tid in children:
3152 name = self.final_name(child_tid)
3153@@ -724,7 +728,7 @@
3154 self._removed_id))
3155 all_ids = self._tree.all_file_ids()
3156 active_tree_ids = all_ids.difference(removed_tree_ids)
3157- for trans_id, file_id in self._new_id.iteritems():
3158+ for trans_id, file_id in viewitems(self._new_id):
3159 if file_id in active_tree_ids:
3160 old_trans_id = self.trans_id_tree_file_id(file_id)
3161 conflicts.append(('duplicate id', old_trans_id, trans_id))
3162@@ -733,7 +737,7 @@
3163 def _parent_type_conflicts(self, by_parent):
3164 """Children must have a directory parent"""
3165 conflicts = []
3166- for parent_id, children in by_parent.iteritems():
3167+ for parent_id, children in viewitems(by_parent):
3168 if parent_id == ROOT_PARENT:
3169 continue
3170 no_children = True
3171@@ -868,12 +872,12 @@
3172 def _affected_ids(self):
3173 """Return the set of transform ids affected by the transform"""
3174 trans_ids = set(self._removed_id)
3175- trans_ids.update(self._new_id.keys())
3176+ trans_ids.update(self._new_id)
3177 trans_ids.update(self._removed_contents)
3178- trans_ids.update(self._new_contents.keys())
3179- trans_ids.update(self._new_executability.keys())
3180- trans_ids.update(self._new_name.keys())
3181- trans_ids.update(self._new_parent.keys())
3182+ trans_ids.update(self._new_contents)
3183+ trans_ids.update(self._new_executability)
3184+ trans_ids.update(self._new_name)
3185+ trans_ids.update(self._new_parent)
3186 return trans_ids
3187
3188 def _get_file_id_maps(self):
3189@@ -953,7 +957,7 @@
3190 from_trans_ids, to_trans_ids = self._get_file_id_maps()
3191 results = []
3192 # Now iterate through all active file_ids
3193- for file_id in set(from_trans_ids.keys() + to_trans_ids.keys()):
3194+ for file_id in set(from_trans_ids).union(to_trans_ids):
3195 modified = False
3196 from_trans_id = from_trans_ids.get(file_id)
3197 # find file ids, and determine versioning state
3198@@ -1096,11 +1100,11 @@
3199 :param serializer: A Serialiser like pack.ContainerSerializer.
3200 """
3201 new_name = dict((k, v.encode('utf-8')) for k, v in
3202- self._new_name.items())
3203+ viewitems(self._new_name))
3204 new_executability = dict((k, int(v)) for k, v in
3205- self._new_executability.items())
3206+ viewitems(self._new_executability))
3207 tree_path_ids = dict((k.encode('utf-8'), v)
3208- for k, v in self._tree_path_ids.items())
3209+ for k, v in viewitems(self._tree_path_ids))
3210 attribs = {
3211 '_id_number': self._id_number,
3212 '_new_name': new_name,
3213@@ -1114,7 +1118,7 @@
3214 }
3215 yield serializer.bytes_record(bencode.bencode(attribs),
3216 (('attribs',),))
3217- for trans_id, kind in self._new_contents.items():
3218+ for trans_id, kind in viewitems(self._new_contents):
3219 if kind == 'file':
3220 lines = osutils.chunks_to_lines(
3221 self._read_file_chunks(trans_id))
3222@@ -1137,15 +1141,15 @@
3223 attribs = bencode.bdecode(content)
3224 self._id_number = attribs['_id_number']
3225 self._new_name = dict((k, v.decode('utf-8'))
3226- for k, v in attribs['_new_name'].items())
3227+ for k, v in viewitems(attribs['_new_name']))
3228 self._new_parent = attribs['_new_parent']
3229- self._new_executability = dict((k, bool(v)) for k, v in
3230- attribs['_new_executability'].items())
3231+ self._new_executability = dict((k, bool(v))
3232+ for k, v in viewitems(attribs['_new_executability']))
3233 self._new_id = attribs['_new_id']
3234- self._r_new_id = dict((v, k) for k, v in self._new_id.items())
3235+ self._r_new_id = dict((v, k) for k, v in viewitems(self._new_id))
3236 self._tree_path_ids = {}
3237 self._tree_id_paths = {}
3238- for bytepath, trans_id in attribs['_tree_path_ids'].items():
3239+ for bytepath, trans_id in viewitems(attribs['_tree_path_ids']):
3240 path = bytepath.decode('utf-8')
3241 self._tree_path_ids[path] = trans_id
3242 self._tree_id_paths[trans_id] = path
3243@@ -1201,9 +1205,9 @@
3244 if self._tree is None:
3245 return
3246 try:
3247- limbo_paths = self._limbo_files.values() + list(
3248- self._possibly_stale_limbo_files)
3249- limbo_paths = sorted(limbo_paths, reverse=True)
3250+ limbo_paths = list(viewvalues(self._limbo_files))
3251+ limbo_paths.extend(self._possibly_stale_limbo_files)
3252+ limbo_paths.sort(reverse=True)
3253 for path in limbo_paths:
3254 try:
3255 delete_any(path)
3256@@ -1676,8 +1680,8 @@
3257 in (trans_id, None)):
3258 use_direct_path = True
3259 else:
3260- for l_filename, l_trans_id in\
3261- self._limbo_children_names[parent].iteritems():
3262+ for l_filename, l_trans_id in viewitems(
3263+ self._limbo_children_names[parent]):
3264 if l_trans_id == trans_id:
3265 continue
3266 if l_filename.lower() == filename.lower():
3267@@ -1767,7 +1771,7 @@
3268 new_path_file_ids = dict((t, self.final_file_id(t)) for p, t in
3269 new_paths)
3270 entries = self._tree.iter_entries_by_dir(
3271- new_path_file_ids.values())
3272+ viewvalues(new_path_file_ids))
3273 old_paths = dict((e.file_id, p) for p, e in entries)
3274 final_kinds = {}
3275 for num, (path, trans_id) in enumerate(new_paths):
3276@@ -1814,8 +1818,7 @@
3277
3278 If inventory_delta is None, no inventory delta generation is performed.
3279 """
3280- tree_paths = list(self._tree_path_ids.iteritems())
3281- tree_paths.sort(reverse=True)
3282+ tree_paths = sorted(viewitems(self._tree_path_ids), reverse=True)
3283 child_pb = ui.ui_factory.nested_progress_bar()
3284 try:
3285 for num, (path, trans_id) in enumerate(tree_paths):
3286@@ -1907,7 +1910,7 @@
3287 # problems. (we could observe start time, and finish time, and if
3288 # it is less than eg 10% overhead, add a sleep call.)
3289 paths = FinalPaths(self)
3290- for trans_id, observed in self._observed_sha1s.iteritems():
3291+ for trans_id, observed in viewitems(self._observed_sha1s):
3292 path = paths.get_path(trans_id)
3293 # We could get the file_id, but dirstate prefers to use the path
3294 # anyway, and it is 'cheaper' to determine.
3295@@ -2057,7 +2060,7 @@
3296 tree_ids = set(self._transform._tree.all_file_ids())
3297 tree_ids.difference_update(self._transform.tree_file_id(t)
3298 for t in self._transform._removed_id)
3299- tree_ids.update(self._transform._new_id.values())
3300+ tree_ids.update(viewvalues(self._transform._new_id))
3301 return tree_ids
3302
3303 def __iter__(self):
3304@@ -2120,7 +2123,7 @@
3305 return children
3306 children = set(self._transform.iter_tree_children(trans_id))
3307 # children in the _new_parent set are provided by _by_parent.
3308- children.difference_update(self._transform._new_parent.keys())
3309+ children.difference_update(self._transform._new_parent)
3310 children.update(self._by_parent.get(trans_id, []))
3311 self._all_children_cache[trans_id] = children
3312 return children
3313
3314=== modified file 'breezy/transport/http/_urllib2_wrappers.py'
3315--- breezy/transport/http/_urllib2_wrappers.py 2017-06-01 23:13:43 +0000
3316+++ breezy/transport/http/_urllib2_wrappers.py 2017-06-05 21:41:38 +0000
3317@@ -190,7 +190,7 @@
3318 def getheaders(self):
3319 if self.headers is None:
3320 raise httplib.ResponseNotReady()
3321- return self.headers.items()
3322+ return list(self.headers.items())
3323
3324
3325 class _ReportingFileSocket(object):
3326@@ -761,7 +761,7 @@
3327 # before sending the request. And not all versions of python 2.5 do
3328 # that. Since we replace urllib2.AbstractHTTPHandler.do_open we do it
3329 # ourself below.
3330- headers = dict((name.title(), val) for name, val in headers.iteritems())
3331+ headers = dict((name.title(), val) for name, val in headers.items())
3332
3333 try:
3334 method = request.get_method()
3335@@ -774,7 +774,7 @@
3336 if 'http' in debug.debug_flags:
3337 trace.mutter('> %s %s' % (method, url))
3338 hdrs = []
3339- for k,v in headers.iteritems():
3340+ for k,v in headers.items():
3341 # People are often told to paste -Dhttp output to help
3342 # debug. Don't compromise credentials.
3343 if k in ('Authorization', 'Proxy-Authorization'):
3344
3345=== modified file 'breezy/transport/ssh.py'
3346--- breezy/transport/ssh.py 2017-05-22 00:56:52 +0000
3347+++ breezy/transport/ssh.py 2017-06-05 21:41:38 +0000
3348@@ -629,8 +629,8 @@
3349 try:
3350 f = open(bzr_hostkey_path, 'w')
3351 f.write('# SSH host keys collected by bzr\n')
3352- for hostname, keys in BRZ_HOSTKEYS.iteritems():
3353- for keytype, key in keys.iteritems():
3354+ for hostname, keys in BRZ_HOSTKEYS.items():
3355+ for keytype, key in keys.items():
3356 f.write('%s %s %s\n' % (hostname, keytype, key.get_base64()))
3357 f.close()
3358 except IOError as e:
3359
3360=== modified file 'breezy/tree.py'
3361--- breezy/tree.py 2017-06-04 18:09:30 +0000
3362+++ breezy/tree.py 2017-06-05 21:41:38 +0000
3363@@ -42,6 +42,9 @@
3364
3365 from .decorators import needs_read_lock
3366 from .inter import InterObject
3367+from .sixish import (
3368+ viewvalues,
3369+ )
3370
3371
3372 class Tree(object):
3373@@ -864,12 +867,12 @@
3374 @needs_read_lock
3375 def iter_child_entries(self, file_id, path=None):
3376 inv, inv_file_id = self._unpack_file_id(file_id)
3377- return inv[inv_file_id].children.itervalues()
3378+ return iter(viewvalues(inv[inv_file_id].children))
3379
3380 def iter_children(self, file_id, path=None):
3381 """See Tree.iter_children."""
3382 entry = self.iter_entries_by_dir([file_id]).next()[1]
3383- for child in getattr(entry, 'children', {}).itervalues():
3384+ for child in viewvalues(getattr(entry, 'children', {})):
3385 yield child.file_id
3386
3387
3388@@ -1592,7 +1595,7 @@
3389 # might ensure better ordering, in case a caller strictly
3390 # requires parents before children.
3391 for idx, other_extra in enumerate(self._others_extra):
3392- others = sorted(other_extra.itervalues(),
3393+ others = sorted(viewvalues(other_extra),
3394 key=lambda x: self._path_to_key(x[0]))
3395 for other_path, other_ie in others:
3396 file_id = other_ie.file_id
3397
3398=== modified file 'breezy/tsort.py'
3399--- breezy/tsort.py 2017-05-22 00:56:52 +0000
3400+++ breezy/tsort.py 2017-06-05 21:41:38 +0000
3401@@ -363,7 +363,7 @@
3402 # we need to do a check late in the process to detect end-of-merges
3403 # which requires the parents to be accessible: its easier for now
3404 # to just keep the original graph around.
3405- self._original_graph = dict(self._graph.items())
3406+ self._original_graph = self._graph.copy()
3407 # we need to know the revision numbers of revisions to determine
3408 # the revision numbers of their descendants
3409 # this is a graph from node to [revno_tuple, first_child]
3410
3411=== modified file 'breezy/uncommit.py'
3412--- breezy/uncommit.py 2017-05-22 00:56:52 +0000
3413+++ breezy/uncommit.py 2017-06-05 21:41:38 +0000
3414@@ -40,7 +40,7 @@
3415 reverse_tags = branch.tags.get_reverse_tag_dict()
3416 ancestors = graph.find_unique_ancestors(old_tip, parents)
3417 removed_tags = []
3418- for revid, tags in reverse_tags.iteritems():
3419+ for revid, tags in reverse_tags.items():
3420 if not revid in ancestors:
3421 continue
3422 for tag in tags:
3423
3424=== modified file 'breezy/urlutils.py'
3425--- breezy/urlutils.py 2017-06-04 18:09:30 +0000
3426+++ breezy/urlutils.py 2017-06-05 21:41:38 +0000
3427@@ -578,7 +578,7 @@
3428 (base, existing_parameters) = split_segment_parameters(url)
3429 new_parameters = {}
3430 new_parameters.update(existing_parameters)
3431- for key, value in parameters.iteritems():
3432+ for key, value in parameters.items():
3433 if not isinstance(key, str):
3434 raise TypeError("parameter key %r is not a bytestring" % key)
3435 if not isinstance(value, str):
3436
3437=== modified file 'breezy/versionedfile.py'
3438--- breezy/versionedfile.py 2017-05-25 21:59:11 +0000
3439+++ breezy/versionedfile.py 2017-06-05 21:41:38 +0000
3440@@ -44,6 +44,8 @@
3441 from .registry import Registry
3442 from .sixish import (
3443 BytesIO,
3444+ viewitems,
3445+ viewvalues,
3446 zip,
3447 )
3448 from .textmerge import TextMerge
3449@@ -250,7 +252,7 @@
3450 refcounts = {}
3451 setdefault = refcounts.setdefault
3452 just_parents = set()
3453- for child_key, parent_keys in parent_map.iteritems():
3454+ for child_key, parent_keys in viewitems(parent_map):
3455 if not parent_keys:
3456 # parent_keys may be None if a given VersionedFile claims to
3457 # not support graph operations.
3458@@ -491,7 +493,7 @@
3459 except KeyError:
3460 raise errors.RevisionNotPresent(version_id, self)
3461 # We need to filter out ghosts, because we can't diff against them.
3462- knit_versions = set(self.get_parent_map(knit_versions).keys())
3463+ knit_versions = set(self.get_parent_map(knit_versions))
3464 lines = dict(zip(knit_versions,
3465 self._get_lf_split_line_list(knit_versions)))
3466 diffs = []
3467@@ -535,7 +537,7 @@
3468 for version, parent_ids, expected_sha1, mpdiff in records:
3469 needed_parents.update(p for p in parent_ids
3470 if not mpvf.has_version(p))
3471- present_parents = set(self.get_parent_map(needed_parents).keys())
3472+ present_parents = set(self.get_parent_map(needed_parents))
3473 for parent_id, lines in zip(present_parents,
3474 self._get_lf_split_line_list(present_parents)):
3475 mpvf.add_version(lines, parent_id, [])
3476@@ -1095,7 +1097,7 @@
3477 this_parent_map = self.get_parent_map(pending)
3478 parent_map.update(this_parent_map)
3479 pending = set(itertools.chain.from_iterable(
3480- this_parent_map.itervalues()))
3481+ viewvalues(this_parent_map)))
3482 pending.difference_update(parent_map)
3483 kg = _mod_graph.KnownGraph(parent_map)
3484 return kg
3485@@ -1297,11 +1299,11 @@
3486 """
3487 prefixes = self._partition_keys(keys)
3488 result = {}
3489- for prefix, suffixes in prefixes.items():
3490+ for prefix, suffixes in viewitems(prefixes):
3491 path = self._mapper.map(prefix)
3492 vf = self._get_vf(path)
3493 parent_map = vf.get_parent_map(suffixes)
3494- for key, parents in parent_map.items():
3495+ for key, parents in viewitems(parent_map):
3496 result[prefix + (key,)] = tuple(
3497 prefix + (parent,) for parent in parents)
3498 return result
3499@@ -1353,7 +1355,7 @@
3500 def _iter_keys_vf(self, keys):
3501 prefixes = self._partition_keys(keys)
3502 sha1s = {}
3503- for prefix, suffixes in prefixes.items():
3504+ for prefix, suffixes in viewitems(prefixes):
3505 path = self._mapper.map(prefix)
3506 vf = self._get_vf(path)
3507 yield prefix, suffixes, vf
3508@@ -1363,7 +1365,7 @@
3509 sha1s = {}
3510 for prefix,suffixes, vf in self._iter_keys_vf(keys):
3511 vf_sha1s = vf.get_sha1s(suffixes)
3512- for suffix, sha1 in vf_sha1s.iteritems():
3513+ for suffix, sha1 in viewitems(vf_sha1s):
3514 sha1s[prefix + (suffix,)] = sha1
3515 return sha1s
3516
3517@@ -1553,7 +1555,7 @@
3518 result.update(
3519 _mod_graph.StackedParentsProvider(
3520 self._providers).get_parent_map(keys))
3521- for key, parents in result.iteritems():
3522+ for key, parents in viewitems(result):
3523 if parents == ():
3524 result[key] = (revision.NULL_REVISION,)
3525 return result
3526@@ -1732,8 +1734,8 @@
3527
3528 def get_parent_map(self, keys):
3529 """See VersionedFiles.get_parent_map."""
3530- return dict([((k,), tuple([(p,) for p in v]))
3531- for k,v in self._get_parent_map([k for (k,) in keys]).iteritems()])
3532+ parent_view = viewitems(self._get_parent_map(k for (k,) in keys))
3533+ return dict(((k,), tuple((p,) for p in v)) for k, v in parent_view)
3534
3535 def get_sha1s(self, keys):
3536 """See VersionedFiles.get_sha1s."""
3537@@ -1889,7 +1891,7 @@
3538 # gc-optimal ordering is approximately reverse topological,
3539 # properly grouped by file-id.
3540 per_prefix_map = {}
3541- for item in parent_map.iteritems():
3542+ for item in viewitems(parent_map):
3543 key = item[0]
3544 if isinstance(key, str) or len(key) == 1:
3545 prefix = ''
3546@@ -1936,9 +1938,9 @@
3547
3548 def get_new_keys(self):
3549 return self.new_keys
3550-
3551+
3552 def get_unsatisfied_refs(self):
3553- return self.refs.iterkeys()
3554+ return self.refs.keys()
3555
3556 def _satisfy_refs_for_key(self, key):
3557 try:
3558@@ -1958,10 +1960,7 @@
3559 self._satisfy_refs_for_key(key)
3560
3561 def get_referrers(self):
3562- result = set()
3563- for referrers in self.refs.itervalues():
3564- result.update(referrers)
3565- return result
3566+ return set(itertools.chain.from_iterable(viewvalues(self.refs)))
3567
3568
3569
3570
3571=== modified file 'breezy/vf_repository.py'
3572--- breezy/vf_repository.py 2017-06-04 18:09:30 +0000
3573+++ breezy/vf_repository.py 2017-06-05 21:41:38 +0000
3574@@ -75,6 +75,8 @@
3575
3576 from .sixish import (
3577 range,
3578+ viewitems,
3579+ viewvalues,
3580 )
3581
3582 from .trace import (
3583@@ -434,7 +436,7 @@
3584 # XXX: Friction: parent_candidates should return a list not a dict
3585 # so that we don't have to walk the inventories again.
3586 parent_candidate_entries = ie.parent_candidates(parent_invs)
3587- head_set = self._heads(ie.file_id, parent_candidate_entries.keys())
3588+ head_set = self._heads(ie.file_id, parent_candidate_entries)
3589 heads = []
3590 for inv in parent_invs:
3591 if inv.has_id(ie.file_id):
3592@@ -692,12 +694,12 @@
3593 seen_root = False # Is the root in the basis delta?
3594 inv_delta = self._basis_delta
3595 modified_rev = self._new_revision_id
3596- for change, head_candidates in changes.values():
3597+ for change, head_candidates in viewvalues(changes):
3598 if change[3][1]: # versioned in target.
3599 # Several things may be happening here:
3600 # We may have a fork in the per-file graph
3601 # - record a change with the content from tree
3602- # We may have a change against < all trees
3603+ # We may have a change against < all trees
3604 # - carry over the tree that hasn't changed
3605 # We may have a change against all trees
3606 # - record the change with the content from tree
3607@@ -1196,7 +1198,7 @@
3608 graph = self.get_graph()
3609 parent_map = graph.get_parent_map(revision_ids)
3610 # The old API returned a list, should this actually be a set?
3611- return parent_map.keys()
3612+ return list(parent_map)
3613
3614 def __init__(self, _format, a_bzrdir, control_files):
3615 """Instantiate a VersionedFileRepository.
3616@@ -1351,7 +1353,7 @@
3617 referrers = frozenset(r[0] for r in key_deps.get_referrers())
3618 file_ids = self.fileids_altered_by_revision_ids(referrers)
3619 missing_texts = set()
3620- for file_id, version_ids in file_ids.iteritems():
3621+ for file_id, version_ids in viewitems(file_ids):
3622 missing_texts.update(
3623 (file_id, version_id) for version_id in version_ids)
3624 present_texts = self.texts.get_parent_map(missing_texts)
3625@@ -1499,8 +1501,7 @@
3626 revision_ids. Each altered file-ids has the exact revision_ids that
3627 altered it listed explicitly.
3628 """
3629- seen = set(self._serializer._find_text_key_references(
3630- line_iterator).iterkeys())
3631+ seen = set(self._serializer._find_text_key_references(line_iterator))
3632 parent_keys = self._find_parent_keys_of_revisions(revision_keys)
3633 parent_seen = set(self._serializer._find_text_key_references(
3634 self._inventory_xml_lines_for_keys(parent_keys)))
3635@@ -1520,7 +1521,7 @@
3636 """
3637 parent_map = self.revisions.get_parent_map(revision_keys)
3638 parent_keys = set(itertools.chain.from_iterable(
3639- parent_map.itervalues()))
3640+ viewvalues(parent_map)))
3641 parent_keys.difference_update(revision_keys)
3642 parent_keys.discard(_mod_revision.NULL_REVISION)
3643 return parent_keys
3644@@ -1603,7 +1604,7 @@
3645 # a cache of the text keys to allow reuse; costs a dict of all the
3646 # keys, but saves a 2-tuple for every child of a given key.
3647 text_key_cache = {}
3648- for text_key, valid in text_key_references.iteritems():
3649+ for text_key, valid in viewitems(text_key_references):
3650 if not valid:
3651 invalid_keys.add(text_key)
3652 else:
3653@@ -1705,7 +1706,7 @@
3654 file_ids = self.fileids_altered_by_revision_ids(revision_ids, inv_w)
3655 count = 0
3656 num_file_ids = len(file_ids)
3657- for file_id, altered_versions in file_ids.iteritems():
3658+ for file_id, altered_versions in viewitems(file_ids):
3659 if pb is not None:
3660 pb.update(gettext("Fetch texts"), count, num_file_ids)
3661 count += 1
3662@@ -1880,8 +1881,8 @@
3663 raise ValueError('get_parent_map(None) is not valid')
3664 else:
3665 query_keys.append((revision_id ,))
3666- for ((revision_id,), parent_keys) in \
3667- self.revisions.get_parent_map(query_keys).iteritems():
3668+ for (revision_id,), parent_keys in viewitems(
3669+ self.revisions.get_parent_map(query_keys)):
3670 if parent_keys:
3671 result[revision_id] = tuple([parent_revid
3672 for (parent_revid,) in parent_keys])
3673@@ -1905,10 +1906,8 @@
3674
3675 def revision_ids_to_search_result(self, result_set):
3676 """Convert a set of revision ids to a graph SearchResult."""
3677- result_parents = set()
3678- for parents in self.get_graph().get_parent_map(
3679- result_set).itervalues():
3680- result_parents.update(parents)
3681+ result_parents = set(itertools.chain.from_iterable(viewvalues(
3682+ self.get_graph().get_parent_map(result_set))))
3683 included_keys = result_set.intersection(result_parents)
3684 start_keys = result_set.difference(included_keys)
3685 exclude_keys = result_parents.difference(result_set)
3686@@ -2334,7 +2333,7 @@
3687 raise AssertionError(
3688 'cannot copy revisions to fill in missing deltas %s' % (
3689 keys['revisions'],))
3690- for substream_kind, keys in keys.iteritems():
3691+ for substream_kind, keys in viewitems(keys):
3692 vf = getattr(self.from_repository, substream_kind)
3693 if vf is None and keys:
3694 raise AssertionError(
3695@@ -2531,8 +2530,7 @@
3696 def _check_file_version_parents(self, texts, progress_bar):
3697 """See check_file_version_parents."""
3698 wrong_parents = {}
3699- self.file_ids = {file_id for file_id, _ in
3700- self.text_index.iterkeys()}
3701+ self.file_ids = {file_id for file_id, _ in self.text_index}
3702 # text keys is now grouped by file_id
3703 n_versions = len(self.text_index)
3704 progress_bar.update(gettext('loading text store'), 0, n_versions)
3705@@ -2540,7 +2538,7 @@
3706 # On unlistable transports this could well be empty/error...
3707 text_keys = self.repository.texts.keys()
3708 unused_keys = frozenset(text_keys) - set(self.text_index)
3709- for num, key in enumerate(self.text_index.iterkeys()):
3710+ for num, key in enumerate(self.text_index):
3711 progress_bar.update(gettext('checking text graph'), num, n_versions)
3712 correct_parents = self.calculate_file_version_parents(key)
3713 try:
3714@@ -2814,11 +2812,10 @@
3715 source may be not have _fallback_repositories even though it is
3716 stacked.)
3717 """
3718- parent_revs = set()
3719- for parents in parent_map.values():
3720- parent_revs.update(parents)
3721+ parent_revs = set(itertools.chain.from_iterable(viewvalues(
3722+ parent_map)))
3723 present_parents = self.source.get_parent_map(parent_revs)
3724- absent_parents = set(parent_revs).difference(present_parents)
3725+ absent_parents = parent_revs.difference(present_parents)
3726 parent_invs_keys_for_stacking = self.source.inventories.get_parent_map(
3727 (rev_id,) for rev_id in absent_parents)
3728 parent_inv_ids = [key[-1] for key in parent_invs_keys_for_stacking]
3729@@ -3154,7 +3151,7 @@
3730 # commit to determine parents. There is a latent/real bug here where
3731 # the parents inserted are not those commit would do - in particular
3732 # they are not filtered by heads(). RBC, AB
3733- for revision, tree in parent_trees.iteritems():
3734+ for revision, tree in viewitems(parent_trees):
3735 if not tree.has_id(ie.file_id):
3736 continue
3737 parent_id = tree.get_file_revision(ie.file_id)
3738
3739=== modified file 'breezy/vf_search.py'
3740--- breezy/vf_search.py 2017-05-25 01:35:55 +0000
3741+++ breezy/vf_search.py 2017-06-05 21:41:38 +0000
3742@@ -18,6 +18,8 @@
3743
3744 from __future__ import absolute_import
3745
3746+import itertools
3747+
3748 from . import (
3749 debug,
3750 revision,
3751@@ -29,11 +31,14 @@
3752 Graph,
3753 invert_parent_map,
3754 )
3755+from .sixish import (
3756+ viewvalues,
3757+ )
3758
3759
3760 class AbstractSearchResult(object):
3761 """The result of a search, describing a set of keys.
3762-
3763+
3764 Search results are typically used as the 'fetch_spec' parameter when
3765 fetching revisions.
3766
3767@@ -380,9 +385,7 @@
3768 # start_set is all the keys in the cache
3769 start_set = set(parent_map)
3770 # result set is all the references to keys in the cache
3771- result_parents = set()
3772- for parents in parent_map.itervalues():
3773- result_parents.update(parents)
3774+ result_parents = set(itertools.chain.from_iterable(viewvalues(parent_map)))
3775 stop_keys = result_parents.difference(start_set)
3776 # We don't need to send ghosts back to the server as a position to
3777 # stop either.
3778@@ -420,14 +423,14 @@
3779 next_revs = next(s)
3780 except StopIteration:
3781 break
3782- for parents in s._current_parents.itervalues():
3783+ for parents in viewvalues(s._current_parents):
3784 f_heads = heads.intersection(parents)
3785 if f_heads:
3786 found_heads.update(f_heads)
3787 stop_keys = exclude_keys.intersection(next_revs)
3788 if stop_keys:
3789 s.stop_searching_any(stop_keys)
3790- for parents in s._current_parents.itervalues():
3791+ for parents in viewvalues(s._current_parents):
3792 f_heads = heads.intersection(parents)
3793 if f_heads:
3794 found_heads.update(f_heads)
3795
3796=== modified file 'breezy/weave.py'
3797--- breezy/weave.py 2017-06-04 18:09:30 +0000
3798+++ breezy/weave.py 2017-06-05 21:41:38 +0000
3799@@ -990,7 +990,7 @@
3800 # map from version name -> all parent names
3801 combined_parents = _reweave_parent_graphs(wa, wb)
3802 mutter("combined parents: %r", combined_parents)
3803- order = tsort.topo_sort(combined_parents.iteritems())
3804+ order = tsort.topo_sort(combined_parents.items())
3805 mutter("order to reweave: %r", order)
3806
3807 if pb and not msg:
3808
3809=== modified file 'breezy/workingtree.py'
3810--- breezy/workingtree.py 2017-05-30 19:32:13 +0000
3811+++ breezy/workingtree.py 2017-06-05 21:41:38 +0000
3812@@ -2343,9 +2343,9 @@
3813 @needs_tree_write_lock
3814 def set_merge_modified(self, modified_hashes):
3815 def iter_stanzas():
3816- for file_id, hash in modified_hashes.iteritems():
3817+ for file_id in modified_hashes:
3818 yield _mod_rio.Stanza(file_id=file_id.decode('utf8'),
3819- hash=hash)
3820+ hash=modified_hashes[file_id])
3821 self._put_rio('merge-hashes', iter_stanzas(), MERGE_MODIFIED_HEADER_1)
3822
3823 @needs_read_lock
3824
3825=== modified file 'breezy/workingtree_4.py'
3826--- breezy/workingtree_4.py 2017-05-30 19:32:13 +0000
3827+++ breezy/workingtree_4.py 2017-06-05 21:41:38 +0000
3828@@ -71,6 +71,7 @@
3829 )
3830 from .sixish import (
3831 BytesIO,
3832+ viewitems,
3833 )
3834 from .transport.local import LocalTransport
3835 from .tree import (
3836@@ -1018,7 +1019,7 @@
3837 raise errors.PathsNotVersionedError(
3838 [p.decode('utf-8') for p in paths])
3839
3840- for dir_name_id, trees_info in found.iteritems():
3841+ for dir_name_id, trees_info in viewitems(found):
3842 for index in search_indexes:
3843 if trees_info[index][0] not in ('r', 'a'):
3844 found_ids.add(dir_name_id[2])

Subscribers

People subscribed via source and target branches