Merge lp:~gz/brz/i_unzipping into lp:brz

Proposed by Martin Packman
Status: Merged
Approved by: Martin Packman
Approved revision: no longer in the source branch.
Merge reported by: The Breezy Bot
Merged at revision: not available
Proposed branch: lp:~gz/brz/i_unzipping
Merge into: lp:brz
Diff against target: 451 lines (+52/-56)
15 files modified
breezy/config.py (+1/-2)
breezy/index.py (+1/-0)
breezy/knit.py (+4/-7)
breezy/log.py (+5/-11)
breezy/repofmt/knitpack_repo.py (+7/-5)
breezy/sixish.py (+2/-1)
breezy/tests/__init__.py (+1/-1)
breezy/tests/blackbox/test_log.py (+2/-3)
breezy/tests/per_controldir/test_controldir.py (+1/-3)
breezy/tests/per_repository/test_repository.py (+1/-1)
breezy/tests/per_transport.py (+6/-6)
breezy/tests/per_versionedfile.py (+7/-6)
breezy/transport/sftp.py (+4/-1)
breezy/versionedfile.py (+8/-7)
breezy/workingtree_4.py (+2/-2)
To merge this branch: bzr merge lp:~gz/brz/i_unzipping
Reviewer Review Type Date Requested Status
Jelmer Vernooij Approve
Review via email: mp+324552@code.launchpad.net

Commit message

Make use of zip Python 3 compatible

Description of the change

Fix uses of zip and itertools.izip to work across Python 2 and 3.

Using future_builtins in preference to conditional import of izip name.

Did some small refactoring of ancient code while touching things, stuff like knit wants to go away anyway.

To post a comment you must log in.
Revision history for this message
Jelmer Vernooij (jelmer) :
review: Approve
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote :

Running landing tests failed
http://10.242.247.184:8080/job/brz-dev/3/

Revision history for this message
Martin Packman (gz) wrote :

Fixed test failures from per_versionedfile tests also really needing an iterator. Also reshuffled comments and moved zip and map to commonly importable location.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'breezy/config.py'
2--- breezy/config.py 2017-05-22 00:56:52 +0000
3+++ breezy/config.py 2017-05-24 23:37:37 +0000
4@@ -1191,8 +1191,7 @@
5 else:
6 # Rely on zip truncating in length to the length of the shortest
7 # argument sequence.
8- names = zip(location_parts, section_parts)
9- for name in names:
10+ for name in zip(location_parts, section_parts):
11 if not fnmatch.fnmatch(name[0], name[1]):
12 matched = False
13 break
14
15=== modified file 'breezy/index.py'
16--- breezy/index.py 2017-05-22 00:56:52 +0000
17+++ breezy/index.py 2017-05-24 23:37:37 +0000
18@@ -1449,6 +1449,7 @@
19 """
20 indices_info = zip(self._index_names, self._indices)
21 if 'index' in debug.debug_flags:
22+ indices_info = list(indices_info)
23 trace.mutter('CombinedGraphIndex reordering: currently %r, '
24 'promoting %r', indices_info, hit_indices)
25 hit_names = []
26
27=== modified file 'breezy/knit.py'
28--- breezy/knit.py 2017-05-22 00:56:52 +0000
29+++ breezy/knit.py 2017-05-24 23:37:37 +0000
30@@ -53,8 +53,6 @@
31
32 from __future__ import absolute_import
33
34-
35-from itertools import izip
36 import operator
37 import os
38
39@@ -471,7 +469,7 @@
40
41 def __init__(self, lines):
42 KnitContent.__init__(self)
43- self._lines = lines
44+ self._lines = list(lines)
45
46 def annotate(self):
47 """Return a list of (origin, text) for each content line."""
48@@ -504,7 +502,7 @@
49 return lines
50
51 def copy(self):
52- return AnnotatedKnitContent(self._lines[:])
53+ return AnnotatedKnitContent(self._lines)
54
55
56 class PlainKnitContent(KnitContent):
57@@ -599,7 +597,7 @@
58 # but the code itself doesn't really depend on that.
59 # Figure out a way to not require the overhead of turning the
60 # list back into tuples.
61- lines = [tuple(line.split(' ', 1)) for line in content]
62+ lines = (tuple(line.split(' ', 1)) for line in content)
63 return AnnotatedKnitContent(lines)
64
65 def parse_line_delta_iter(self, lines):
66@@ -1933,8 +1931,7 @@
67 raw_data = self._access.get_raw_records(
68 [index_memo for key, index_memo in needed_records])
69
70- for (key, index_memo), data in \
71- izip(iter(needed_records), raw_data):
72+ for (key, index_memo), data in zip(needed_records, raw_data):
73 content, digest = self._parse_record(key[-1], data)
74 yield key, content, digest
75
76
77=== modified file 'breezy/log.py'
78--- breezy/log.py 2017-05-22 00:56:52 +0000
79+++ breezy/log.py 2017-05-24 23:37:37 +0000
80@@ -87,16 +87,10 @@
81 )
82 from breezy.sixish import (
83 BytesIO,
84- PY3,
85+ zip,
86 )
87
88
89-if PY3:
90- izip = zip
91-else:
92- izip = itertools.izip
93-
94-
95 def find_touching_revisions(branch, file_id):
96 """Yield a description of revisions which affect the file_id.
97
98@@ -834,7 +828,7 @@
99 # A single batch conversion is faster than many incremental ones.
100 # As we have all the data, do a batch conversion.
101 nones = [None] * len(view_revisions)
102- log_rev_iterator = iter([zip(view_revisions, nones, nones)])
103+ log_rev_iterator = iter([list(zip(view_revisions, nones, nones))])
104 else:
105 def _convert():
106 for view in view_revisions:
107@@ -945,11 +939,11 @@
108 new_revs = []
109 if delta_type == 'full' and not check_fileids:
110 deltas = repository.get_deltas_for_revisions(revisions)
111- for rev, delta in izip(revs, deltas):
112+ for rev, delta in zip(revs, deltas):
113 new_revs.append((rev[0], rev[1], delta))
114 else:
115 deltas = repository.get_deltas_for_revisions(revisions, fileid_set)
116- for rev, delta in izip(revs, deltas):
117+ for rev, delta in zip(revs, deltas):
118 if check_fileids:
119 if delta is None or not delta.has_changed():
120 continue
121@@ -1005,7 +999,7 @@
122 revision_ids = [view[0] for view, _, _ in revs]
123 revisions = repository.get_revisions(revision_ids)
124 revs = [(rev[0], revision, rev[2]) for rev, revision in
125- izip(revs, revisions)]
126+ zip(revs, revisions)]
127 yield revs
128
129
130
131=== modified file 'breezy/repofmt/knitpack_repo.py'
132--- breezy/repofmt/knitpack_repo.py 2017-05-22 00:56:52 +0000
133+++ breezy/repofmt/knitpack_repo.py 2017-05-24 23:37:37 +0000
134@@ -20,7 +20,6 @@
135
136 from ..lazy_import import lazy_import
137 lazy_import(globals(), """
138-from itertools import izip
139 import time
140
141 from breezy import (
142@@ -68,6 +67,9 @@
143 PackRootCommitBuilder,
144 RepositoryPackCollection,
145 )
146+from ..sixish import (
147+ zip
148+ )
149 from ..vf_repository import (
150 StreamSource,
151 )
152@@ -659,8 +661,8 @@
153 if self._reload_func is not None:
154 self._reload_func()
155 raise
156- for (names, read_func), (_1, _2, (key, eol_flag)) in \
157- izip(reader.iter_records(), pack_readv_requests):
158+ for (names, read_func), (_1, _2, (key, eol_flag)) in zip(
159+ reader.iter_records(), pack_readv_requests):
160 raw_data = read_func(None)
161 # check the header only
162 if output_lines is not None:
163@@ -711,8 +713,8 @@
164 if self._reload_func is not None:
165 self._reload_func()
166 raise
167- for (names, read_func), (key, eol_flag, references) in \
168- izip(reader.iter_records(), node_vector):
169+ for (names, read_func), (key, eol_flag, references) in zip(
170+ reader.iter_records(), node_vector):
171 raw_data = read_func(None)
172 if output_lines:
173 # read the entire thing
174
175=== modified file 'breezy/sixish.py'
176--- breezy/sixish.py 2017-05-21 18:16:32 +0000
177+++ breezy/sixish.py 2017-05-24 23:37:37 +0000
178@@ -24,7 +24,6 @@
179
180 from six import (
181 binary_type,
182- PY2,
183 PY3,
184 reraise,
185 string_types,
186@@ -40,6 +39,8 @@
187 import io as _io
188 BytesIO = _io.BytesIO
189 StringIO = _io.StringIO
190+ from builtins import zip, map
191 else:
192 from cStringIO import StringIO as BytesIO
193 from StringIO import StringIO
194+ from future_builtins import zip, map
195
196=== modified file 'breezy/tests/__init__.py'
197--- breezy/tests/__init__.py 2017-05-22 00:56:52 +0000
198+++ breezy/tests/__init__.py 2017-05-24 23:37:37 +0000
199@@ -3456,7 +3456,7 @@
200 # than the fastest.
201 partitions = [list() for i in range(count)]
202 tests = iter_suite_tests(suite)
203- for partition, test in itertools.izip(itertools.cycle(partitions), tests):
204+ for partition, test in zip(itertools.cycle(partitions), tests):
205 partition.append(test)
206 return partitions
207
208
209=== modified file 'breezy/tests/blackbox/test_log.py'
210--- breezy/tests/blackbox/test_log.py 2017-05-21 18:10:28 +0000
211+++ breezy/tests/blackbox/test_log.py 2017-05-24 23:37:37 +0000
212@@ -17,7 +17,6 @@
213
214 """Black-box tests for brz log."""
215
216-from itertools import izip
217 import os
218
219 from breezy import (
220@@ -693,8 +692,8 @@
221 for r in self.get_captured_revisions()])
222 # Now check the diffs, adding the revno in case of failure
223 fmt = 'In revno %s\n%s'
224- for expected_rev, actual_rev in izip(expected,
225- self.get_captured_revisions()):
226+ for expected_rev, actual_rev in zip(expected,
227+ self.get_captured_revisions()):
228 revno, depth, expected_diff = expected_rev
229 actual_diff = actual_rev.diff
230 self.assertEqualDiff(fmt % (revno, expected_diff),
231
232=== modified file 'breezy/tests/per_controldir/test_controldir.py'
233--- breezy/tests/per_controldir/test_controldir.py 2017-05-22 00:56:52 +0000
234+++ breezy/tests/per_controldir/test_controldir.py 2017-05-24 23:37:37 +0000
235@@ -16,8 +16,6 @@
236
237 """Tests for control directory implementations - tests a controldir format."""
238
239-from itertools import izip
240-
241 import breezy.branch
242 from breezy import (
243 bzrdir as _mod_bzrdir,
244@@ -1560,7 +1558,7 @@
245 self.assertPathExists(old_path)
246 self.assertPathExists(new_path)
247 for (((dir_relpath1, _), entries1),
248- ((dir_relpath2, _), entries2)) in izip(
249+ ((dir_relpath2, _), entries2)) in zip(
250 osutils.walkdirs(old_path),
251 osutils.walkdirs(new_path)):
252 self.assertEqual(dir_relpath1, dir_relpath2)
253
254=== modified file 'breezy/tests/per_repository/test_repository.py'
255--- breezy/tests/per_repository/test_repository.py 2017-05-22 00:56:52 +0000
256+++ breezy/tests/per_repository/test_repository.py 2017-05-24 23:37:37 +0000
257@@ -421,7 +421,7 @@
258 revision_ids = ['a-rev', 'b-rev', 'c-rev']
259 revisions = repo.get_revisions(revision_ids)
260 self.assertEqual(len(revisions), 3)
261- zipped = zip(revisions, revision_ids)
262+ zipped = list(zip(revisions, revision_ids))
263 self.assertEqual(len(zipped), 3)
264 for revision, revision_id in zipped:
265 self.assertEqual(revision.revision_id, revision_id)
266
267=== modified file 'breezy/tests/per_transport.py'
268--- breezy/tests/per_transport.py 2017-05-23 14:08:03 +0000
269+++ breezy/tests/per_transport.py 2017-05-24 23:37:37 +0000
270@@ -20,7 +20,6 @@
271 TransportTestProviderAdapter.
272 """
273
274-import itertools
275 import os
276 import stat
277 import sys
278@@ -43,6 +42,7 @@
279 from ..osutils import getcwd
280 from ..sixish import (
281 BytesIO,
282+ zip,
283 )
284 from ..smart import medium
285 from . import (
286@@ -191,15 +191,15 @@
287 self.build_tree(files, transport=t, line_endings='binary')
288 self.check_transport_contents('contents of a\n', t, 'a')
289 content_f = t.get_multi(files)
290- # Use itertools.izip() instead of use zip() or map(), since they fully
291- # evaluate their inputs, the transport requests should be issued and
292+ # Must use iter zip() from future not old version which will fully
293+ # evaluate its inputs, the transport requests should be issued and
294 # handled sequentially (we don't want to force transport to buffer).
295- for content, f in itertools.izip(contents, content_f):
296+ for content, f in zip(contents, content_f):
297 self.assertEqual(content, f.read())
298
299 content_f = t.get_multi(iter(files))
300- # Use itertools.izip() for the same reason
301- for content, f in itertools.izip(contents, content_f):
302+ # Again this zip() must come from the future
303+ for content, f in zip(contents, content_f):
304 self.assertEqual(content, f.read())
305
306 def test_get_unknown_file(self):
307
308=== modified file 'breezy/tests/per_versionedfile.py'
309--- breezy/tests/per_versionedfile.py 2017-05-22 00:56:52 +0000
310+++ breezy/tests/per_versionedfile.py 2017-05-24 23:37:37 +0000
311@@ -22,7 +22,7 @@
312 # considered typical and check that it can be detected/corrected.
313
314 from gzip import GzipFile
315-from itertools import chain, izip
316+import itertools
317
318 from .. import (
319 errors,
320@@ -45,6 +45,7 @@
321 )
322 from ..sixish import (
323 BytesIO,
324+ zip,
325 )
326 from . import (
327 TestCase,
328@@ -2028,9 +2029,9 @@
329 :param records: A list to collect the seen records.
330 :return: A generator of the records in stream.
331 """
332- # We make assertions during copying to catch things early for
333- # easier debugging.
334- for record, ref_record in izip(stream, expected):
335+ # We make assertions during copying to catch things early for easier
336+ # debugging. This must use the iterating zip() from the future.
337+ for record, ref_record in zip(stream, expected):
338 records.append(record)
339 self.assertEqual(ref_record.key, record.key)
340 self.assertEqual(ref_record.storage_kind, record.storage_kind)
341@@ -2444,7 +2445,7 @@
342 origin_entries = source.get_record_stream(origin_keys, 'unordered', False)
343 end_entries = source.get_record_stream(end_keys, 'topological', False)
344 start_entries = source.get_record_stream(start_keys, 'topological', False)
345- entries = chain(origin_entries, end_entries, start_entries)
346+ entries = itertools.chain(origin_entries, end_entries, start_entries)
347 try:
348 files.insert_record_stream(entries)
349 except RevisionNotPresent:
350@@ -2476,7 +2477,7 @@
351 streams = []
352 for key in reversed(keys):
353 streams.append(source.get_record_stream([key], 'unordered', False))
354- deltas = chain(*streams[:-1])
355+ deltas = itertools.chain.from_iterable(streams[:-1])
356 files = self.get_versionedfiles()
357 try:
358 files.insert_record_stream(deltas)
359
360=== modified file 'breezy/transport/sftp.py'
361--- breezy/transport/sftp.py 2017-05-22 00:56:52 +0000
362+++ breezy/transport/sftp.py 2017-05-24 23:37:37 +0000
363@@ -49,6 +49,9 @@
364 ParamikoNotPresent,
365 )
366 from ..osutils import fancy_rename
367+from ..sixish import (
368+ zip,
369+ )
370 from ..trace import mutter, warning
371 from ..transport import (
372 FileFileStream,
373@@ -202,7 +205,7 @@
374 # short readv.
375 data_stream = itertools.chain(fp.readv(requests),
376 itertools.repeat(None))
377- for (start, length), data in itertools.izip(requests, data_stream):
378+ for (start, length), data in zip(requests, data_stream):
379 if data is None:
380 if cur_coalesced is not None:
381 raise errors.ShortReadvError(self.relpath,
382
383=== modified file 'breezy/versionedfile.py'
384--- breezy/versionedfile.py 2017-05-22 00:56:52 +0000
385+++ breezy/versionedfile.py 2017-05-24 23:37:37 +0000
386@@ -43,6 +43,7 @@
387 from .registry import Registry
388 from .sixish import (
389 BytesIO,
390+ zip,
391 )
392 from .textmerge import TextMerge
393
394@@ -535,10 +536,10 @@
395 if not mpvf.has_version(p))
396 present_parents = set(self.get_parent_map(needed_parents).keys())
397 for parent_id, lines in zip(present_parents,
398- self._get_lf_split_line_list(present_parents)):
399+ self._get_lf_split_line_list(present_parents)):
400 mpvf.add_version(lines, parent_id, [])
401- for (version, parent_ids, expected_sha1, mpdiff), lines in\
402- zip(records, mpvf.get_line_list(versions)):
403+ for (version, parent_ids, expected_sha1, mpdiff), lines in zip(
404+ records, mpvf.get_line_list(versions)):
405 if len(parent_ids) == 1:
406 left_matching_blocks = list(mpdiff.get_matching_blocks(0,
407 mpvf.get_diff(parent_ids[0]).num_lines()))
408@@ -1027,8 +1028,8 @@
409 continue
410 mpvf.add_version(chunks_to_lines(record.get_bytes_as('chunked')),
411 record.key, [])
412- for (key, parent_keys, expected_sha1, mpdiff), lines in\
413- zip(records, mpvf.get_line_list(versions)):
414+ for (key, parent_keys, expected_sha1, mpdiff), lines in zip(
415+ records, mpvf.get_line_list(versions)):
416 if len(parent_keys) == 1:
417 left_matching_blocks = list(mpdiff.get_matching_blocks(0,
418 mpvf.get_diff(parent_keys[0]).num_lines()))
419@@ -1318,7 +1319,7 @@
420 prefix_keys.append(key[-1])
421 return result
422
423- def _get_all_prefixes(self):
424+ def _iter_all_prefixes(self):
425 # Identify all key prefixes.
426 # XXX: A bit hacky, needs polish.
427 if isinstance(self._mapper, ConstantMapper):
428@@ -1413,7 +1414,7 @@
429 yield line, prefix + (version,)
430
431 def _iter_all_components(self):
432- for path, prefix in self._get_all_prefixes():
433+ for path, prefix in self._iter_all_prefixes():
434 yield prefix, self._get_vf(path)
435
436 def keys(self):
437
438=== modified file 'breezy/workingtree_4.py'
439--- breezy/workingtree_4.py 2017-05-22 00:56:52 +0000
440+++ breezy/workingtree_4.py 2017-05-24 23:37:37 +0000
441@@ -1353,8 +1353,8 @@
442 _mod_revision.NULL_REVISION)
443 trees = []
444 else:
445- trees = zip(revision_ids,
446- self.branch.repository.revision_trees(revision_ids))
447+ trees = list(zip(revision_ids,
448+ self.branch.repository.revision_trees(revision_ids)))
449 base_tree = trees[0][1]
450 state = self.current_dirstate()
451 # We don't support ghosts yet

Subscribers

People subscribed via source and target branches