Merge lp:~jelmer/brz/progressbar-context into lp:brz
- progressbar-context
- Merge into trunk
Proposed by
Jelmer Vernooij
Status: | Merged |
---|---|
Approved by: | Jelmer Vernooij |
Approved revision: | no longer in the source branch. |
Merge reported by: | The Breezy Bot |
Merged at revision: | not available |
Proposed branch: | lp:~jelmer/brz/progressbar-context |
Merge into: | lp:brz |
Diff against target: |
476 lines (+125/-160) 9 files modified
breezy/bzr/bzrdir.py (+65/-66) breezy/bzr/groupcompress_repo.py (+28/-29) breezy/git/commands.py (+1/-4) breezy/git/interrepo.py (+6/-21) breezy/git/object_store.py (+2/-8) breezy/git/remote.py (+2/-6) breezy/git/repository.py (+1/-4) breezy/plugins/repodebug/check_chk.py (+20/-21) breezy/reconcile.py (+0/-1) |
To merge this branch: | bzr merge lp:~jelmer/brz/progressbar-context |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Jelmer Vernooij | Approve | ||
Review via email: mp+358460@code.launchpad.net |
Commit message
Use contexts for progress bars where possible.
Description of the change
Use contexts for progress bars where possible.
To post a comment you must log in.
Revision history for this message
Jelmer Vernooij (jelmer) : | # |
review:
Approve
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote : | # |
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote : | # |
Running landing tests failed
https:/
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'breezy/bzr/bzrdir.py' |
2 | --- breezy/bzr/bzrdir.py 2019-06-02 21:07:08 +0000 |
3 | +++ breezy/bzr/bzrdir.py 2019-06-06 21:52:30 +0000 |
4 | @@ -1734,73 +1734,72 @@ |
5 | def convert(self, to_convert, pb): |
6 | """See Converter.convert().""" |
7 | self.controldir = to_convert |
8 | - self.pb = ui.ui_factory.nested_progress_bar() |
9 | - self.count = 0 |
10 | - self.total = 1 |
11 | - self.step('checking repository format') |
12 | - try: |
13 | - repo = self.controldir.open_repository() |
14 | - except errors.NoRepositoryPresent: |
15 | - pass |
16 | - else: |
17 | - repo_fmt = self.target_format.repository_format |
18 | - if not isinstance(repo._format, repo_fmt.__class__): |
19 | - from ..repository import CopyConverter |
20 | - ui.ui_factory.note(gettext('starting repository conversion')) |
21 | - if not repo_fmt.supports_overriding_transport: |
22 | - raise AssertionError( |
23 | - "Repository in metadir does not support " |
24 | - "overriding transport") |
25 | - converter = CopyConverter(self.target_format.repository_format) |
26 | - converter.convert(repo, pb) |
27 | - for branch in self.controldir.list_branches(): |
28 | - # TODO: conversions of Branch and Tree should be done by |
29 | - # InterXFormat lookups/some sort of registry. |
30 | - # Avoid circular imports |
31 | - old = branch._format.__class__ |
32 | - new = self.target_format.get_branch_format().__class__ |
33 | - while old != new: |
34 | - if (old == fullhistorybranch.BzrBranchFormat5 |
35 | - and new in (_mod_bzrbranch.BzrBranchFormat6, |
36 | - _mod_bzrbranch.BzrBranchFormat7, |
37 | - _mod_bzrbranch.BzrBranchFormat8)): |
38 | - branch_converter = _mod_bzrbranch.Converter5to6() |
39 | - elif (old == _mod_bzrbranch.BzrBranchFormat6 |
40 | - and new in (_mod_bzrbranch.BzrBranchFormat7, |
41 | - _mod_bzrbranch.BzrBranchFormat8)): |
42 | - branch_converter = _mod_bzrbranch.Converter6to7() |
43 | - elif (old == _mod_bzrbranch.BzrBranchFormat7 |
44 | - and new is _mod_bzrbranch.BzrBranchFormat8): |
45 | - branch_converter = _mod_bzrbranch.Converter7to8() |
46 | - else: |
47 | - raise errors.BadConversionTarget("No converter", new, |
48 | - branch._format) |
49 | - branch_converter.convert(branch) |
50 | - branch = self.controldir.open_branch() |
51 | + with ui.ui_factory.nested_progress_bar() as self.pb: |
52 | + self.count = 0 |
53 | + self.total = 1 |
54 | + self.step('checking repository format') |
55 | + try: |
56 | + repo = self.controldir.open_repository() |
57 | + except errors.NoRepositoryPresent: |
58 | + pass |
59 | + else: |
60 | + repo_fmt = self.target_format.repository_format |
61 | + if not isinstance(repo._format, repo_fmt.__class__): |
62 | + from ..repository import CopyConverter |
63 | + ui.ui_factory.note(gettext('starting repository conversion')) |
64 | + if not repo_fmt.supports_overriding_transport: |
65 | + raise AssertionError( |
66 | + "Repository in metadir does not support " |
67 | + "overriding transport") |
68 | + converter = CopyConverter(self.target_format.repository_format) |
69 | + converter.convert(repo, pb) |
70 | + for branch in self.controldir.list_branches(): |
71 | + # TODO: conversions of Branch and Tree should be done by |
72 | + # InterXFormat lookups/some sort of registry. |
73 | + # Avoid circular imports |
74 | old = branch._format.__class__ |
75 | - try: |
76 | - tree = self.controldir.open_workingtree(recommend_upgrade=False) |
77 | - except (errors.NoWorkingTree, errors.NotLocalUrl): |
78 | - pass |
79 | - else: |
80 | - # TODO: conversions of Branch and Tree should be done by |
81 | - # InterXFormat lookups |
82 | - if (isinstance(tree, workingtree_3.WorkingTree3) |
83 | - and not isinstance(tree, workingtree_4.DirStateWorkingTree) |
84 | - and isinstance(self.target_format.workingtree_format, |
85 | - workingtree_4.DirStateWorkingTreeFormat)): |
86 | - workingtree_4.Converter3to4().convert(tree) |
87 | - if (isinstance(tree, workingtree_4.DirStateWorkingTree) |
88 | - and not isinstance(tree, workingtree_4.WorkingTree5) |
89 | - and isinstance(self.target_format.workingtree_format, |
90 | - workingtree_4.WorkingTreeFormat5)): |
91 | - workingtree_4.Converter4to5().convert(tree) |
92 | - if (isinstance(tree, workingtree_4.DirStateWorkingTree) |
93 | - and not isinstance(tree, workingtree_4.WorkingTree6) |
94 | - and isinstance(self.target_format.workingtree_format, |
95 | - workingtree_4.WorkingTreeFormat6)): |
96 | - workingtree_4.Converter4or5to6().convert(tree) |
97 | - self.pb.finished() |
98 | + new = self.target_format.get_branch_format().__class__ |
99 | + while old != new: |
100 | + if (old == fullhistorybranch.BzrBranchFormat5 |
101 | + and new in (_mod_bzrbranch.BzrBranchFormat6, |
102 | + _mod_bzrbranch.BzrBranchFormat7, |
103 | + _mod_bzrbranch.BzrBranchFormat8)): |
104 | + branch_converter = _mod_bzrbranch.Converter5to6() |
105 | + elif (old == _mod_bzrbranch.BzrBranchFormat6 |
106 | + and new in (_mod_bzrbranch.BzrBranchFormat7, |
107 | + _mod_bzrbranch.BzrBranchFormat8)): |
108 | + branch_converter = _mod_bzrbranch.Converter6to7() |
109 | + elif (old == _mod_bzrbranch.BzrBranchFormat7 |
110 | + and new is _mod_bzrbranch.BzrBranchFormat8): |
111 | + branch_converter = _mod_bzrbranch.Converter7to8() |
112 | + else: |
113 | + raise errors.BadConversionTarget("No converter", new, |
114 | + branch._format) |
115 | + branch_converter.convert(branch) |
116 | + branch = self.controldir.open_branch() |
117 | + old = branch._format.__class__ |
118 | + try: |
119 | + tree = self.controldir.open_workingtree(recommend_upgrade=False) |
120 | + except (errors.NoWorkingTree, errors.NotLocalUrl): |
121 | + pass |
122 | + else: |
123 | + # TODO: conversions of Branch and Tree should be done by |
124 | + # InterXFormat lookups |
125 | + if (isinstance(tree, workingtree_3.WorkingTree3) |
126 | + and not isinstance(tree, workingtree_4.DirStateWorkingTree) |
127 | + and isinstance(self.target_format.workingtree_format, |
128 | + workingtree_4.DirStateWorkingTreeFormat)): |
129 | + workingtree_4.Converter3to4().convert(tree) |
130 | + if (isinstance(tree, workingtree_4.DirStateWorkingTree) |
131 | + and not isinstance(tree, workingtree_4.WorkingTree5) |
132 | + and isinstance(self.target_format.workingtree_format, |
133 | + workingtree_4.WorkingTreeFormat5)): |
134 | + workingtree_4.Converter4to5().convert(tree) |
135 | + if (isinstance(tree, workingtree_4.DirStateWorkingTree) |
136 | + and not isinstance(tree, workingtree_4.WorkingTree6) |
137 | + and isinstance(self.target_format.workingtree_format, |
138 | + workingtree_4.WorkingTreeFormat6)): |
139 | + workingtree_4.Converter4or5to6().convert(tree) |
140 | return to_convert |
141 | |
142 | |
143 | |
144 | === modified file 'breezy/bzr/groupcompress_repo.py' |
145 | --- breezy/bzr/groupcompress_repo.py 2018-11-30 12:39:04 +0000 |
146 | +++ breezy/bzr/groupcompress_repo.py 2019-06-06 21:52:30 +0000 |
147 | @@ -1270,35 +1270,34 @@ |
148 | yield record |
149 | |
150 | revision_ids = search.get_keys() |
151 | - pb = ui.ui_factory.nested_progress_bar() |
152 | - rc = self._record_counter |
153 | - self._record_counter.setup(len(revision_ids)) |
154 | - for stream_info in self._fetch_revision_texts(revision_ids): |
155 | - yield (stream_info[0], |
156 | - wrap_and_count(pb, rc, stream_info[1])) |
157 | - self._revision_keys = [(rev_id,) for rev_id in revision_ids] |
158 | - # TODO: The keys to exclude might be part of the search recipe |
159 | - # For now, exclude all parents that are at the edge of ancestry, for |
160 | - # which we have inventories |
161 | - from_repo = self.from_repository |
162 | - parent_keys = from_repo._find_parent_keys_of_revisions( |
163 | - self._revision_keys) |
164 | - self.from_repository.revisions.clear_cache() |
165 | - self.from_repository.signatures.clear_cache() |
166 | - # Clear the repo's get_parent_map cache too. |
167 | - self.from_repository._unstacked_provider.disable_cache() |
168 | - self.from_repository._unstacked_provider.enable_cache() |
169 | - s = self._get_inventory_stream(self._revision_keys) |
170 | - yield (s[0], wrap_and_count(pb, rc, s[1])) |
171 | - self.from_repository.inventories.clear_cache() |
172 | - for stream_info in self._get_filtered_chk_streams(parent_keys): |
173 | - yield (stream_info[0], wrap_and_count(pb, rc, stream_info[1])) |
174 | - self.from_repository.chk_bytes.clear_cache() |
175 | - s = self._get_text_stream() |
176 | - yield (s[0], wrap_and_count(pb, rc, s[1])) |
177 | - self.from_repository.texts.clear_cache() |
178 | - pb.update('Done', rc.max, rc.max) |
179 | - pb.finished() |
180 | + with ui.ui_factory.nested_progress_bar() as pb: |
181 | + rc = self._record_counter |
182 | + self._record_counter.setup(len(revision_ids)) |
183 | + for stream_info in self._fetch_revision_texts(revision_ids): |
184 | + yield (stream_info[0], |
185 | + wrap_and_count(pb, rc, stream_info[1])) |
186 | + self._revision_keys = [(rev_id,) for rev_id in revision_ids] |
187 | + # TODO: The keys to exclude might be part of the search recipe |
188 | + # For now, exclude all parents that are at the edge of ancestry, for |
189 | + # which we have inventories |
190 | + from_repo = self.from_repository |
191 | + parent_keys = from_repo._find_parent_keys_of_revisions( |
192 | + self._revision_keys) |
193 | + self.from_repository.revisions.clear_cache() |
194 | + self.from_repository.signatures.clear_cache() |
195 | + # Clear the repo's get_parent_map cache too. |
196 | + self.from_repository._unstacked_provider.disable_cache() |
197 | + self.from_repository._unstacked_provider.enable_cache() |
198 | + s = self._get_inventory_stream(self._revision_keys) |
199 | + yield (s[0], wrap_and_count(pb, rc, s[1])) |
200 | + self.from_repository.inventories.clear_cache() |
201 | + for stream_info in self._get_filtered_chk_streams(parent_keys): |
202 | + yield (stream_info[0], wrap_and_count(pb, rc, stream_info[1])) |
203 | + self.from_repository.chk_bytes.clear_cache() |
204 | + s = self._get_text_stream() |
205 | + yield (s[0], wrap_and_count(pb, rc, s[1])) |
206 | + self.from_repository.texts.clear_cache() |
207 | + pb.update('Done', rc.max, rc.max) |
208 | |
209 | def get_stream_for_missing_keys(self, missing_keys): |
210 | # missing keys can only occur when we are byte copying and not |
211 | |
212 | === modified file 'breezy/git/commands.py' |
213 | --- breezy/git/commands.py 2019-04-06 12:51:43 +0000 |
214 | +++ breezy/git/commands.py 2019-06-06 21:52:30 +0000 |
215 | @@ -130,8 +130,7 @@ |
216 | interrepo = InterRepository.get(source_repo, target_repo) |
217 | mapping = source_repo.get_mapping() |
218 | refs = interrepo.fetch() |
219 | - pb = ui.ui_factory.nested_progress_bar() |
220 | - try: |
221 | + with ui.ui_factory.nested_progress_bar() as pb: |
222 | for i, (name, sha) in enumerate(viewitems(refs)): |
223 | try: |
224 | branch_name = ref_to_branch_name(name) |
225 | @@ -159,8 +158,6 @@ |
226 | source_branch.base, |
227 | {"branch": urlutils.escape(branch_name)}) |
228 | head_branch.set_parent(url) |
229 | - finally: |
230 | - pb.finished() |
231 | trace.note(gettext( |
232 | "Use 'bzr checkout' to create a working tree in " |
233 | "the newly created branches.")) |
234 | |
235 | === modified file 'breezy/git/interrepo.py' |
236 | --- breezy/git/interrepo.py 2018-11-16 23:15:15 +0000 |
237 | +++ breezy/git/interrepo.py 2019-06-06 21:52:30 +0000 |
238 | @@ -216,8 +216,7 @@ |
239 | stop_revids.append(revid) |
240 | missing = set() |
241 | graph = self.source.get_graph() |
242 | - pb = ui.ui_factory.nested_progress_bar() |
243 | - try: |
244 | + with ui.ui_factory.nested_progress_bar() as pb: |
245 | while stop_revids: |
246 | new_stop_revids = [] |
247 | for revid in stop_revids: |
248 | @@ -231,8 +230,6 @@ |
249 | for parent_revids in viewvalues(parent_map): |
250 | stop_revids.update(parent_revids) |
251 | pb.update("determining revisions to fetch", len(missing)) |
252 | - finally: |
253 | - pb.finished() |
254 | return graph.iter_topo_order(missing) |
255 | |
256 | def _get_target_bzr_refs(self): |
257 | @@ -302,8 +299,7 @@ |
258 | with self.source_store.lock_read(): |
259 | todo = list(self.missing_revisions(revs))[:limit] |
260 | revidmap = {} |
261 | - pb = ui.ui_factory.nested_progress_bar() |
262 | - try: |
263 | + with ui.ui_factory.nested_progress_bar() as pb: |
264 | object_generator = MissingObjectsIterator( |
265 | self.source_store, self.source, pb) |
266 | for (old_revid, git_sha) in object_generator.import_revisions( |
267 | @@ -321,8 +317,6 @@ |
268 | revidmap[old_revid] = (git_sha, new_revid) |
269 | self.target_store.add_objects(object_generator) |
270 | return revidmap |
271 | - finally: |
272 | - pb.finished() |
273 | |
274 | def fetch(self, revision_id=None, pb=None, find_ghosts=False, |
275 | fetch_spec=None, mapped_refs=None): |
276 | @@ -574,8 +568,7 @@ |
277 | lambda sha: store[sha].parents) |
278 | wants_recorder = DetermineWantsRecorder(determine_wants) |
279 | |
280 | - pb = ui.ui_factory.nested_progress_bar() |
281 | - try: |
282 | + with ui.ui_factory.nested_progress_bar() as pb: |
283 | objects_iter = self.source.fetch_objects( |
284 | wants_recorder, graph_walker, store.get_raw) |
285 | trace.mutter("Importing %d new revisions", |
286 | @@ -584,8 +577,6 @@ |
287 | self.target, mapping, objects_iter, store, |
288 | wants_recorder.wants, pb, limit) |
289 | return (pack_hint, last_rev, wants_recorder.remote_refs) |
290 | - finally: |
291 | - pb.finished() |
292 | |
293 | @staticmethod |
294 | def is_compatible(source, target): |
295 | @@ -610,9 +601,8 @@ |
296 | self._warn_slow() |
297 | remote_refs = self.source.controldir.get_refs_container().as_dict() |
298 | wants = determine_wants(remote_refs) |
299 | - pb = ui.ui_factory.nested_progress_bar() |
300 | target_git_object_retriever = get_object_store(self.target, mapping) |
301 | - try: |
302 | + with ui.ui_factory.nested_progress_bar() as pb: |
303 | target_git_object_retriever.lock_write() |
304 | try: |
305 | (pack_hint, last_rev) = import_git_objects( |
306 | @@ -621,8 +611,6 @@ |
307 | return (pack_hint, last_rev, remote_refs) |
308 | finally: |
309 | target_git_object_retriever.unlock() |
310 | - finally: |
311 | - pb.finished() |
312 | |
313 | @staticmethod |
314 | def is_compatible(source, target): |
315 | @@ -726,14 +714,11 @@ |
316 | if limit is not None: |
317 | raise FetchLimitUnsupported(self) |
318 | from .remote import DefaultProgressReporter |
319 | - pb = ui.ui_factory.nested_progress_bar() |
320 | - progress = DefaultProgressReporter(pb).progress |
321 | - try: |
322 | + with ui.ui_factory.nested_progress_bar() as pb: |
323 | + progress = DefaultProgressReporter(pb).progress |
324 | refs = self.source._git.fetch( |
325 | self.target._git, determine_wants, |
326 | progress=progress) |
327 | - finally: |
328 | - pb.finished() |
329 | return (None, None, refs) |
330 | |
331 | @staticmethod |
332 | |
333 | === modified file 'breezy/git/object_store.py' |
334 | --- breezy/git/object_store.py 2019-04-14 03:19:31 +0000 |
335 | +++ breezy/git/object_store.py 2019-06-06 21:52:30 +0000 |
336 | @@ -457,15 +457,12 @@ |
337 | return |
338 | self.start_write_group() |
339 | try: |
340 | - pb = ui.ui_factory.nested_progress_bar() |
341 | - try: |
342 | + with ui.ui_factory.nested_progress_bar() as pb: |
343 | for i, revid in enumerate(graph.iter_topo_order( |
344 | missing_revids)): |
345 | trace.mutter('processing %r', revid) |
346 | pb.update("updating git map", i, len(missing_revids)) |
347 | self._update_sha_map_revision(revid) |
348 | - finally: |
349 | - pb.finished() |
350 | if stop_revision is None: |
351 | self._map_updated = True |
352 | except BaseException: |
353 | @@ -824,8 +821,7 @@ |
354 | graph = self.repository.get_graph() |
355 | todo = _find_missing_bzr_revids(graph, pending, processed) |
356 | ret = PackTupleIterable(self) |
357 | - pb = ui.ui_factory.nested_progress_bar() |
358 | - try: |
359 | + with ui.ui_factory.nested_progress_bar() as pb: |
360 | for i, revid in enumerate(graph.iter_topo_order(todo)): |
361 | pb.update("generating git objects", i, len(todo)) |
362 | try: |
363 | @@ -837,8 +833,6 @@ |
364 | rev, tree, lossy=lossy): |
365 | ret.add(obj.id, path) |
366 | return ret |
367 | - finally: |
368 | - pb.finished() |
369 | |
370 | def add_thin_pack(self): |
371 | import tempfile |
372 | |
373 | === modified file 'breezy/git/remote.py' |
374 | --- breezy/git/remote.py 2019-06-02 05:13:10 +0000 |
375 | +++ breezy/git/remote.py 2019-06-06 21:52:30 +0000 |
376 | @@ -652,14 +652,10 @@ |
377 | |
378 | def _idx_load_or_generate(self, path): |
379 | if not os.path.exists(path): |
380 | - pb = ui.ui_factory.nested_progress_bar() |
381 | - try: |
382 | + with ui.ui_factory.nested_progress_bar() as pb: |
383 | def report_progress(cur, total): |
384 | pb.update("generating index", cur, total) |
385 | - self.data.create_index(path, |
386 | - progress=report_progress) |
387 | - finally: |
388 | - pb.finished() |
389 | + self.data.create_index(path, progress=report_progress) |
390 | return load_pack_index(path) |
391 | |
392 | def __del__(self): |
393 | |
394 | === modified file 'breezy/git/repository.py' |
395 | --- breezy/git/repository.py 2019-02-11 19:40:51 +0000 |
396 | +++ breezy/git/repository.py 2019-06-06 21:52:30 +0000 |
397 | @@ -516,8 +516,7 @@ |
398 | except KeyError: |
399 | # Update refs from Git commit objects |
400 | # FIXME: Hitting this a lot will be very inefficient... |
401 | - pb = ui.ui_factory.nested_progress_bar() |
402 | - try: |
403 | + with ui.ui_factory.nested_progress_bar() as pb: |
404 | for i, (git_sha, revid, roundtrip_revid) in enumerate( |
405 | self._iter_revision_ids()): |
406 | if not roundtrip_revid: |
407 | @@ -527,8 +526,6 @@ |
408 | self._git.refs[refname] = git_sha |
409 | if roundtrip_revid == bzr_revid: |
410 | return git_sha, mapping |
411 | - finally: |
412 | - pb.finished() |
413 | raise errors.NoSuchRevision(self, bzr_revid) |
414 | else: |
415 | return (git_sha, mapping) |
416 | |
417 | === modified file 'breezy/plugins/repodebug/check_chk.py' |
418 | --- breezy/plugins/repodebug/check_chk.py 2017-09-07 08:21:29 +0000 |
419 | +++ breezy/plugins/repodebug/check_chk.py 2019-06-06 21:52:30 +0000 |
420 | @@ -59,24 +59,23 @@ |
421 | r1 = revision[0].as_revision_id(branch) |
422 | r2 = revision[1].as_revision_id(branch) |
423 | inv_ids = g.find_unique_ancestors(r2, [r1]) |
424 | - pb = ui.ui_factory.nested_progress_bar() |
425 | - self.add_cleanup(pb.finished) |
426 | - for idx, inv in enumerate(repo.iter_inventories(inv_ids)): |
427 | - pb.update('checking', idx, len(inv_ids)) |
428 | - d = dict(inv.id_to_entry.iteritems()) |
429 | - test_key = chk_map.CHKMap.from_dict( |
430 | - vf, d, maximum_size=inv.id_to_entry._root_node._maximum_size, |
431 | - key_width=inv.id_to_entry._root_node._key_width, |
432 | - search_key_func=inv.id_to_entry._search_key_func) |
433 | - if inv.id_to_entry.key() != test_key: |
434 | - trace.warning('Failed for id_to_entry inv: %s' |
435 | - % (inv.revision_id,)) |
436 | - pid = inv.parent_id_basename_to_file_id |
437 | - d = dict(pid.iteritems()) |
438 | - test_key = chk_map.CHKMap.from_dict( |
439 | - vf, d, maximum_size=pid._root_node._maximum_size, |
440 | - key_width=pid._root_node._key_width, |
441 | - search_key_func=pid._search_key_func) |
442 | - if pid.key() != test_key: |
443 | - trace.warning('Failed for parent_id_to_basename inv: %s' |
444 | - % (inv.revision_id,)) |
445 | + with ui.ui_factory.nested_progress_bar() as pb: |
446 | + for idx, inv in enumerate(repo.iter_inventories(inv_ids)): |
447 | + pb.update('checking', idx, len(inv_ids)) |
448 | + d = dict(inv.id_to_entry.iteritems()) |
449 | + test_key = chk_map.CHKMap.from_dict( |
450 | + vf, d, maximum_size=inv.id_to_entry._root_node._maximum_size, |
451 | + key_width=inv.id_to_entry._root_node._key_width, |
452 | + search_key_func=inv.id_to_entry._search_key_func) |
453 | + if inv.id_to_entry.key() != test_key: |
454 | + trace.warning('Failed for id_to_entry inv: %s' |
455 | + % (inv.revision_id,)) |
456 | + pid = inv.parent_id_basename_to_file_id |
457 | + d = dict(pid.iteritems()) |
458 | + test_key = chk_map.CHKMap.from_dict( |
459 | + vf, d, maximum_size=pid._root_node._maximum_size, |
460 | + key_width=pid._root_node._key_width, |
461 | + search_key_func=pid._search_key_func) |
462 | + if pid.key() != test_key: |
463 | + trace.warning('Failed for parent_id_to_basename inv: %s' |
464 | + % (inv.revision_id,)) |
465 | |
466 | === modified file 'breezy/reconcile.py' |
467 | --- breezy/reconcile.py 2019-01-01 21:56:21 +0000 |
468 | +++ breezy/reconcile.py 2019-06-06 21:52:30 +0000 |
469 | @@ -25,7 +25,6 @@ |
470 | |
471 | |
472 | from . import ( |
473 | - cleanup, |
474 | errors, |
475 | ui, |
476 | ) |
Merging failed /ci.breezy- vcs.org/ job/brz/ job/brz- land/312/
https:/