Merge lp:~jelmer/brz/move-reconcile-1 into lp:brz
- move-reconcile-1
- Merge into trunk
Proposed by
Jelmer Vernooij
Status: | Merged |
---|---|
Approved by: | Jelmer Vernooij |
Approved revision: | no longer in the source branch. |
Merge reported by: | The Breezy Bot |
Merged at revision: | not available |
Proposed branch: | lp:~jelmer/brz/move-reconcile-1 |
Merge into: | lp:brz |
Prerequisite: | lp:~jelmer/brz/python3.7 |
Diff against target: |
2182 lines (+682/-686) 31 files modified
breezy/annotate.py (+1/-1) breezy/branch.py (+5/-6) breezy/builtins.py (+4/-6) breezy/bzr/branch.py (+7/-0) breezy/bzr/groupcompress_repo.py (+2/-3) breezy/bzr/knitrepo.py (+2/-3) breezy/bzr/pack_repo.py (+2/-3) breezy/bzr/reconcile.py (+471/-0) breezy/bzr/remote.py (+12/-2) breezy/bzr/vf_repository.py (+9/-2) breezy/bzr/workingtree_4.py (+1/-1) breezy/commit.py (+1/-1) breezy/diff.py (+41/-54) breezy/git/branch.py (+6/-0) breezy/git/commit.py (+1/-1) breezy/git/repository.py (+4/-28) breezy/git/tests/test_blackbox.py (+15/-0) breezy/git/tree.py (+1/-1) breezy/plugins/fastimport/revision_store.py (+1/-1) breezy/reconcile.py (+23/-473) breezy/repository.py (+2/-6) breezy/shelf_ui.py (+2/-3) breezy/tests/per_branch/test_reconcile.py (+2/-2) breezy/tests/per_intertree/test_compare.py (+4/-2) breezy/tests/per_repository/test_commit_builder.py (+22/-45) breezy/tests/per_repository_vf/test_reconcile.py (+11/-12) breezy/tests/per_repository_vf/test_repository.py (+2/-1) breezy/tests/per_workingtree/test_parents.py (+2/-2) breezy/tests/test_diff.py (+11/-12) breezy/tests/test_foreign.py (+4/-4) breezy/tests/test_reconcile.py (+11/-11) |
To merge this branch: | bzr merge lp:~jelmer/brz/move-reconcile-1 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Martin Packman | Approve | ||
Review via email: mp+359941@code.launchpad.net |
Commit message
Move bzr-specific reconcile bits to breezy.
Description of the change
Move bzr-specific reconcile bits to breezy.
To post a comment you must log in.
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote : | # |
Running landing tests failed
https:/
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote : | # |
Running landing tests failed
https:/
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote : | # |
Running landing tests failed
https:/
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote : | # |
Running landing tests failed
https:/
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'breezy/annotate.py' |
2 | --- breezy/annotate.py 2018-11-16 18:33:17 +0000 |
3 | +++ breezy/annotate.py 2019-01-01 23:11:42 +0000 |
4 | @@ -54,7 +54,7 @@ |
5 | |
6 | def annotate_file_tree(tree, path, to_file, verbose=False, full=False, |
7 | show_ids=False, branch=None): |
8 | - """Annotate file_id in a tree. |
9 | + """Annotate path in a tree. |
10 | |
11 | The tree should already be read_locked() when annotate_file_tree is called. |
12 | |
13 | |
14 | === modified file 'breezy/branch.py' |
15 | --- breezy/branch.py 2018-11-16 18:40:46 +0000 |
16 | +++ breezy/branch.py 2019-01-01 23:11:42 +0000 |
17 | @@ -1414,12 +1414,11 @@ |
18 | return tree |
19 | |
20 | def reconcile(self, thorough=True): |
21 | - """Make sure the data stored in this branch is consistent.""" |
22 | - from breezy.reconcile import BranchReconciler |
23 | - with self.lock_write(): |
24 | - reconciler = BranchReconciler(self, thorough=thorough) |
25 | - reconciler.reconcile() |
26 | - return reconciler |
27 | + """Make sure the data stored in this branch is consistent. |
28 | + |
29 | + :return: A `ReconcileResult` object. |
30 | + """ |
31 | + raise NotImplementedError(self.reconcile) |
32 | |
33 | def reference_parent(self, path, file_id=None, possible_transports=None): |
34 | """Return the parent branch for a tree-reference file_id |
35 | |
36 | === modified file 'breezy/builtins.py' |
37 | --- breezy/builtins.py 2019-01-01 21:53:56 +0000 |
38 | +++ breezy/builtins.py 2019-01-01 23:11:42 +0000 |
39 | @@ -5224,17 +5224,15 @@ |
40 | tree = _get_one_revision_tree('annotate', revision, branch=branch) |
41 | self.add_cleanup(tree.lock_read().unlock) |
42 | if wt is not None and revision is None: |
43 | - file_id = wt.path2id(relpath) |
44 | - else: |
45 | - file_id = tree.path2id(relpath) |
46 | - if file_id is None: |
47 | - raise errors.NotVersionedError(filename) |
48 | - if wt is not None and revision is None: |
49 | + if not wt.is_versioned(relpath): |
50 | + raise errors.NotVersionedError(relpath) |
51 | # If there is a tree and we're not annotating historical |
52 | # versions, annotate the working tree's content. |
53 | annotate_file_tree(wt, relpath, self.outf, long, all, |
54 | show_ids=show_ids) |
55 | else: |
56 | + if not tree.is_versioned(relpath): |
57 | + raise errors.NotVersionedError(relpath) |
58 | annotate_file_tree(tree, relpath, self.outf, long, all, |
59 | show_ids=show_ids, branch=branch) |
60 | |
61 | |
62 | === modified file 'breezy/bzr/branch.py' |
63 | --- breezy/bzr/branch.py 2018-11-11 15:40:12 +0000 |
64 | +++ breezy/bzr/branch.py 2019-01-01 23:11:42 +0000 |
65 | @@ -443,6 +443,13 @@ |
66 | super(BzrBranch, self)._clear_cached_state() |
67 | self._tags_bytes = None |
68 | |
69 | + def reconcile(self, thorough=True): |
70 | + """Make sure the data stored in this branch is consistent.""" |
71 | + from .reconcile import BranchReconciler |
72 | + with self.lock_write(): |
73 | + reconciler = BranchReconciler(self, thorough=thorough) |
74 | + return reconciler.reconcile() |
75 | + |
76 | |
77 | class BzrBranch8(BzrBranch): |
78 | """A branch that stores tree-reference locations.""" |
79 | |
80 | === modified file 'breezy/bzr/groupcompress_repo.py' |
81 | --- breezy/bzr/groupcompress_repo.py 2018-11-11 04:08:32 +0000 |
82 | +++ breezy/bzr/groupcompress_repo.py 2019-01-01 23:11:42 +0000 |
83 | @@ -1098,12 +1098,11 @@ |
84 | """Reconcile this repository to make sure all CHKs are in canonical |
85 | form. |
86 | """ |
87 | - from breezy.reconcile import PackReconciler |
88 | + from .reconcile import PackReconciler |
89 | with self.lock_write(): |
90 | reconciler = PackReconciler( |
91 | self, thorough=True, canonicalize_chks=True) |
92 | - reconciler.reconcile() |
93 | - return reconciler |
94 | + return reconciler.reconcile() |
95 | |
96 | def _reconcile_pack(self, collection, packs, extension, revs, pb): |
97 | packer = GCCHKReconcilePacker(collection, packs, extension) |
98 | |
99 | === modified file 'breezy/bzr/knitrepo.py' |
100 | --- breezy/bzr/knitrepo.py 2018-11-11 04:08:32 +0000 |
101 | +++ breezy/bzr/knitrepo.py 2019-01-01 23:11:42 +0000 |
102 | @@ -206,11 +206,10 @@ |
103 | |
104 | def reconcile(self, other=None, thorough=False): |
105 | """Reconcile this repository.""" |
106 | - from breezy.reconcile import KnitReconciler |
107 | + from .reconcile import KnitReconciler |
108 | with self.lock_write(): |
109 | reconciler = KnitReconciler(self, thorough=thorough) |
110 | - reconciler.reconcile() |
111 | - return reconciler |
112 | + return reconciler.reconcile() |
113 | |
114 | def _make_parents_provider(self): |
115 | return _KnitsParentsProvider(self.revisions) |
116 | |
117 | === modified file 'breezy/bzr/pack_repo.py' |
118 | --- breezy/bzr/pack_repo.py 2018-11-12 01:41:38 +0000 |
119 | +++ breezy/bzr/pack_repo.py 2019-01-01 23:11:42 +0000 |
120 | @@ -1819,11 +1819,10 @@ |
121 | |
122 | def reconcile(self, other=None, thorough=False): |
123 | """Reconcile this repository.""" |
124 | - from breezy.reconcile import PackReconciler |
125 | + from .reconcile import PackReconciler |
126 | with self.lock_write(): |
127 | reconciler = PackReconciler(self, thorough=thorough) |
128 | - reconciler.reconcile() |
129 | - return reconciler |
130 | + return reconciler.reconcile() |
131 | |
132 | def _reconcile_pack(self, collection, packs, extension, revs, pb): |
133 | raise NotImplementedError(self._reconcile_pack) |
134 | |
135 | === added file 'breezy/bzr/reconcile.py' |
136 | --- breezy/bzr/reconcile.py 1970-01-01 00:00:00 +0000 |
137 | +++ breezy/bzr/reconcile.py 2019-01-01 23:11:42 +0000 |
138 | @@ -0,0 +1,471 @@ |
139 | +# Copyright (C) 2006-2010 Canonical Ltd |
140 | +# |
141 | +# This program is free software; you can redistribute it and/or modify |
142 | +# it under the terms of the GNU General Public License as published by |
143 | +# the Free Software Foundation; either version 2 of the License, or |
144 | +# (at your option) any later version. |
145 | +# |
146 | +# This program is distributed in the hope that it will be useful, |
147 | +# but WITHOUT ANY WARRANTY; without even the implied warranty of |
148 | +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
149 | +# GNU General Public License for more details. |
150 | +# |
151 | +# You should have received a copy of the GNU General Public License |
152 | +# along with this program; if not, write to the Free Software |
153 | +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
154 | + |
155 | +"""Reconcilers are able to fix some potential data errors in a branch.""" |
156 | + |
157 | +from __future__ import absolute_import |
158 | + |
159 | +__all__ = [ |
160 | + 'BranchReconciler', |
161 | + 'KnitReconciler', |
162 | + 'PackReconciler', |
163 | + 'VersionedFileRepoReconciler', |
164 | + ] |
165 | + |
166 | +from .. import ( |
167 | + cleanup, |
168 | + errors, |
169 | + revision as _mod_revision, |
170 | + ui, |
171 | + ) |
172 | +from ..reconcile import ReconcileResult |
173 | +from ..i18n import gettext |
174 | +from ..trace import mutter |
175 | +from ..tsort import topo_sort |
176 | +from .versionedfile import AdapterFactory, FulltextContentFactory |
177 | + |
178 | + |
179 | +class VersionedFileRepoReconciler(object): |
180 | + """Reconciler that reconciles a repository. |
181 | + |
182 | + The goal of repository reconciliation is to make any derived data |
183 | + consistent with the core data committed by a user. This can involve |
184 | + reindexing, or removing unreferenced data if that can interfere with |
185 | + queries in a given repository. |
186 | + |
187 | + Currently this consists of an inventory reweave with revision cross-checks. |
188 | + """ |
189 | + |
190 | + def __init__(self, repo, other=None, thorough=False): |
191 | + """Construct a RepoReconciler. |
192 | + |
193 | + :param thorough: perform a thorough check which may take longer but |
194 | + will correct non-data loss issues such as incorrect |
195 | + cached data. |
196 | + """ |
197 | + self.garbage_inventories = 0 |
198 | + self.inconsistent_parents = 0 |
199 | + self.aborted = False |
200 | + self.repo = repo |
201 | + self.thorough = thorough |
202 | + |
203 | + def reconcile(self): |
204 | + """Perform reconciliation. |
205 | + |
206 | + After reconciliation the following attributes document found issues: |
207 | + |
208 | + * `inconsistent_parents`: The number of revisions in the repository |
209 | + whose ancestry was being reported incorrectly. |
210 | + * `garbage_inventories`: The number of inventory objects without |
211 | + revisions that were garbage collected. |
212 | + """ |
213 | + with self.repo.lock_write(), \ |
214 | + ui.ui_factory.nested_progress_bar() as self.pb: |
215 | + self._reconcile_steps() |
216 | + ret = ReconcileResult() |
217 | + ret.aborted = self.aborted |
218 | + ret.garbage_inventories = self.garbage_inventories |
219 | + ret.inconsistent_parents = self.inconsistent_parents |
220 | + return ret |
221 | + |
222 | + def _reconcile_steps(self): |
223 | + """Perform the steps to reconcile this repository.""" |
224 | + self._reweave_inventory() |
225 | + |
226 | + def _reweave_inventory(self): |
227 | + """Regenerate the inventory weave for the repository from scratch. |
228 | + |
229 | + This is a smart function: it will only do the reweave if doing it |
230 | + will correct data issues. The self.thorough flag controls whether |
231 | + only data-loss causing issues (!self.thorough) or all issues |
232 | + (self.thorough) are treated as requiring the reweave. |
233 | + """ |
234 | + transaction = self.repo.get_transaction() |
235 | + self.pb.update(gettext('Reading inventory data')) |
236 | + self.inventory = self.repo.inventories |
237 | + self.revisions = self.repo.revisions |
238 | + # the total set of revisions to process |
239 | + self.pending = {key[-1] for key in self.revisions.keys()} |
240 | + |
241 | + # mapping from revision_id to parents |
242 | + self._rev_graph = {} |
243 | + # errors that we detect |
244 | + self.inconsistent_parents = 0 |
245 | + # we need the revision id of each revision and its available parents list |
246 | + self._setup_steps(len(self.pending)) |
247 | + for rev_id in self.pending: |
248 | + # put a revision into the graph. |
249 | + self._graph_revision(rev_id) |
250 | + self._check_garbage_inventories() |
251 | + # if there are no inconsistent_parents and |
252 | + # (no garbage inventories or we are not doing a thorough check) |
253 | + if (not self.inconsistent_parents |
254 | + and (not self.garbage_inventories or not self.thorough)): |
255 | + ui.ui_factory.note(gettext('Inventory ok.')) |
256 | + return |
257 | + self.pb.update(gettext('Backing up inventory'), 0, 0) |
258 | + self.repo._backup_inventory() |
259 | + ui.ui_factory.note(gettext('Backup inventory created.')) |
260 | + new_inventories = self.repo._temp_inventories() |
261 | + |
262 | + # we have topological order of revisions and non ghost parents ready. |
263 | + self._setup_steps(len(self._rev_graph)) |
264 | + revision_keys = [(rev_id,) for rev_id in topo_sort(self._rev_graph)] |
265 | + stream = self._change_inv_parents( |
266 | + self.inventory.get_record_stream(revision_keys, 'unordered', True), |
267 | + self._new_inv_parents, |
268 | + set(revision_keys)) |
269 | + new_inventories.insert_record_stream(stream) |
270 | + # if this worked, the set of new_inventories.keys should equal |
271 | + # self.pending |
272 | + if not (set(new_inventories.keys()) |
273 | + == {(revid,) for revid in self.pending}): |
274 | + raise AssertionError() |
275 | + self.pb.update(gettext('Writing weave')) |
276 | + self.repo._activate_new_inventory() |
277 | + self.inventory = None |
278 | + ui.ui_factory.note(gettext('Inventory regenerated.')) |
279 | + |
280 | + def _new_inv_parents(self, revision_key): |
281 | + """Lookup ghost-filtered parents for revision_key.""" |
282 | + # Use the filtered ghostless parents list: |
283 | + return tuple([(revid,) for revid in self._rev_graph[revision_key[-1]]]) |
284 | + |
285 | + def _change_inv_parents(self, stream, get_parents, all_revision_keys): |
286 | + """Adapt a record stream to reconcile the parents.""" |
287 | + for record in stream: |
288 | + wanted_parents = get_parents(record.key) |
289 | + if wanted_parents and wanted_parents[0] not in all_revision_keys: |
290 | + # The check for the left most parent only handles knit |
291 | + # compressors, but this code only applies to knit and weave |
292 | + # repositories anyway. |
293 | + bytes = record.get_bytes_as('fulltext') |
294 | + yield FulltextContentFactory(record.key, wanted_parents, record.sha1, bytes) |
295 | + else: |
296 | + adapted_record = AdapterFactory( |
297 | + record.key, wanted_parents, record) |
298 | + yield adapted_record |
299 | + self._reweave_step('adding inventories') |
300 | + |
301 | + def _setup_steps(self, new_total): |
302 | + """Setup the markers we need to control the progress bar.""" |
303 | + self.total = new_total |
304 | + self.count = 0 |
305 | + |
306 | + def _graph_revision(self, rev_id): |
307 | + """Load a revision into the revision graph.""" |
308 | + # pick a random revision |
309 | + # analyse revision id rev_id and put it in the stack. |
310 | + self._reweave_step('loading revisions') |
311 | + rev = self.repo.get_revision_reconcile(rev_id) |
312 | + parents = [] |
313 | + for parent in rev.parent_ids: |
314 | + if self._parent_is_available(parent): |
315 | + parents.append(parent) |
316 | + else: |
317 | + mutter('found ghost %s', parent) |
318 | + self._rev_graph[rev_id] = parents |
319 | + |
320 | + def _check_garbage_inventories(self): |
321 | + """Check for garbage inventories which we cannot trust |
322 | + |
323 | + We cant trust them because their pre-requisite file data may not |
324 | + be present - all we know is that their revision was not installed. |
325 | + """ |
326 | + if not self.thorough: |
327 | + return |
328 | + inventories = set(self.inventory.keys()) |
329 | + revisions = set(self.revisions.keys()) |
330 | + garbage = inventories.difference(revisions) |
331 | + self.garbage_inventories = len(garbage) |
332 | + for revision_key in garbage: |
333 | + mutter('Garbage inventory {%s} found.', revision_key[-1]) |
334 | + |
335 | + def _parent_is_available(self, parent): |
336 | + """True if parent is a fully available revision |
337 | + |
338 | + A fully available revision has a inventory and a revision object in the |
339 | + repository. |
340 | + """ |
341 | + if parent in self._rev_graph: |
342 | + return True |
343 | + inv_present = (1 == len(self.inventory.get_parent_map([(parent,)]))) |
344 | + return (inv_present and self.repo.has_revision(parent)) |
345 | + |
346 | + def _reweave_step(self, message): |
347 | + """Mark a single step of regeneration complete.""" |
348 | + self.pb.update(message, self.count, self.total) |
349 | + self.count += 1 |
350 | + |
351 | + |
352 | +class KnitReconciler(VersionedFileRepoReconciler): |
353 | + """Reconciler that reconciles a knit format repository. |
354 | + |
355 | + This will detect garbage inventories and remove them in thorough mode. |
356 | + """ |
357 | + |
358 | + def _reconcile_steps(self): |
359 | + """Perform the steps to reconcile this repository.""" |
360 | + if self.thorough: |
361 | + try: |
362 | + self._load_indexes() |
363 | + except errors.BzrCheckError: |
364 | + self.aborted = True |
365 | + return |
366 | + # knits never suffer this |
367 | + self._gc_inventory() |
368 | + self._fix_text_parents() |
369 | + |
370 | + def _load_indexes(self): |
371 | + """Load indexes for the reconciliation.""" |
372 | + self.transaction = self.repo.get_transaction() |
373 | + self.pb.update(gettext('Reading indexes'), 0, 2) |
374 | + self.inventory = self.repo.inventories |
375 | + self.pb.update(gettext('Reading indexes'), 1, 2) |
376 | + self.repo._check_for_inconsistent_revision_parents() |
377 | + self.revisions = self.repo.revisions |
378 | + self.pb.update(gettext('Reading indexes'), 2, 2) |
379 | + |
380 | + def _gc_inventory(self): |
381 | + """Remove inventories that are not referenced from the revision store.""" |
382 | + self.pb.update(gettext('Checking unused inventories'), 0, 1) |
383 | + self._check_garbage_inventories() |
384 | + self.pb.update(gettext('Checking unused inventories'), 1, 3) |
385 | + if not self.garbage_inventories: |
386 | + ui.ui_factory.note(gettext('Inventory ok.')) |
387 | + return |
388 | + self.pb.update(gettext('Backing up inventory'), 0, 0) |
389 | + self.repo._backup_inventory() |
390 | + ui.ui_factory.note(gettext('Backup Inventory created')) |
391 | + # asking for '' should never return a non-empty weave |
392 | + new_inventories = self.repo._temp_inventories() |
393 | + # we have topological order of revisions and non ghost parents ready. |
394 | + graph = self.revisions.get_parent_map(self.revisions.keys()) |
395 | + revision_keys = topo_sort(graph) |
396 | + revision_ids = [key[-1] for key in revision_keys] |
397 | + self._setup_steps(len(revision_keys)) |
398 | + stream = self._change_inv_parents( |
399 | + self.inventory.get_record_stream(revision_keys, 'unordered', True), |
400 | + graph.__getitem__, |
401 | + set(revision_keys)) |
402 | + new_inventories.insert_record_stream(stream) |
403 | + # if this worked, the set of new_inventory_vf.names should equal |
404 | + # the revisionds list |
405 | + if not(set(new_inventories.keys()) == set(revision_keys)): |
406 | + raise AssertionError() |
407 | + self.pb.update(gettext('Writing weave')) |
408 | + self.repo._activate_new_inventory() |
409 | + self.inventory = None |
410 | + ui.ui_factory.note(gettext('Inventory regenerated.')) |
411 | + |
412 | + def _fix_text_parents(self): |
413 | + """Fix bad versionedfile parent entries. |
414 | + |
415 | + It is possible for the parents entry in a versionedfile entry to be |
416 | + inconsistent with the values in the revision and inventory. |
417 | + |
418 | + This method finds entries with such inconsistencies, corrects their |
419 | + parent lists, and replaces the versionedfile with a corrected version. |
420 | + """ |
421 | + transaction = self.repo.get_transaction() |
422 | + versions = [key[-1] for key in self.revisions.keys()] |
423 | + mutter('Prepopulating revision text cache with %d revisions', |
424 | + len(versions)) |
425 | + vf_checker = self.repo._get_versioned_file_checker() |
426 | + bad_parents, unused_versions = vf_checker.check_file_version_parents( |
427 | + self.repo.texts, self.pb) |
428 | + text_index = vf_checker.text_index |
429 | + per_id_bad_parents = {} |
430 | + for key in unused_versions: |
431 | + # Ensure that every file with unused versions gets rewritten. |
432 | + # NB: This is really not needed, reconcile != pack. |
433 | + per_id_bad_parents[key[0]] = {} |
434 | + # Generate per-knit/weave data. |
435 | + for key, details in bad_parents.items(): |
436 | + file_id = key[0] |
437 | + rev_id = key[1] |
438 | + knit_parents = tuple([parent[-1] for parent in details[0]]) |
439 | + correct_parents = tuple([parent[-1] for parent in details[1]]) |
440 | + file_details = per_id_bad_parents.setdefault(file_id, {}) |
441 | + file_details[rev_id] = (knit_parents, correct_parents) |
442 | + file_id_versions = {} |
443 | + for text_key in text_index: |
444 | + versions_list = file_id_versions.setdefault(text_key[0], []) |
445 | + versions_list.append(text_key[1]) |
446 | + # Do the reconcile of individual weaves. |
447 | + for num, file_id in enumerate(per_id_bad_parents): |
448 | + self.pb.update(gettext('Fixing text parents'), num, |
449 | + len(per_id_bad_parents)) |
450 | + versions_with_bad_parents = per_id_bad_parents[file_id] |
451 | + id_unused_versions = set(key[-1] for key in unused_versions |
452 | + if key[0] == file_id) |
453 | + if file_id in file_id_versions: |
454 | + file_versions = file_id_versions[file_id] |
455 | + else: |
456 | + # This id was present in the disk store but is not referenced |
457 | + # by any revision at all. |
458 | + file_versions = [] |
459 | + self._fix_text_parent(file_id, versions_with_bad_parents, |
460 | + id_unused_versions, file_versions) |
461 | + |
462 | + def _fix_text_parent(self, file_id, versions_with_bad_parents, |
463 | + unused_versions, all_versions): |
464 | + """Fix bad versionedfile entries in a single versioned file.""" |
465 | + mutter('fixing text parent: %r (%d versions)', file_id, |
466 | + len(versions_with_bad_parents)) |
467 | + mutter('(%d are unused)', len(unused_versions)) |
468 | + new_file_id = b'temp:%s' % file_id |
469 | + new_parents = {} |
470 | + needed_keys = set() |
471 | + for version in all_versions: |
472 | + if version in unused_versions: |
473 | + continue |
474 | + elif version in versions_with_bad_parents: |
475 | + parents = versions_with_bad_parents[version][1] |
476 | + else: |
477 | + pmap = self.repo.texts.get_parent_map([(file_id, version)]) |
478 | + parents = [key[-1] for key in pmap[(file_id, version)]] |
479 | + new_parents[(new_file_id, version)] = [ |
480 | + (new_file_id, parent) for parent in parents] |
481 | + needed_keys.add((file_id, version)) |
482 | + |
483 | + def fix_parents(stream): |
484 | + for record in stream: |
485 | + bytes = record.get_bytes_as('fulltext') |
486 | + new_key = (new_file_id, record.key[-1]) |
487 | + parents = new_parents[new_key] |
488 | + yield FulltextContentFactory(new_key, parents, record.sha1, bytes) |
489 | + stream = self.repo.texts.get_record_stream( |
490 | + needed_keys, 'topological', True) |
491 | + self.repo._remove_file_id(new_file_id) |
492 | + self.repo.texts.insert_record_stream(fix_parents(stream)) |
493 | + self.repo._remove_file_id(file_id) |
494 | + if len(new_parents): |
495 | + self.repo._move_file_id(new_file_id, file_id) |
496 | + |
497 | + |
498 | +class PackReconciler(VersionedFileRepoReconciler): |
499 | + """Reconciler that reconciles a pack based repository. |
500 | + |
501 | + Garbage inventories do not affect ancestry queries, and removal is |
502 | + considerably more expensive as there is no separate versioned file for |
503 | + them, so they are not cleaned. In short it is currently a no-op. |
504 | + |
505 | + In future this may be a good place to hook in annotation cache checking, |
506 | + index recreation etc. |
507 | + """ |
508 | + |
509 | + # XXX: The index corruption that _fix_text_parents performs is needed for |
510 | + # packs, but not yet implemented. The basic approach is to: |
511 | + # - lock the names list |
512 | + # - perform a customised pack() that regenerates data as needed |
513 | + # - unlock the names list |
514 | + # https://bugs.launchpad.net/bzr/+bug/154173 |
515 | + |
516 | + def __init__(self, repo, other=None, thorough=False, |
517 | + canonicalize_chks=False): |
518 | + super(PackReconciler, self).__init__(repo, other=other, |
519 | + thorough=thorough) |
520 | + self.canonicalize_chks = canonicalize_chks |
521 | + |
522 | + def _reconcile_steps(self): |
523 | + """Perform the steps to reconcile this repository.""" |
524 | + if not self.thorough: |
525 | + return |
526 | + collection = self.repo._pack_collection |
527 | + collection.ensure_loaded() |
528 | + collection.lock_names() |
529 | + try: |
530 | + packs = collection.all_packs() |
531 | + all_revisions = self.repo.all_revision_ids() |
532 | + total_inventories = len(list( |
533 | + collection.inventory_index.combined_index.iter_all_entries())) |
534 | + if len(all_revisions): |
535 | + if self.canonicalize_chks: |
536 | + reconcile_meth = self.repo._canonicalize_chks_pack |
537 | + else: |
538 | + reconcile_meth = self.repo._reconcile_pack |
539 | + new_pack = reconcile_meth(collection, packs, ".reconcile", |
540 | + all_revisions, self.pb) |
541 | + if new_pack is not None: |
542 | + self._discard_and_save(packs) |
543 | + else: |
544 | + # only make a new pack when there is data to copy. |
545 | + self._discard_and_save(packs) |
546 | + self.garbage_inventories = total_inventories - len(list( |
547 | + collection.inventory_index.combined_index.iter_all_entries())) |
548 | + finally: |
549 | + collection._unlock_names() |
550 | + |
551 | + def _discard_and_save(self, packs): |
552 | + """Discard some packs from the repository. |
553 | + |
554 | + This removes them from the memory index, saves the in-memory index |
555 | + which makes the newly reconciled pack visible and hides the packs to be |
556 | + discarded, and finally renames the packs being discarded into the |
557 | + obsolete packs directory. |
558 | + |
559 | + :param packs: The packs to discard. |
560 | + """ |
561 | + for pack in packs: |
562 | + self.repo._pack_collection._remove_pack_from_memory(pack) |
563 | + self.repo._pack_collection._save_pack_names() |
564 | + self.repo._pack_collection._obsolete_packs(packs) |
565 | + |
566 | + |
567 | +class BranchReconciler(object): |
568 | + """Reconciler that works on a branch.""" |
569 | + |
570 | + def __init__(self, a_branch, thorough=False): |
571 | + self.fixed_history = None |
572 | + self.thorough = thorough |
573 | + self.branch = a_branch |
574 | + |
575 | + def reconcile(self): |
576 | + with self.branch.lock_write(), \ |
577 | + ui.ui_factory.nested_progress_bar() as self.pb: |
578 | + ret = ReconcileResult() |
579 | + ret.fixed_history = self._reconcile_steps() |
580 | + return ret |
581 | + |
582 | + def _reconcile_steps(self): |
583 | + return self._reconcile_revision_history() |
584 | + |
585 | + def _reconcile_revision_history(self): |
586 | + last_revno, last_revision_id = self.branch.last_revision_info() |
587 | + real_history = [] |
588 | + graph = self.branch.repository.get_graph() |
589 | + try: |
590 | + for revid in graph.iter_lefthand_ancestry( |
591 | + last_revision_id, (_mod_revision.NULL_REVISION,)): |
592 | + real_history.append(revid) |
593 | + except errors.RevisionNotPresent: |
594 | + pass # Hit a ghost left hand parent |
595 | + real_history.reverse() |
596 | + if last_revno != len(real_history): |
597 | + # Technically for Branch5 formats, it is more efficient to use |
598 | + # set_revision_history, as this will regenerate it again. |
599 | + # Not really worth a whole BranchReconciler class just for this, |
600 | + # though. |
601 | + ui.ui_factory.note(gettext('Fixing last revision info {0} ' |
602 | + ' => {1}').format( |
603 | + last_revno, len(real_history))) |
604 | + self.branch.set_last_revision_info(len(real_history), |
605 | + last_revision_id) |
606 | + return True |
607 | + else: |
608 | + ui.ui_factory.note(gettext('revision_history ok.')) |
609 | + return False |
610 | |
611 | === modified file 'breezy/bzr/remote.py' |
612 | --- breezy/bzr/remote.py 2019-01-01 21:23:40 +0000 |
613 | +++ breezy/bzr/remote.py 2019-01-01 23:11:42 +0000 |
614 | @@ -2474,7 +2474,7 @@ |
615 | return self._real_repository._get_inventory_xml(revision_id) |
616 | |
617 | def reconcile(self, other=None, thorough=False): |
618 | - from ..reconcile import RepoReconciler |
619 | + from ..reconcile import ReconcileResult |
620 | with self.lock_write(): |
621 | path = self.controldir._path_for_remote_call(self._client) |
622 | try: |
623 | @@ -2486,7 +2486,10 @@ |
624 | if response != (b'ok', ): |
625 | raise errors.UnexpectedSmartServerResponse(response) |
626 | body = handler.read_body_bytes() |
627 | - result = RepoReconciler(self) |
628 | + result = ReconcileResult() |
629 | + result.garbage_inventories = None |
630 | + result.inconsistent_parents = None |
631 | + result.aborted = None |
632 | for line in body.split(b'\n'): |
633 | if not line: |
634 | continue |
635 | @@ -4139,6 +4142,13 @@ |
636 | self._ensure_real() |
637 | return self._real_branch.heads_to_fetch() |
638 | |
639 | + def reconcile(self, thorough=True): |
640 | + """Make sure the data stored in this branch is consistent.""" |
641 | + from .reconcile import BranchReconciler |
642 | + with self.lock_write(): |
643 | + reconciler = BranchReconciler(self, thorough=thorough) |
644 | + return reconciler.reconcile() |
645 | + |
646 | |
647 | class RemoteConfig(object): |
648 | """A Config that reads and writes from smart verbs. |
649 | |
650 | === modified file 'breezy/bzr/vf_repository.py' |
651 | --- breezy/bzr/vf_repository.py 2018-11-29 23:42:41 +0000 |
652 | +++ breezy/bzr/vf_repository.py 2019-01-01 23:11:42 +0000 |
653 | @@ -294,7 +294,7 @@ |
654 | or errored-on before record_iter_changes sees the item. |
655 | :param _entry_factory: Private method to bind entry_factory locally for |
656 | performance. |
657 | - :return: A generator of (file_id, relpath, fs_hash) tuples for use with |
658 | + :return: A generator of (relpath, fs_hash) tuples for use with |
659 | tree._observed_sha1. |
660 | """ |
661 | # Create an inventory delta based on deltas between all the parents and |
662 | @@ -487,7 +487,7 @@ |
663 | try: |
664 | entry.text_sha1, entry.text_size = self._add_file_to_weave( |
665 | file_id, file_obj, heads, nostore_sha) |
666 | - yield file_id, change[1][1], (entry.text_sha1, stat_value) |
667 | + yield change[1][1], (entry.text_sha1, stat_value) |
668 | except errors.ExistingContent: |
669 | # No content change against a carry_over parent |
670 | # Perhaps this should also yield a fs hash update? |
671 | @@ -1737,6 +1737,13 @@ |
672 | """Return a source for streaming from this repository.""" |
673 | return StreamSource(self, to_format) |
674 | |
675 | + def reconcile(self, other=None, thorough=False): |
676 | + """Reconcile this repository.""" |
677 | + from .reconcile import VersionedFileRepoReconciler |
678 | + with self.lock_write(): |
679 | + reconciler = VersionedFileRepoReconciler(self, thorough=thorough) |
680 | + return reconciler.reconcile() |
681 | + |
682 | |
683 | class MetaDirVersionedFileRepository(MetaDirRepository, |
684 | VersionedFileRepository): |
685 | |
686 | === modified file 'breezy/bzr/workingtree_4.py' |
687 | --- breezy/bzr/workingtree_4.py 2018-11-21 03:20:30 +0000 |
688 | +++ breezy/bzr/workingtree_4.py 2019-01-01 23:11:42 +0000 |
689 | @@ -1935,7 +1935,7 @@ |
690 | for path, identifier in desired_files: |
691 | entry = self._get_entry(path=path) |
692 | if entry == (None, None): |
693 | - raise errors.NoSuchFile(self, path) |
694 | + raise errors.NoSuchFile(path) |
695 | repo_desired_files.append((entry[0][2], entry[1][parent_index][4], |
696 | identifier)) |
697 | return self._repository.iter_files_bytes(repo_desired_files) |
698 | |
699 | === modified file 'breezy/commit.py' |
700 | --- breezy/commit.py 2018-11-16 23:15:15 +0000 |
701 | +++ breezy/commit.py 2019-01-01 23:11:42 +0000 |
702 | @@ -681,7 +681,7 @@ |
703 | if self.exclude: |
704 | iter_changes = filter_excluded(iter_changes, self.exclude) |
705 | iter_changes = self._filter_iter_changes(iter_changes) |
706 | - for file_id, path, fs_hash in self.builder.record_iter_changes( |
707 | + for path, fs_hash in self.builder.record_iter_changes( |
708 | self.work_tree, self.basis_revid, iter_changes): |
709 | self.work_tree._observed_sha1(path, fs_hash) |
710 | |
711 | |
712 | === modified file 'breezy/diff.py' |
713 | --- breezy/diff.py 2018-11-18 19:48:57 +0000 |
714 | +++ breezy/diff.py 2019-01-01 23:11:42 +0000 |
715 | @@ -477,7 +477,7 @@ |
716 | tree.unlock() |
717 | |
718 | |
719 | -def _patch_header_date(tree, file_id, path): |
720 | +def _patch_header_date(tree, path): |
721 | """Returns a timestamp suitable for use in a patch header.""" |
722 | try: |
723 | mtime = tree.get_file_mtime(path) |
724 | @@ -526,10 +526,9 @@ |
725 | diff_tree.to_file, diff_tree.path_encoding) |
726 | |
727 | @staticmethod |
728 | - def _diff_many(differs, file_id, old_path, new_path, old_kind, new_kind): |
729 | + def _diff_many(differs, old_path, new_path, old_kind, new_kind): |
730 | for file_differ in differs: |
731 | - result = file_differ.diff(file_id, old_path, new_path, old_kind, |
732 | - new_kind) |
733 | + result = file_differ.diff(old_path, new_path, old_kind, new_kind) |
734 | if result is not DiffPath.CANNOT_DIFF: |
735 | return result |
736 | else: |
737 | @@ -553,10 +552,9 @@ |
738 | def from_diff_tree(klass, diff_tree): |
739 | return klass(diff_tree.differs) |
740 | |
741 | - def diff(self, file_id, old_path, new_path, old_kind, new_kind): |
742 | + def diff(self, old_path, new_path, old_kind, new_kind): |
743 | """Perform comparison |
744 | |
745 | - :param file_id: The file_id of the file to compare |
746 | :param old_path: Path of the file in the old tree |
747 | :param new_path: Path of the file in the new tree |
748 | :param old_kind: Old file-kind of the file |
749 | @@ -564,17 +562,17 @@ |
750 | """ |
751 | if None in (old_kind, new_kind): |
752 | return DiffPath.CANNOT_DIFF |
753 | - result = DiffPath._diff_many(self.differs, file_id, old_path, |
754 | - new_path, old_kind, None) |
755 | + result = DiffPath._diff_many( |
756 | + self.differs, old_path, new_path, old_kind, None) |
757 | if result is DiffPath.CANNOT_DIFF: |
758 | return result |
759 | - return DiffPath._diff_many(self.differs, file_id, old_path, new_path, |
760 | - None, new_kind) |
761 | + return DiffPath._diff_many( |
762 | + self.differs, old_path, new_path, None, new_kind) |
763 | |
764 | |
765 | class DiffDirectory(DiffPath): |
766 | |
767 | - def diff(self, file_id, old_path, new_path, old_kind, new_kind): |
768 | + def diff(self, old_path, new_path, old_kind, new_kind): |
769 | """Perform comparison between two directories. (dummy) |
770 | |
771 | """ |
772 | @@ -589,10 +587,9 @@ |
773 | |
774 | class DiffSymlink(DiffPath): |
775 | |
776 | - def diff(self, file_id, old_path, new_path, old_kind, new_kind): |
777 | + def diff(self, old_path, new_path, old_kind, new_kind): |
778 | """Perform comparison between two symlinks |
779 | |
780 | - :param file_id: The file_id of the file to compare |
781 | :param old_path: Path of the file in the old tree |
782 | :param new_path: Path of the file in the new tree |
783 | :param old_kind: Old file-kind of the file |
784 | @@ -644,10 +641,9 @@ |
785 | self.path_encoding = path_encoding |
786 | self.context_lines = context_lines |
787 | |
788 | - def diff(self, file_id, old_path, new_path, old_kind, new_kind): |
789 | + def diff(self, old_path, new_path, old_kind, new_kind): |
790 | """Compare two files in unified diff format |
791 | |
792 | - :param file_id: The file_id of the file to compare |
793 | :param old_path: Path of the file in the old tree |
794 | :param new_path: Path of the file in the new tree |
795 | :param old_kind: Old file-kind of the file |
796 | @@ -655,30 +651,25 @@ |
797 | """ |
798 | if 'file' not in (old_kind, new_kind): |
799 | return self.CANNOT_DIFF |
800 | - from_file_id = to_file_id = file_id |
801 | if old_kind == 'file': |
802 | - old_date = _patch_header_date(self.old_tree, file_id, old_path) |
803 | + old_date = _patch_header_date(self.old_tree, old_path) |
804 | elif old_kind is None: |
805 | old_date = self.EPOCH_DATE |
806 | - from_file_id = None |
807 | else: |
808 | return self.CANNOT_DIFF |
809 | if new_kind == 'file': |
810 | - new_date = _patch_header_date(self.new_tree, file_id, new_path) |
811 | + new_date = _patch_header_date(self.new_tree, new_path) |
812 | elif new_kind is None: |
813 | new_date = self.EPOCH_DATE |
814 | - to_file_id = None |
815 | else: |
816 | return self.CANNOT_DIFF |
817 | from_label = '%s%s\t%s' % (self.old_label, old_path, |
818 | old_date) |
819 | to_label = '%s%s\t%s' % (self.new_label, new_path, |
820 | new_date) |
821 | - return self.diff_text(old_path, new_path, from_label, to_label, |
822 | - from_file_id, to_file_id) |
823 | + return self.diff_text(old_path, new_path, from_label, to_label) |
824 | |
825 | - def diff_text(self, from_path, to_path, from_label, to_label, |
826 | - from_file_id=None, to_file_id=None): |
827 | + def diff_text(self, from_path, to_path, from_label, to_label): |
828 | """Diff the content of given files in two trees |
829 | |
830 | :param from_path: The path in the from tree. If None, |
831 | @@ -686,18 +677,17 @@ |
832 | :param to_path: The path in the to tree. This may refer |
833 | to a different file from from_path. If None, |
834 | the file is not present in the to tree. |
835 | - :param from_file_id: The id of the file in the from tree or None if |
836 | - unknown. |
837 | - :param to_file_id: The id of the file in the to tree or None if |
838 | - unknown. |
839 | """ |
840 | - def _get_text(tree, file_id, path): |
841 | - if file_id is None: |
842 | - return [] |
843 | - return tree.get_file_lines(path) |
844 | + def _get_text(tree, path): |
845 | + if path is None: |
846 | + return [] |
847 | + try: |
848 | + return tree.get_file_lines(path) |
849 | + except errors.NoSuchFile: |
850 | + return [] |
851 | try: |
852 | - from_text = _get_text(self.old_tree, from_file_id, from_path) |
853 | - to_text = _get_text(self.new_tree, to_file_id, to_path) |
854 | + from_text = _get_text(self.old_tree, from_path) |
855 | + to_text = _get_text(self.new_tree, to_path) |
856 | self.text_differ(from_label, from_text, to_label, to_text, |
857 | self.to_file, path_encoding=self.path_encoding, |
858 | context_lines=self.context_lines) |
859 | @@ -804,7 +794,7 @@ |
860 | return osutils.pathjoin(self._root, prefix, relpath_tmp) |
861 | |
862 | def _write_file(self, relpath, tree, prefix, force_temp=False, |
863 | - allow_write=False, file_id=None): |
864 | + allow_write=False): |
865 | if not force_temp and isinstance(tree, WorkingTree): |
866 | full_path = tree.abspath(relpath) |
867 | if self._is_safepath(full_path): |
868 | @@ -836,12 +826,12 @@ |
869 | return full_path |
870 | |
871 | def _prepare_files(self, old_path, new_path, force_temp=False, |
872 | - allow_write_new=False, file_id=None): |
873 | - old_disk_path = self._write_file(old_path, self.old_tree, 'old', |
874 | - force_temp, file_id=file_id) |
875 | - new_disk_path = self._write_file(new_path, self.new_tree, 'new', |
876 | - force_temp, file_id=file_id, |
877 | - allow_write=allow_write_new) |
878 | + allow_write_new=False): |
879 | + old_disk_path = self._write_file( |
880 | + old_path, self.old_tree, 'old', force_temp) |
881 | + new_disk_path = self._write_file( |
882 | + new_path, self.new_tree, 'new', force_temp, |
883 | + allow_write=allow_write_new) |
884 | return old_disk_path, new_disk_path |
885 | |
886 | def finish(self): |
887 | @@ -852,25 +842,23 @@ |
888 | mutter("The temporary directory \"%s\" was not " |
889 | "cleanly removed: %s." % (self._root, e)) |
890 | |
891 | - def diff(self, file_id, old_path, new_path, old_kind, new_kind): |
892 | + def diff(self, old_path, new_path, old_kind, new_kind): |
893 | if (old_kind, new_kind) != ('file', 'file'): |
894 | return DiffPath.CANNOT_DIFF |
895 | (old_disk_path, new_disk_path) = self._prepare_files( |
896 | - old_path, new_path, file_id=file_id) |
897 | + old_path, new_path) |
898 | self._execute(old_disk_path, new_disk_path) |
899 | |
900 | - def edit_file(self, old_path, new_path, file_id=None): |
901 | + def edit_file(self, old_path, new_path): |
902 | """Use this tool to edit a file. |
903 | |
904 | A temporary copy will be edited, and the new contents will be |
905 | returned. |
906 | |
907 | - :param file_id: The id of the file to edit. |
908 | :return: The new contents of the file. |
909 | """ |
910 | old_abs_path, new_abs_path = self._prepare_files( |
911 | - old_path, new_path, allow_write_new=True, force_temp=True, |
912 | - file_id=file_id) |
913 | + old_path, new_path, allow_write_new=True, force_temp=True) |
914 | command = self._get_command(old_abs_path, new_abs_path) |
915 | subprocess.call(command, cwd=self._root) |
916 | with open(new_abs_path, 'rb') as new_file: |
917 | @@ -1028,16 +1016,15 @@ |
918 | self.to_file.write(b"=== modified %s '%s'%s\n" % (kind[0].encode('ascii'), |
919 | newpath_encoded, prop_str)) |
920 | if changed_content: |
921 | - self._diff(oldpath, newpath, kind[0], kind[1], file_id=file_id) |
922 | + self._diff(oldpath, newpath, kind[0], kind[1]) |
923 | has_changes = 1 |
924 | if renamed: |
925 | has_changes = 1 |
926 | return has_changes |
927 | |
928 | - def diff(self, file_id, old_path, new_path): |
929 | + def diff(self, old_path, new_path): |
930 | """Perform a diff of a single file |
931 | |
932 | - :param file_id: file-id of the file |
933 | :param old_path: The path of the file in the old tree |
934 | :param new_path: The path of the file in the new tree |
935 | """ |
936 | @@ -1049,11 +1036,11 @@ |
937 | new_kind = None |
938 | else: |
939 | new_kind = self.new_tree.kind(new_path) |
940 | - self._diff(old_path, new_path, old_kind, new_kind, file_id=file_id) |
941 | + self._diff(old_path, new_path, old_kind, new_kind) |
942 | |
943 | - def _diff(self, old_path, new_path, old_kind, new_kind, file_id): |
944 | - result = DiffPath._diff_many(self.differs, file_id, old_path, |
945 | - new_path, old_kind, new_kind) |
946 | + def _diff(self, old_path, new_path, old_kind, new_kind): |
947 | + result = DiffPath._diff_many( |
948 | + self.differs, old_path, new_path, old_kind, new_kind) |
949 | if result is DiffPath.CANNOT_DIFF: |
950 | error_path = new_path |
951 | if error_path is None: |
952 | |
953 | === modified file 'breezy/git/branch.py' |
954 | --- breezy/git/branch.py 2018-11-16 23:15:15 +0000 |
955 | +++ breezy/git/branch.py 2019-01-01 23:11:42 +0000 |
956 | @@ -615,6 +615,12 @@ |
957 | self, stop_revision=revid, lossy=lossy, _stop_revno=revno) |
958 | return (push_result.new_revno, push_result.new_revid) |
959 | |
960 | + def reconcile(self, thorough=True): |
961 | + """Make sure the data stored in this branch is consistent.""" |
962 | + from ..reconcile import ReconcileResult |
963 | + # Nothing to do here |
964 | + return ReconcileResult() |
965 | + |
966 | |
967 | class LocalGitBranch(GitBranch): |
968 | """A local Git branch.""" |
969 | |
970 | === modified file 'breezy/git/commit.py' |
971 | --- breezy/git/commit.py 2018-11-22 03:04:59 +0000 |
972 | +++ breezy/git/commit.py 2019-01-01 23:11:42 +0000 |
973 | @@ -142,7 +142,7 @@ |
974 | encoded_new_path = path[1].encode("utf-8") |
975 | self._blobs[encoded_new_path] = (mode, sha) |
976 | if st is not None: |
977 | - yield file_id, path[1], (entry.text_sha1, st) |
978 | + yield path[1], (entry.text_sha1, st) |
979 | if self._mapping.generate_file_id(encoded_new_path) != file_id: |
980 | self._override_fileids[encoded_new_path] = file_id |
981 | else: |
982 | |
983 | === modified file 'breezy/git/repository.py' |
984 | --- breezy/git/repository.py 2018-11-11 14:23:06 +0000 |
985 | +++ breezy/git/repository.py 2019-01-01 23:11:42 +0000 |
986 | @@ -68,31 +68,6 @@ |
987 | ) |
988 | |
989 | |
990 | -class RepoReconciler(object): |
991 | - """Reconciler that reconciles a repository. |
992 | - |
993 | - """ |
994 | - |
995 | - def __init__(self, repo, other=None, thorough=False): |
996 | - """Construct a RepoReconciler. |
997 | - |
998 | - :param thorough: perform a thorough check which may take longer but |
999 | - will correct non-data loss issues such as incorrect |
1000 | - cached data. |
1001 | - """ |
1002 | - self.repo = repo |
1003 | - |
1004 | - def reconcile(self): |
1005 | - """Perform reconciliation. |
1006 | - |
1007 | - After reconciliation the following attributes document found issues: |
1008 | - inconsistent_parents: The number of revisions in the repository whose |
1009 | - ancestry was being reported incorrectly. |
1010 | - garbage_inventories: The number of inventory objects without revisions |
1011 | - that were garbage collected. |
1012 | - """ |
1013 | - |
1014 | - |
1015 | class GitCheck(check.Check): |
1016 | |
1017 | def __init__(self, repository, check_repo=True): |
1018 | @@ -241,9 +216,10 @@ |
1019 | |
1020 | def reconcile(self, other=None, thorough=False): |
1021 | """Reconcile this repository.""" |
1022 | - reconciler = RepoReconciler(self, thorough=thorough) |
1023 | - reconciler.reconcile() |
1024 | - return reconciler |
1025 | + from ..reconcile import ReconcileResult |
1026 | + ret = ReconcileResult() |
1027 | + ret.aborted = False |
1028 | + return ret |
1029 | |
1030 | def supports_rich_root(self): |
1031 | return True |
1032 | |
1033 | === modified file 'breezy/git/tests/test_blackbox.py' |
1034 | --- breezy/git/tests/test_blackbox.py 2018-12-18 19:51:52 +0000 |
1035 | +++ breezy/git/tests/test_blackbox.py 2019-01-01 23:11:42 +0000 |
1036 | @@ -395,6 +395,21 @@ |
1037 | self.assertEqual(error, '') |
1038 | |
1039 | |
1040 | +class ReconcileTests(ExternalBase): |
1041 | + |
1042 | + def test_simple_reconcile(self): |
1043 | + tree = self.make_branch_and_tree('.', format='git') |
1044 | + self.build_tree_contents([('a', 'text for a\n')]) |
1045 | + tree.add(['a']) |
1046 | + output, error = self.run_bzr('reconcile') |
1047 | + self.assertContainsRe( |
1048 | + output, |
1049 | + 'Reconciling branch file://.*\n' |
1050 | + 'Reconciling repository file://.*\n' |
1051 | + 'Reconciliation complete.\n') |
1052 | + self.assertEqual(error, '') |
1053 | + |
1054 | + |
1055 | class StatusTests(ExternalBase): |
1056 | |
1057 | def test_empty_dir(self): |
1058 | |
1059 | === modified file 'breezy/git/tree.py' |
1060 | --- breezy/git/tree.py 2018-12-11 17:29:18 +0000 |
1061 | +++ breezy/git/tree.py 2019-01-01 23:11:42 +0000 |
1062 | @@ -995,7 +995,7 @@ |
1063 | raise errors.NoSuchId(self, file_id) |
1064 | |
1065 | def _set_root_id(self, file_id): |
1066 | - self._fileid_map.set_file_id("", file_id) |
1067 | + raise errors.UnsupportedOperation(self._set_root_id, self) |
1068 | |
1069 | def get_root_id(self): |
1070 | return self.path2id(u"") |
1071 | |
1072 | === modified file 'breezy/plugins/fastimport/revision_store.py' |
1073 | --- breezy/plugins/fastimport/revision_store.py 2018-11-16 18:33:17 +0000 |
1074 | +++ breezy/plugins/fastimport/revision_store.py 2019-01-01 23:11:42 +0000 |
1075 | @@ -367,7 +367,7 @@ |
1076 | basis_rev_id = _mod_revision.NULL_REVISION |
1077 | tree = _TreeShim(self.repo, basis_inv, inv_delta, text_provider) |
1078 | changes = tree._delta_to_iter_changes() |
1079 | - for (file_id, path, fs_hash) in builder.record_iter_changes( |
1080 | + for (path, fs_hash) in builder.record_iter_changes( |
1081 | tree, basis_rev_id, changes): |
1082 | # So far, we don't *do* anything with the result |
1083 | pass |
1084 | |
1085 | === modified file 'breezy/reconcile.py' |
1086 | --- breezy/reconcile.py 2018-11-11 04:08:32 +0000 |
1087 | +++ breezy/reconcile.py 2019-01-01 23:11:42 +0000 |
1088 | @@ -19,23 +19,17 @@ |
1089 | from __future__ import absolute_import |
1090 | |
1091 | __all__ = [ |
1092 | - 'KnitReconciler', |
1093 | - 'PackReconciler', |
1094 | 'reconcile', |
1095 | 'Reconciler', |
1096 | - 'RepoReconciler', |
1097 | ] |
1098 | |
1099 | |
1100 | from . import ( |
1101 | cleanup, |
1102 | errors, |
1103 | - revision as _mod_revision, |
1104 | ui, |
1105 | ) |
1106 | from .trace import mutter |
1107 | -from .tsort import topo_sort |
1108 | -from .bzr.versionedfile import AdapterFactory, FulltextContentFactory |
1109 | from .i18n import gettext |
1110 | |
1111 | |
1112 | @@ -52,7 +46,11 @@ |
1113 | :param canonicalize_chks: Make sure CHKs are in canonical form. |
1114 | """ |
1115 | reconciler = Reconciler(dir, canonicalize_chks=canonicalize_chks) |
1116 | - reconciler.reconcile() |
1117 | + return reconciler.reconcile() |
1118 | + |
1119 | + |
1120 | +class ReconcileResult(object): |
1121 | + """Class describing the result of a reconcile operation.""" |
1122 | |
1123 | |
1124 | class Reconciler(object): |
1125 | @@ -65,38 +63,29 @@ |
1126 | |
1127 | def reconcile(self): |
1128 | """Perform reconciliation. |
1129 | - |
1130 | - After reconciliation the following attributes document found issues: |
1131 | - |
1132 | - * `inconsistent_parents`: The number of revisions in the repository |
1133 | - whose ancestry was being reported incorrectly. |
1134 | - * `garbage_inventories`: The number of inventory objects without |
1135 | - revisions that were garbage collected. |
1136 | - * `fixed_branch_history`: None if there was no branch, False if the |
1137 | - branch history was correct, True if the branch history needed to be |
1138 | - re-normalized. |
1139 | """ |
1140 | - operation = cleanup.OperationWithCleanups(self._reconcile) |
1141 | - self.add_cleanup = operation.add_cleanup |
1142 | - operation.run_simple() |
1143 | - |
1144 | - def _reconcile(self): |
1145 | - """Helper function for performing reconciliation.""" |
1146 | - self.pb = ui.ui_factory.nested_progress_bar() |
1147 | - self.add_cleanup(self.pb.finished) |
1148 | - self._reconcile_branch() |
1149 | - self._reconcile_repository() |
1150 | + with ui.ui_factory.nested_progress_bar() as self.pb: |
1151 | + result = ReconcileResult() |
1152 | + branch_result = self._reconcile_branch() |
1153 | + repo_result = self._reconcile_repository() |
1154 | + # TODO(jelmer): Don't hardcode supported attributes here |
1155 | + result.inconsistent_parents = getattr( |
1156 | + repo_result, 'inconsistent_parents', None) |
1157 | + result.aborted = getattr(repo_result, 'aborted', None) |
1158 | + result.garbage_inventories = getattr( |
1159 | + repo_result, 'garbage_inventories', None) |
1160 | + result.fixed_branch_history = getattr( |
1161 | + branch_result, 'fixed_history', None) |
1162 | + return result |
1163 | |
1164 | def _reconcile_branch(self): |
1165 | try: |
1166 | self.branch = self.controldir.open_branch() |
1167 | except errors.NotBranchError: |
1168 | # Nothing to check here |
1169 | - self.fixed_branch_history = None |
1170 | return |
1171 | ui.ui_factory.note(gettext('Reconciling branch %s') % self.branch.base) |
1172 | - branch_reconciler = self.branch.reconcile(thorough=True) |
1173 | - self.fixed_branch_history = branch_reconciler.fixed_history |
1174 | + return self.branch.reconcile(thorough=True) |
1175 | |
1176 | def _reconcile_repository(self): |
1177 | self.repo = self.controldir.find_repository() |
1178 | @@ -109,453 +98,14 @@ |
1179 | except AttributeError: |
1180 | raise errors.BzrError( |
1181 | gettext("%s cannot canonicalize CHKs.") % (self.repo,)) |
1182 | - repo_reconciler = self.repo.reconcile_canonicalize_chks() |
1183 | + reconcile_result = self.repo.reconcile_canonicalize_chks() |
1184 | else: |
1185 | - repo_reconciler = self.repo.reconcile(thorough=True) |
1186 | - self.inconsistent_parents = repo_reconciler.inconsistent_parents |
1187 | - self.garbage_inventories = repo_reconciler.garbage_inventories |
1188 | - if repo_reconciler.aborted: |
1189 | + reconcile_result = self.repo.reconcile(thorough=True) |
1190 | + if reconcile_result.aborted: |
1191 | ui.ui_factory.note(gettext( |
1192 | 'Reconcile aborted: revision index has inconsistent parents.')) |
1193 | ui.ui_factory.note(gettext( |
1194 | 'Run "brz check" for more details.')) |
1195 | else: |
1196 | ui.ui_factory.note(gettext('Reconciliation complete.')) |
1197 | - |
1198 | - |
1199 | -class BranchReconciler(object): |
1200 | - """Reconciler that works on a branch.""" |
1201 | - |
1202 | - def __init__(self, a_branch, thorough=False): |
1203 | - self.fixed_history = None |
1204 | - self.thorough = thorough |
1205 | - self.branch = a_branch |
1206 | - |
1207 | - def reconcile(self): |
1208 | - operation = cleanup.OperationWithCleanups(self._reconcile) |
1209 | - self.add_cleanup = operation.add_cleanup |
1210 | - operation.run_simple() |
1211 | - |
1212 | - def _reconcile(self): |
1213 | - self.branch.lock_write() |
1214 | - self.add_cleanup(self.branch.unlock) |
1215 | - self.pb = ui.ui_factory.nested_progress_bar() |
1216 | - self.add_cleanup(self.pb.finished) |
1217 | - self._reconcile_steps() |
1218 | - |
1219 | - def _reconcile_steps(self): |
1220 | - self._reconcile_revision_history() |
1221 | - |
1222 | - def _reconcile_revision_history(self): |
1223 | - last_revno, last_revision_id = self.branch.last_revision_info() |
1224 | - real_history = [] |
1225 | - graph = self.branch.repository.get_graph() |
1226 | - try: |
1227 | - for revid in graph.iter_lefthand_ancestry( |
1228 | - last_revision_id, (_mod_revision.NULL_REVISION,)): |
1229 | - real_history.append(revid) |
1230 | - except errors.RevisionNotPresent: |
1231 | - pass # Hit a ghost left hand parent |
1232 | - real_history.reverse() |
1233 | - if last_revno != len(real_history): |
1234 | - self.fixed_history = True |
1235 | - # Technically for Branch5 formats, it is more efficient to use |
1236 | - # set_revision_history, as this will regenerate it again. |
1237 | - # Not really worth a whole BranchReconciler class just for this, |
1238 | - # though. |
1239 | - ui.ui_factory.note(gettext('Fixing last revision info {0} ' |
1240 | - ' => {1}').format( |
1241 | - last_revno, len(real_history))) |
1242 | - self.branch.set_last_revision_info(len(real_history), |
1243 | - last_revision_id) |
1244 | - else: |
1245 | - self.fixed_history = False |
1246 | - ui.ui_factory.note(gettext('revision_history ok.')) |
1247 | - |
1248 | - |
1249 | -class RepoReconciler(object): |
1250 | - """Reconciler that reconciles a repository. |
1251 | - |
1252 | - The goal of repository reconciliation is to make any derived data |
1253 | - consistent with the core data committed by a user. This can involve |
1254 | - reindexing, or removing unreferenced data if that can interfere with |
1255 | - queries in a given repository. |
1256 | - |
1257 | - Currently this consists of an inventory reweave with revision cross-checks. |
1258 | - """ |
1259 | - |
1260 | - def __init__(self, repo, other=None, thorough=False): |
1261 | - """Construct a RepoReconciler. |
1262 | - |
1263 | - :param thorough: perform a thorough check which may take longer but |
1264 | - will correct non-data loss issues such as incorrect |
1265 | - cached data. |
1266 | - """ |
1267 | - self.garbage_inventories = 0 |
1268 | - self.inconsistent_parents = 0 |
1269 | - self.aborted = False |
1270 | - self.repo = repo |
1271 | - self.thorough = thorough |
1272 | - |
1273 | - def reconcile(self): |
1274 | - """Perform reconciliation. |
1275 | - |
1276 | - After reconciliation the following attributes document found issues: |
1277 | - |
1278 | - * `inconsistent_parents`: The number of revisions in the repository |
1279 | - whose ancestry was being reported incorrectly. |
1280 | - * `garbage_inventories`: The number of inventory objects without |
1281 | - revisions that were garbage collected. |
1282 | - """ |
1283 | - operation = cleanup.OperationWithCleanups(self._reconcile) |
1284 | - self.add_cleanup = operation.add_cleanup |
1285 | - operation.run_simple() |
1286 | - |
1287 | - def _reconcile(self): |
1288 | - self.repo.lock_write() |
1289 | - self.add_cleanup(self.repo.unlock) |
1290 | - self.pb = ui.ui_factory.nested_progress_bar() |
1291 | - self.add_cleanup(self.pb.finished) |
1292 | - self._reconcile_steps() |
1293 | - |
1294 | - def _reconcile_steps(self): |
1295 | - """Perform the steps to reconcile this repository.""" |
1296 | - self._reweave_inventory() |
1297 | - |
1298 | - def _reweave_inventory(self): |
1299 | - """Regenerate the inventory weave for the repository from scratch. |
1300 | - |
1301 | - This is a smart function: it will only do the reweave if doing it |
1302 | - will correct data issues. The self.thorough flag controls whether |
1303 | - only data-loss causing issues (!self.thorough) or all issues |
1304 | - (self.thorough) are treated as requiring the reweave. |
1305 | - """ |
1306 | - transaction = self.repo.get_transaction() |
1307 | - self.pb.update(gettext('Reading inventory data')) |
1308 | - self.inventory = self.repo.inventories |
1309 | - self.revisions = self.repo.revisions |
1310 | - # the total set of revisions to process |
1311 | - self.pending = {key[-1] for key in self.revisions.keys()} |
1312 | - |
1313 | - # mapping from revision_id to parents |
1314 | - self._rev_graph = {} |
1315 | - # errors that we detect |
1316 | - self.inconsistent_parents = 0 |
1317 | - # we need the revision id of each revision and its available parents list |
1318 | - self._setup_steps(len(self.pending)) |
1319 | - for rev_id in self.pending: |
1320 | - # put a revision into the graph. |
1321 | - self._graph_revision(rev_id) |
1322 | - self._check_garbage_inventories() |
1323 | - # if there are no inconsistent_parents and |
1324 | - # (no garbage inventories or we are not doing a thorough check) |
1325 | - if (not self.inconsistent_parents |
1326 | - and (not self.garbage_inventories or not self.thorough)): |
1327 | - ui.ui_factory.note(gettext('Inventory ok.')) |
1328 | - return |
1329 | - self.pb.update(gettext('Backing up inventory'), 0, 0) |
1330 | - self.repo._backup_inventory() |
1331 | - ui.ui_factory.note(gettext('Backup inventory created.')) |
1332 | - new_inventories = self.repo._temp_inventories() |
1333 | - |
1334 | - # we have topological order of revisions and non ghost parents ready. |
1335 | - self._setup_steps(len(self._rev_graph)) |
1336 | - revision_keys = [(rev_id,) for rev_id in topo_sort(self._rev_graph)] |
1337 | - stream = self._change_inv_parents( |
1338 | - self.inventory.get_record_stream(revision_keys, 'unordered', True), |
1339 | - self._new_inv_parents, |
1340 | - set(revision_keys)) |
1341 | - new_inventories.insert_record_stream(stream) |
1342 | - # if this worked, the set of new_inventories.keys should equal |
1343 | - # self.pending |
1344 | - if not (set(new_inventories.keys()) |
1345 | - == {(revid,) for revid in self.pending}): |
1346 | - raise AssertionError() |
1347 | - self.pb.update(gettext('Writing weave')) |
1348 | - self.repo._activate_new_inventory() |
1349 | - self.inventory = None |
1350 | - ui.ui_factory.note(gettext('Inventory regenerated.')) |
1351 | - |
1352 | - def _new_inv_parents(self, revision_key): |
1353 | - """Lookup ghost-filtered parents for revision_key.""" |
1354 | - # Use the filtered ghostless parents list: |
1355 | - return tuple([(revid,) for revid in self._rev_graph[revision_key[-1]]]) |
1356 | - |
1357 | - def _change_inv_parents(self, stream, get_parents, all_revision_keys): |
1358 | - """Adapt a record stream to reconcile the parents.""" |
1359 | - for record in stream: |
1360 | - wanted_parents = get_parents(record.key) |
1361 | - if wanted_parents and wanted_parents[0] not in all_revision_keys: |
1362 | - # The check for the left most parent only handles knit |
1363 | - # compressors, but this code only applies to knit and weave |
1364 | - # repositories anyway. |
1365 | - bytes = record.get_bytes_as('fulltext') |
1366 | - yield FulltextContentFactory(record.key, wanted_parents, record.sha1, bytes) |
1367 | - else: |
1368 | - adapted_record = AdapterFactory( |
1369 | - record.key, wanted_parents, record) |
1370 | - yield adapted_record |
1371 | - self._reweave_step('adding inventories') |
1372 | - |
1373 | - def _setup_steps(self, new_total): |
1374 | - """Setup the markers we need to control the progress bar.""" |
1375 | - self.total = new_total |
1376 | - self.count = 0 |
1377 | - |
1378 | - def _graph_revision(self, rev_id): |
1379 | - """Load a revision into the revision graph.""" |
1380 | - # pick a random revision |
1381 | - # analyse revision id rev_id and put it in the stack. |
1382 | - self._reweave_step('loading revisions') |
1383 | - rev = self.repo.get_revision_reconcile(rev_id) |
1384 | - parents = [] |
1385 | - for parent in rev.parent_ids: |
1386 | - if self._parent_is_available(parent): |
1387 | - parents.append(parent) |
1388 | - else: |
1389 | - mutter('found ghost %s', parent) |
1390 | - self._rev_graph[rev_id] = parents |
1391 | - |
1392 | - def _check_garbage_inventories(self): |
1393 | - """Check for garbage inventories which we cannot trust |
1394 | - |
1395 | - We cant trust them because their pre-requisite file data may not |
1396 | - be present - all we know is that their revision was not installed. |
1397 | - """ |
1398 | - if not self.thorough: |
1399 | - return |
1400 | - inventories = set(self.inventory.keys()) |
1401 | - revisions = set(self.revisions.keys()) |
1402 | - garbage = inventories.difference(revisions) |
1403 | - self.garbage_inventories = len(garbage) |
1404 | - for revision_key in garbage: |
1405 | - mutter('Garbage inventory {%s} found.', revision_key[-1]) |
1406 | - |
1407 | - def _parent_is_available(self, parent): |
1408 | - """True if parent is a fully available revision |
1409 | - |
1410 | - A fully available revision has a inventory and a revision object in the |
1411 | - repository. |
1412 | - """ |
1413 | - if parent in self._rev_graph: |
1414 | - return True |
1415 | - inv_present = (1 == len(self.inventory.get_parent_map([(parent,)]))) |
1416 | - return (inv_present and self.repo.has_revision(parent)) |
1417 | - |
1418 | - def _reweave_step(self, message): |
1419 | - """Mark a single step of regeneration complete.""" |
1420 | - self.pb.update(message, self.count, self.total) |
1421 | - self.count += 1 |
1422 | - |
1423 | - |
1424 | -class KnitReconciler(RepoReconciler): |
1425 | - """Reconciler that reconciles a knit format repository. |
1426 | - |
1427 | - This will detect garbage inventories and remove them in thorough mode. |
1428 | - """ |
1429 | - |
1430 | - def _reconcile_steps(self): |
1431 | - """Perform the steps to reconcile this repository.""" |
1432 | - if self.thorough: |
1433 | - try: |
1434 | - self._load_indexes() |
1435 | - except errors.BzrCheckError: |
1436 | - self.aborted = True |
1437 | - return |
1438 | - # knits never suffer this |
1439 | - self._gc_inventory() |
1440 | - self._fix_text_parents() |
1441 | - |
1442 | - def _load_indexes(self): |
1443 | - """Load indexes for the reconciliation.""" |
1444 | - self.transaction = self.repo.get_transaction() |
1445 | - self.pb.update(gettext('Reading indexes'), 0, 2) |
1446 | - self.inventory = self.repo.inventories |
1447 | - self.pb.update(gettext('Reading indexes'), 1, 2) |
1448 | - self.repo._check_for_inconsistent_revision_parents() |
1449 | - self.revisions = self.repo.revisions |
1450 | - self.pb.update(gettext('Reading indexes'), 2, 2) |
1451 | - |
1452 | - def _gc_inventory(self): |
1453 | - """Remove inventories that are not referenced from the revision store.""" |
1454 | - self.pb.update(gettext('Checking unused inventories'), 0, 1) |
1455 | - self._check_garbage_inventories() |
1456 | - self.pb.update(gettext('Checking unused inventories'), 1, 3) |
1457 | - if not self.garbage_inventories: |
1458 | - ui.ui_factory.note(gettext('Inventory ok.')) |
1459 | - return |
1460 | - self.pb.update(gettext('Backing up inventory'), 0, 0) |
1461 | - self.repo._backup_inventory() |
1462 | - ui.ui_factory.note(gettext('Backup Inventory created')) |
1463 | - # asking for '' should never return a non-empty weave |
1464 | - new_inventories = self.repo._temp_inventories() |
1465 | - # we have topological order of revisions and non ghost parents ready. |
1466 | - graph = self.revisions.get_parent_map(self.revisions.keys()) |
1467 | - revision_keys = topo_sort(graph) |
1468 | - revision_ids = [key[-1] for key in revision_keys] |
1469 | - self._setup_steps(len(revision_keys)) |
1470 | - stream = self._change_inv_parents( |
1471 | - self.inventory.get_record_stream(revision_keys, 'unordered', True), |
1472 | - graph.__getitem__, |
1473 | - set(revision_keys)) |
1474 | - new_inventories.insert_record_stream(stream) |
1475 | - # if this worked, the set of new_inventory_vf.names should equal |
1476 | - # the revisionds list |
1477 | - if not(set(new_inventories.keys()) == set(revision_keys)): |
1478 | - raise AssertionError() |
1479 | - self.pb.update(gettext('Writing weave')) |
1480 | - self.repo._activate_new_inventory() |
1481 | - self.inventory = None |
1482 | - ui.ui_factory.note(gettext('Inventory regenerated.')) |
1483 | - |
1484 | - def _fix_text_parents(self): |
1485 | - """Fix bad versionedfile parent entries. |
1486 | - |
1487 | - It is possible for the parents entry in a versionedfile entry to be |
1488 | - inconsistent with the values in the revision and inventory. |
1489 | - |
1490 | - This method finds entries with such inconsistencies, corrects their |
1491 | - parent lists, and replaces the versionedfile with a corrected version. |
1492 | - """ |
1493 | - transaction = self.repo.get_transaction() |
1494 | - versions = [key[-1] for key in self.revisions.keys()] |
1495 | - mutter('Prepopulating revision text cache with %d revisions', |
1496 | - len(versions)) |
1497 | - vf_checker = self.repo._get_versioned_file_checker() |
1498 | - bad_parents, unused_versions = vf_checker.check_file_version_parents( |
1499 | - self.repo.texts, self.pb) |
1500 | - text_index = vf_checker.text_index |
1501 | - per_id_bad_parents = {} |
1502 | - for key in unused_versions: |
1503 | - # Ensure that every file with unused versions gets rewritten. |
1504 | - # NB: This is really not needed, reconcile != pack. |
1505 | - per_id_bad_parents[key[0]] = {} |
1506 | - # Generate per-knit/weave data. |
1507 | - for key, details in bad_parents.items(): |
1508 | - file_id = key[0] |
1509 | - rev_id = key[1] |
1510 | - knit_parents = tuple([parent[-1] for parent in details[0]]) |
1511 | - correct_parents = tuple([parent[-1] for parent in details[1]]) |
1512 | - file_details = per_id_bad_parents.setdefault(file_id, {}) |
1513 | - file_details[rev_id] = (knit_parents, correct_parents) |
1514 | - file_id_versions = {} |
1515 | - for text_key in text_index: |
1516 | - versions_list = file_id_versions.setdefault(text_key[0], []) |
1517 | - versions_list.append(text_key[1]) |
1518 | - # Do the reconcile of individual weaves. |
1519 | - for num, file_id in enumerate(per_id_bad_parents): |
1520 | - self.pb.update(gettext('Fixing text parents'), num, |
1521 | - len(per_id_bad_parents)) |
1522 | - versions_with_bad_parents = per_id_bad_parents[file_id] |
1523 | - id_unused_versions = set(key[-1] for key in unused_versions |
1524 | - if key[0] == file_id) |
1525 | - if file_id in file_id_versions: |
1526 | - file_versions = file_id_versions[file_id] |
1527 | - else: |
1528 | - # This id was present in the disk store but is not referenced |
1529 | - # by any revision at all. |
1530 | - file_versions = [] |
1531 | - self._fix_text_parent(file_id, versions_with_bad_parents, |
1532 | - id_unused_versions, file_versions) |
1533 | - |
1534 | - def _fix_text_parent(self, file_id, versions_with_bad_parents, |
1535 | - unused_versions, all_versions): |
1536 | - """Fix bad versionedfile entries in a single versioned file.""" |
1537 | - mutter('fixing text parent: %r (%d versions)', file_id, |
1538 | - len(versions_with_bad_parents)) |
1539 | - mutter('(%d are unused)', len(unused_versions)) |
1540 | - new_file_id = b'temp:%s' % file_id |
1541 | - new_parents = {} |
1542 | - needed_keys = set() |
1543 | - for version in all_versions: |
1544 | - if version in unused_versions: |
1545 | - continue |
1546 | - elif version in versions_with_bad_parents: |
1547 | - parents = versions_with_bad_parents[version][1] |
1548 | - else: |
1549 | - pmap = self.repo.texts.get_parent_map([(file_id, version)]) |
1550 | - parents = [key[-1] for key in pmap[(file_id, version)]] |
1551 | - new_parents[(new_file_id, version)] = [ |
1552 | - (new_file_id, parent) for parent in parents] |
1553 | - needed_keys.add((file_id, version)) |
1554 | - |
1555 | - def fix_parents(stream): |
1556 | - for record in stream: |
1557 | - bytes = record.get_bytes_as('fulltext') |
1558 | - new_key = (new_file_id, record.key[-1]) |
1559 | - parents = new_parents[new_key] |
1560 | - yield FulltextContentFactory(new_key, parents, record.sha1, bytes) |
1561 | - stream = self.repo.texts.get_record_stream( |
1562 | - needed_keys, 'topological', True) |
1563 | - self.repo._remove_file_id(new_file_id) |
1564 | - self.repo.texts.insert_record_stream(fix_parents(stream)) |
1565 | - self.repo._remove_file_id(file_id) |
1566 | - if len(new_parents): |
1567 | - self.repo._move_file_id(new_file_id, file_id) |
1568 | - |
1569 | - |
1570 | -class PackReconciler(RepoReconciler): |
1571 | - """Reconciler that reconciles a pack based repository. |
1572 | - |
1573 | - Garbage inventories do not affect ancestry queries, and removal is |
1574 | - considerably more expensive as there is no separate versioned file for |
1575 | - them, so they are not cleaned. In short it is currently a no-op. |
1576 | - |
1577 | - In future this may be a good place to hook in annotation cache checking, |
1578 | - index recreation etc. |
1579 | - """ |
1580 | - |
1581 | - # XXX: The index corruption that _fix_text_parents performs is needed for |
1582 | - # packs, but not yet implemented. The basic approach is to: |
1583 | - # - lock the names list |
1584 | - # - perform a customised pack() that regenerates data as needed |
1585 | - # - unlock the names list |
1586 | - # https://bugs.launchpad.net/bzr/+bug/154173 |
1587 | - |
1588 | - def __init__(self, repo, other=None, thorough=False, |
1589 | - canonicalize_chks=False): |
1590 | - super(PackReconciler, self).__init__(repo, other=other, |
1591 | - thorough=thorough) |
1592 | - self.canonicalize_chks = canonicalize_chks |
1593 | - |
1594 | - def _reconcile_steps(self): |
1595 | - """Perform the steps to reconcile this repository.""" |
1596 | - if not self.thorough: |
1597 | - return |
1598 | - collection = self.repo._pack_collection |
1599 | - collection.ensure_loaded() |
1600 | - collection.lock_names() |
1601 | - self.add_cleanup(collection._unlock_names) |
1602 | - packs = collection.all_packs() |
1603 | - all_revisions = self.repo.all_revision_ids() |
1604 | - total_inventories = len(list( |
1605 | - collection.inventory_index.combined_index.iter_all_entries())) |
1606 | - if len(all_revisions): |
1607 | - if self.canonicalize_chks: |
1608 | - reconcile_meth = self.repo._canonicalize_chks_pack |
1609 | - else: |
1610 | - reconcile_meth = self.repo._reconcile_pack |
1611 | - new_pack = reconcile_meth(collection, packs, ".reconcile", |
1612 | - all_revisions, self.pb) |
1613 | - if new_pack is not None: |
1614 | - self._discard_and_save(packs) |
1615 | - else: |
1616 | - # only make a new pack when there is data to copy. |
1617 | - self._discard_and_save(packs) |
1618 | - self.garbage_inventories = total_inventories - len(list( |
1619 | - collection.inventory_index.combined_index.iter_all_entries())) |
1620 | - |
1621 | - def _discard_and_save(self, packs): |
1622 | - """Discard some packs from the repository. |
1623 | - |
1624 | - This removes them from the memory index, saves the in-memory index |
1625 | - which makes the newly reconciled pack visible and hides the packs to be |
1626 | - discarded, and finally renames the packs being discarded into the |
1627 | - obsolete packs directory. |
1628 | - |
1629 | - :param packs: The packs to discard. |
1630 | - """ |
1631 | - for pack in packs: |
1632 | - self.repo._pack_collection._remove_pack_from_memory(pack) |
1633 | - self.repo._pack_collection._save_pack_names() |
1634 | - self.repo._pack_collection._obsolete_packs(packs) |
1635 | + return reconcile_result |
1636 | |
1637 | === modified file 'breezy/repository.py' |
1638 | --- breezy/repository.py 2018-11-29 23:42:41 +0000 |
1639 | +++ breezy/repository.py 2019-01-01 23:11:42 +0000 |
1640 | @@ -218,7 +218,7 @@ |
1641 | to basis_revision_id. The iterator must not include any items with |
1642 | a current kind of None - missing items must be either filtered out |
1643 | or errored-on beefore record_iter_changes sees the item. |
1644 | - :return: A generator of (file_id, relpath, fs_hash) tuples for use with |
1645 | + :return: A generator of (relpath, fs_hash) tuples for use with |
1646 | tree._observed_sha1. |
1647 | """ |
1648 | raise NotImplementedError(self.record_iter_changes) |
1649 | @@ -953,11 +953,7 @@ |
1650 | |
1651 | def reconcile(self, other=None, thorough=False): |
1652 | """Reconcile this repository.""" |
1653 | - from .reconcile import RepoReconciler |
1654 | - with self.lock_write(): |
1655 | - reconciler = RepoReconciler(self, thorough=thorough) |
1656 | - reconciler.reconcile() |
1657 | - return reconciler |
1658 | + raise NotImplementedError(self.reconcile) |
1659 | |
1660 | def _refresh_data(self): |
1661 | """Helper called from lock_* to ensure coherency with disk. |
1662 | |
1663 | === modified file 'breezy/shelf_ui.py' |
1664 | --- breezy/shelf_ui.py 2018-11-16 18:33:17 +0000 |
1665 | +++ breezy/shelf_ui.py 2019-01-01 23:11:42 +0000 |
1666 | @@ -250,7 +250,7 @@ |
1667 | path_encoding = osutils.get_terminal_encoding() |
1668 | text_differ = diff.DiffText(old_tree, new_tree, diff_file, |
1669 | path_encoding=path_encoding) |
1670 | - patch = text_differ.diff(file_id, old_path, new_path, 'file', 'file') |
1671 | + patch = text_differ.diff(old_path, new_path, 'file', 'file') |
1672 | diff_file.seek(0) |
1673 | return patches.parse_patch(diff_file) |
1674 | |
1675 | @@ -365,8 +365,7 @@ |
1676 | """ |
1677 | lines = osutils.split_lines(self.change_editor.edit_file( |
1678 | self.change_editor.old_tree.id2path(file_id), |
1679 | - self.change_editor.new_tree.id2path(file_id), |
1680 | - file_id=file_id)) |
1681 | + self.change_editor.new_tree.id2path(file_id))) |
1682 | return lines, self._count_changed_regions(work_tree_lines, lines) |
1683 | |
1684 | @staticmethod |
1685 | |
1686 | === modified file 'breezy/tests/per_branch/test_reconcile.py' |
1687 | --- breezy/tests/per_branch/test_reconcile.py 2018-11-11 04:08:32 +0000 |
1688 | +++ breezy/tests/per_branch/test_reconcile.py 2019-01-01 23:11:42 +0000 |
1689 | @@ -68,9 +68,9 @@ |
1690 | def test_reconcile_returns_reconciler(self): |
1691 | a_branch = self.make_branch('a_branch') |
1692 | result = a_branch.reconcile() |
1693 | - self.assertIsInstance(result, reconcile.BranchReconciler) |
1694 | + self.assertIsInstance(result, reconcile.ReconcileResult) |
1695 | # No history to fix |
1696 | - self.assertIs(False, result.fixed_history) |
1697 | + self.assertIs(False, getattr(result, 'fixed_history', False)) |
1698 | |
1699 | def test_reconcile_supports_thorough(self): |
1700 | a_branch = self.make_branch('a_branch') |
1701 | |
1702 | === modified file 'breezy/tests/per_intertree/test_compare.py' |
1703 | --- breezy/tests/per_intertree/test_compare.py 2018-11-22 03:51:03 +0000 |
1704 | +++ breezy/tests/per_intertree/test_compare.py 2019-01-01 23:11:42 +0000 |
1705 | @@ -798,13 +798,15 @@ |
1706 | tree1.mkdir('changing', b'parent-id') |
1707 | tree1.mkdir('changing/unchanging', b'mid-id') |
1708 | tree1.add(['changing/unchanging/file'], [b'file-id'], ['file']) |
1709 | - tree1.put_file_bytes_non_atomic('changing/unchanging/file', b'a file') |
1710 | + tree1.put_file_bytes_non_atomic( |
1711 | + 'changing/unchanging/file', b'a file') |
1712 | tree2 = self.make_to_branch_and_tree('2') |
1713 | tree2.set_root_id(tree1.get_root_id()) |
1714 | tree2.mkdir('changed', b'parent-id') |
1715 | tree2.mkdir('changed/unchanging', b'mid-id') |
1716 | tree2.add(['changed/unchanging/file'], [b'file-id'], ['file']) |
1717 | - tree2.put_file_bytes_non_atomic('changed/unchanging/file', b'changed content') |
1718 | + tree2.put_file_bytes_non_atomic( |
1719 | + 'changed/unchanging/file', b'changed content') |
1720 | tree1, tree2 = self.mutable_trees_to_test_trees(self, tree1, tree2) |
1721 | # parent-id has changed, as has file-id |
1722 | root_id = tree1.path2id('') |
1723 | |
1724 | === modified file 'breezy/tests/per_repository/test_commit_builder.py' |
1725 | --- breezy/tests/per_repository/test_commit_builder.py 2018-11-16 18:33:17 +0000 |
1726 | +++ breezy/tests/per_repository/test_commit_builder.py 2019-01-01 23:11:42 +0000 |
1727 | @@ -49,8 +49,7 @@ |
1728 | |
1729 | def test_finish_inventory_record_iter_changes(self): |
1730 | tree = self.make_branch_and_tree(".") |
1731 | - tree.lock_write() |
1732 | - try: |
1733 | + with tree.lock_write(): |
1734 | builder = tree.branch.get_commit_builder([]) |
1735 | try: |
1736 | list(builder.record_iter_changes(tree, tree.last_revision(), |
1737 | @@ -61,13 +60,10 @@ |
1738 | raise |
1739 | repo = tree.branch.repository |
1740 | repo.commit_write_group() |
1741 | - finally: |
1742 | - tree.unlock() |
1743 | |
1744 | def test_abort_record_iter_changes(self): |
1745 | tree = self.make_branch_and_tree(".") |
1746 | - tree.lock_write() |
1747 | - try: |
1748 | + with tree.lock_write(): |
1749 | builder = tree.branch.get_commit_builder([]) |
1750 | try: |
1751 | basis = tree.basis_tree() |
1752 | @@ -77,56 +73,44 @@ |
1753 | builder.finish_inventory() |
1754 | finally: |
1755 | builder.abort() |
1756 | - finally: |
1757 | - tree.unlock() |
1758 | |
1759 | def test_commit_lossy(self): |
1760 | tree = self.make_branch_and_tree(".") |
1761 | - tree.lock_write() |
1762 | - try: |
1763 | + with tree.lock_write(): |
1764 | builder = tree.branch.get_commit_builder([], lossy=True) |
1765 | list(builder.record_iter_changes(tree, tree.last_revision(), |
1766 | tree.iter_changes(tree.basis_tree()))) |
1767 | builder.finish_inventory() |
1768 | rev_id = builder.commit('foo bar blah') |
1769 | - finally: |
1770 | - tree.unlock() |
1771 | rev = tree.branch.repository.get_revision(rev_id) |
1772 | self.assertEqual('foo bar blah', rev.message) |
1773 | |
1774 | def test_commit_message(self): |
1775 | tree = self.make_branch_and_tree(".") |
1776 | - tree.lock_write() |
1777 | - try: |
1778 | + with tree.lock_write(): |
1779 | builder = tree.branch.get_commit_builder([]) |
1780 | list(builder.record_iter_changes(tree, tree.last_revision(), |
1781 | tree.iter_changes(tree.basis_tree()))) |
1782 | builder.finish_inventory() |
1783 | rev_id = builder.commit('foo bar blah') |
1784 | - finally: |
1785 | - tree.unlock() |
1786 | rev = tree.branch.repository.get_revision(rev_id) |
1787 | self.assertEqual('foo bar blah', rev.message) |
1788 | |
1789 | def test_updates_branch(self): |
1790 | tree = self.make_branch_and_tree(".") |
1791 | - tree.lock_write() |
1792 | - try: |
1793 | + with tree.lock_write(): |
1794 | builder = tree.branch.get_commit_builder([]) |
1795 | list(builder.record_iter_changes(tree, tree.last_revision(), |
1796 | tree.iter_changes(tree.basis_tree()))) |
1797 | builder.finish_inventory() |
1798 | will_update_branch = builder.updates_branch |
1799 | rev_id = builder.commit('might update the branch') |
1800 | - finally: |
1801 | - tree.unlock() |
1802 | actually_updated_branch = (tree.branch.last_revision() == rev_id) |
1803 | self.assertEqual(actually_updated_branch, will_update_branch) |
1804 | |
1805 | def test_commit_with_revision_id_record_iter_changes(self): |
1806 | tree = self.make_branch_and_tree(".") |
1807 | - tree.lock_write() |
1808 | - try: |
1809 | + with tree.lock_write(): |
1810 | # use a unicode revision id to test more corner cases. |
1811 | # The repository layer is meant to handle this. |
1812 | revision_id = u'\xc8abc'.encode('utf8') |
1813 | @@ -150,20 +134,18 @@ |
1814 | builder.abort() |
1815 | raise |
1816 | self.assertEqual(revision_id, builder.commit('foo bar')) |
1817 | - finally: |
1818 | - tree.unlock() |
1819 | self.assertTrue(tree.branch.repository.has_revision(revision_id)) |
1820 | # the revision id must be set on the inventory when saving it. This |
1821 | # does not precisely test that - a repository that wants to can add it |
1822 | # on deserialisation, but thats all the current contract guarantees |
1823 | # anyway. |
1824 | - self.assertEqual(revision_id, |
1825 | - tree.branch.repository.revision_tree(revision_id).get_revision_id()) |
1826 | + self.assertEqual( |
1827 | + revision_id, |
1828 | + tree.branch.repository.revision_tree(revision_id).get_revision_id()) |
1829 | |
1830 | def test_commit_without_root_errors(self): |
1831 | tree = self.make_branch_and_tree(".") |
1832 | - tree.lock_write() |
1833 | - try: |
1834 | + with tree.lock_write(): |
1835 | builder = tree.branch.get_commit_builder([]) |
1836 | |
1837 | def do_commit(): |
1838 | @@ -177,8 +159,6 @@ |
1839 | else: |
1840 | builder.commit("msg") |
1841 | self.assertRaises(errors.RootMissing, do_commit) |
1842 | - finally: |
1843 | - tree.unlock() |
1844 | |
1845 | def test_commit_unchanged_root_record_iter_changes(self): |
1846 | tree = self.make_branch_and_tree(".") |
1847 | @@ -210,8 +190,7 @@ |
1848 | tree.add(["foo"]) |
1849 | foo_id = tree.path2id('foo') |
1850 | rev_id = tree.commit("added foo") |
1851 | - tree.lock_write() |
1852 | - try: |
1853 | + with tree.lock_write(): |
1854 | builder = tree.branch.get_commit_builder([rev_id]) |
1855 | try: |
1856 | delete_change = (foo_id, ('foo', None), True, (True, False), |
1857 | @@ -228,8 +207,6 @@ |
1858 | except: |
1859 | builder.abort() |
1860 | raise |
1861 | - finally: |
1862 | - tree.unlock() |
1863 | rev_tree = builder.revision_tree() |
1864 | rev_tree.lock_read() |
1865 | self.addCleanup(rev_tree.unlock) |
1866 | @@ -462,7 +439,9 @@ |
1867 | self.assertFileGraph(expected_graph, tree, (file_id, rev2)) |
1868 | |
1869 | def mini_commit_record_iter_changes(self, tree, name, new_name, |
1870 | - records_version=True, delta_against_basis=True, expect_fs_hash=False): |
1871 | + records_version=True, |
1872 | + delta_against_basis=True, |
1873 | + expect_fs_hash=False): |
1874 | """Perform a miniature commit looking for record entry results. |
1875 | |
1876 | This version uses the record_iter_changes interface. |
1877 | @@ -498,10 +477,10 @@ |
1878 | tree_file_stat[0].close() |
1879 | self.assertLength(1, result) |
1880 | result = result[0] |
1881 | - self.assertEqual(result[:2], (file_id, new_name)) |
1882 | + self.assertEqual(result[0], new_name) |
1883 | self.assertEqual( |
1884 | - result[2][0], tree.get_file_sha1(new_name)) |
1885 | - self.assertEqualStat(result[2][1], tree_file_stat[1]) |
1886 | + result[1][0], tree.get_file_sha1(new_name)) |
1887 | + self.assertEqualStat(result[1][1], tree_file_stat[1]) |
1888 | else: |
1889 | self.assertEqual([], result) |
1890 | builder.finish_inventory() |
1891 | @@ -600,9 +579,10 @@ |
1892 | rev2 = self._rename_in_tree(tree1, name, 'rev2') |
1893 | rev3 = self._rename_in_tree(tree2, name, 'rev3') |
1894 | tree1.merge_from_branch(tree2.branch) |
1895 | - rev4 = self.mini_commit_record_iter_changes(tree1, 'new_' + name, 'new_' + name, |
1896 | - expect_fs_hash=expect_fs_hash, |
1897 | - delta_against_basis=tree1.supports_rename_tracking()) |
1898 | + rev4 = self.mini_commit_record_iter_changes( |
1899 | + tree1, 'new_' + name, 'new_' + name, |
1900 | + expect_fs_hash=expect_fs_hash, |
1901 | + delta_against_basis=tree1.supports_rename_tracking()) |
1902 | tree3, = self._get_revtrees(tree1, [rev4]) |
1903 | expected_graph = {} |
1904 | if tree1.supports_rename_tracking(): |
1905 | @@ -873,8 +853,7 @@ |
1906 | self.overrideAttr(config, '_auto_user_id', |
1907 | lambda: (None, None)) |
1908 | tree = self.make_branch_and_tree(".") |
1909 | - tree.lock_write() |
1910 | - try: |
1911 | + with tree.lock_write(): |
1912 | # Make sure no username is available. |
1913 | self.assertRaises(config.NoWhoami, tree.branch.get_commit_builder, |
1914 | []) |
1915 | @@ -889,5 +868,3 @@ |
1916 | raise |
1917 | repo = tree.branch.repository |
1918 | repo.commit_write_group() |
1919 | - finally: |
1920 | - tree.unlock() |
1921 | |
1922 | === modified file 'breezy/tests/per_repository_vf/test_reconcile.py' |
1923 | --- breezy/tests/per_repository_vf/test_reconcile.py 2018-11-11 04:08:32 +0000 |
1924 | +++ breezy/tests/per_repository_vf/test_reconcile.py 2019-01-01 23:11:42 +0000 |
1925 | @@ -73,14 +73,14 @@ |
1926 | make sure we safely detect this problem. |
1927 | """ |
1928 | repo = self.make_repo_with_extra_ghost_index() |
1929 | - reconciler = repo.reconcile(thorough=True) |
1930 | - self.assertTrue(reconciler.aborted, |
1931 | + result = repo.reconcile(thorough=True) |
1932 | + self.assertTrue(result.aborted, |
1933 | "reconcile should have aborted due to bad parents.") |
1934 | |
1935 | def test_does_not_abort_on_clean_repo(self): |
1936 | repo = self.make_repository('.') |
1937 | - reconciler = repo.reconcile(thorough=True) |
1938 | - self.assertFalse(reconciler.aborted, |
1939 | + result = repo.reconcile(thorough=True) |
1940 | + self.assertFalse(result.aborted, |
1941 | "reconcile should not have aborted on an unbroken repository.") |
1942 | |
1943 | |
1944 | @@ -147,11 +147,11 @@ |
1945 | self.make_repository('empty') |
1946 | d = BzrDir.open(self.get_url('empty')) |
1947 | # calling on a empty repository should do nothing |
1948 | - reconciler = d.find_repository().reconcile(**kwargs) |
1949 | + result = d.find_repository().reconcile(**kwargs) |
1950 | # no inconsistent parents should have been found |
1951 | - self.assertEqual(0, reconciler.inconsistent_parents) |
1952 | + self.assertEqual(0, result.inconsistent_parents) |
1953 | # and no garbage inventories |
1954 | - self.assertEqual(0, reconciler.garbage_inventories) |
1955 | + self.assertEqual(0, result.garbage_inventories) |
1956 | # and no backup weave should have been needed/made. |
1957 | self.checkNoBackupInventory(d) |
1958 | |
1959 | @@ -187,11 +187,11 @@ |
1960 | if not repo._reconcile_does_inventory_gc: |
1961 | raise TestSkipped('Irrelevant test') |
1962 | self.checkUnreconciled(d, repo.reconcile()) |
1963 | - reconciler = repo.reconcile(thorough=True) |
1964 | + result = repo.reconcile(thorough=True) |
1965 | # no bad parents |
1966 | - self.assertEqual(0, reconciler.inconsistent_parents) |
1967 | + self.assertEqual(0, result.inconsistent_parents) |
1968 | # and one garbage inventory |
1969 | - self.assertEqual(1, reconciler.garbage_inventories) |
1970 | + self.assertEqual(1, result.garbage_inventories) |
1971 | self.check_missing_was_removed(repo) |
1972 | |
1973 | def check_thorough_reweave_missing_revision(self, aBzrDir, reconcile, |
1974 | @@ -241,8 +241,7 @@ |
1975 | |
1976 | def reconcile(): |
1977 | reconciler = Reconciler(d) |
1978 | - reconciler.reconcile() |
1979 | - return reconciler |
1980 | + return reconciler.reconcile() |
1981 | self.check_thorough_reweave_missing_revision(d, reconcile) |
1982 | |
1983 | def test_reweave_inventory_without_revision_and_ghost(self): |
1984 | |
1985 | === modified file 'breezy/tests/per_repository_vf/test_repository.py' |
1986 | --- breezy/tests/per_repository_vf/test_repository.py 2018-11-18 01:02:16 +0000 |
1987 | +++ breezy/tests/per_repository_vf/test_repository.py 2019-01-01 23:11:42 +0000 |
1988 | @@ -271,7 +271,8 @@ |
1989 | with tree.lock_write(): |
1990 | self.assertEqual(set(), set(repo.texts.keys())) |
1991 | tree.add(['foo'], [file_id], ['file']) |
1992 | - tree.put_file_bytes_non_atomic('foo', b'content\n') |
1993 | + tree.put_file_bytes_non_atomic( |
1994 | + 'foo', b'content\n') |
1995 | try: |
1996 | rev_key = (tree.commit("foo"),) |
1997 | except errors.IllegalPath: |
1998 | |
1999 | === modified file 'breezy/tests/per_workingtree/test_parents.py' |
2000 | --- breezy/tests/per_workingtree/test_parents.py 2018-11-18 00:25:19 +0000 |
2001 | +++ breezy/tests/per_workingtree/test_parents.py 2019-01-01 23:11:42 +0000 |
2002 | @@ -466,8 +466,8 @@ |
2003 | _mod_revision.NULL_REVISION) |
2004 | changes = shape_tree.iter_changes( |
2005 | base_tree) |
2006 | - list(builder.record_iter_changes(shape_tree, |
2007 | - base_tree.get_revision_id(), changes)) |
2008 | + list(builder.record_iter_changes( |
2009 | + shape_tree, base_tree.get_revision_id(), changes)) |
2010 | builder.finish_inventory() |
2011 | builder.commit("Message") |
2012 | |
2013 | |
2014 | === modified file 'breezy/tests/test_diff.py' |
2015 | --- breezy/tests/test_diff.py 2018-11-11 04:08:32 +0000 |
2016 | +++ breezy/tests/test_diff.py 2019-01-01 23:11:42 +0000 |
2017 | @@ -699,7 +699,7 @@ |
2018 | |
2019 | class DiffWasIs(diff.DiffPath): |
2020 | |
2021 | - def diff(self, file_id, old_path, new_path, old_kind, new_kind): |
2022 | + def diff(self, old_path, new_path, old_kind, new_kind): |
2023 | self.to_file.write(b'was: ') |
2024 | self.to_file.write(self.old_tree.get_file(old_path).read()) |
2025 | self.to_file.write(b'is: ') |
2026 | @@ -728,20 +728,19 @@ |
2027 | self.new_tree.add('newdir') |
2028 | self.new_tree.add('newdir/newfile', b'file-id') |
2029 | differ = diff.DiffText(self.old_tree, self.new_tree, BytesIO()) |
2030 | - differ.diff_text('olddir/oldfile', None, 'old label', |
2031 | - 'new label', b'file-id', None) |
2032 | + differ.diff_text('olddir/oldfile', None, 'old label', 'new label') |
2033 | self.assertEqual( |
2034 | b'--- old label\n+++ new label\n@@ -1,1 +0,0 @@\n-old\n\n', |
2035 | differ.to_file.getvalue()) |
2036 | differ.to_file.seek(0) |
2037 | differ.diff_text(None, 'newdir/newfile', |
2038 | - 'old label', 'new label', None, b'file-id') |
2039 | + 'old label', 'new label') |
2040 | self.assertEqual( |
2041 | b'--- old label\n+++ new label\n@@ -0,0 +1,1 @@\n+new\n\n', |
2042 | differ.to_file.getvalue()) |
2043 | differ.to_file.seek(0) |
2044 | differ.diff_text('olddir/oldfile', 'newdir/newfile', |
2045 | - 'old label', 'new label', b'file-id', b'file-id') |
2046 | + 'old label', 'new label') |
2047 | self.assertEqual( |
2048 | b'--- old label\n+++ new label\n@@ -1,1 +1,1 @@\n-old\n+new\n\n', |
2049 | differ.to_file.getvalue()) |
2050 | @@ -789,7 +788,7 @@ |
2051 | ('new-tree/newdir/newfile', b'new\n')]) |
2052 | self.new_tree.add('newdir') |
2053 | self.new_tree.add('newdir/newfile', b'file-id') |
2054 | - self.differ.diff(b'file-id', 'olddir/oldfile', 'newdir/newfile') |
2055 | + self.differ.diff('olddir/oldfile', 'newdir/newfile') |
2056 | self.assertContainsRe( |
2057 | self.differ.to_file.getvalue(), |
2058 | br'--- olddir/oldfile.*\n\+\+\+ newdir/newfile.*\n\@\@ -1,1 \+1,1' |
2059 | @@ -805,7 +804,7 @@ |
2060 | os.symlink('new', 'new-tree/newdir/newfile') |
2061 | self.new_tree.add('newdir') |
2062 | self.new_tree.add('newdir/newfile', b'file-id') |
2063 | - self.differ.diff(b'file-id', 'olddir/oldfile', 'newdir/newfile') |
2064 | + self.differ.diff('olddir/oldfile', 'newdir/newfile') |
2065 | self.assertContainsRe( |
2066 | self.differ.to_file.getvalue(), |
2067 | br'--- olddir/oldfile.*\n\+\+\+ newdir/newfile.*\n\@\@ -1,1 \+0,0' |
2068 | @@ -816,7 +815,7 @@ |
2069 | def test_diff_directory(self): |
2070 | self.build_tree(['new-tree/new-dir/']) |
2071 | self.new_tree.add('new-dir', b'new-dir-id') |
2072 | - self.differ.diff(b'new-dir-id', None, 'new-dir') |
2073 | + self.differ.diff(None, 'new-dir') |
2074 | self.assertEqual(self.differ.to_file.getvalue(), b'') |
2075 | |
2076 | def create_old_new(self): |
2077 | @@ -838,7 +837,7 @@ |
2078 | differ = diff.DiffTree(self.old_tree, self.new_tree, BytesIO()) |
2079 | finally: |
2080 | diff.DiffTree.diff_factories = old_diff_factories |
2081 | - differ.diff(b'file-id', 'olddir/oldfile', 'newdir/newfile') |
2082 | + differ.diff('olddir/oldfile', 'newdir/newfile') |
2083 | self.assertNotContainsRe( |
2084 | differ.to_file.getvalue(), |
2085 | br'--- olddir/oldfile.*\n\+\+\+ newdir/newfile.*\n\@\@ -1,1 \+1,1' |
2086 | @@ -850,7 +849,7 @@ |
2087 | self.create_old_new() |
2088 | differ = diff.DiffTree(self.old_tree, self.new_tree, BytesIO(), |
2089 | extra_factories=[DiffWasIs.from_diff_tree]) |
2090 | - differ.diff(b'file-id', 'olddir/oldfile', 'newdir/newfile') |
2091 | + differ.diff('olddir/oldfile', 'newdir/newfile') |
2092 | self.assertNotContainsRe( |
2093 | differ.to_file.getvalue(), |
2094 | br'--- olddir/oldfile.*\n\+\+\+ newdir/newfile.*\n\@\@ -1,1 \+1,1' |
2095 | @@ -1492,7 +1491,7 @@ |
2096 | self.addCleanup(diff_obj.finish) |
2097 | self.assertContainsRe(diff_obj._root, 'brz-diff-[^/]*') |
2098 | old_path, new_path = diff_obj._prepare_files( |
2099 | - 'oldname', 'newname', file_id=b'file-id') |
2100 | + 'oldname', 'newname') |
2101 | self.assertContainsRe(old_path, 'old/oldname$') |
2102 | self.assertEqual(315532800, os.stat(old_path).st_mtime) |
2103 | self.assertContainsRe(new_path, 'tree/newname$') |
2104 | @@ -1501,7 +1500,7 @@ |
2105 | if osutils.host_os_dereferences_symlinks(): |
2106 | self.assertTrue(os.path.samefile('tree/newname', new_path)) |
2107 | # make sure we can create files with the same parent directories |
2108 | - diff_obj._prepare_files('oldname2', 'newname2', file_id=b'file2-id') |
2109 | + diff_obj._prepare_files('oldname2', 'newname2') |
2110 | |
2111 | |
2112 | class TestDiffFromToolEncodedFilename(tests.TestCaseWithTransport): |
2113 | |
2114 | === modified file 'breezy/tests/test_foreign.py' |
2115 | --- breezy/tests/test_foreign.py 2018-11-16 18:33:17 +0000 |
2116 | +++ breezy/tests/test_foreign.py 2019-01-01 23:11:42 +0000 |
2117 | @@ -205,10 +205,10 @@ |
2118 | parent_revids = [] |
2119 | else: |
2120 | parent_revids = [parent_revid] |
2121 | - builder = self.target.get_commit_builder(parent_revids, |
2122 | - self.target.get_config_stack(), rev.timestamp, |
2123 | - rev.timezone, rev.committer, rev.properties, |
2124 | - new_revid) |
2125 | + builder = self.target.get_commit_builder( |
2126 | + parent_revids, self.target.get_config_stack(), rev.timestamp, |
2127 | + rev.timezone, rev.committer, rev.properties, |
2128 | + new_revid) |
2129 | try: |
2130 | parent_tree = self.target.repository.revision_tree( |
2131 | parent_revid) |
2132 | |
2133 | === modified file 'breezy/tests/test_reconcile.py' |
2134 | --- breezy/tests/test_reconcile.py 2017-06-10 16:40:42 +0000 |
2135 | +++ breezy/tests/test_reconcile.py 2019-01-01 23:11:42 +0000 |
2136 | @@ -38,14 +38,14 @@ |
2137 | child = bzrdir.BzrDirMetaFormat1().initialize('child') |
2138 | self.assertRaises(errors.NoRepositoryPresent, child.open_repository) |
2139 | reconciler = Reconciler(child) |
2140 | - reconciler.reconcile() |
2141 | + result = reconciler.reconcile() |
2142 | # smoke test for reconcile appears to work too. |
2143 | reconcile(child) |
2144 | # no inconsistent parents should have been found |
2145 | # but the values should have been set. |
2146 | - self.assertEqual(0, reconciler.inconsistent_parents) |
2147 | + self.assertEqual(0, result.inconsistent_parents) |
2148 | # and no garbage inventories |
2149 | - self.assertEqual(0, reconciler.garbage_inventories) |
2150 | + self.assertEqual(0, result.garbage_inventories) |
2151 | |
2152 | |
2153 | class TestReconciler(tests.TestCaseWithTransport): |
2154 | @@ -53,20 +53,20 @@ |
2155 | def test_reconciler_with_no_branch(self): |
2156 | repo = self.make_repository('repo') |
2157 | reconciler = Reconciler(repo.controldir) |
2158 | - reconciler.reconcile() |
2159 | + result = reconciler.reconcile() |
2160 | # no inconsistent parents should have been found |
2161 | # but the values should have been set. |
2162 | - self.assertEqual(0, reconciler.inconsistent_parents) |
2163 | + self.assertEqual(0, result.inconsistent_parents) |
2164 | # and no garbage inventories |
2165 | - self.assertEqual(0, reconciler.garbage_inventories) |
2166 | - self.assertIs(None, reconciler.fixed_branch_history) |
2167 | + self.assertEqual(0, result.garbage_inventories) |
2168 | + self.assertIs(None, result.fixed_branch_history) |
2169 | |
2170 | def test_reconciler_finds_branch(self): |
2171 | a_branch = self.make_branch('a_branch') |
2172 | reconciler = Reconciler(a_branch.controldir) |
2173 | - reconciler.reconcile() |
2174 | + result = reconciler.reconcile() |
2175 | |
2176 | # It should have checked the repository, and the branch |
2177 | - self.assertEqual(0, reconciler.inconsistent_parents) |
2178 | - self.assertEqual(0, reconciler.garbage_inventories) |
2179 | - self.assertIs(False, reconciler.fixed_branch_history) |
2180 | + self.assertEqual(0, result.inconsistent_parents) |
2181 | + self.assertEqual(0, result.garbage_inventories) |
2182 | + self.assertIs(False, result.fixed_branch_history) |
Somewhat rubber-stamping code move, motive seems reasonable.