Merge lp:~jelmer/brz/move-reconcile-1 into lp:brz

Proposed by Jelmer Vernooij
Status: Merged
Approved by: Jelmer Vernooij
Approved revision: no longer in the source branch.
Merge reported by: The Breezy Bot
Merged at revision: not available
Proposed branch: lp:~jelmer/brz/move-reconcile-1
Merge into: lp:brz
Prerequisite: lp:~jelmer/brz/python3.7
Diff against target: 2182 lines (+682/-686)
31 files modified
breezy/annotate.py (+1/-1)
breezy/branch.py (+5/-6)
breezy/builtins.py (+4/-6)
breezy/bzr/branch.py (+7/-0)
breezy/bzr/groupcompress_repo.py (+2/-3)
breezy/bzr/knitrepo.py (+2/-3)
breezy/bzr/pack_repo.py (+2/-3)
breezy/bzr/reconcile.py (+471/-0)
breezy/bzr/remote.py (+12/-2)
breezy/bzr/vf_repository.py (+9/-2)
breezy/bzr/workingtree_4.py (+1/-1)
breezy/commit.py (+1/-1)
breezy/diff.py (+41/-54)
breezy/git/branch.py (+6/-0)
breezy/git/commit.py (+1/-1)
breezy/git/repository.py (+4/-28)
breezy/git/tests/test_blackbox.py (+15/-0)
breezy/git/tree.py (+1/-1)
breezy/plugins/fastimport/revision_store.py (+1/-1)
breezy/reconcile.py (+23/-473)
breezy/repository.py (+2/-6)
breezy/shelf_ui.py (+2/-3)
breezy/tests/per_branch/test_reconcile.py (+2/-2)
breezy/tests/per_intertree/test_compare.py (+4/-2)
breezy/tests/per_repository/test_commit_builder.py (+22/-45)
breezy/tests/per_repository_vf/test_reconcile.py (+11/-12)
breezy/tests/per_repository_vf/test_repository.py (+2/-1)
breezy/tests/per_workingtree/test_parents.py (+2/-2)
breezy/tests/test_diff.py (+11/-12)
breezy/tests/test_foreign.py (+4/-4)
breezy/tests/test_reconcile.py (+11/-11)
To merge this branch: bzr merge lp:~jelmer/brz/move-reconcile-1
Reviewer Review Type Date Requested Status
Martin Packman Approve
Review via email: mp+359941@code.launchpad.net

Commit message

Move bzr-specific reconcile bits to breezy.bzr.reconcile.

Description of the change

Move bzr-specific reconcile bits to breezy.bzr.reconcile.

To post a comment you must log in.
Revision history for this message
Martin Packman (gz) wrote :

Somewhat rubber-stamping code move, motive seems reasonable.

review: Approve
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote :
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote :
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote :
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote :

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'breezy/annotate.py'
--- breezy/annotate.py 2018-11-16 18:33:17 +0000
+++ breezy/annotate.py 2019-01-01 23:11:42 +0000
@@ -54,7 +54,7 @@
5454
55def annotate_file_tree(tree, path, to_file, verbose=False, full=False,55def annotate_file_tree(tree, path, to_file, verbose=False, full=False,
56 show_ids=False, branch=None):56 show_ids=False, branch=None):
57 """Annotate file_id in a tree.57 """Annotate path in a tree.
5858
59 The tree should already be read_locked() when annotate_file_tree is called.59 The tree should already be read_locked() when annotate_file_tree is called.
6060
6161
=== modified file 'breezy/branch.py'
--- breezy/branch.py 2018-11-16 18:40:46 +0000
+++ breezy/branch.py 2019-01-01 23:11:42 +0000
@@ -1414,12 +1414,11 @@
1414 return tree1414 return tree
14151415
1416 def reconcile(self, thorough=True):1416 def reconcile(self, thorough=True):
1417 """Make sure the data stored in this branch is consistent."""1417 """Make sure the data stored in this branch is consistent.
1418 from breezy.reconcile import BranchReconciler1418
1419 with self.lock_write():1419 :return: A `ReconcileResult` object.
1420 reconciler = BranchReconciler(self, thorough=thorough)1420 """
1421 reconciler.reconcile()1421 raise NotImplementedError(self.reconcile)
1422 return reconciler
14231422
1424 def reference_parent(self, path, file_id=None, possible_transports=None):1423 def reference_parent(self, path, file_id=None, possible_transports=None):
1425 """Return the parent branch for a tree-reference file_id1424 """Return the parent branch for a tree-reference file_id
14261425
=== modified file 'breezy/builtins.py'
--- breezy/builtins.py 2019-01-01 21:53:56 +0000
+++ breezy/builtins.py 2019-01-01 23:11:42 +0000
@@ -5224,17 +5224,15 @@
5224 tree = _get_one_revision_tree('annotate', revision, branch=branch)5224 tree = _get_one_revision_tree('annotate', revision, branch=branch)
5225 self.add_cleanup(tree.lock_read().unlock)5225 self.add_cleanup(tree.lock_read().unlock)
5226 if wt is not None and revision is None:5226 if wt is not None and revision is None:
5227 file_id = wt.path2id(relpath)5227 if not wt.is_versioned(relpath):
5228 else:5228 raise errors.NotVersionedError(relpath)
5229 file_id = tree.path2id(relpath)
5230 if file_id is None:
5231 raise errors.NotVersionedError(filename)
5232 if wt is not None and revision is None:
5233 # If there is a tree and we're not annotating historical5229 # If there is a tree and we're not annotating historical
5234 # versions, annotate the working tree's content.5230 # versions, annotate the working tree's content.
5235 annotate_file_tree(wt, relpath, self.outf, long, all,5231 annotate_file_tree(wt, relpath, self.outf, long, all,
5236 show_ids=show_ids)5232 show_ids=show_ids)
5237 else:5233 else:
5234 if not tree.is_versioned(relpath):
5235 raise errors.NotVersionedError(relpath)
5238 annotate_file_tree(tree, relpath, self.outf, long, all,5236 annotate_file_tree(tree, relpath, self.outf, long, all,
5239 show_ids=show_ids, branch=branch)5237 show_ids=show_ids, branch=branch)
52405238
52415239
=== modified file 'breezy/bzr/branch.py'
--- breezy/bzr/branch.py 2018-11-11 15:40:12 +0000
+++ breezy/bzr/branch.py 2019-01-01 23:11:42 +0000
@@ -443,6 +443,13 @@
443 super(BzrBranch, self)._clear_cached_state()443 super(BzrBranch, self)._clear_cached_state()
444 self._tags_bytes = None444 self._tags_bytes = None
445445
446 def reconcile(self, thorough=True):
447 """Make sure the data stored in this branch is consistent."""
448 from .reconcile import BranchReconciler
449 with self.lock_write():
450 reconciler = BranchReconciler(self, thorough=thorough)
451 return reconciler.reconcile()
452
446453
447class BzrBranch8(BzrBranch):454class BzrBranch8(BzrBranch):
448 """A branch that stores tree-reference locations."""455 """A branch that stores tree-reference locations."""
449456
=== modified file 'breezy/bzr/groupcompress_repo.py'
--- breezy/bzr/groupcompress_repo.py 2018-11-11 04:08:32 +0000
+++ breezy/bzr/groupcompress_repo.py 2019-01-01 23:11:42 +0000
@@ -1098,12 +1098,11 @@
1098 """Reconcile this repository to make sure all CHKs are in canonical1098 """Reconcile this repository to make sure all CHKs are in canonical
1099 form.1099 form.
1100 """1100 """
1101 from breezy.reconcile import PackReconciler1101 from .reconcile import PackReconciler
1102 with self.lock_write():1102 with self.lock_write():
1103 reconciler = PackReconciler(1103 reconciler = PackReconciler(
1104 self, thorough=True, canonicalize_chks=True)1104 self, thorough=True, canonicalize_chks=True)
1105 reconciler.reconcile()1105 return reconciler.reconcile()
1106 return reconciler
11071106
1108 def _reconcile_pack(self, collection, packs, extension, revs, pb):1107 def _reconcile_pack(self, collection, packs, extension, revs, pb):
1109 packer = GCCHKReconcilePacker(collection, packs, extension)1108 packer = GCCHKReconcilePacker(collection, packs, extension)
11101109
=== modified file 'breezy/bzr/knitrepo.py'
--- breezy/bzr/knitrepo.py 2018-11-11 04:08:32 +0000
+++ breezy/bzr/knitrepo.py 2019-01-01 23:11:42 +0000
@@ -206,11 +206,10 @@
206206
207 def reconcile(self, other=None, thorough=False):207 def reconcile(self, other=None, thorough=False):
208 """Reconcile this repository."""208 """Reconcile this repository."""
209 from breezy.reconcile import KnitReconciler209 from .reconcile import KnitReconciler
210 with self.lock_write():210 with self.lock_write():
211 reconciler = KnitReconciler(self, thorough=thorough)211 reconciler = KnitReconciler(self, thorough=thorough)
212 reconciler.reconcile()212 return reconciler.reconcile()
213 return reconciler
214213
215 def _make_parents_provider(self):214 def _make_parents_provider(self):
216 return _KnitsParentsProvider(self.revisions)215 return _KnitsParentsProvider(self.revisions)
217216
=== modified file 'breezy/bzr/pack_repo.py'
--- breezy/bzr/pack_repo.py 2018-11-12 01:41:38 +0000
+++ breezy/bzr/pack_repo.py 2019-01-01 23:11:42 +0000
@@ -1819,11 +1819,10 @@
18191819
1820 def reconcile(self, other=None, thorough=False):1820 def reconcile(self, other=None, thorough=False):
1821 """Reconcile this repository."""1821 """Reconcile this repository."""
1822 from breezy.reconcile import PackReconciler1822 from .reconcile import PackReconciler
1823 with self.lock_write():1823 with self.lock_write():
1824 reconciler = PackReconciler(self, thorough=thorough)1824 reconciler = PackReconciler(self, thorough=thorough)
1825 reconciler.reconcile()1825 return reconciler.reconcile()
1826 return reconciler
18271826
1828 def _reconcile_pack(self, collection, packs, extension, revs, pb):1827 def _reconcile_pack(self, collection, packs, extension, revs, pb):
1829 raise NotImplementedError(self._reconcile_pack)1828 raise NotImplementedError(self._reconcile_pack)
18301829
=== added file 'breezy/bzr/reconcile.py'
--- breezy/bzr/reconcile.py 1970-01-01 00:00:00 +0000
+++ breezy/bzr/reconcile.py 2019-01-01 23:11:42 +0000
@@ -0,0 +1,471 @@
1# Copyright (C) 2006-2010 Canonical Ltd
2#
3# This program is free software; you can redistribute it and/or modify
4# it under the terms of the GNU General Public License as published by
5# the Free Software Foundation; either version 2 of the License, or
6# (at your option) any later version.
7#
8# This program is distributed in the hope that it will be useful,
9# but WITHOUT ANY WARRANTY; without even the implied warranty of
10# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11# GNU General Public License for more details.
12#
13# You should have received a copy of the GNU General Public License
14# along with this program; if not, write to the Free Software
15# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16
17"""Reconcilers are able to fix some potential data errors in a branch."""
18
19from __future__ import absolute_import
20
21__all__ = [
22 'BranchReconciler',
23 'KnitReconciler',
24 'PackReconciler',
25 'VersionedFileRepoReconciler',
26 ]
27
28from .. import (
29 cleanup,
30 errors,
31 revision as _mod_revision,
32 ui,
33 )
34from ..reconcile import ReconcileResult
35from ..i18n import gettext
36from ..trace import mutter
37from ..tsort import topo_sort
38from .versionedfile import AdapterFactory, FulltextContentFactory
39
40
41class VersionedFileRepoReconciler(object):
42 """Reconciler that reconciles a repository.
43
44 The goal of repository reconciliation is to make any derived data
45 consistent with the core data committed by a user. This can involve
46 reindexing, or removing unreferenced data if that can interfere with
47 queries in a given repository.
48
49 Currently this consists of an inventory reweave with revision cross-checks.
50 """
51
52 def __init__(self, repo, other=None, thorough=False):
53 """Construct a RepoReconciler.
54
55 :param thorough: perform a thorough check which may take longer but
56 will correct non-data loss issues such as incorrect
57 cached data.
58 """
59 self.garbage_inventories = 0
60 self.inconsistent_parents = 0
61 self.aborted = False
62 self.repo = repo
63 self.thorough = thorough
64
65 def reconcile(self):
66 """Perform reconciliation.
67
68 After reconciliation the following attributes document found issues:
69
70 * `inconsistent_parents`: The number of revisions in the repository
71 whose ancestry was being reported incorrectly.
72 * `garbage_inventories`: The number of inventory objects without
73 revisions that were garbage collected.
74 """
75 with self.repo.lock_write(), \
76 ui.ui_factory.nested_progress_bar() as self.pb:
77 self._reconcile_steps()
78 ret = ReconcileResult()
79 ret.aborted = self.aborted
80 ret.garbage_inventories = self.garbage_inventories
81 ret.inconsistent_parents = self.inconsistent_parents
82 return ret
83
84 def _reconcile_steps(self):
85 """Perform the steps to reconcile this repository."""
86 self._reweave_inventory()
87
88 def _reweave_inventory(self):
89 """Regenerate the inventory weave for the repository from scratch.
90
91 This is a smart function: it will only do the reweave if doing it
92 will correct data issues. The self.thorough flag controls whether
93 only data-loss causing issues (!self.thorough) or all issues
94 (self.thorough) are treated as requiring the reweave.
95 """
96 transaction = self.repo.get_transaction()
97 self.pb.update(gettext('Reading inventory data'))
98 self.inventory = self.repo.inventories
99 self.revisions = self.repo.revisions
100 # the total set of revisions to process
101 self.pending = {key[-1] for key in self.revisions.keys()}
102
103 # mapping from revision_id to parents
104 self._rev_graph = {}
105 # errors that we detect
106 self.inconsistent_parents = 0
107 # we need the revision id of each revision and its available parents list
108 self._setup_steps(len(self.pending))
109 for rev_id in self.pending:
110 # put a revision into the graph.
111 self._graph_revision(rev_id)
112 self._check_garbage_inventories()
113 # if there are no inconsistent_parents and
114 # (no garbage inventories or we are not doing a thorough check)
115 if (not self.inconsistent_parents
116 and (not self.garbage_inventories or not self.thorough)):
117 ui.ui_factory.note(gettext('Inventory ok.'))
118 return
119 self.pb.update(gettext('Backing up inventory'), 0, 0)
120 self.repo._backup_inventory()
121 ui.ui_factory.note(gettext('Backup inventory created.'))
122 new_inventories = self.repo._temp_inventories()
123
124 # we have topological order of revisions and non ghost parents ready.
125 self._setup_steps(len(self._rev_graph))
126 revision_keys = [(rev_id,) for rev_id in topo_sort(self._rev_graph)]
127 stream = self._change_inv_parents(
128 self.inventory.get_record_stream(revision_keys, 'unordered', True),
129 self._new_inv_parents,
130 set(revision_keys))
131 new_inventories.insert_record_stream(stream)
132 # if this worked, the set of new_inventories.keys should equal
133 # self.pending
134 if not (set(new_inventories.keys())
135 == {(revid,) for revid in self.pending}):
136 raise AssertionError()
137 self.pb.update(gettext('Writing weave'))
138 self.repo._activate_new_inventory()
139 self.inventory = None
140 ui.ui_factory.note(gettext('Inventory regenerated.'))
141
142 def _new_inv_parents(self, revision_key):
143 """Lookup ghost-filtered parents for revision_key."""
144 # Use the filtered ghostless parents list:
145 return tuple([(revid,) for revid in self._rev_graph[revision_key[-1]]])
146
147 def _change_inv_parents(self, stream, get_parents, all_revision_keys):
148 """Adapt a record stream to reconcile the parents."""
149 for record in stream:
150 wanted_parents = get_parents(record.key)
151 if wanted_parents and wanted_parents[0] not in all_revision_keys:
152 # The check for the left most parent only handles knit
153 # compressors, but this code only applies to knit and weave
154 # repositories anyway.
155 bytes = record.get_bytes_as('fulltext')
156 yield FulltextContentFactory(record.key, wanted_parents, record.sha1, bytes)
157 else:
158 adapted_record = AdapterFactory(
159 record.key, wanted_parents, record)
160 yield adapted_record
161 self._reweave_step('adding inventories')
162
163 def _setup_steps(self, new_total):
164 """Setup the markers we need to control the progress bar."""
165 self.total = new_total
166 self.count = 0
167
168 def _graph_revision(self, rev_id):
169 """Load a revision into the revision graph."""
170 # pick a random revision
171 # analyse revision id rev_id and put it in the stack.
172 self._reweave_step('loading revisions')
173 rev = self.repo.get_revision_reconcile(rev_id)
174 parents = []
175 for parent in rev.parent_ids:
176 if self._parent_is_available(parent):
177 parents.append(parent)
178 else:
179 mutter('found ghost %s', parent)
180 self._rev_graph[rev_id] = parents
181
182 def _check_garbage_inventories(self):
183 """Check for garbage inventories which we cannot trust
184
185 We cant trust them because their pre-requisite file data may not
186 be present - all we know is that their revision was not installed.
187 """
188 if not self.thorough:
189 return
190 inventories = set(self.inventory.keys())
191 revisions = set(self.revisions.keys())
192 garbage = inventories.difference(revisions)
193 self.garbage_inventories = len(garbage)
194 for revision_key in garbage:
195 mutter('Garbage inventory {%s} found.', revision_key[-1])
196
197 def _parent_is_available(self, parent):
198 """True if parent is a fully available revision
199
200 A fully available revision has a inventory and a revision object in the
201 repository.
202 """
203 if parent in self._rev_graph:
204 return True
205 inv_present = (1 == len(self.inventory.get_parent_map([(parent,)])))
206 return (inv_present and self.repo.has_revision(parent))
207
208 def _reweave_step(self, message):
209 """Mark a single step of regeneration complete."""
210 self.pb.update(message, self.count, self.total)
211 self.count += 1
212
213
214class KnitReconciler(VersionedFileRepoReconciler):
215 """Reconciler that reconciles a knit format repository.
216
217 This will detect garbage inventories and remove them in thorough mode.
218 """
219
220 def _reconcile_steps(self):
221 """Perform the steps to reconcile this repository."""
222 if self.thorough:
223 try:
224 self._load_indexes()
225 except errors.BzrCheckError:
226 self.aborted = True
227 return
228 # knits never suffer this
229 self._gc_inventory()
230 self._fix_text_parents()
231
232 def _load_indexes(self):
233 """Load indexes for the reconciliation."""
234 self.transaction = self.repo.get_transaction()
235 self.pb.update(gettext('Reading indexes'), 0, 2)
236 self.inventory = self.repo.inventories
237 self.pb.update(gettext('Reading indexes'), 1, 2)
238 self.repo._check_for_inconsistent_revision_parents()
239 self.revisions = self.repo.revisions
240 self.pb.update(gettext('Reading indexes'), 2, 2)
241
242 def _gc_inventory(self):
243 """Remove inventories that are not referenced from the revision store."""
244 self.pb.update(gettext('Checking unused inventories'), 0, 1)
245 self._check_garbage_inventories()
246 self.pb.update(gettext('Checking unused inventories'), 1, 3)
247 if not self.garbage_inventories:
248 ui.ui_factory.note(gettext('Inventory ok.'))
249 return
250 self.pb.update(gettext('Backing up inventory'), 0, 0)
251 self.repo._backup_inventory()
252 ui.ui_factory.note(gettext('Backup Inventory created'))
253 # asking for '' should never return a non-empty weave
254 new_inventories = self.repo._temp_inventories()
255 # we have topological order of revisions and non ghost parents ready.
256 graph = self.revisions.get_parent_map(self.revisions.keys())
257 revision_keys = topo_sort(graph)
258 revision_ids = [key[-1] for key in revision_keys]
259 self._setup_steps(len(revision_keys))
260 stream = self._change_inv_parents(
261 self.inventory.get_record_stream(revision_keys, 'unordered', True),
262 graph.__getitem__,
263 set(revision_keys))
264 new_inventories.insert_record_stream(stream)
265 # if this worked, the set of new_inventory_vf.names should equal
266 # the revisionds list
267 if not(set(new_inventories.keys()) == set(revision_keys)):
268 raise AssertionError()
269 self.pb.update(gettext('Writing weave'))
270 self.repo._activate_new_inventory()
271 self.inventory = None
272 ui.ui_factory.note(gettext('Inventory regenerated.'))
273
274 def _fix_text_parents(self):
275 """Fix bad versionedfile parent entries.
276
277 It is possible for the parents entry in a versionedfile entry to be
278 inconsistent with the values in the revision and inventory.
279
280 This method finds entries with such inconsistencies, corrects their
281 parent lists, and replaces the versionedfile with a corrected version.
282 """
283 transaction = self.repo.get_transaction()
284 versions = [key[-1] for key in self.revisions.keys()]
285 mutter('Prepopulating revision text cache with %d revisions',
286 len(versions))
287 vf_checker = self.repo._get_versioned_file_checker()
288 bad_parents, unused_versions = vf_checker.check_file_version_parents(
289 self.repo.texts, self.pb)
290 text_index = vf_checker.text_index
291 per_id_bad_parents = {}
292 for key in unused_versions:
293 # Ensure that every file with unused versions gets rewritten.
294 # NB: This is really not needed, reconcile != pack.
295 per_id_bad_parents[key[0]] = {}
296 # Generate per-knit/weave data.
297 for key, details in bad_parents.items():
298 file_id = key[0]
299 rev_id = key[1]
300 knit_parents = tuple([parent[-1] for parent in details[0]])
301 correct_parents = tuple([parent[-1] for parent in details[1]])
302 file_details = per_id_bad_parents.setdefault(file_id, {})
303 file_details[rev_id] = (knit_parents, correct_parents)
304 file_id_versions = {}
305 for text_key in text_index:
306 versions_list = file_id_versions.setdefault(text_key[0], [])
307 versions_list.append(text_key[1])
308 # Do the reconcile of individual weaves.
309 for num, file_id in enumerate(per_id_bad_parents):
310 self.pb.update(gettext('Fixing text parents'), num,
311 len(per_id_bad_parents))
312 versions_with_bad_parents = per_id_bad_parents[file_id]
313 id_unused_versions = set(key[-1] for key in unused_versions
314 if key[0] == file_id)
315 if file_id in file_id_versions:
316 file_versions = file_id_versions[file_id]
317 else:
318 # This id was present in the disk store but is not referenced
319 # by any revision at all.
320 file_versions = []
321 self._fix_text_parent(file_id, versions_with_bad_parents,
322 id_unused_versions, file_versions)
323
324 def _fix_text_parent(self, file_id, versions_with_bad_parents,
325 unused_versions, all_versions):
326 """Fix bad versionedfile entries in a single versioned file."""
327 mutter('fixing text parent: %r (%d versions)', file_id,
328 len(versions_with_bad_parents))
329 mutter('(%d are unused)', len(unused_versions))
330 new_file_id = b'temp:%s' % file_id
331 new_parents = {}
332 needed_keys = set()
333 for version in all_versions:
334 if version in unused_versions:
335 continue
336 elif version in versions_with_bad_parents:
337 parents = versions_with_bad_parents[version][1]
338 else:
339 pmap = self.repo.texts.get_parent_map([(file_id, version)])
340 parents = [key[-1] for key in pmap[(file_id, version)]]
341 new_parents[(new_file_id, version)] = [
342 (new_file_id, parent) for parent in parents]
343 needed_keys.add((file_id, version))
344
345 def fix_parents(stream):
346 for record in stream:
347 bytes = record.get_bytes_as('fulltext')
348 new_key = (new_file_id, record.key[-1])
349 parents = new_parents[new_key]
350 yield FulltextContentFactory(new_key, parents, record.sha1, bytes)
351 stream = self.repo.texts.get_record_stream(
352 needed_keys, 'topological', True)
353 self.repo._remove_file_id(new_file_id)
354 self.repo.texts.insert_record_stream(fix_parents(stream))
355 self.repo._remove_file_id(file_id)
356 if len(new_parents):
357 self.repo._move_file_id(new_file_id, file_id)
358
359
360class PackReconciler(VersionedFileRepoReconciler):
361 """Reconciler that reconciles a pack based repository.
362
363 Garbage inventories do not affect ancestry queries, and removal is
364 considerably more expensive as there is no separate versioned file for
365 them, so they are not cleaned. In short it is currently a no-op.
366
367 In future this may be a good place to hook in annotation cache checking,
368 index recreation etc.
369 """
370
371 # XXX: The index corruption that _fix_text_parents performs is needed for
372 # packs, but not yet implemented. The basic approach is to:
373 # - lock the names list
374 # - perform a customised pack() that regenerates data as needed
375 # - unlock the names list
376 # https://bugs.launchpad.net/bzr/+bug/154173
377
378 def __init__(self, repo, other=None, thorough=False,
379 canonicalize_chks=False):
380 super(PackReconciler, self).__init__(repo, other=other,
381 thorough=thorough)
382 self.canonicalize_chks = canonicalize_chks
383
384 def _reconcile_steps(self):
385 """Perform the steps to reconcile this repository."""
386 if not self.thorough:
387 return
388 collection = self.repo._pack_collection
389 collection.ensure_loaded()
390 collection.lock_names()
391 try:
392 packs = collection.all_packs()
393 all_revisions = self.repo.all_revision_ids()
394 total_inventories = len(list(
395 collection.inventory_index.combined_index.iter_all_entries()))
396 if len(all_revisions):
397 if self.canonicalize_chks:
398 reconcile_meth = self.repo._canonicalize_chks_pack
399 else:
400 reconcile_meth = self.repo._reconcile_pack
401 new_pack = reconcile_meth(collection, packs, ".reconcile",
402 all_revisions, self.pb)
403 if new_pack is not None:
404 self._discard_and_save(packs)
405 else:
406 # only make a new pack when there is data to copy.
407 self._discard_and_save(packs)
408 self.garbage_inventories = total_inventories - len(list(
409 collection.inventory_index.combined_index.iter_all_entries()))
410 finally:
411 collection._unlock_names()
412
413 def _discard_and_save(self, packs):
414 """Discard some packs from the repository.
415
416 This removes them from the memory index, saves the in-memory index
417 which makes the newly reconciled pack visible and hides the packs to be
418 discarded, and finally renames the packs being discarded into the
419 obsolete packs directory.
420
421 :param packs: The packs to discard.
422 """
423 for pack in packs:
424 self.repo._pack_collection._remove_pack_from_memory(pack)
425 self.repo._pack_collection._save_pack_names()
426 self.repo._pack_collection._obsolete_packs(packs)
427
428
429class BranchReconciler(object):
430 """Reconciler that works on a branch."""
431
432 def __init__(self, a_branch, thorough=False):
433 self.fixed_history = None
434 self.thorough = thorough
435 self.branch = a_branch
436
437 def reconcile(self):
438 with self.branch.lock_write(), \
439 ui.ui_factory.nested_progress_bar() as self.pb:
440 ret = ReconcileResult()
441 ret.fixed_history = self._reconcile_steps()
442 return ret
443
444 def _reconcile_steps(self):
445 return self._reconcile_revision_history()
446
447 def _reconcile_revision_history(self):
448 last_revno, last_revision_id = self.branch.last_revision_info()
449 real_history = []
450 graph = self.branch.repository.get_graph()
451 try:
452 for revid in graph.iter_lefthand_ancestry(
453 last_revision_id, (_mod_revision.NULL_REVISION,)):
454 real_history.append(revid)
455 except errors.RevisionNotPresent:
456 pass # Hit a ghost left hand parent
457 real_history.reverse()
458 if last_revno != len(real_history):
459 # Technically for Branch5 formats, it is more efficient to use
460 # set_revision_history, as this will regenerate it again.
461 # Not really worth a whole BranchReconciler class just for this,
462 # though.
463 ui.ui_factory.note(gettext('Fixing last revision info {0} '
464 ' => {1}').format(
465 last_revno, len(real_history)))
466 self.branch.set_last_revision_info(len(real_history),
467 last_revision_id)
468 return True
469 else:
470 ui.ui_factory.note(gettext('revision_history ok.'))
471 return False
0472
=== modified file 'breezy/bzr/remote.py'
--- breezy/bzr/remote.py 2019-01-01 21:23:40 +0000
+++ breezy/bzr/remote.py 2019-01-01 23:11:42 +0000
@@ -2474,7 +2474,7 @@
2474 return self._real_repository._get_inventory_xml(revision_id)2474 return self._real_repository._get_inventory_xml(revision_id)
24752475
2476 def reconcile(self, other=None, thorough=False):2476 def reconcile(self, other=None, thorough=False):
2477 from ..reconcile import RepoReconciler2477 from ..reconcile import ReconcileResult
2478 with self.lock_write():2478 with self.lock_write():
2479 path = self.controldir._path_for_remote_call(self._client)2479 path = self.controldir._path_for_remote_call(self._client)
2480 try:2480 try:
@@ -2486,7 +2486,10 @@
2486 if response != (b'ok', ):2486 if response != (b'ok', ):
2487 raise errors.UnexpectedSmartServerResponse(response)2487 raise errors.UnexpectedSmartServerResponse(response)
2488 body = handler.read_body_bytes()2488 body = handler.read_body_bytes()
2489 result = RepoReconciler(self)2489 result = ReconcileResult()
2490 result.garbage_inventories = None
2491 result.inconsistent_parents = None
2492 result.aborted = None
2490 for line in body.split(b'\n'):2493 for line in body.split(b'\n'):
2491 if not line:2494 if not line:
2492 continue2495 continue
@@ -4139,6 +4142,13 @@
4139 self._ensure_real()4142 self._ensure_real()
4140 return self._real_branch.heads_to_fetch()4143 return self._real_branch.heads_to_fetch()
41414144
4145 def reconcile(self, thorough=True):
4146 """Make sure the data stored in this branch is consistent."""
4147 from .reconcile import BranchReconciler
4148 with self.lock_write():
4149 reconciler = BranchReconciler(self, thorough=thorough)
4150 return reconciler.reconcile()
4151
41424152
4143class RemoteConfig(object):4153class RemoteConfig(object):
4144 """A Config that reads and writes from smart verbs.4154 """A Config that reads and writes from smart verbs.
41454155
=== modified file 'breezy/bzr/vf_repository.py'
--- breezy/bzr/vf_repository.py 2018-11-29 23:42:41 +0000
+++ breezy/bzr/vf_repository.py 2019-01-01 23:11:42 +0000
@@ -294,7 +294,7 @@
294 or errored-on before record_iter_changes sees the item.294 or errored-on before record_iter_changes sees the item.
295 :param _entry_factory: Private method to bind entry_factory locally for295 :param _entry_factory: Private method to bind entry_factory locally for
296 performance.296 performance.
297 :return: A generator of (file_id, relpath, fs_hash) tuples for use with297 :return: A generator of (relpath, fs_hash) tuples for use with
298 tree._observed_sha1.298 tree._observed_sha1.
299 """299 """
300 # Create an inventory delta based on deltas between all the parents and300 # Create an inventory delta based on deltas between all the parents and
@@ -487,7 +487,7 @@
487 try:487 try:
488 entry.text_sha1, entry.text_size = self._add_file_to_weave(488 entry.text_sha1, entry.text_size = self._add_file_to_weave(
489 file_id, file_obj, heads, nostore_sha)489 file_id, file_obj, heads, nostore_sha)
490 yield file_id, change[1][1], (entry.text_sha1, stat_value)490 yield change[1][1], (entry.text_sha1, stat_value)
491 except errors.ExistingContent:491 except errors.ExistingContent:
492 # No content change against a carry_over parent492 # No content change against a carry_over parent
493 # Perhaps this should also yield a fs hash update?493 # Perhaps this should also yield a fs hash update?
@@ -1737,6 +1737,13 @@
1737 """Return a source for streaming from this repository."""1737 """Return a source for streaming from this repository."""
1738 return StreamSource(self, to_format)1738 return StreamSource(self, to_format)
17391739
1740 def reconcile(self, other=None, thorough=False):
1741 """Reconcile this repository."""
1742 from .reconcile import VersionedFileRepoReconciler
1743 with self.lock_write():
1744 reconciler = VersionedFileRepoReconciler(self, thorough=thorough)
1745 return reconciler.reconcile()
1746
17401747
1741class MetaDirVersionedFileRepository(MetaDirRepository,1748class MetaDirVersionedFileRepository(MetaDirRepository,
1742 VersionedFileRepository):1749 VersionedFileRepository):
17431750
=== modified file 'breezy/bzr/workingtree_4.py'
--- breezy/bzr/workingtree_4.py 2018-11-21 03:20:30 +0000
+++ breezy/bzr/workingtree_4.py 2019-01-01 23:11:42 +0000
@@ -1935,7 +1935,7 @@
1935 for path, identifier in desired_files:1935 for path, identifier in desired_files:
1936 entry = self._get_entry(path=path)1936 entry = self._get_entry(path=path)
1937 if entry == (None, None):1937 if entry == (None, None):
1938 raise errors.NoSuchFile(self, path)1938 raise errors.NoSuchFile(path)
1939 repo_desired_files.append((entry[0][2], entry[1][parent_index][4],1939 repo_desired_files.append((entry[0][2], entry[1][parent_index][4],
1940 identifier))1940 identifier))
1941 return self._repository.iter_files_bytes(repo_desired_files)1941 return self._repository.iter_files_bytes(repo_desired_files)
19421942
=== modified file 'breezy/commit.py'
--- breezy/commit.py 2018-11-16 23:15:15 +0000
+++ breezy/commit.py 2019-01-01 23:11:42 +0000
@@ -681,7 +681,7 @@
681 if self.exclude:681 if self.exclude:
682 iter_changes = filter_excluded(iter_changes, self.exclude)682 iter_changes = filter_excluded(iter_changes, self.exclude)
683 iter_changes = self._filter_iter_changes(iter_changes)683 iter_changes = self._filter_iter_changes(iter_changes)
684 for file_id, path, fs_hash in self.builder.record_iter_changes(684 for path, fs_hash in self.builder.record_iter_changes(
685 self.work_tree, self.basis_revid, iter_changes):685 self.work_tree, self.basis_revid, iter_changes):
686 self.work_tree._observed_sha1(path, fs_hash)686 self.work_tree._observed_sha1(path, fs_hash)
687687
688688
=== modified file 'breezy/diff.py'
--- breezy/diff.py 2018-11-18 19:48:57 +0000
+++ breezy/diff.py 2019-01-01 23:11:42 +0000
@@ -477,7 +477,7 @@
477 tree.unlock()477 tree.unlock()
478478
479479
480def _patch_header_date(tree, file_id, path):480def _patch_header_date(tree, path):
481 """Returns a timestamp suitable for use in a patch header."""481 """Returns a timestamp suitable for use in a patch header."""
482 try:482 try:
483 mtime = tree.get_file_mtime(path)483 mtime = tree.get_file_mtime(path)
@@ -526,10 +526,9 @@
526 diff_tree.to_file, diff_tree.path_encoding)526 diff_tree.to_file, diff_tree.path_encoding)
527527
528 @staticmethod528 @staticmethod
529 def _diff_many(differs, file_id, old_path, new_path, old_kind, new_kind):529 def _diff_many(differs, old_path, new_path, old_kind, new_kind):
530 for file_differ in differs:530 for file_differ in differs:
531 result = file_differ.diff(file_id, old_path, new_path, old_kind,531 result = file_differ.diff(old_path, new_path, old_kind, new_kind)
532 new_kind)
533 if result is not DiffPath.CANNOT_DIFF:532 if result is not DiffPath.CANNOT_DIFF:
534 return result533 return result
535 else:534 else:
@@ -553,10 +552,9 @@
553 def from_diff_tree(klass, diff_tree):552 def from_diff_tree(klass, diff_tree):
554 return klass(diff_tree.differs)553 return klass(diff_tree.differs)
555554
556 def diff(self, file_id, old_path, new_path, old_kind, new_kind):555 def diff(self, old_path, new_path, old_kind, new_kind):
557 """Perform comparison556 """Perform comparison
558557
559 :param file_id: The file_id of the file to compare
560 :param old_path: Path of the file in the old tree558 :param old_path: Path of the file in the old tree
561 :param new_path: Path of the file in the new tree559 :param new_path: Path of the file in the new tree
562 :param old_kind: Old file-kind of the file560 :param old_kind: Old file-kind of the file
@@ -564,17 +562,17 @@
564 """562 """
565 if None in (old_kind, new_kind):563 if None in (old_kind, new_kind):
566 return DiffPath.CANNOT_DIFF564 return DiffPath.CANNOT_DIFF
567 result = DiffPath._diff_many(self.differs, file_id, old_path,565 result = DiffPath._diff_many(
568 new_path, old_kind, None)566 self.differs, old_path, new_path, old_kind, None)
569 if result is DiffPath.CANNOT_DIFF:567 if result is DiffPath.CANNOT_DIFF:
570 return result568 return result
571 return DiffPath._diff_many(self.differs, file_id, old_path, new_path,569 return DiffPath._diff_many(
572 None, new_kind)570 self.differs, old_path, new_path, None, new_kind)
573571
574572
575class DiffDirectory(DiffPath):573class DiffDirectory(DiffPath):
576574
577 def diff(self, file_id, old_path, new_path, old_kind, new_kind):575 def diff(self, old_path, new_path, old_kind, new_kind):
578 """Perform comparison between two directories. (dummy)576 """Perform comparison between two directories. (dummy)
579577
580 """578 """
@@ -589,10 +587,9 @@
589587
590class DiffSymlink(DiffPath):588class DiffSymlink(DiffPath):
591589
592 def diff(self, file_id, old_path, new_path, old_kind, new_kind):590 def diff(self, old_path, new_path, old_kind, new_kind):
593 """Perform comparison between two symlinks591 """Perform comparison between two symlinks
594592
595 :param file_id: The file_id of the file to compare
596 :param old_path: Path of the file in the old tree593 :param old_path: Path of the file in the old tree
597 :param new_path: Path of the file in the new tree594 :param new_path: Path of the file in the new tree
598 :param old_kind: Old file-kind of the file595 :param old_kind: Old file-kind of the file
@@ -644,10 +641,9 @@
644 self.path_encoding = path_encoding641 self.path_encoding = path_encoding
645 self.context_lines = context_lines642 self.context_lines = context_lines
646643
647 def diff(self, file_id, old_path, new_path, old_kind, new_kind):644 def diff(self, old_path, new_path, old_kind, new_kind):
648 """Compare two files in unified diff format645 """Compare two files in unified diff format
649646
650 :param file_id: The file_id of the file to compare
651 :param old_path: Path of the file in the old tree647 :param old_path: Path of the file in the old tree
652 :param new_path: Path of the file in the new tree648 :param new_path: Path of the file in the new tree
653 :param old_kind: Old file-kind of the file649 :param old_kind: Old file-kind of the file
@@ -655,30 +651,25 @@
655 """651 """
656 if 'file' not in (old_kind, new_kind):652 if 'file' not in (old_kind, new_kind):
657 return self.CANNOT_DIFF653 return self.CANNOT_DIFF
658 from_file_id = to_file_id = file_id
659 if old_kind == 'file':654 if old_kind == 'file':
660 old_date = _patch_header_date(self.old_tree, file_id, old_path)655 old_date = _patch_header_date(self.old_tree, old_path)
661 elif old_kind is None:656 elif old_kind is None:
662 old_date = self.EPOCH_DATE657 old_date = self.EPOCH_DATE
663 from_file_id = None
664 else:658 else:
665 return self.CANNOT_DIFF659 return self.CANNOT_DIFF
666 if new_kind == 'file':660 if new_kind == 'file':
667 new_date = _patch_header_date(self.new_tree, file_id, new_path)661 new_date = _patch_header_date(self.new_tree, new_path)
668 elif new_kind is None:662 elif new_kind is None:
669 new_date = self.EPOCH_DATE663 new_date = self.EPOCH_DATE
670 to_file_id = None
671 else:664 else:
672 return self.CANNOT_DIFF665 return self.CANNOT_DIFF
673 from_label = '%s%s\t%s' % (self.old_label, old_path,666 from_label = '%s%s\t%s' % (self.old_label, old_path,
674 old_date)667 old_date)
675 to_label = '%s%s\t%s' % (self.new_label, new_path,668 to_label = '%s%s\t%s' % (self.new_label, new_path,
676 new_date)669 new_date)
677 return self.diff_text(old_path, new_path, from_label, to_label,670 return self.diff_text(old_path, new_path, from_label, to_label)
678 from_file_id, to_file_id)
679671
680 def diff_text(self, from_path, to_path, from_label, to_label,672 def diff_text(self, from_path, to_path, from_label, to_label):
681 from_file_id=None, to_file_id=None):
682 """Diff the content of given files in two trees673 """Diff the content of given files in two trees
683674
684 :param from_path: The path in the from tree. If None,675 :param from_path: The path in the from tree. If None,
@@ -686,18 +677,17 @@
686 :param to_path: The path in the to tree. This may refer677 :param to_path: The path in the to tree. This may refer
687 to a different file from from_path. If None,678 to a different file from from_path. If None,
688 the file is not present in the to tree.679 the file is not present in the to tree.
689 :param from_file_id: The id of the file in the from tree or None if
690 unknown.
691 :param to_file_id: The id of the file in the to tree or None if
692 unknown.
693 """680 """
694 def _get_text(tree, file_id, path):681 def _get_text(tree, path):
695 if file_id is None:682 if path is None:
696 return []683 return []
697 return tree.get_file_lines(path)684 try:
685 return tree.get_file_lines(path)
686 except errors.NoSuchFile:
687 return []
698 try:688 try:
699 from_text = _get_text(self.old_tree, from_file_id, from_path)689 from_text = _get_text(self.old_tree, from_path)
700 to_text = _get_text(self.new_tree, to_file_id, to_path)690 to_text = _get_text(self.new_tree, to_path)
701 self.text_differ(from_label, from_text, to_label, to_text,691 self.text_differ(from_label, from_text, to_label, to_text,
702 self.to_file, path_encoding=self.path_encoding,692 self.to_file, path_encoding=self.path_encoding,
703 context_lines=self.context_lines)693 context_lines=self.context_lines)
@@ -804,7 +794,7 @@
804 return osutils.pathjoin(self._root, prefix, relpath_tmp)794 return osutils.pathjoin(self._root, prefix, relpath_tmp)
805795
806 def _write_file(self, relpath, tree, prefix, force_temp=False,796 def _write_file(self, relpath, tree, prefix, force_temp=False,
807 allow_write=False, file_id=None):797 allow_write=False):
808 if not force_temp and isinstance(tree, WorkingTree):798 if not force_temp and isinstance(tree, WorkingTree):
809 full_path = tree.abspath(relpath)799 full_path = tree.abspath(relpath)
810 if self._is_safepath(full_path):800 if self._is_safepath(full_path):
@@ -836,12 +826,12 @@
836 return full_path826 return full_path
837827
838 def _prepare_files(self, old_path, new_path, force_temp=False,828 def _prepare_files(self, old_path, new_path, force_temp=False,
839 allow_write_new=False, file_id=None):829 allow_write_new=False):
840 old_disk_path = self._write_file(old_path, self.old_tree, 'old',830 old_disk_path = self._write_file(
841 force_temp, file_id=file_id)831 old_path, self.old_tree, 'old', force_temp)
842 new_disk_path = self._write_file(new_path, self.new_tree, 'new',832 new_disk_path = self._write_file(
843 force_temp, file_id=file_id,833 new_path, self.new_tree, 'new', force_temp,
844 allow_write=allow_write_new)834 allow_write=allow_write_new)
845 return old_disk_path, new_disk_path835 return old_disk_path, new_disk_path
846836
847 def finish(self):837 def finish(self):
@@ -852,25 +842,23 @@
852 mutter("The temporary directory \"%s\" was not "842 mutter("The temporary directory \"%s\" was not "
853 "cleanly removed: %s." % (self._root, e))843 "cleanly removed: %s." % (self._root, e))
854844
855 def diff(self, file_id, old_path, new_path, old_kind, new_kind):845 def diff(self, old_path, new_path, old_kind, new_kind):
856 if (old_kind, new_kind) != ('file', 'file'):846 if (old_kind, new_kind) != ('file', 'file'):
857 return DiffPath.CANNOT_DIFF847 return DiffPath.CANNOT_DIFF
858 (old_disk_path, new_disk_path) = self._prepare_files(848 (old_disk_path, new_disk_path) = self._prepare_files(
859 old_path, new_path, file_id=file_id)849 old_path, new_path)
860 self._execute(old_disk_path, new_disk_path)850 self._execute(old_disk_path, new_disk_path)
861851
862 def edit_file(self, old_path, new_path, file_id=None):852 def edit_file(self, old_path, new_path):
863 """Use this tool to edit a file.853 """Use this tool to edit a file.
864854
865 A temporary copy will be edited, and the new contents will be855 A temporary copy will be edited, and the new contents will be
866 returned.856 returned.
867857
868 :param file_id: The id of the file to edit.
869 :return: The new contents of the file.858 :return: The new contents of the file.
870 """859 """
871 old_abs_path, new_abs_path = self._prepare_files(860 old_abs_path, new_abs_path = self._prepare_files(
872 old_path, new_path, allow_write_new=True, force_temp=True,861 old_path, new_path, allow_write_new=True, force_temp=True)
873 file_id=file_id)
874 command = self._get_command(old_abs_path, new_abs_path)862 command = self._get_command(old_abs_path, new_abs_path)
875 subprocess.call(command, cwd=self._root)863 subprocess.call(command, cwd=self._root)
876 with open(new_abs_path, 'rb') as new_file:864 with open(new_abs_path, 'rb') as new_file:
@@ -1028,16 +1016,15 @@
1028 self.to_file.write(b"=== modified %s '%s'%s\n" % (kind[0].encode('ascii'),1016 self.to_file.write(b"=== modified %s '%s'%s\n" % (kind[0].encode('ascii'),
1029 newpath_encoded, prop_str))1017 newpath_encoded, prop_str))
1030 if changed_content:1018 if changed_content:
1031 self._diff(oldpath, newpath, kind[0], kind[1], file_id=file_id)1019 self._diff(oldpath, newpath, kind[0], kind[1])
1032 has_changes = 11020 has_changes = 1
1033 if renamed:1021 if renamed:
1034 has_changes = 11022 has_changes = 1
1035 return has_changes1023 return has_changes
10361024
1037 def diff(self, file_id, old_path, new_path):1025 def diff(self, old_path, new_path):
1038 """Perform a diff of a single file1026 """Perform a diff of a single file
10391027
1040 :param file_id: file-id of the file
1041 :param old_path: The path of the file in the old tree1028 :param old_path: The path of the file in the old tree
1042 :param new_path: The path of the file in the new tree1029 :param new_path: The path of the file in the new tree
1043 """1030 """
@@ -1049,11 +1036,11 @@
1049 new_kind = None1036 new_kind = None
1050 else:1037 else:
1051 new_kind = self.new_tree.kind(new_path)1038 new_kind = self.new_tree.kind(new_path)
1052 self._diff(old_path, new_path, old_kind, new_kind, file_id=file_id)1039 self._diff(old_path, new_path, old_kind, new_kind)
10531040
1054 def _diff(self, old_path, new_path, old_kind, new_kind, file_id):1041 def _diff(self, old_path, new_path, old_kind, new_kind):
1055 result = DiffPath._diff_many(self.differs, file_id, old_path,1042 result = DiffPath._diff_many(
1056 new_path, old_kind, new_kind)1043 self.differs, old_path, new_path, old_kind, new_kind)
1057 if result is DiffPath.CANNOT_DIFF:1044 if result is DiffPath.CANNOT_DIFF:
1058 error_path = new_path1045 error_path = new_path
1059 if error_path is None:1046 if error_path is None:
10601047
=== modified file 'breezy/git/branch.py'
--- breezy/git/branch.py 2018-11-16 23:15:15 +0000
+++ breezy/git/branch.py 2019-01-01 23:11:42 +0000
@@ -615,6 +615,12 @@
615 self, stop_revision=revid, lossy=lossy, _stop_revno=revno)615 self, stop_revision=revid, lossy=lossy, _stop_revno=revno)
616 return (push_result.new_revno, push_result.new_revid)616 return (push_result.new_revno, push_result.new_revid)
617617
618 def reconcile(self, thorough=True):
619 """Make sure the data stored in this branch is consistent."""
620 from ..reconcile import ReconcileResult
621 # Nothing to do here
622 return ReconcileResult()
623
618624
619class LocalGitBranch(GitBranch):625class LocalGitBranch(GitBranch):
620 """A local Git branch."""626 """A local Git branch."""
621627
=== modified file 'breezy/git/commit.py'
--- breezy/git/commit.py 2018-11-22 03:04:59 +0000
+++ breezy/git/commit.py 2019-01-01 23:11:42 +0000
@@ -142,7 +142,7 @@
142 encoded_new_path = path[1].encode("utf-8")142 encoded_new_path = path[1].encode("utf-8")
143 self._blobs[encoded_new_path] = (mode, sha)143 self._blobs[encoded_new_path] = (mode, sha)
144 if st is not None:144 if st is not None:
145 yield file_id, path[1], (entry.text_sha1, st)145 yield path[1], (entry.text_sha1, st)
146 if self._mapping.generate_file_id(encoded_new_path) != file_id:146 if self._mapping.generate_file_id(encoded_new_path) != file_id:
147 self._override_fileids[encoded_new_path] = file_id147 self._override_fileids[encoded_new_path] = file_id
148 else:148 else:
149149
=== modified file 'breezy/git/repository.py'
--- breezy/git/repository.py 2018-11-11 14:23:06 +0000
+++ breezy/git/repository.py 2019-01-01 23:11:42 +0000
@@ -68,31 +68,6 @@
68 )68 )
6969
7070
71class RepoReconciler(object):
72 """Reconciler that reconciles a repository.
73
74 """
75
76 def __init__(self, repo, other=None, thorough=False):
77 """Construct a RepoReconciler.
78
79 :param thorough: perform a thorough check which may take longer but
80 will correct non-data loss issues such as incorrect
81 cached data.
82 """
83 self.repo = repo
84
85 def reconcile(self):
86 """Perform reconciliation.
87
88 After reconciliation the following attributes document found issues:
89 inconsistent_parents: The number of revisions in the repository whose
90 ancestry was being reported incorrectly.
91 garbage_inventories: The number of inventory objects without revisions
92 that were garbage collected.
93 """
94
95
96class GitCheck(check.Check):71class GitCheck(check.Check):
9772
98 def __init__(self, repository, check_repo=True):73 def __init__(self, repository, check_repo=True):
@@ -241,9 +216,10 @@
241216
242 def reconcile(self, other=None, thorough=False):217 def reconcile(self, other=None, thorough=False):
243 """Reconcile this repository."""218 """Reconcile this repository."""
244 reconciler = RepoReconciler(self, thorough=thorough)219 from ..reconcile import ReconcileResult
245 reconciler.reconcile()220 ret = ReconcileResult()
246 return reconciler221 ret.aborted = False
222 return ret
247223
248 def supports_rich_root(self):224 def supports_rich_root(self):
249 return True225 return True
250226
=== modified file 'breezy/git/tests/test_blackbox.py'
--- breezy/git/tests/test_blackbox.py 2018-12-18 19:51:52 +0000
+++ breezy/git/tests/test_blackbox.py 2019-01-01 23:11:42 +0000
@@ -395,6 +395,21 @@
395 self.assertEqual(error, '')395 self.assertEqual(error, '')
396396
397397
398class ReconcileTests(ExternalBase):
399
400 def test_simple_reconcile(self):
401 tree = self.make_branch_and_tree('.', format='git')
402 self.build_tree_contents([('a', 'text for a\n')])
403 tree.add(['a'])
404 output, error = self.run_bzr('reconcile')
405 self.assertContainsRe(
406 output,
407 'Reconciling branch file://.*\n'
408 'Reconciling repository file://.*\n'
409 'Reconciliation complete.\n')
410 self.assertEqual(error, '')
411
412
398class StatusTests(ExternalBase):413class StatusTests(ExternalBase):
399414
400 def test_empty_dir(self):415 def test_empty_dir(self):
401416
=== modified file 'breezy/git/tree.py'
--- breezy/git/tree.py 2018-12-11 17:29:18 +0000
+++ breezy/git/tree.py 2019-01-01 23:11:42 +0000
@@ -995,7 +995,7 @@
995 raise errors.NoSuchId(self, file_id)995 raise errors.NoSuchId(self, file_id)
996996
997 def _set_root_id(self, file_id):997 def _set_root_id(self, file_id):
998 self._fileid_map.set_file_id("", file_id)998 raise errors.UnsupportedOperation(self._set_root_id, self)
999999
1000 def get_root_id(self):1000 def get_root_id(self):
1001 return self.path2id(u"")1001 return self.path2id(u"")
10021002
=== modified file 'breezy/plugins/fastimport/revision_store.py'
--- breezy/plugins/fastimport/revision_store.py 2018-11-16 18:33:17 +0000
+++ breezy/plugins/fastimport/revision_store.py 2019-01-01 23:11:42 +0000
@@ -367,7 +367,7 @@
367 basis_rev_id = _mod_revision.NULL_REVISION367 basis_rev_id = _mod_revision.NULL_REVISION
368 tree = _TreeShim(self.repo, basis_inv, inv_delta, text_provider)368 tree = _TreeShim(self.repo, basis_inv, inv_delta, text_provider)
369 changes = tree._delta_to_iter_changes()369 changes = tree._delta_to_iter_changes()
370 for (file_id, path, fs_hash) in builder.record_iter_changes(370 for (path, fs_hash) in builder.record_iter_changes(
371 tree, basis_rev_id, changes):371 tree, basis_rev_id, changes):
372 # So far, we don't *do* anything with the result372 # So far, we don't *do* anything with the result
373 pass373 pass
374374
=== modified file 'breezy/reconcile.py'
--- breezy/reconcile.py 2018-11-11 04:08:32 +0000
+++ breezy/reconcile.py 2019-01-01 23:11:42 +0000
@@ -19,23 +19,17 @@
19from __future__ import absolute_import19from __future__ import absolute_import
2020
21__all__ = [21__all__ = [
22 'KnitReconciler',
23 'PackReconciler',
24 'reconcile',22 'reconcile',
25 'Reconciler',23 'Reconciler',
26 'RepoReconciler',
27 ]24 ]
2825
2926
30from . import (27from . import (
31 cleanup,28 cleanup,
32 errors,29 errors,
33 revision as _mod_revision,
34 ui,30 ui,
35 )31 )
36from .trace import mutter32from .trace import mutter
37from .tsort import topo_sort
38from .bzr.versionedfile import AdapterFactory, FulltextContentFactory
39from .i18n import gettext33from .i18n import gettext
4034
4135
@@ -52,7 +46,11 @@
52 :param canonicalize_chks: Make sure CHKs are in canonical form.46 :param canonicalize_chks: Make sure CHKs are in canonical form.
53 """47 """
54 reconciler = Reconciler(dir, canonicalize_chks=canonicalize_chks)48 reconciler = Reconciler(dir, canonicalize_chks=canonicalize_chks)
55 reconciler.reconcile()49 return reconciler.reconcile()
50
51
52class ReconcileResult(object):
53 """Class describing the result of a reconcile operation."""
5654
5755
58class Reconciler(object):56class Reconciler(object):
@@ -65,38 +63,29 @@
6563
66 def reconcile(self):64 def reconcile(self):
67 """Perform reconciliation.65 """Perform reconciliation.
68
69 After reconciliation the following attributes document found issues:
70
71 * `inconsistent_parents`: The number of revisions in the repository
72 whose ancestry was being reported incorrectly.
73 * `garbage_inventories`: The number of inventory objects without
74 revisions that were garbage collected.
75 * `fixed_branch_history`: None if there was no branch, False if the
76 branch history was correct, True if the branch history needed to be
77 re-normalized.
78 """66 """
79 operation = cleanup.OperationWithCleanups(self._reconcile)67 with ui.ui_factory.nested_progress_bar() as self.pb:
80 self.add_cleanup = operation.add_cleanup68 result = ReconcileResult()
81 operation.run_simple()69 branch_result = self._reconcile_branch()
8270 repo_result = self._reconcile_repository()
83 def _reconcile(self):71 # TODO(jelmer): Don't hardcode supported attributes here
84 """Helper function for performing reconciliation."""72 result.inconsistent_parents = getattr(
85 self.pb = ui.ui_factory.nested_progress_bar()73 repo_result, 'inconsistent_parents', None)
86 self.add_cleanup(self.pb.finished)74 result.aborted = getattr(repo_result, 'aborted', None)
87 self._reconcile_branch()75 result.garbage_inventories = getattr(
88 self._reconcile_repository()76 repo_result, 'garbage_inventories', None)
77 result.fixed_branch_history = getattr(
78 branch_result, 'fixed_history', None)
79 return result
8980
90 def _reconcile_branch(self):81 def _reconcile_branch(self):
91 try:82 try:
92 self.branch = self.controldir.open_branch()83 self.branch = self.controldir.open_branch()
93 except errors.NotBranchError:84 except errors.NotBranchError:
94 # Nothing to check here85 # Nothing to check here
95 self.fixed_branch_history = None
96 return86 return
97 ui.ui_factory.note(gettext('Reconciling branch %s') % self.branch.base)87 ui.ui_factory.note(gettext('Reconciling branch %s') % self.branch.base)
98 branch_reconciler = self.branch.reconcile(thorough=True)88 return self.branch.reconcile(thorough=True)
99 self.fixed_branch_history = branch_reconciler.fixed_history
10089
101 def _reconcile_repository(self):90 def _reconcile_repository(self):
102 self.repo = self.controldir.find_repository()91 self.repo = self.controldir.find_repository()
@@ -109,453 +98,14 @@
109 except AttributeError:98 except AttributeError:
110 raise errors.BzrError(99 raise errors.BzrError(
111 gettext("%s cannot canonicalize CHKs.") % (self.repo,))100 gettext("%s cannot canonicalize CHKs.") % (self.repo,))
112 repo_reconciler = self.repo.reconcile_canonicalize_chks()101 reconcile_result = self.repo.reconcile_canonicalize_chks()
113 else:102 else:
114 repo_reconciler = self.repo.reconcile(thorough=True)103 reconcile_result = self.repo.reconcile(thorough=True)
115 self.inconsistent_parents = repo_reconciler.inconsistent_parents104 if reconcile_result.aborted:
116 self.garbage_inventories = repo_reconciler.garbage_inventories
117 if repo_reconciler.aborted:
118 ui.ui_factory.note(gettext(105 ui.ui_factory.note(gettext(
119 'Reconcile aborted: revision index has inconsistent parents.'))106 'Reconcile aborted: revision index has inconsistent parents.'))
120 ui.ui_factory.note(gettext(107 ui.ui_factory.note(gettext(
121 'Run "brz check" for more details.'))108 'Run "brz check" for more details.'))
122 else:109 else:
123 ui.ui_factory.note(gettext('Reconciliation complete.'))110 ui.ui_factory.note(gettext('Reconciliation complete.'))
124111 return reconcile_result
125
126class BranchReconciler(object):
127 """Reconciler that works on a branch."""
128
129 def __init__(self, a_branch, thorough=False):
130 self.fixed_history = None
131 self.thorough = thorough
132 self.branch = a_branch
133
134 def reconcile(self):
135 operation = cleanup.OperationWithCleanups(self._reconcile)
136 self.add_cleanup = operation.add_cleanup
137 operation.run_simple()
138
139 def _reconcile(self):
140 self.branch.lock_write()
141 self.add_cleanup(self.branch.unlock)
142 self.pb = ui.ui_factory.nested_progress_bar()
143 self.add_cleanup(self.pb.finished)
144 self._reconcile_steps()
145
146 def _reconcile_steps(self):
147 self._reconcile_revision_history()
148
149 def _reconcile_revision_history(self):
150 last_revno, last_revision_id = self.branch.last_revision_info()
151 real_history = []
152 graph = self.branch.repository.get_graph()
153 try:
154 for revid in graph.iter_lefthand_ancestry(
155 last_revision_id, (_mod_revision.NULL_REVISION,)):
156 real_history.append(revid)
157 except errors.RevisionNotPresent:
158 pass # Hit a ghost left hand parent
159 real_history.reverse()
160 if last_revno != len(real_history):
161 self.fixed_history = True
162 # Technically for Branch5 formats, it is more efficient to use
163 # set_revision_history, as this will regenerate it again.
164 # Not really worth a whole BranchReconciler class just for this,
165 # though.
166 ui.ui_factory.note(gettext('Fixing last revision info {0} '
167 ' => {1}').format(
168 last_revno, len(real_history)))
169 self.branch.set_last_revision_info(len(real_history),
170 last_revision_id)
171 else:
172 self.fixed_history = False
173 ui.ui_factory.note(gettext('revision_history ok.'))
174
175
176class RepoReconciler(object):
177 """Reconciler that reconciles a repository.
178
179 The goal of repository reconciliation is to make any derived data
180 consistent with the core data committed by a user. This can involve
181 reindexing, or removing unreferenced data if that can interfere with
182 queries in a given repository.
183
184 Currently this consists of an inventory reweave with revision cross-checks.
185 """
186
187 def __init__(self, repo, other=None, thorough=False):
188 """Construct a RepoReconciler.
189
190 :param thorough: perform a thorough check which may take longer but
191 will correct non-data loss issues such as incorrect
192 cached data.
193 """
194 self.garbage_inventories = 0
195 self.inconsistent_parents = 0
196 self.aborted = False
197 self.repo = repo
198 self.thorough = thorough
199
200 def reconcile(self):
201 """Perform reconciliation.
202
203 After reconciliation the following attributes document found issues:
204
205 * `inconsistent_parents`: The number of revisions in the repository
206 whose ancestry was being reported incorrectly.
207 * `garbage_inventories`: The number of inventory objects without
208 revisions that were garbage collected.
209 """
210 operation = cleanup.OperationWithCleanups(self._reconcile)
211 self.add_cleanup = operation.add_cleanup
212 operation.run_simple()
213
214 def _reconcile(self):
215 self.repo.lock_write()
216 self.add_cleanup(self.repo.unlock)
217 self.pb = ui.ui_factory.nested_progress_bar()
218 self.add_cleanup(self.pb.finished)
219 self._reconcile_steps()
220
221 def _reconcile_steps(self):
222 """Perform the steps to reconcile this repository."""
223 self._reweave_inventory()
224
225 def _reweave_inventory(self):
226 """Regenerate the inventory weave for the repository from scratch.
227
228 This is a smart function: it will only do the reweave if doing it
229 will correct data issues. The self.thorough flag controls whether
230 only data-loss causing issues (!self.thorough) or all issues
231 (self.thorough) are treated as requiring the reweave.
232 """
233 transaction = self.repo.get_transaction()
234 self.pb.update(gettext('Reading inventory data'))
235 self.inventory = self.repo.inventories
236 self.revisions = self.repo.revisions
237 # the total set of revisions to process
238 self.pending = {key[-1] for key in self.revisions.keys()}
239
240 # mapping from revision_id to parents
241 self._rev_graph = {}
242 # errors that we detect
243 self.inconsistent_parents = 0
244 # we need the revision id of each revision and its available parents list
245 self._setup_steps(len(self.pending))
246 for rev_id in self.pending:
247 # put a revision into the graph.
248 self._graph_revision(rev_id)
249 self._check_garbage_inventories()
250 # if there are no inconsistent_parents and
251 # (no garbage inventories or we are not doing a thorough check)
252 if (not self.inconsistent_parents
253 and (not self.garbage_inventories or not self.thorough)):
254 ui.ui_factory.note(gettext('Inventory ok.'))
255 return
256 self.pb.update(gettext('Backing up inventory'), 0, 0)
257 self.repo._backup_inventory()
258 ui.ui_factory.note(gettext('Backup inventory created.'))
259 new_inventories = self.repo._temp_inventories()
260
261 # we have topological order of revisions and non ghost parents ready.
262 self._setup_steps(len(self._rev_graph))
263 revision_keys = [(rev_id,) for rev_id in topo_sort(self._rev_graph)]
264 stream = self._change_inv_parents(
265 self.inventory.get_record_stream(revision_keys, 'unordered', True),
266 self._new_inv_parents,
267 set(revision_keys))
268 new_inventories.insert_record_stream(stream)
269 # if this worked, the set of new_inventories.keys should equal
270 # self.pending
271 if not (set(new_inventories.keys())
272 == {(revid,) for revid in self.pending}):
273 raise AssertionError()
274 self.pb.update(gettext('Writing weave'))
275 self.repo._activate_new_inventory()
276 self.inventory = None
277 ui.ui_factory.note(gettext('Inventory regenerated.'))
278
279 def _new_inv_parents(self, revision_key):
280 """Lookup ghost-filtered parents for revision_key."""
281 # Use the filtered ghostless parents list:
282 return tuple([(revid,) for revid in self._rev_graph[revision_key[-1]]])
283
284 def _change_inv_parents(self, stream, get_parents, all_revision_keys):
285 """Adapt a record stream to reconcile the parents."""
286 for record in stream:
287 wanted_parents = get_parents(record.key)
288 if wanted_parents and wanted_parents[0] not in all_revision_keys:
289 # The check for the left most parent only handles knit
290 # compressors, but this code only applies to knit and weave
291 # repositories anyway.
292 bytes = record.get_bytes_as('fulltext')
293 yield FulltextContentFactory(record.key, wanted_parents, record.sha1, bytes)
294 else:
295 adapted_record = AdapterFactory(
296 record.key, wanted_parents, record)
297 yield adapted_record
298 self._reweave_step('adding inventories')
299
300 def _setup_steps(self, new_total):
301 """Setup the markers we need to control the progress bar."""
302 self.total = new_total
303 self.count = 0
304
305 def _graph_revision(self, rev_id):
306 """Load a revision into the revision graph."""
307 # pick a random revision
308 # analyse revision id rev_id and put it in the stack.
309 self._reweave_step('loading revisions')
310 rev = self.repo.get_revision_reconcile(rev_id)
311 parents = []
312 for parent in rev.parent_ids:
313 if self._parent_is_available(parent):
314 parents.append(parent)
315 else:
316 mutter('found ghost %s', parent)
317 self._rev_graph[rev_id] = parents
318
319 def _check_garbage_inventories(self):
320 """Check for garbage inventories which we cannot trust
321
322 We cant trust them because their pre-requisite file data may not
323 be present - all we know is that their revision was not installed.
324 """
325 if not self.thorough:
326 return
327 inventories = set(self.inventory.keys())
328 revisions = set(self.revisions.keys())
329 garbage = inventories.difference(revisions)
330 self.garbage_inventories = len(garbage)
331 for revision_key in garbage:
332 mutter('Garbage inventory {%s} found.', revision_key[-1])
333
334 def _parent_is_available(self, parent):
335 """True if parent is a fully available revision
336
337 A fully available revision has a inventory and a revision object in the
338 repository.
339 """
340 if parent in self._rev_graph:
341 return True
342 inv_present = (1 == len(self.inventory.get_parent_map([(parent,)])))
343 return (inv_present and self.repo.has_revision(parent))
344
345 def _reweave_step(self, message):
346 """Mark a single step of regeneration complete."""
347 self.pb.update(message, self.count, self.total)
348 self.count += 1
349
350
351class KnitReconciler(RepoReconciler):
352 """Reconciler that reconciles a knit format repository.
353
354 This will detect garbage inventories and remove them in thorough mode.
355 """
356
357 def _reconcile_steps(self):
358 """Perform the steps to reconcile this repository."""
359 if self.thorough:
360 try:
361 self._load_indexes()
362 except errors.BzrCheckError:
363 self.aborted = True
364 return
365 # knits never suffer this
366 self._gc_inventory()
367 self._fix_text_parents()
368
369 def _load_indexes(self):
370 """Load indexes for the reconciliation."""
371 self.transaction = self.repo.get_transaction()
372 self.pb.update(gettext('Reading indexes'), 0, 2)
373 self.inventory = self.repo.inventories
374 self.pb.update(gettext('Reading indexes'), 1, 2)
375 self.repo._check_for_inconsistent_revision_parents()
376 self.revisions = self.repo.revisions
377 self.pb.update(gettext('Reading indexes'), 2, 2)
378
379 def _gc_inventory(self):
380 """Remove inventories that are not referenced from the revision store."""
381 self.pb.update(gettext('Checking unused inventories'), 0, 1)
382 self._check_garbage_inventories()
383 self.pb.update(gettext('Checking unused inventories'), 1, 3)
384 if not self.garbage_inventories:
385 ui.ui_factory.note(gettext('Inventory ok.'))
386 return
387 self.pb.update(gettext('Backing up inventory'), 0, 0)
388 self.repo._backup_inventory()
389 ui.ui_factory.note(gettext('Backup Inventory created'))
390 # asking for '' should never return a non-empty weave
391 new_inventories = self.repo._temp_inventories()
392 # we have topological order of revisions and non ghost parents ready.
393 graph = self.revisions.get_parent_map(self.revisions.keys())
394 revision_keys = topo_sort(graph)
395 revision_ids = [key[-1] for key in revision_keys]
396 self._setup_steps(len(revision_keys))
397 stream = self._change_inv_parents(
398 self.inventory.get_record_stream(revision_keys, 'unordered', True),
399 graph.__getitem__,
400 set(revision_keys))
401 new_inventories.insert_record_stream(stream)
402 # if this worked, the set of new_inventory_vf.names should equal
403 # the revisionds list
404 if not(set(new_inventories.keys()) == set(revision_keys)):
405 raise AssertionError()
406 self.pb.update(gettext('Writing weave'))
407 self.repo._activate_new_inventory()
408 self.inventory = None
409 ui.ui_factory.note(gettext('Inventory regenerated.'))
410
411 def _fix_text_parents(self):
412 """Fix bad versionedfile parent entries.
413
414 It is possible for the parents entry in a versionedfile entry to be
415 inconsistent with the values in the revision and inventory.
416
417 This method finds entries with such inconsistencies, corrects their
418 parent lists, and replaces the versionedfile with a corrected version.
419 """
420 transaction = self.repo.get_transaction()
421 versions = [key[-1] for key in self.revisions.keys()]
422 mutter('Prepopulating revision text cache with %d revisions',
423 len(versions))
424 vf_checker = self.repo._get_versioned_file_checker()
425 bad_parents, unused_versions = vf_checker.check_file_version_parents(
426 self.repo.texts, self.pb)
427 text_index = vf_checker.text_index
428 per_id_bad_parents = {}
429 for key in unused_versions:
430 # Ensure that every file with unused versions gets rewritten.
431 # NB: This is really not needed, reconcile != pack.
432 per_id_bad_parents[key[0]] = {}
433 # Generate per-knit/weave data.
434 for key, details in bad_parents.items():
435 file_id = key[0]
436 rev_id = key[1]
437 knit_parents = tuple([parent[-1] for parent in details[0]])
438 correct_parents = tuple([parent[-1] for parent in details[1]])
439 file_details = per_id_bad_parents.setdefault(file_id, {})
440 file_details[rev_id] = (knit_parents, correct_parents)
441 file_id_versions = {}
442 for text_key in text_index:
443 versions_list = file_id_versions.setdefault(text_key[0], [])
444 versions_list.append(text_key[1])
445 # Do the reconcile of individual weaves.
446 for num, file_id in enumerate(per_id_bad_parents):
447 self.pb.update(gettext('Fixing text parents'), num,
448 len(per_id_bad_parents))
449 versions_with_bad_parents = per_id_bad_parents[file_id]
450 id_unused_versions = set(key[-1] for key in unused_versions
451 if key[0] == file_id)
452 if file_id in file_id_versions:
453 file_versions = file_id_versions[file_id]
454 else:
455 # This id was present in the disk store but is not referenced
456 # by any revision at all.
457 file_versions = []
458 self._fix_text_parent(file_id, versions_with_bad_parents,
459 id_unused_versions, file_versions)
460
461 def _fix_text_parent(self, file_id, versions_with_bad_parents,
462 unused_versions, all_versions):
463 """Fix bad versionedfile entries in a single versioned file."""
464 mutter('fixing text parent: %r (%d versions)', file_id,
465 len(versions_with_bad_parents))
466 mutter('(%d are unused)', len(unused_versions))
467 new_file_id = b'temp:%s' % file_id
468 new_parents = {}
469 needed_keys = set()
470 for version in all_versions:
471 if version in unused_versions:
472 continue
473 elif version in versions_with_bad_parents:
474 parents = versions_with_bad_parents[version][1]
475 else:
476 pmap = self.repo.texts.get_parent_map([(file_id, version)])
477 parents = [key[-1] for key in pmap[(file_id, version)]]
478 new_parents[(new_file_id, version)] = [
479 (new_file_id, parent) for parent in parents]
480 needed_keys.add((file_id, version))
481
482 def fix_parents(stream):
483 for record in stream:
484 bytes = record.get_bytes_as('fulltext')
485 new_key = (new_file_id, record.key[-1])
486 parents = new_parents[new_key]
487 yield FulltextContentFactory(new_key, parents, record.sha1, bytes)
488 stream = self.repo.texts.get_record_stream(
489 needed_keys, 'topological', True)
490 self.repo._remove_file_id(new_file_id)
491 self.repo.texts.insert_record_stream(fix_parents(stream))
492 self.repo._remove_file_id(file_id)
493 if len(new_parents):
494 self.repo._move_file_id(new_file_id, file_id)
495
496
497class PackReconciler(RepoReconciler):
498 """Reconciler that reconciles a pack based repository.
499
500 Garbage inventories do not affect ancestry queries, and removal is
501 considerably more expensive as there is no separate versioned file for
502 them, so they are not cleaned. In short it is currently a no-op.
503
504 In future this may be a good place to hook in annotation cache checking,
505 index recreation etc.
506 """
507
508 # XXX: The index corruption that _fix_text_parents performs is needed for
509 # packs, but not yet implemented. The basic approach is to:
510 # - lock the names list
511 # - perform a customised pack() that regenerates data as needed
512 # - unlock the names list
513 # https://bugs.launchpad.net/bzr/+bug/154173
514
515 def __init__(self, repo, other=None, thorough=False,
516 canonicalize_chks=False):
517 super(PackReconciler, self).__init__(repo, other=other,
518 thorough=thorough)
519 self.canonicalize_chks = canonicalize_chks
520
521 def _reconcile_steps(self):
522 """Perform the steps to reconcile this repository."""
523 if not self.thorough:
524 return
525 collection = self.repo._pack_collection
526 collection.ensure_loaded()
527 collection.lock_names()
528 self.add_cleanup(collection._unlock_names)
529 packs = collection.all_packs()
530 all_revisions = self.repo.all_revision_ids()
531 total_inventories = len(list(
532 collection.inventory_index.combined_index.iter_all_entries()))
533 if len(all_revisions):
534 if self.canonicalize_chks:
535 reconcile_meth = self.repo._canonicalize_chks_pack
536 else:
537 reconcile_meth = self.repo._reconcile_pack
538 new_pack = reconcile_meth(collection, packs, ".reconcile",
539 all_revisions, self.pb)
540 if new_pack is not None:
541 self._discard_and_save(packs)
542 else:
543 # only make a new pack when there is data to copy.
544 self._discard_and_save(packs)
545 self.garbage_inventories = total_inventories - len(list(
546 collection.inventory_index.combined_index.iter_all_entries()))
547
548 def _discard_and_save(self, packs):
549 """Discard some packs from the repository.
550
551 This removes them from the memory index, saves the in-memory index
552 which makes the newly reconciled pack visible and hides the packs to be
553 discarded, and finally renames the packs being discarded into the
554 obsolete packs directory.
555
556 :param packs: The packs to discard.
557 """
558 for pack in packs:
559 self.repo._pack_collection._remove_pack_from_memory(pack)
560 self.repo._pack_collection._save_pack_names()
561 self.repo._pack_collection._obsolete_packs(packs)
562112
=== modified file 'breezy/repository.py'
--- breezy/repository.py 2018-11-29 23:42:41 +0000
+++ breezy/repository.py 2019-01-01 23:11:42 +0000
@@ -218,7 +218,7 @@
218 to basis_revision_id. The iterator must not include any items with218 to basis_revision_id. The iterator must not include any items with
219 a current kind of None - missing items must be either filtered out219 a current kind of None - missing items must be either filtered out
220 or errored-on beefore record_iter_changes sees the item.220 or errored-on beefore record_iter_changes sees the item.
221 :return: A generator of (file_id, relpath, fs_hash) tuples for use with221 :return: A generator of (relpath, fs_hash) tuples for use with
222 tree._observed_sha1.222 tree._observed_sha1.
223 """223 """
224 raise NotImplementedError(self.record_iter_changes)224 raise NotImplementedError(self.record_iter_changes)
@@ -953,11 +953,7 @@
953953
954 def reconcile(self, other=None, thorough=False):954 def reconcile(self, other=None, thorough=False):
955 """Reconcile this repository."""955 """Reconcile this repository."""
956 from .reconcile import RepoReconciler956 raise NotImplementedError(self.reconcile)
957 with self.lock_write():
958 reconciler = RepoReconciler(self, thorough=thorough)
959 reconciler.reconcile()
960 return reconciler
961957
962 def _refresh_data(self):958 def _refresh_data(self):
963 """Helper called from lock_* to ensure coherency with disk.959 """Helper called from lock_* to ensure coherency with disk.
964960
=== modified file 'breezy/shelf_ui.py'
--- breezy/shelf_ui.py 2018-11-16 18:33:17 +0000
+++ breezy/shelf_ui.py 2019-01-01 23:11:42 +0000
@@ -250,7 +250,7 @@
250 path_encoding = osutils.get_terminal_encoding()250 path_encoding = osutils.get_terminal_encoding()
251 text_differ = diff.DiffText(old_tree, new_tree, diff_file,251 text_differ = diff.DiffText(old_tree, new_tree, diff_file,
252 path_encoding=path_encoding)252 path_encoding=path_encoding)
253 patch = text_differ.diff(file_id, old_path, new_path, 'file', 'file')253 patch = text_differ.diff(old_path, new_path, 'file', 'file')
254 diff_file.seek(0)254 diff_file.seek(0)
255 return patches.parse_patch(diff_file)255 return patches.parse_patch(diff_file)
256256
@@ -365,8 +365,7 @@
365 """365 """
366 lines = osutils.split_lines(self.change_editor.edit_file(366 lines = osutils.split_lines(self.change_editor.edit_file(
367 self.change_editor.old_tree.id2path(file_id),367 self.change_editor.old_tree.id2path(file_id),
368 self.change_editor.new_tree.id2path(file_id),368 self.change_editor.new_tree.id2path(file_id)))
369 file_id=file_id))
370 return lines, self._count_changed_regions(work_tree_lines, lines)369 return lines, self._count_changed_regions(work_tree_lines, lines)
371370
372 @staticmethod371 @staticmethod
373372
=== modified file 'breezy/tests/per_branch/test_reconcile.py'
--- breezy/tests/per_branch/test_reconcile.py 2018-11-11 04:08:32 +0000
+++ breezy/tests/per_branch/test_reconcile.py 2019-01-01 23:11:42 +0000
@@ -68,9 +68,9 @@
68 def test_reconcile_returns_reconciler(self):68 def test_reconcile_returns_reconciler(self):
69 a_branch = self.make_branch('a_branch')69 a_branch = self.make_branch('a_branch')
70 result = a_branch.reconcile()70 result = a_branch.reconcile()
71 self.assertIsInstance(result, reconcile.BranchReconciler)71 self.assertIsInstance(result, reconcile.ReconcileResult)
72 # No history to fix72 # No history to fix
73 self.assertIs(False, result.fixed_history)73 self.assertIs(False, getattr(result, 'fixed_history', False))
7474
75 def test_reconcile_supports_thorough(self):75 def test_reconcile_supports_thorough(self):
76 a_branch = self.make_branch('a_branch')76 a_branch = self.make_branch('a_branch')
7777
=== modified file 'breezy/tests/per_intertree/test_compare.py'
--- breezy/tests/per_intertree/test_compare.py 2018-11-22 03:51:03 +0000
+++ breezy/tests/per_intertree/test_compare.py 2019-01-01 23:11:42 +0000
@@ -798,13 +798,15 @@
798 tree1.mkdir('changing', b'parent-id')798 tree1.mkdir('changing', b'parent-id')
799 tree1.mkdir('changing/unchanging', b'mid-id')799 tree1.mkdir('changing/unchanging', b'mid-id')
800 tree1.add(['changing/unchanging/file'], [b'file-id'], ['file'])800 tree1.add(['changing/unchanging/file'], [b'file-id'], ['file'])
801 tree1.put_file_bytes_non_atomic('changing/unchanging/file', b'a file')801 tree1.put_file_bytes_non_atomic(
802 'changing/unchanging/file', b'a file')
802 tree2 = self.make_to_branch_and_tree('2')803 tree2 = self.make_to_branch_and_tree('2')
803 tree2.set_root_id(tree1.get_root_id())804 tree2.set_root_id(tree1.get_root_id())
804 tree2.mkdir('changed', b'parent-id')805 tree2.mkdir('changed', b'parent-id')
805 tree2.mkdir('changed/unchanging', b'mid-id')806 tree2.mkdir('changed/unchanging', b'mid-id')
806 tree2.add(['changed/unchanging/file'], [b'file-id'], ['file'])807 tree2.add(['changed/unchanging/file'], [b'file-id'], ['file'])
807 tree2.put_file_bytes_non_atomic('changed/unchanging/file', b'changed content')808 tree2.put_file_bytes_non_atomic(
809 'changed/unchanging/file', b'changed content')
808 tree1, tree2 = self.mutable_trees_to_test_trees(self, tree1, tree2)810 tree1, tree2 = self.mutable_trees_to_test_trees(self, tree1, tree2)
809 # parent-id has changed, as has file-id811 # parent-id has changed, as has file-id
810 root_id = tree1.path2id('')812 root_id = tree1.path2id('')
811813
=== modified file 'breezy/tests/per_repository/test_commit_builder.py'
--- breezy/tests/per_repository/test_commit_builder.py 2018-11-16 18:33:17 +0000
+++ breezy/tests/per_repository/test_commit_builder.py 2019-01-01 23:11:42 +0000
@@ -49,8 +49,7 @@
4949
50 def test_finish_inventory_record_iter_changes(self):50 def test_finish_inventory_record_iter_changes(self):
51 tree = self.make_branch_and_tree(".")51 tree = self.make_branch_and_tree(".")
52 tree.lock_write()52 with tree.lock_write():
53 try:
54 builder = tree.branch.get_commit_builder([])53 builder = tree.branch.get_commit_builder([])
55 try:54 try:
56 list(builder.record_iter_changes(tree, tree.last_revision(),55 list(builder.record_iter_changes(tree, tree.last_revision(),
@@ -61,13 +60,10 @@
61 raise60 raise
62 repo = tree.branch.repository61 repo = tree.branch.repository
63 repo.commit_write_group()62 repo.commit_write_group()
64 finally:
65 tree.unlock()
6663
67 def test_abort_record_iter_changes(self):64 def test_abort_record_iter_changes(self):
68 tree = self.make_branch_and_tree(".")65 tree = self.make_branch_and_tree(".")
69 tree.lock_write()66 with tree.lock_write():
70 try:
71 builder = tree.branch.get_commit_builder([])67 builder = tree.branch.get_commit_builder([])
72 try:68 try:
73 basis = tree.basis_tree()69 basis = tree.basis_tree()
@@ -77,56 +73,44 @@
77 builder.finish_inventory()73 builder.finish_inventory()
78 finally:74 finally:
79 builder.abort()75 builder.abort()
80 finally:
81 tree.unlock()
8276
83 def test_commit_lossy(self):77 def test_commit_lossy(self):
84 tree = self.make_branch_and_tree(".")78 tree = self.make_branch_and_tree(".")
85 tree.lock_write()79 with tree.lock_write():
86 try:
87 builder = tree.branch.get_commit_builder([], lossy=True)80 builder = tree.branch.get_commit_builder([], lossy=True)
88 list(builder.record_iter_changes(tree, tree.last_revision(),81 list(builder.record_iter_changes(tree, tree.last_revision(),
89 tree.iter_changes(tree.basis_tree())))82 tree.iter_changes(tree.basis_tree())))
90 builder.finish_inventory()83 builder.finish_inventory()
91 rev_id = builder.commit('foo bar blah')84 rev_id = builder.commit('foo bar blah')
92 finally:
93 tree.unlock()
94 rev = tree.branch.repository.get_revision(rev_id)85 rev = tree.branch.repository.get_revision(rev_id)
95 self.assertEqual('foo bar blah', rev.message)86 self.assertEqual('foo bar blah', rev.message)
9687
97 def test_commit_message(self):88 def test_commit_message(self):
98 tree = self.make_branch_and_tree(".")89 tree = self.make_branch_and_tree(".")
99 tree.lock_write()90 with tree.lock_write():
100 try:
101 builder = tree.branch.get_commit_builder([])91 builder = tree.branch.get_commit_builder([])
102 list(builder.record_iter_changes(tree, tree.last_revision(),92 list(builder.record_iter_changes(tree, tree.last_revision(),
103 tree.iter_changes(tree.basis_tree())))93 tree.iter_changes(tree.basis_tree())))
104 builder.finish_inventory()94 builder.finish_inventory()
105 rev_id = builder.commit('foo bar blah')95 rev_id = builder.commit('foo bar blah')
106 finally:
107 tree.unlock()
108 rev = tree.branch.repository.get_revision(rev_id)96 rev = tree.branch.repository.get_revision(rev_id)
109 self.assertEqual('foo bar blah', rev.message)97 self.assertEqual('foo bar blah', rev.message)
11098
111 def test_updates_branch(self):99 def test_updates_branch(self):
112 tree = self.make_branch_and_tree(".")100 tree = self.make_branch_and_tree(".")
113 tree.lock_write()101 with tree.lock_write():
114 try:
115 builder = tree.branch.get_commit_builder([])102 builder = tree.branch.get_commit_builder([])
116 list(builder.record_iter_changes(tree, tree.last_revision(),103 list(builder.record_iter_changes(tree, tree.last_revision(),
117 tree.iter_changes(tree.basis_tree())))104 tree.iter_changes(tree.basis_tree())))
118 builder.finish_inventory()105 builder.finish_inventory()
119 will_update_branch = builder.updates_branch106 will_update_branch = builder.updates_branch
120 rev_id = builder.commit('might update the branch')107 rev_id = builder.commit('might update the branch')
121 finally:
122 tree.unlock()
123 actually_updated_branch = (tree.branch.last_revision() == rev_id)108 actually_updated_branch = (tree.branch.last_revision() == rev_id)
124 self.assertEqual(actually_updated_branch, will_update_branch)109 self.assertEqual(actually_updated_branch, will_update_branch)
125110
126 def test_commit_with_revision_id_record_iter_changes(self):111 def test_commit_with_revision_id_record_iter_changes(self):
127 tree = self.make_branch_and_tree(".")112 tree = self.make_branch_and_tree(".")
128 tree.lock_write()113 with tree.lock_write():
129 try:
130 # use a unicode revision id to test more corner cases.114 # use a unicode revision id to test more corner cases.
131 # The repository layer is meant to handle this.115 # The repository layer is meant to handle this.
132 revision_id = u'\xc8abc'.encode('utf8')116 revision_id = u'\xc8abc'.encode('utf8')
@@ -150,20 +134,18 @@
150 builder.abort()134 builder.abort()
151 raise135 raise
152 self.assertEqual(revision_id, builder.commit('foo bar'))136 self.assertEqual(revision_id, builder.commit('foo bar'))
153 finally:
154 tree.unlock()
155 self.assertTrue(tree.branch.repository.has_revision(revision_id))137 self.assertTrue(tree.branch.repository.has_revision(revision_id))
156 # the revision id must be set on the inventory when saving it. This138 # the revision id must be set on the inventory when saving it. This
157 # does not precisely test that - a repository that wants to can add it139 # does not precisely test that - a repository that wants to can add it
158 # on deserialisation, but thats all the current contract guarantees140 # on deserialisation, but thats all the current contract guarantees
159 # anyway.141 # anyway.
160 self.assertEqual(revision_id,142 self.assertEqual(
161 tree.branch.repository.revision_tree(revision_id).get_revision_id())143 revision_id,
144 tree.branch.repository.revision_tree(revision_id).get_revision_id())
162145
163 def test_commit_without_root_errors(self):146 def test_commit_without_root_errors(self):
164 tree = self.make_branch_and_tree(".")147 tree = self.make_branch_and_tree(".")
165 tree.lock_write()148 with tree.lock_write():
166 try:
167 builder = tree.branch.get_commit_builder([])149 builder = tree.branch.get_commit_builder([])
168150
169 def do_commit():151 def do_commit():
@@ -177,8 +159,6 @@
177 else:159 else:
178 builder.commit("msg")160 builder.commit("msg")
179 self.assertRaises(errors.RootMissing, do_commit)161 self.assertRaises(errors.RootMissing, do_commit)
180 finally:
181 tree.unlock()
182162
183 def test_commit_unchanged_root_record_iter_changes(self):163 def test_commit_unchanged_root_record_iter_changes(self):
184 tree = self.make_branch_and_tree(".")164 tree = self.make_branch_and_tree(".")
@@ -210,8 +190,7 @@
210 tree.add(["foo"])190 tree.add(["foo"])
211 foo_id = tree.path2id('foo')191 foo_id = tree.path2id('foo')
212 rev_id = tree.commit("added foo")192 rev_id = tree.commit("added foo")
213 tree.lock_write()193 with tree.lock_write():
214 try:
215 builder = tree.branch.get_commit_builder([rev_id])194 builder = tree.branch.get_commit_builder([rev_id])
216 try:195 try:
217 delete_change = (foo_id, ('foo', None), True, (True, False),196 delete_change = (foo_id, ('foo', None), True, (True, False),
@@ -228,8 +207,6 @@
228 except:207 except:
229 builder.abort()208 builder.abort()
230 raise209 raise
231 finally:
232 tree.unlock()
233 rev_tree = builder.revision_tree()210 rev_tree = builder.revision_tree()
234 rev_tree.lock_read()211 rev_tree.lock_read()
235 self.addCleanup(rev_tree.unlock)212 self.addCleanup(rev_tree.unlock)
@@ -462,7 +439,9 @@
462 self.assertFileGraph(expected_graph, tree, (file_id, rev2))439 self.assertFileGraph(expected_graph, tree, (file_id, rev2))
463440
464 def mini_commit_record_iter_changes(self, tree, name, new_name,441 def mini_commit_record_iter_changes(self, tree, name, new_name,
465 records_version=True, delta_against_basis=True, expect_fs_hash=False):442 records_version=True,
443 delta_against_basis=True,
444 expect_fs_hash=False):
466 """Perform a miniature commit looking for record entry results.445 """Perform a miniature commit looking for record entry results.
467446
468 This version uses the record_iter_changes interface.447 This version uses the record_iter_changes interface.
@@ -498,10 +477,10 @@
498 tree_file_stat[0].close()477 tree_file_stat[0].close()
499 self.assertLength(1, result)478 self.assertLength(1, result)
500 result = result[0]479 result = result[0]
501 self.assertEqual(result[:2], (file_id, new_name))480 self.assertEqual(result[0], new_name)
502 self.assertEqual(481 self.assertEqual(
503 result[2][0], tree.get_file_sha1(new_name))482 result[1][0], tree.get_file_sha1(new_name))
504 self.assertEqualStat(result[2][1], tree_file_stat[1])483 self.assertEqualStat(result[1][1], tree_file_stat[1])
505 else:484 else:
506 self.assertEqual([], result)485 self.assertEqual([], result)
507 builder.finish_inventory()486 builder.finish_inventory()
@@ -600,9 +579,10 @@
600 rev2 = self._rename_in_tree(tree1, name, 'rev2')579 rev2 = self._rename_in_tree(tree1, name, 'rev2')
601 rev3 = self._rename_in_tree(tree2, name, 'rev3')580 rev3 = self._rename_in_tree(tree2, name, 'rev3')
602 tree1.merge_from_branch(tree2.branch)581 tree1.merge_from_branch(tree2.branch)
603 rev4 = self.mini_commit_record_iter_changes(tree1, 'new_' + name, 'new_' + name,582 rev4 = self.mini_commit_record_iter_changes(
604 expect_fs_hash=expect_fs_hash,583 tree1, 'new_' + name, 'new_' + name,
605 delta_against_basis=tree1.supports_rename_tracking())584 expect_fs_hash=expect_fs_hash,
585 delta_against_basis=tree1.supports_rename_tracking())
606 tree3, = self._get_revtrees(tree1, [rev4])586 tree3, = self._get_revtrees(tree1, [rev4])
607 expected_graph = {}587 expected_graph = {}
608 if tree1.supports_rename_tracking():588 if tree1.supports_rename_tracking():
@@ -873,8 +853,7 @@
873 self.overrideAttr(config, '_auto_user_id',853 self.overrideAttr(config, '_auto_user_id',
874 lambda: (None, None))854 lambda: (None, None))
875 tree = self.make_branch_and_tree(".")855 tree = self.make_branch_and_tree(".")
876 tree.lock_write()856 with tree.lock_write():
877 try:
878 # Make sure no username is available.857 # Make sure no username is available.
879 self.assertRaises(config.NoWhoami, tree.branch.get_commit_builder,858 self.assertRaises(config.NoWhoami, tree.branch.get_commit_builder,
880 [])859 [])
@@ -889,5 +868,3 @@
889 raise868 raise
890 repo = tree.branch.repository869 repo = tree.branch.repository
891 repo.commit_write_group()870 repo.commit_write_group()
892 finally:
893 tree.unlock()
894871
=== modified file 'breezy/tests/per_repository_vf/test_reconcile.py'
--- breezy/tests/per_repository_vf/test_reconcile.py 2018-11-11 04:08:32 +0000
+++ breezy/tests/per_repository_vf/test_reconcile.py 2019-01-01 23:11:42 +0000
@@ -73,14 +73,14 @@
73 make sure we safely detect this problem.73 make sure we safely detect this problem.
74 """74 """
75 repo = self.make_repo_with_extra_ghost_index()75 repo = self.make_repo_with_extra_ghost_index()
76 reconciler = repo.reconcile(thorough=True)76 result = repo.reconcile(thorough=True)
77 self.assertTrue(reconciler.aborted,77 self.assertTrue(result.aborted,
78 "reconcile should have aborted due to bad parents.")78 "reconcile should have aborted due to bad parents.")
7979
80 def test_does_not_abort_on_clean_repo(self):80 def test_does_not_abort_on_clean_repo(self):
81 repo = self.make_repository('.')81 repo = self.make_repository('.')
82 reconciler = repo.reconcile(thorough=True)82 result = repo.reconcile(thorough=True)
83 self.assertFalse(reconciler.aborted,83 self.assertFalse(result.aborted,
84 "reconcile should not have aborted on an unbroken repository.")84 "reconcile should not have aborted on an unbroken repository.")
8585
8686
@@ -147,11 +147,11 @@
147 self.make_repository('empty')147 self.make_repository('empty')
148 d = BzrDir.open(self.get_url('empty'))148 d = BzrDir.open(self.get_url('empty'))
149 # calling on a empty repository should do nothing149 # calling on a empty repository should do nothing
150 reconciler = d.find_repository().reconcile(**kwargs)150 result = d.find_repository().reconcile(**kwargs)
151 # no inconsistent parents should have been found151 # no inconsistent parents should have been found
152 self.assertEqual(0, reconciler.inconsistent_parents)152 self.assertEqual(0, result.inconsistent_parents)
153 # and no garbage inventories153 # and no garbage inventories
154 self.assertEqual(0, reconciler.garbage_inventories)154 self.assertEqual(0, result.garbage_inventories)
155 # and no backup weave should have been needed/made.155 # and no backup weave should have been needed/made.
156 self.checkNoBackupInventory(d)156 self.checkNoBackupInventory(d)
157157
@@ -187,11 +187,11 @@
187 if not repo._reconcile_does_inventory_gc:187 if not repo._reconcile_does_inventory_gc:
188 raise TestSkipped('Irrelevant test')188 raise TestSkipped('Irrelevant test')
189 self.checkUnreconciled(d, repo.reconcile())189 self.checkUnreconciled(d, repo.reconcile())
190 reconciler = repo.reconcile(thorough=True)190 result = repo.reconcile(thorough=True)
191 # no bad parents191 # no bad parents
192 self.assertEqual(0, reconciler.inconsistent_parents)192 self.assertEqual(0, result.inconsistent_parents)
193 # and one garbage inventory193 # and one garbage inventory
194 self.assertEqual(1, reconciler.garbage_inventories)194 self.assertEqual(1, result.garbage_inventories)
195 self.check_missing_was_removed(repo)195 self.check_missing_was_removed(repo)
196196
197 def check_thorough_reweave_missing_revision(self, aBzrDir, reconcile,197 def check_thorough_reweave_missing_revision(self, aBzrDir, reconcile,
@@ -241,8 +241,7 @@
241241
242 def reconcile():242 def reconcile():
243 reconciler = Reconciler(d)243 reconciler = Reconciler(d)
244 reconciler.reconcile()244 return reconciler.reconcile()
245 return reconciler
246 self.check_thorough_reweave_missing_revision(d, reconcile)245 self.check_thorough_reweave_missing_revision(d, reconcile)
247246
248 def test_reweave_inventory_without_revision_and_ghost(self):247 def test_reweave_inventory_without_revision_and_ghost(self):
249248
=== modified file 'breezy/tests/per_repository_vf/test_repository.py'
--- breezy/tests/per_repository_vf/test_repository.py 2018-11-18 01:02:16 +0000
+++ breezy/tests/per_repository_vf/test_repository.py 2019-01-01 23:11:42 +0000
@@ -271,7 +271,8 @@
271 with tree.lock_write():271 with tree.lock_write():
272 self.assertEqual(set(), set(repo.texts.keys()))272 self.assertEqual(set(), set(repo.texts.keys()))
273 tree.add(['foo'], [file_id], ['file'])273 tree.add(['foo'], [file_id], ['file'])
274 tree.put_file_bytes_non_atomic('foo', b'content\n')274 tree.put_file_bytes_non_atomic(
275 'foo', b'content\n')
275 try:276 try:
276 rev_key = (tree.commit("foo"),)277 rev_key = (tree.commit("foo"),)
277 except errors.IllegalPath:278 except errors.IllegalPath:
278279
=== modified file 'breezy/tests/per_workingtree/test_parents.py'
--- breezy/tests/per_workingtree/test_parents.py 2018-11-18 00:25:19 +0000
+++ breezy/tests/per_workingtree/test_parents.py 2019-01-01 23:11:42 +0000
@@ -466,8 +466,8 @@
466 _mod_revision.NULL_REVISION)466 _mod_revision.NULL_REVISION)
467 changes = shape_tree.iter_changes(467 changes = shape_tree.iter_changes(
468 base_tree)468 base_tree)
469 list(builder.record_iter_changes(shape_tree,469 list(builder.record_iter_changes(
470 base_tree.get_revision_id(), changes))470 shape_tree, base_tree.get_revision_id(), changes))
471 builder.finish_inventory()471 builder.finish_inventory()
472 builder.commit("Message")472 builder.commit("Message")
473473
474474
=== modified file 'breezy/tests/test_diff.py'
--- breezy/tests/test_diff.py 2018-11-11 04:08:32 +0000
+++ breezy/tests/test_diff.py 2019-01-01 23:11:42 +0000
@@ -699,7 +699,7 @@
699699
700class DiffWasIs(diff.DiffPath):700class DiffWasIs(diff.DiffPath):
701701
702 def diff(self, file_id, old_path, new_path, old_kind, new_kind):702 def diff(self, old_path, new_path, old_kind, new_kind):
703 self.to_file.write(b'was: ')703 self.to_file.write(b'was: ')
704 self.to_file.write(self.old_tree.get_file(old_path).read())704 self.to_file.write(self.old_tree.get_file(old_path).read())
705 self.to_file.write(b'is: ')705 self.to_file.write(b'is: ')
@@ -728,20 +728,19 @@
728 self.new_tree.add('newdir')728 self.new_tree.add('newdir')
729 self.new_tree.add('newdir/newfile', b'file-id')729 self.new_tree.add('newdir/newfile', b'file-id')
730 differ = diff.DiffText(self.old_tree, self.new_tree, BytesIO())730 differ = diff.DiffText(self.old_tree, self.new_tree, BytesIO())
731 differ.diff_text('olddir/oldfile', None, 'old label',731 differ.diff_text('olddir/oldfile', None, 'old label', 'new label')
732 'new label', b'file-id', None)
733 self.assertEqual(732 self.assertEqual(
734 b'--- old label\n+++ new label\n@@ -1,1 +0,0 @@\n-old\n\n',733 b'--- old label\n+++ new label\n@@ -1,1 +0,0 @@\n-old\n\n',
735 differ.to_file.getvalue())734 differ.to_file.getvalue())
736 differ.to_file.seek(0)735 differ.to_file.seek(0)
737 differ.diff_text(None, 'newdir/newfile',736 differ.diff_text(None, 'newdir/newfile',
738 'old label', 'new label', None, b'file-id')737 'old label', 'new label')
739 self.assertEqual(738 self.assertEqual(
740 b'--- old label\n+++ new label\n@@ -0,0 +1,1 @@\n+new\n\n',739 b'--- old label\n+++ new label\n@@ -0,0 +1,1 @@\n+new\n\n',
741 differ.to_file.getvalue())740 differ.to_file.getvalue())
742 differ.to_file.seek(0)741 differ.to_file.seek(0)
743 differ.diff_text('olddir/oldfile', 'newdir/newfile',742 differ.diff_text('olddir/oldfile', 'newdir/newfile',
744 'old label', 'new label', b'file-id', b'file-id')743 'old label', 'new label')
745 self.assertEqual(744 self.assertEqual(
746 b'--- old label\n+++ new label\n@@ -1,1 +1,1 @@\n-old\n+new\n\n',745 b'--- old label\n+++ new label\n@@ -1,1 +1,1 @@\n-old\n+new\n\n',
747 differ.to_file.getvalue())746 differ.to_file.getvalue())
@@ -789,7 +788,7 @@
789 ('new-tree/newdir/newfile', b'new\n')])788 ('new-tree/newdir/newfile', b'new\n')])
790 self.new_tree.add('newdir')789 self.new_tree.add('newdir')
791 self.new_tree.add('newdir/newfile', b'file-id')790 self.new_tree.add('newdir/newfile', b'file-id')
792 self.differ.diff(b'file-id', 'olddir/oldfile', 'newdir/newfile')791 self.differ.diff('olddir/oldfile', 'newdir/newfile')
793 self.assertContainsRe(792 self.assertContainsRe(
794 self.differ.to_file.getvalue(),793 self.differ.to_file.getvalue(),
795 br'--- olddir/oldfile.*\n\+\+\+ newdir/newfile.*\n\@\@ -1,1 \+1,1'794 br'--- olddir/oldfile.*\n\+\+\+ newdir/newfile.*\n\@\@ -1,1 \+1,1'
@@ -805,7 +804,7 @@
805 os.symlink('new', 'new-tree/newdir/newfile')804 os.symlink('new', 'new-tree/newdir/newfile')
806 self.new_tree.add('newdir')805 self.new_tree.add('newdir')
807 self.new_tree.add('newdir/newfile', b'file-id')806 self.new_tree.add('newdir/newfile', b'file-id')
808 self.differ.diff(b'file-id', 'olddir/oldfile', 'newdir/newfile')807 self.differ.diff('olddir/oldfile', 'newdir/newfile')
809 self.assertContainsRe(808 self.assertContainsRe(
810 self.differ.to_file.getvalue(),809 self.differ.to_file.getvalue(),
811 br'--- olddir/oldfile.*\n\+\+\+ newdir/newfile.*\n\@\@ -1,1 \+0,0'810 br'--- olddir/oldfile.*\n\+\+\+ newdir/newfile.*\n\@\@ -1,1 \+0,0'
@@ -816,7 +815,7 @@
816 def test_diff_directory(self):815 def test_diff_directory(self):
817 self.build_tree(['new-tree/new-dir/'])816 self.build_tree(['new-tree/new-dir/'])
818 self.new_tree.add('new-dir', b'new-dir-id')817 self.new_tree.add('new-dir', b'new-dir-id')
819 self.differ.diff(b'new-dir-id', None, 'new-dir')818 self.differ.diff(None, 'new-dir')
820 self.assertEqual(self.differ.to_file.getvalue(), b'')819 self.assertEqual(self.differ.to_file.getvalue(), b'')
821820
822 def create_old_new(self):821 def create_old_new(self):
@@ -838,7 +837,7 @@
838 differ = diff.DiffTree(self.old_tree, self.new_tree, BytesIO())837 differ = diff.DiffTree(self.old_tree, self.new_tree, BytesIO())
839 finally:838 finally:
840 diff.DiffTree.diff_factories = old_diff_factories839 diff.DiffTree.diff_factories = old_diff_factories
841 differ.diff(b'file-id', 'olddir/oldfile', 'newdir/newfile')840 differ.diff('olddir/oldfile', 'newdir/newfile')
842 self.assertNotContainsRe(841 self.assertNotContainsRe(
843 differ.to_file.getvalue(),842 differ.to_file.getvalue(),
844 br'--- olddir/oldfile.*\n\+\+\+ newdir/newfile.*\n\@\@ -1,1 \+1,1'843 br'--- olddir/oldfile.*\n\+\+\+ newdir/newfile.*\n\@\@ -1,1 \+1,1'
@@ -850,7 +849,7 @@
850 self.create_old_new()849 self.create_old_new()
851 differ = diff.DiffTree(self.old_tree, self.new_tree, BytesIO(),850 differ = diff.DiffTree(self.old_tree, self.new_tree, BytesIO(),
852 extra_factories=[DiffWasIs.from_diff_tree])851 extra_factories=[DiffWasIs.from_diff_tree])
853 differ.diff(b'file-id', 'olddir/oldfile', 'newdir/newfile')852 differ.diff('olddir/oldfile', 'newdir/newfile')
854 self.assertNotContainsRe(853 self.assertNotContainsRe(
855 differ.to_file.getvalue(),854 differ.to_file.getvalue(),
856 br'--- olddir/oldfile.*\n\+\+\+ newdir/newfile.*\n\@\@ -1,1 \+1,1'855 br'--- olddir/oldfile.*\n\+\+\+ newdir/newfile.*\n\@\@ -1,1 \+1,1'
@@ -1492,7 +1491,7 @@
1492 self.addCleanup(diff_obj.finish)1491 self.addCleanup(diff_obj.finish)
1493 self.assertContainsRe(diff_obj._root, 'brz-diff-[^/]*')1492 self.assertContainsRe(diff_obj._root, 'brz-diff-[^/]*')
1494 old_path, new_path = diff_obj._prepare_files(1493 old_path, new_path = diff_obj._prepare_files(
1495 'oldname', 'newname', file_id=b'file-id')1494 'oldname', 'newname')
1496 self.assertContainsRe(old_path, 'old/oldname$')1495 self.assertContainsRe(old_path, 'old/oldname$')
1497 self.assertEqual(315532800, os.stat(old_path).st_mtime)1496 self.assertEqual(315532800, os.stat(old_path).st_mtime)
1498 self.assertContainsRe(new_path, 'tree/newname$')1497 self.assertContainsRe(new_path, 'tree/newname$')
@@ -1501,7 +1500,7 @@
1501 if osutils.host_os_dereferences_symlinks():1500 if osutils.host_os_dereferences_symlinks():
1502 self.assertTrue(os.path.samefile('tree/newname', new_path))1501 self.assertTrue(os.path.samefile('tree/newname', new_path))
1503 # make sure we can create files with the same parent directories1502 # make sure we can create files with the same parent directories
1504 diff_obj._prepare_files('oldname2', 'newname2', file_id=b'file2-id')1503 diff_obj._prepare_files('oldname2', 'newname2')
15051504
15061505
1507class TestDiffFromToolEncodedFilename(tests.TestCaseWithTransport):1506class TestDiffFromToolEncodedFilename(tests.TestCaseWithTransport):
15081507
=== modified file 'breezy/tests/test_foreign.py'
--- breezy/tests/test_foreign.py 2018-11-16 18:33:17 +0000
+++ breezy/tests/test_foreign.py 2019-01-01 23:11:42 +0000
@@ -205,10 +205,10 @@
205 parent_revids = []205 parent_revids = []
206 else:206 else:
207 parent_revids = [parent_revid]207 parent_revids = [parent_revid]
208 builder = self.target.get_commit_builder(parent_revids,208 builder = self.target.get_commit_builder(
209 self.target.get_config_stack(), rev.timestamp,209 parent_revids, self.target.get_config_stack(), rev.timestamp,
210 rev.timezone, rev.committer, rev.properties,210 rev.timezone, rev.committer, rev.properties,
211 new_revid)211 new_revid)
212 try:212 try:
213 parent_tree = self.target.repository.revision_tree(213 parent_tree = self.target.repository.revision_tree(
214 parent_revid)214 parent_revid)
215215
=== modified file 'breezy/tests/test_reconcile.py'
--- breezy/tests/test_reconcile.py 2017-06-10 16:40:42 +0000
+++ breezy/tests/test_reconcile.py 2019-01-01 23:11:42 +0000
@@ -38,14 +38,14 @@
38 child = bzrdir.BzrDirMetaFormat1().initialize('child')38 child = bzrdir.BzrDirMetaFormat1().initialize('child')
39 self.assertRaises(errors.NoRepositoryPresent, child.open_repository)39 self.assertRaises(errors.NoRepositoryPresent, child.open_repository)
40 reconciler = Reconciler(child)40 reconciler = Reconciler(child)
41 reconciler.reconcile()41 result = reconciler.reconcile()
42 # smoke test for reconcile appears to work too.42 # smoke test for reconcile appears to work too.
43 reconcile(child)43 reconcile(child)
44 # no inconsistent parents should have been found44 # no inconsistent parents should have been found
45 # but the values should have been set.45 # but the values should have been set.
46 self.assertEqual(0, reconciler.inconsistent_parents)46 self.assertEqual(0, result.inconsistent_parents)
47 # and no garbage inventories47 # and no garbage inventories
48 self.assertEqual(0, reconciler.garbage_inventories)48 self.assertEqual(0, result.garbage_inventories)
4949
5050
51class TestReconciler(tests.TestCaseWithTransport):51class TestReconciler(tests.TestCaseWithTransport):
@@ -53,20 +53,20 @@
53 def test_reconciler_with_no_branch(self):53 def test_reconciler_with_no_branch(self):
54 repo = self.make_repository('repo')54 repo = self.make_repository('repo')
55 reconciler = Reconciler(repo.controldir)55 reconciler = Reconciler(repo.controldir)
56 reconciler.reconcile()56 result = reconciler.reconcile()
57 # no inconsistent parents should have been found57 # no inconsistent parents should have been found
58 # but the values should have been set.58 # but the values should have been set.
59 self.assertEqual(0, reconciler.inconsistent_parents)59 self.assertEqual(0, result.inconsistent_parents)
60 # and no garbage inventories60 # and no garbage inventories
61 self.assertEqual(0, reconciler.garbage_inventories)61 self.assertEqual(0, result.garbage_inventories)
62 self.assertIs(None, reconciler.fixed_branch_history)62 self.assertIs(None, result.fixed_branch_history)
6363
64 def test_reconciler_finds_branch(self):64 def test_reconciler_finds_branch(self):
65 a_branch = self.make_branch('a_branch')65 a_branch = self.make_branch('a_branch')
66 reconciler = Reconciler(a_branch.controldir)66 reconciler = Reconciler(a_branch.controldir)
67 reconciler.reconcile()67 result = reconciler.reconcile()
6868
69 # It should have checked the repository, and the branch69 # It should have checked the repository, and the branch
70 self.assertEqual(0, reconciler.inconsistent_parents)70 self.assertEqual(0, result.inconsistent_parents)
71 self.assertEqual(0, reconciler.garbage_inventories)71 self.assertEqual(0, result.garbage_inventories)
72 self.assertIs(False, reconciler.fixed_branch_history)72 self.assertIs(False, result.fixed_branch_history)

Subscribers

People subscribed via source and target branches