Merge lp:~jelmer/brz/move-reconcile-1 into lp:brz
- move-reconcile-1
- Merge into trunk
Proposed by
Jelmer Vernooij
Status: | Merged |
---|---|
Approved by: | Jelmer Vernooij |
Approved revision: | no longer in the source branch. |
Merge reported by: | The Breezy Bot |
Merged at revision: | not available |
Proposed branch: | lp:~jelmer/brz/move-reconcile-1 |
Merge into: | lp:brz |
Prerequisite: | lp:~jelmer/brz/python3.7 |
Diff against target: |
2182 lines (+682/-686) 31 files modified
breezy/annotate.py (+1/-1) breezy/branch.py (+5/-6) breezy/builtins.py (+4/-6) breezy/bzr/branch.py (+7/-0) breezy/bzr/groupcompress_repo.py (+2/-3) breezy/bzr/knitrepo.py (+2/-3) breezy/bzr/pack_repo.py (+2/-3) breezy/bzr/reconcile.py (+471/-0) breezy/bzr/remote.py (+12/-2) breezy/bzr/vf_repository.py (+9/-2) breezy/bzr/workingtree_4.py (+1/-1) breezy/commit.py (+1/-1) breezy/diff.py (+41/-54) breezy/git/branch.py (+6/-0) breezy/git/commit.py (+1/-1) breezy/git/repository.py (+4/-28) breezy/git/tests/test_blackbox.py (+15/-0) breezy/git/tree.py (+1/-1) breezy/plugins/fastimport/revision_store.py (+1/-1) breezy/reconcile.py (+23/-473) breezy/repository.py (+2/-6) breezy/shelf_ui.py (+2/-3) breezy/tests/per_branch/test_reconcile.py (+2/-2) breezy/tests/per_intertree/test_compare.py (+4/-2) breezy/tests/per_repository/test_commit_builder.py (+22/-45) breezy/tests/per_repository_vf/test_reconcile.py (+11/-12) breezy/tests/per_repository_vf/test_repository.py (+2/-1) breezy/tests/per_workingtree/test_parents.py (+2/-2) breezy/tests/test_diff.py (+11/-12) breezy/tests/test_foreign.py (+4/-4) breezy/tests/test_reconcile.py (+11/-11) |
To merge this branch: | bzr merge lp:~jelmer/brz/move-reconcile-1 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Martin Packman | Approve | ||
Review via email: mp+359941@code.launchpad.net |
Commit message
Move bzr-specific reconcile bits to breezy.
Description of the change
Move bzr-specific reconcile bits to breezy.
To post a comment you must log in.
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote : | # |
Running landing tests failed
https:/
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote : | # |
Running landing tests failed
https:/
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote : | # |
Running landing tests failed
https:/
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote : | # |
Running landing tests failed
https:/
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'breezy/annotate.py' | |||
2 | --- breezy/annotate.py 2018-11-16 18:33:17 +0000 | |||
3 | +++ breezy/annotate.py 2019-01-01 23:11:42 +0000 | |||
4 | @@ -54,7 +54,7 @@ | |||
5 | 54 | 54 | ||
6 | 55 | def annotate_file_tree(tree, path, to_file, verbose=False, full=False, | 55 | def annotate_file_tree(tree, path, to_file, verbose=False, full=False, |
7 | 56 | show_ids=False, branch=None): | 56 | show_ids=False, branch=None): |
9 | 57 | """Annotate file_id in a tree. | 57 | """Annotate path in a tree. |
10 | 58 | 58 | ||
11 | 59 | The tree should already be read_locked() when annotate_file_tree is called. | 59 | The tree should already be read_locked() when annotate_file_tree is called. |
12 | 60 | 60 | ||
13 | 61 | 61 | ||
14 | === modified file 'breezy/branch.py' | |||
15 | --- breezy/branch.py 2018-11-16 18:40:46 +0000 | |||
16 | +++ breezy/branch.py 2019-01-01 23:11:42 +0000 | |||
17 | @@ -1414,12 +1414,11 @@ | |||
18 | 1414 | return tree | 1414 | return tree |
19 | 1415 | 1415 | ||
20 | 1416 | def reconcile(self, thorough=True): | 1416 | def reconcile(self, thorough=True): |
27 | 1417 | """Make sure the data stored in this branch is consistent.""" | 1417 | """Make sure the data stored in this branch is consistent. |
28 | 1418 | from breezy.reconcile import BranchReconciler | 1418 | |
29 | 1419 | with self.lock_write(): | 1419 | :return: A `ReconcileResult` object. |
30 | 1420 | reconciler = BranchReconciler(self, thorough=thorough) | 1420 | """ |
31 | 1421 | reconciler.reconcile() | 1421 | raise NotImplementedError(self.reconcile) |
26 | 1422 | return reconciler | ||
32 | 1423 | 1422 | ||
33 | 1424 | def reference_parent(self, path, file_id=None, possible_transports=None): | 1423 | def reference_parent(self, path, file_id=None, possible_transports=None): |
34 | 1425 | """Return the parent branch for a tree-reference file_id | 1424 | """Return the parent branch for a tree-reference file_id |
35 | 1426 | 1425 | ||
36 | === modified file 'breezy/builtins.py' | |||
37 | --- breezy/builtins.py 2019-01-01 21:53:56 +0000 | |||
38 | +++ breezy/builtins.py 2019-01-01 23:11:42 +0000 | |||
39 | @@ -5224,17 +5224,15 @@ | |||
40 | 5224 | tree = _get_one_revision_tree('annotate', revision, branch=branch) | 5224 | tree = _get_one_revision_tree('annotate', revision, branch=branch) |
41 | 5225 | self.add_cleanup(tree.lock_read().unlock) | 5225 | self.add_cleanup(tree.lock_read().unlock) |
42 | 5226 | if wt is not None and revision is None: | 5226 | if wt is not None and revision is None: |
49 | 5227 | file_id = wt.path2id(relpath) | 5227 | if not wt.is_versioned(relpath): |
50 | 5228 | else: | 5228 | raise errors.NotVersionedError(relpath) |
45 | 5229 | file_id = tree.path2id(relpath) | ||
46 | 5230 | if file_id is None: | ||
47 | 5231 | raise errors.NotVersionedError(filename) | ||
48 | 5232 | if wt is not None and revision is None: | ||
51 | 5233 | # If there is a tree and we're not annotating historical | 5229 | # If there is a tree and we're not annotating historical |
52 | 5234 | # versions, annotate the working tree's content. | 5230 | # versions, annotate the working tree's content. |
53 | 5235 | annotate_file_tree(wt, relpath, self.outf, long, all, | 5231 | annotate_file_tree(wt, relpath, self.outf, long, all, |
54 | 5236 | show_ids=show_ids) | 5232 | show_ids=show_ids) |
55 | 5237 | else: | 5233 | else: |
56 | 5234 | if not tree.is_versioned(relpath): | ||
57 | 5235 | raise errors.NotVersionedError(relpath) | ||
58 | 5238 | annotate_file_tree(tree, relpath, self.outf, long, all, | 5236 | annotate_file_tree(tree, relpath, self.outf, long, all, |
59 | 5239 | show_ids=show_ids, branch=branch) | 5237 | show_ids=show_ids, branch=branch) |
60 | 5240 | 5238 | ||
61 | 5241 | 5239 | ||
62 | === modified file 'breezy/bzr/branch.py' | |||
63 | --- breezy/bzr/branch.py 2018-11-11 15:40:12 +0000 | |||
64 | +++ breezy/bzr/branch.py 2019-01-01 23:11:42 +0000 | |||
65 | @@ -443,6 +443,13 @@ | |||
66 | 443 | super(BzrBranch, self)._clear_cached_state() | 443 | super(BzrBranch, self)._clear_cached_state() |
67 | 444 | self._tags_bytes = None | 444 | self._tags_bytes = None |
68 | 445 | 445 | ||
69 | 446 | def reconcile(self, thorough=True): | ||
70 | 447 | """Make sure the data stored in this branch is consistent.""" | ||
71 | 448 | from .reconcile import BranchReconciler | ||
72 | 449 | with self.lock_write(): | ||
73 | 450 | reconciler = BranchReconciler(self, thorough=thorough) | ||
74 | 451 | return reconciler.reconcile() | ||
75 | 452 | |||
76 | 446 | 453 | ||
77 | 447 | class BzrBranch8(BzrBranch): | 454 | class BzrBranch8(BzrBranch): |
78 | 448 | """A branch that stores tree-reference locations.""" | 455 | """A branch that stores tree-reference locations.""" |
79 | 449 | 456 | ||
80 | === modified file 'breezy/bzr/groupcompress_repo.py' | |||
81 | --- breezy/bzr/groupcompress_repo.py 2018-11-11 04:08:32 +0000 | |||
82 | +++ breezy/bzr/groupcompress_repo.py 2019-01-01 23:11:42 +0000 | |||
83 | @@ -1098,12 +1098,11 @@ | |||
84 | 1098 | """Reconcile this repository to make sure all CHKs are in canonical | 1098 | """Reconcile this repository to make sure all CHKs are in canonical |
85 | 1099 | form. | 1099 | form. |
86 | 1100 | """ | 1100 | """ |
88 | 1101 | from breezy.reconcile import PackReconciler | 1101 | from .reconcile import PackReconciler |
89 | 1102 | with self.lock_write(): | 1102 | with self.lock_write(): |
90 | 1103 | reconciler = PackReconciler( | 1103 | reconciler = PackReconciler( |
91 | 1104 | self, thorough=True, canonicalize_chks=True) | 1104 | self, thorough=True, canonicalize_chks=True) |
94 | 1105 | reconciler.reconcile() | 1105 | return reconciler.reconcile() |
93 | 1106 | return reconciler | ||
95 | 1107 | 1106 | ||
96 | 1108 | def _reconcile_pack(self, collection, packs, extension, revs, pb): | 1107 | def _reconcile_pack(self, collection, packs, extension, revs, pb): |
97 | 1109 | packer = GCCHKReconcilePacker(collection, packs, extension) | 1108 | packer = GCCHKReconcilePacker(collection, packs, extension) |
98 | 1110 | 1109 | ||
99 | === modified file 'breezy/bzr/knitrepo.py' | |||
100 | --- breezy/bzr/knitrepo.py 2018-11-11 04:08:32 +0000 | |||
101 | +++ breezy/bzr/knitrepo.py 2019-01-01 23:11:42 +0000 | |||
102 | @@ -206,11 +206,10 @@ | |||
103 | 206 | 206 | ||
104 | 207 | def reconcile(self, other=None, thorough=False): | 207 | def reconcile(self, other=None, thorough=False): |
105 | 208 | """Reconcile this repository.""" | 208 | """Reconcile this repository.""" |
107 | 209 | from breezy.reconcile import KnitReconciler | 209 | from .reconcile import KnitReconciler |
108 | 210 | with self.lock_write(): | 210 | with self.lock_write(): |
109 | 211 | reconciler = KnitReconciler(self, thorough=thorough) | 211 | reconciler = KnitReconciler(self, thorough=thorough) |
112 | 212 | reconciler.reconcile() | 212 | return reconciler.reconcile() |
111 | 213 | return reconciler | ||
113 | 214 | 213 | ||
114 | 215 | def _make_parents_provider(self): | 214 | def _make_parents_provider(self): |
115 | 216 | return _KnitsParentsProvider(self.revisions) | 215 | return _KnitsParentsProvider(self.revisions) |
116 | 217 | 216 | ||
117 | === modified file 'breezy/bzr/pack_repo.py' | |||
118 | --- breezy/bzr/pack_repo.py 2018-11-12 01:41:38 +0000 | |||
119 | +++ breezy/bzr/pack_repo.py 2019-01-01 23:11:42 +0000 | |||
120 | @@ -1819,11 +1819,10 @@ | |||
121 | 1819 | 1819 | ||
122 | 1820 | def reconcile(self, other=None, thorough=False): | 1820 | def reconcile(self, other=None, thorough=False): |
123 | 1821 | """Reconcile this repository.""" | 1821 | """Reconcile this repository.""" |
125 | 1822 | from breezy.reconcile import PackReconciler | 1822 | from .reconcile import PackReconciler |
126 | 1823 | with self.lock_write(): | 1823 | with self.lock_write(): |
127 | 1824 | reconciler = PackReconciler(self, thorough=thorough) | 1824 | reconciler = PackReconciler(self, thorough=thorough) |
130 | 1825 | reconciler.reconcile() | 1825 | return reconciler.reconcile() |
129 | 1826 | return reconciler | ||
131 | 1827 | 1826 | ||
132 | 1828 | def _reconcile_pack(self, collection, packs, extension, revs, pb): | 1827 | def _reconcile_pack(self, collection, packs, extension, revs, pb): |
133 | 1829 | raise NotImplementedError(self._reconcile_pack) | 1828 | raise NotImplementedError(self._reconcile_pack) |
134 | 1830 | 1829 | ||
135 | === added file 'breezy/bzr/reconcile.py' | |||
136 | --- breezy/bzr/reconcile.py 1970-01-01 00:00:00 +0000 | |||
137 | +++ breezy/bzr/reconcile.py 2019-01-01 23:11:42 +0000 | |||
138 | @@ -0,0 +1,471 @@ | |||
139 | 1 | # Copyright (C) 2006-2010 Canonical Ltd | ||
140 | 2 | # | ||
141 | 3 | # This program is free software; you can redistribute it and/or modify | ||
142 | 4 | # it under the terms of the GNU General Public License as published by | ||
143 | 5 | # the Free Software Foundation; either version 2 of the License, or | ||
144 | 6 | # (at your option) any later version. | ||
145 | 7 | # | ||
146 | 8 | # This program is distributed in the hope that it will be useful, | ||
147 | 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
148 | 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
149 | 11 | # GNU General Public License for more details. | ||
150 | 12 | # | ||
151 | 13 | # You should have received a copy of the GNU General Public License | ||
152 | 14 | # along with this program; if not, write to the Free Software | ||
153 | 15 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | ||
154 | 16 | |||
155 | 17 | """Reconcilers are able to fix some potential data errors in a branch.""" | ||
156 | 18 | |||
157 | 19 | from __future__ import absolute_import | ||
158 | 20 | |||
159 | 21 | __all__ = [ | ||
160 | 22 | 'BranchReconciler', | ||
161 | 23 | 'KnitReconciler', | ||
162 | 24 | 'PackReconciler', | ||
163 | 25 | 'VersionedFileRepoReconciler', | ||
164 | 26 | ] | ||
165 | 27 | |||
166 | 28 | from .. import ( | ||
167 | 29 | cleanup, | ||
168 | 30 | errors, | ||
169 | 31 | revision as _mod_revision, | ||
170 | 32 | ui, | ||
171 | 33 | ) | ||
172 | 34 | from ..reconcile import ReconcileResult | ||
173 | 35 | from ..i18n import gettext | ||
174 | 36 | from ..trace import mutter | ||
175 | 37 | from ..tsort import topo_sort | ||
176 | 38 | from .versionedfile import AdapterFactory, FulltextContentFactory | ||
177 | 39 | |||
178 | 40 | |||
179 | 41 | class VersionedFileRepoReconciler(object): | ||
180 | 42 | """Reconciler that reconciles a repository. | ||
181 | 43 | |||
182 | 44 | The goal of repository reconciliation is to make any derived data | ||
183 | 45 | consistent with the core data committed by a user. This can involve | ||
184 | 46 | reindexing, or removing unreferenced data if that can interfere with | ||
185 | 47 | queries in a given repository. | ||
186 | 48 | |||
187 | 49 | Currently this consists of an inventory reweave with revision cross-checks. | ||
188 | 50 | """ | ||
189 | 51 | |||
190 | 52 | def __init__(self, repo, other=None, thorough=False): | ||
191 | 53 | """Construct a RepoReconciler. | ||
192 | 54 | |||
193 | 55 | :param thorough: perform a thorough check which may take longer but | ||
194 | 56 | will correct non-data loss issues such as incorrect | ||
195 | 57 | cached data. | ||
196 | 58 | """ | ||
197 | 59 | self.garbage_inventories = 0 | ||
198 | 60 | self.inconsistent_parents = 0 | ||
199 | 61 | self.aborted = False | ||
200 | 62 | self.repo = repo | ||
201 | 63 | self.thorough = thorough | ||
202 | 64 | |||
203 | 65 | def reconcile(self): | ||
204 | 66 | """Perform reconciliation. | ||
205 | 67 | |||
206 | 68 | After reconciliation the following attributes document found issues: | ||
207 | 69 | |||
208 | 70 | * `inconsistent_parents`: The number of revisions in the repository | ||
209 | 71 | whose ancestry was being reported incorrectly. | ||
210 | 72 | * `garbage_inventories`: The number of inventory objects without | ||
211 | 73 | revisions that were garbage collected. | ||
212 | 74 | """ | ||
213 | 75 | with self.repo.lock_write(), \ | ||
214 | 76 | ui.ui_factory.nested_progress_bar() as self.pb: | ||
215 | 77 | self._reconcile_steps() | ||
216 | 78 | ret = ReconcileResult() | ||
217 | 79 | ret.aborted = self.aborted | ||
218 | 80 | ret.garbage_inventories = self.garbage_inventories | ||
219 | 81 | ret.inconsistent_parents = self.inconsistent_parents | ||
220 | 82 | return ret | ||
221 | 83 | |||
222 | 84 | def _reconcile_steps(self): | ||
223 | 85 | """Perform the steps to reconcile this repository.""" | ||
224 | 86 | self._reweave_inventory() | ||
225 | 87 | |||
226 | 88 | def _reweave_inventory(self): | ||
227 | 89 | """Regenerate the inventory weave for the repository from scratch. | ||
228 | 90 | |||
229 | 91 | This is a smart function: it will only do the reweave if doing it | ||
230 | 92 | will correct data issues. The self.thorough flag controls whether | ||
231 | 93 | only data-loss causing issues (!self.thorough) or all issues | ||
232 | 94 | (self.thorough) are treated as requiring the reweave. | ||
233 | 95 | """ | ||
234 | 96 | transaction = self.repo.get_transaction() | ||
235 | 97 | self.pb.update(gettext('Reading inventory data')) | ||
236 | 98 | self.inventory = self.repo.inventories | ||
237 | 99 | self.revisions = self.repo.revisions | ||
238 | 100 | # the total set of revisions to process | ||
239 | 101 | self.pending = {key[-1] for key in self.revisions.keys()} | ||
240 | 102 | |||
241 | 103 | # mapping from revision_id to parents | ||
242 | 104 | self._rev_graph = {} | ||
243 | 105 | # errors that we detect | ||
244 | 106 | self.inconsistent_parents = 0 | ||
245 | 107 | # we need the revision id of each revision and its available parents list | ||
246 | 108 | self._setup_steps(len(self.pending)) | ||
247 | 109 | for rev_id in self.pending: | ||
248 | 110 | # put a revision into the graph. | ||
249 | 111 | self._graph_revision(rev_id) | ||
250 | 112 | self._check_garbage_inventories() | ||
251 | 113 | # if there are no inconsistent_parents and | ||
252 | 114 | # (no garbage inventories or we are not doing a thorough check) | ||
253 | 115 | if (not self.inconsistent_parents | ||
254 | 116 | and (not self.garbage_inventories or not self.thorough)): | ||
255 | 117 | ui.ui_factory.note(gettext('Inventory ok.')) | ||
256 | 118 | return | ||
257 | 119 | self.pb.update(gettext('Backing up inventory'), 0, 0) | ||
258 | 120 | self.repo._backup_inventory() | ||
259 | 121 | ui.ui_factory.note(gettext('Backup inventory created.')) | ||
260 | 122 | new_inventories = self.repo._temp_inventories() | ||
261 | 123 | |||
262 | 124 | # we have topological order of revisions and non ghost parents ready. | ||
263 | 125 | self._setup_steps(len(self._rev_graph)) | ||
264 | 126 | revision_keys = [(rev_id,) for rev_id in topo_sort(self._rev_graph)] | ||
265 | 127 | stream = self._change_inv_parents( | ||
266 | 128 | self.inventory.get_record_stream(revision_keys, 'unordered', True), | ||
267 | 129 | self._new_inv_parents, | ||
268 | 130 | set(revision_keys)) | ||
269 | 131 | new_inventories.insert_record_stream(stream) | ||
270 | 132 | # if this worked, the set of new_inventories.keys should equal | ||
271 | 133 | # self.pending | ||
272 | 134 | if not (set(new_inventories.keys()) | ||
273 | 135 | == {(revid,) for revid in self.pending}): | ||
274 | 136 | raise AssertionError() | ||
275 | 137 | self.pb.update(gettext('Writing weave')) | ||
276 | 138 | self.repo._activate_new_inventory() | ||
277 | 139 | self.inventory = None | ||
278 | 140 | ui.ui_factory.note(gettext('Inventory regenerated.')) | ||
279 | 141 | |||
280 | 142 | def _new_inv_parents(self, revision_key): | ||
281 | 143 | """Lookup ghost-filtered parents for revision_key.""" | ||
282 | 144 | # Use the filtered ghostless parents list: | ||
283 | 145 | return tuple([(revid,) for revid in self._rev_graph[revision_key[-1]]]) | ||
284 | 146 | |||
285 | 147 | def _change_inv_parents(self, stream, get_parents, all_revision_keys): | ||
286 | 148 | """Adapt a record stream to reconcile the parents.""" | ||
287 | 149 | for record in stream: | ||
288 | 150 | wanted_parents = get_parents(record.key) | ||
289 | 151 | if wanted_parents and wanted_parents[0] not in all_revision_keys: | ||
290 | 152 | # The check for the left most parent only handles knit | ||
291 | 153 | # compressors, but this code only applies to knit and weave | ||
292 | 154 | # repositories anyway. | ||
293 | 155 | bytes = record.get_bytes_as('fulltext') | ||
294 | 156 | yield FulltextContentFactory(record.key, wanted_parents, record.sha1, bytes) | ||
295 | 157 | else: | ||
296 | 158 | adapted_record = AdapterFactory( | ||
297 | 159 | record.key, wanted_parents, record) | ||
298 | 160 | yield adapted_record | ||
299 | 161 | self._reweave_step('adding inventories') | ||
300 | 162 | |||
301 | 163 | def _setup_steps(self, new_total): | ||
302 | 164 | """Setup the markers we need to control the progress bar.""" | ||
303 | 165 | self.total = new_total | ||
304 | 166 | self.count = 0 | ||
305 | 167 | |||
306 | 168 | def _graph_revision(self, rev_id): | ||
307 | 169 | """Load a revision into the revision graph.""" | ||
308 | 170 | # pick a random revision | ||
309 | 171 | # analyse revision id rev_id and put it in the stack. | ||
310 | 172 | self._reweave_step('loading revisions') | ||
311 | 173 | rev = self.repo.get_revision_reconcile(rev_id) | ||
312 | 174 | parents = [] | ||
313 | 175 | for parent in rev.parent_ids: | ||
314 | 176 | if self._parent_is_available(parent): | ||
315 | 177 | parents.append(parent) | ||
316 | 178 | else: | ||
317 | 179 | mutter('found ghost %s', parent) | ||
318 | 180 | self._rev_graph[rev_id] = parents | ||
319 | 181 | |||
320 | 182 | def _check_garbage_inventories(self): | ||
321 | 183 | """Check for garbage inventories which we cannot trust | ||
322 | 184 | |||
323 | 185 | We cant trust them because their pre-requisite file data may not | ||
324 | 186 | be present - all we know is that their revision was not installed. | ||
325 | 187 | """ | ||
326 | 188 | if not self.thorough: | ||
327 | 189 | return | ||
328 | 190 | inventories = set(self.inventory.keys()) | ||
329 | 191 | revisions = set(self.revisions.keys()) | ||
330 | 192 | garbage = inventories.difference(revisions) | ||
331 | 193 | self.garbage_inventories = len(garbage) | ||
332 | 194 | for revision_key in garbage: | ||
333 | 195 | mutter('Garbage inventory {%s} found.', revision_key[-1]) | ||
334 | 196 | |||
335 | 197 | def _parent_is_available(self, parent): | ||
336 | 198 | """True if parent is a fully available revision | ||
337 | 199 | |||
338 | 200 | A fully available revision has a inventory and a revision object in the | ||
339 | 201 | repository. | ||
340 | 202 | """ | ||
341 | 203 | if parent in self._rev_graph: | ||
342 | 204 | return True | ||
343 | 205 | inv_present = (1 == len(self.inventory.get_parent_map([(parent,)]))) | ||
344 | 206 | return (inv_present and self.repo.has_revision(parent)) | ||
345 | 207 | |||
346 | 208 | def _reweave_step(self, message): | ||
347 | 209 | """Mark a single step of regeneration complete.""" | ||
348 | 210 | self.pb.update(message, self.count, self.total) | ||
349 | 211 | self.count += 1 | ||
350 | 212 | |||
351 | 213 | |||
352 | 214 | class KnitReconciler(VersionedFileRepoReconciler): | ||
353 | 215 | """Reconciler that reconciles a knit format repository. | ||
354 | 216 | |||
355 | 217 | This will detect garbage inventories and remove them in thorough mode. | ||
356 | 218 | """ | ||
357 | 219 | |||
358 | 220 | def _reconcile_steps(self): | ||
359 | 221 | """Perform the steps to reconcile this repository.""" | ||
360 | 222 | if self.thorough: | ||
361 | 223 | try: | ||
362 | 224 | self._load_indexes() | ||
363 | 225 | except errors.BzrCheckError: | ||
364 | 226 | self.aborted = True | ||
365 | 227 | return | ||
366 | 228 | # knits never suffer this | ||
367 | 229 | self._gc_inventory() | ||
368 | 230 | self._fix_text_parents() | ||
369 | 231 | |||
370 | 232 | def _load_indexes(self): | ||
371 | 233 | """Load indexes for the reconciliation.""" | ||
372 | 234 | self.transaction = self.repo.get_transaction() | ||
373 | 235 | self.pb.update(gettext('Reading indexes'), 0, 2) | ||
374 | 236 | self.inventory = self.repo.inventories | ||
375 | 237 | self.pb.update(gettext('Reading indexes'), 1, 2) | ||
376 | 238 | self.repo._check_for_inconsistent_revision_parents() | ||
377 | 239 | self.revisions = self.repo.revisions | ||
378 | 240 | self.pb.update(gettext('Reading indexes'), 2, 2) | ||
379 | 241 | |||
380 | 242 | def _gc_inventory(self): | ||
381 | 243 | """Remove inventories that are not referenced from the revision store.""" | ||
382 | 244 | self.pb.update(gettext('Checking unused inventories'), 0, 1) | ||
383 | 245 | self._check_garbage_inventories() | ||
384 | 246 | self.pb.update(gettext('Checking unused inventories'), 1, 3) | ||
385 | 247 | if not self.garbage_inventories: | ||
386 | 248 | ui.ui_factory.note(gettext('Inventory ok.')) | ||
387 | 249 | return | ||
388 | 250 | self.pb.update(gettext('Backing up inventory'), 0, 0) | ||
389 | 251 | self.repo._backup_inventory() | ||
390 | 252 | ui.ui_factory.note(gettext('Backup Inventory created')) | ||
391 | 253 | # asking for '' should never return a non-empty weave | ||
392 | 254 | new_inventories = self.repo._temp_inventories() | ||
393 | 255 | # we have topological order of revisions and non ghost parents ready. | ||
394 | 256 | graph = self.revisions.get_parent_map(self.revisions.keys()) | ||
395 | 257 | revision_keys = topo_sort(graph) | ||
396 | 258 | revision_ids = [key[-1] for key in revision_keys] | ||
397 | 259 | self._setup_steps(len(revision_keys)) | ||
398 | 260 | stream = self._change_inv_parents( | ||
399 | 261 | self.inventory.get_record_stream(revision_keys, 'unordered', True), | ||
400 | 262 | graph.__getitem__, | ||
401 | 263 | set(revision_keys)) | ||
402 | 264 | new_inventories.insert_record_stream(stream) | ||
403 | 265 | # if this worked, the set of new_inventory_vf.names should equal | ||
404 | 266 | # the revisionds list | ||
405 | 267 | if not(set(new_inventories.keys()) == set(revision_keys)): | ||
406 | 268 | raise AssertionError() | ||
407 | 269 | self.pb.update(gettext('Writing weave')) | ||
408 | 270 | self.repo._activate_new_inventory() | ||
409 | 271 | self.inventory = None | ||
410 | 272 | ui.ui_factory.note(gettext('Inventory regenerated.')) | ||
411 | 273 | |||
412 | 274 | def _fix_text_parents(self): | ||
413 | 275 | """Fix bad versionedfile parent entries. | ||
414 | 276 | |||
415 | 277 | It is possible for the parents entry in a versionedfile entry to be | ||
416 | 278 | inconsistent with the values in the revision and inventory. | ||
417 | 279 | |||
418 | 280 | This method finds entries with such inconsistencies, corrects their | ||
419 | 281 | parent lists, and replaces the versionedfile with a corrected version. | ||
420 | 282 | """ | ||
421 | 283 | transaction = self.repo.get_transaction() | ||
422 | 284 | versions = [key[-1] for key in self.revisions.keys()] | ||
423 | 285 | mutter('Prepopulating revision text cache with %d revisions', | ||
424 | 286 | len(versions)) | ||
425 | 287 | vf_checker = self.repo._get_versioned_file_checker() | ||
426 | 288 | bad_parents, unused_versions = vf_checker.check_file_version_parents( | ||
427 | 289 | self.repo.texts, self.pb) | ||
428 | 290 | text_index = vf_checker.text_index | ||
429 | 291 | per_id_bad_parents = {} | ||
430 | 292 | for key in unused_versions: | ||
431 | 293 | # Ensure that every file with unused versions gets rewritten. | ||
432 | 294 | # NB: This is really not needed, reconcile != pack. | ||
433 | 295 | per_id_bad_parents[key[0]] = {} | ||
434 | 296 | # Generate per-knit/weave data. | ||
435 | 297 | for key, details in bad_parents.items(): | ||
436 | 298 | file_id = key[0] | ||
437 | 299 | rev_id = key[1] | ||
438 | 300 | knit_parents = tuple([parent[-1] for parent in details[0]]) | ||
439 | 301 | correct_parents = tuple([parent[-1] for parent in details[1]]) | ||
440 | 302 | file_details = per_id_bad_parents.setdefault(file_id, {}) | ||
441 | 303 | file_details[rev_id] = (knit_parents, correct_parents) | ||
442 | 304 | file_id_versions = {} | ||
443 | 305 | for text_key in text_index: | ||
444 | 306 | versions_list = file_id_versions.setdefault(text_key[0], []) | ||
445 | 307 | versions_list.append(text_key[1]) | ||
446 | 308 | # Do the reconcile of individual weaves. | ||
447 | 309 | for num, file_id in enumerate(per_id_bad_parents): | ||
448 | 310 | self.pb.update(gettext('Fixing text parents'), num, | ||
449 | 311 | len(per_id_bad_parents)) | ||
450 | 312 | versions_with_bad_parents = per_id_bad_parents[file_id] | ||
451 | 313 | id_unused_versions = set(key[-1] for key in unused_versions | ||
452 | 314 | if key[0] == file_id) | ||
453 | 315 | if file_id in file_id_versions: | ||
454 | 316 | file_versions = file_id_versions[file_id] | ||
455 | 317 | else: | ||
456 | 318 | # This id was present in the disk store but is not referenced | ||
457 | 319 | # by any revision at all. | ||
458 | 320 | file_versions = [] | ||
459 | 321 | self._fix_text_parent(file_id, versions_with_bad_parents, | ||
460 | 322 | id_unused_versions, file_versions) | ||
461 | 323 | |||
462 | 324 | def _fix_text_parent(self, file_id, versions_with_bad_parents, | ||
463 | 325 | unused_versions, all_versions): | ||
464 | 326 | """Fix bad versionedfile entries in a single versioned file.""" | ||
465 | 327 | mutter('fixing text parent: %r (%d versions)', file_id, | ||
466 | 328 | len(versions_with_bad_parents)) | ||
467 | 329 | mutter('(%d are unused)', len(unused_versions)) | ||
468 | 330 | new_file_id = b'temp:%s' % file_id | ||
469 | 331 | new_parents = {} | ||
470 | 332 | needed_keys = set() | ||
471 | 333 | for version in all_versions: | ||
472 | 334 | if version in unused_versions: | ||
473 | 335 | continue | ||
474 | 336 | elif version in versions_with_bad_parents: | ||
475 | 337 | parents = versions_with_bad_parents[version][1] | ||
476 | 338 | else: | ||
477 | 339 | pmap = self.repo.texts.get_parent_map([(file_id, version)]) | ||
478 | 340 | parents = [key[-1] for key in pmap[(file_id, version)]] | ||
479 | 341 | new_parents[(new_file_id, version)] = [ | ||
480 | 342 | (new_file_id, parent) for parent in parents] | ||
481 | 343 | needed_keys.add((file_id, version)) | ||
482 | 344 | |||
483 | 345 | def fix_parents(stream): | ||
484 | 346 | for record in stream: | ||
485 | 347 | bytes = record.get_bytes_as('fulltext') | ||
486 | 348 | new_key = (new_file_id, record.key[-1]) | ||
487 | 349 | parents = new_parents[new_key] | ||
488 | 350 | yield FulltextContentFactory(new_key, parents, record.sha1, bytes) | ||
489 | 351 | stream = self.repo.texts.get_record_stream( | ||
490 | 352 | needed_keys, 'topological', True) | ||
491 | 353 | self.repo._remove_file_id(new_file_id) | ||
492 | 354 | self.repo.texts.insert_record_stream(fix_parents(stream)) | ||
493 | 355 | self.repo._remove_file_id(file_id) | ||
494 | 356 | if len(new_parents): | ||
495 | 357 | self.repo._move_file_id(new_file_id, file_id) | ||
496 | 358 | |||
497 | 359 | |||
498 | 360 | class PackReconciler(VersionedFileRepoReconciler): | ||
499 | 361 | """Reconciler that reconciles a pack based repository. | ||
500 | 362 | |||
501 | 363 | Garbage inventories do not affect ancestry queries, and removal is | ||
502 | 364 | considerably more expensive as there is no separate versioned file for | ||
503 | 365 | them, so they are not cleaned. In short it is currently a no-op. | ||
504 | 366 | |||
505 | 367 | In future this may be a good place to hook in annotation cache checking, | ||
506 | 368 | index recreation etc. | ||
507 | 369 | """ | ||
508 | 370 | |||
509 | 371 | # XXX: The index corruption that _fix_text_parents performs is needed for | ||
510 | 372 | # packs, but not yet implemented. The basic approach is to: | ||
511 | 373 | # - lock the names list | ||
512 | 374 | # - perform a customised pack() that regenerates data as needed | ||
513 | 375 | # - unlock the names list | ||
514 | 376 | # https://bugs.launchpad.net/bzr/+bug/154173 | ||
515 | 377 | |||
516 | 378 | def __init__(self, repo, other=None, thorough=False, | ||
517 | 379 | canonicalize_chks=False): | ||
518 | 380 | super(PackReconciler, self).__init__(repo, other=other, | ||
519 | 381 | thorough=thorough) | ||
520 | 382 | self.canonicalize_chks = canonicalize_chks | ||
521 | 383 | |||
522 | 384 | def _reconcile_steps(self): | ||
523 | 385 | """Perform the steps to reconcile this repository.""" | ||
524 | 386 | if not self.thorough: | ||
525 | 387 | return | ||
526 | 388 | collection = self.repo._pack_collection | ||
527 | 389 | collection.ensure_loaded() | ||
528 | 390 | collection.lock_names() | ||
529 | 391 | try: | ||
530 | 392 | packs = collection.all_packs() | ||
531 | 393 | all_revisions = self.repo.all_revision_ids() | ||
532 | 394 | total_inventories = len(list( | ||
533 | 395 | collection.inventory_index.combined_index.iter_all_entries())) | ||
534 | 396 | if len(all_revisions): | ||
535 | 397 | if self.canonicalize_chks: | ||
536 | 398 | reconcile_meth = self.repo._canonicalize_chks_pack | ||
537 | 399 | else: | ||
538 | 400 | reconcile_meth = self.repo._reconcile_pack | ||
539 | 401 | new_pack = reconcile_meth(collection, packs, ".reconcile", | ||
540 | 402 | all_revisions, self.pb) | ||
541 | 403 | if new_pack is not None: | ||
542 | 404 | self._discard_and_save(packs) | ||
543 | 405 | else: | ||
544 | 406 | # only make a new pack when there is data to copy. | ||
545 | 407 | self._discard_and_save(packs) | ||
546 | 408 | self.garbage_inventories = total_inventories - len(list( | ||
547 | 409 | collection.inventory_index.combined_index.iter_all_entries())) | ||
548 | 410 | finally: | ||
549 | 411 | collection._unlock_names() | ||
550 | 412 | |||
551 | 413 | def _discard_and_save(self, packs): | ||
552 | 414 | """Discard some packs from the repository. | ||
553 | 415 | |||
554 | 416 | This removes them from the memory index, saves the in-memory index | ||
555 | 417 | which makes the newly reconciled pack visible and hides the packs to be | ||
556 | 418 | discarded, and finally renames the packs being discarded into the | ||
557 | 419 | obsolete packs directory. | ||
558 | 420 | |||
559 | 421 | :param packs: The packs to discard. | ||
560 | 422 | """ | ||
561 | 423 | for pack in packs: | ||
562 | 424 | self.repo._pack_collection._remove_pack_from_memory(pack) | ||
563 | 425 | self.repo._pack_collection._save_pack_names() | ||
564 | 426 | self.repo._pack_collection._obsolete_packs(packs) | ||
565 | 427 | |||
566 | 428 | |||
567 | 429 | class BranchReconciler(object): | ||
568 | 430 | """Reconciler that works on a branch.""" | ||
569 | 431 | |||
570 | 432 | def __init__(self, a_branch, thorough=False): | ||
571 | 433 | self.fixed_history = None | ||
572 | 434 | self.thorough = thorough | ||
573 | 435 | self.branch = a_branch | ||
574 | 436 | |||
575 | 437 | def reconcile(self): | ||
576 | 438 | with self.branch.lock_write(), \ | ||
577 | 439 | ui.ui_factory.nested_progress_bar() as self.pb: | ||
578 | 440 | ret = ReconcileResult() | ||
579 | 441 | ret.fixed_history = self._reconcile_steps() | ||
580 | 442 | return ret | ||
581 | 443 | |||
582 | 444 | def _reconcile_steps(self): | ||
583 | 445 | return self._reconcile_revision_history() | ||
584 | 446 | |||
585 | 447 | def _reconcile_revision_history(self): | ||
586 | 448 | last_revno, last_revision_id = self.branch.last_revision_info() | ||
587 | 449 | real_history = [] | ||
588 | 450 | graph = self.branch.repository.get_graph() | ||
589 | 451 | try: | ||
590 | 452 | for revid in graph.iter_lefthand_ancestry( | ||
591 | 453 | last_revision_id, (_mod_revision.NULL_REVISION,)): | ||
592 | 454 | real_history.append(revid) | ||
593 | 455 | except errors.RevisionNotPresent: | ||
594 | 456 | pass # Hit a ghost left hand parent | ||
595 | 457 | real_history.reverse() | ||
596 | 458 | if last_revno != len(real_history): | ||
597 | 459 | # Technically for Branch5 formats, it is more efficient to use | ||
598 | 460 | # set_revision_history, as this will regenerate it again. | ||
599 | 461 | # Not really worth a whole BranchReconciler class just for this, | ||
600 | 462 | # though. | ||
601 | 463 | ui.ui_factory.note(gettext('Fixing last revision info {0} ' | ||
602 | 464 | ' => {1}').format( | ||
603 | 465 | last_revno, len(real_history))) | ||
604 | 466 | self.branch.set_last_revision_info(len(real_history), | ||
605 | 467 | last_revision_id) | ||
606 | 468 | return True | ||
607 | 469 | else: | ||
608 | 470 | ui.ui_factory.note(gettext('revision_history ok.')) | ||
609 | 471 | return False | ||
610 | 0 | 472 | ||
611 | === modified file 'breezy/bzr/remote.py' | |||
612 | --- breezy/bzr/remote.py 2019-01-01 21:23:40 +0000 | |||
613 | +++ breezy/bzr/remote.py 2019-01-01 23:11:42 +0000 | |||
614 | @@ -2474,7 +2474,7 @@ | |||
615 | 2474 | return self._real_repository._get_inventory_xml(revision_id) | 2474 | return self._real_repository._get_inventory_xml(revision_id) |
616 | 2475 | 2475 | ||
617 | 2476 | def reconcile(self, other=None, thorough=False): | 2476 | def reconcile(self, other=None, thorough=False): |
619 | 2477 | from ..reconcile import RepoReconciler | 2477 | from ..reconcile import ReconcileResult |
620 | 2478 | with self.lock_write(): | 2478 | with self.lock_write(): |
621 | 2479 | path = self.controldir._path_for_remote_call(self._client) | 2479 | path = self.controldir._path_for_remote_call(self._client) |
622 | 2480 | try: | 2480 | try: |
623 | @@ -2486,7 +2486,10 @@ | |||
624 | 2486 | if response != (b'ok', ): | 2486 | if response != (b'ok', ): |
625 | 2487 | raise errors.UnexpectedSmartServerResponse(response) | 2487 | raise errors.UnexpectedSmartServerResponse(response) |
626 | 2488 | body = handler.read_body_bytes() | 2488 | body = handler.read_body_bytes() |
628 | 2489 | result = RepoReconciler(self) | 2489 | result = ReconcileResult() |
629 | 2490 | result.garbage_inventories = None | ||
630 | 2491 | result.inconsistent_parents = None | ||
631 | 2492 | result.aborted = None | ||
632 | 2490 | for line in body.split(b'\n'): | 2493 | for line in body.split(b'\n'): |
633 | 2491 | if not line: | 2494 | if not line: |
634 | 2492 | continue | 2495 | continue |
635 | @@ -4139,6 +4142,13 @@ | |||
636 | 4139 | self._ensure_real() | 4142 | self._ensure_real() |
637 | 4140 | return self._real_branch.heads_to_fetch() | 4143 | return self._real_branch.heads_to_fetch() |
638 | 4141 | 4144 | ||
639 | 4145 | def reconcile(self, thorough=True): | ||
640 | 4146 | """Make sure the data stored in this branch is consistent.""" | ||
641 | 4147 | from .reconcile import BranchReconciler | ||
642 | 4148 | with self.lock_write(): | ||
643 | 4149 | reconciler = BranchReconciler(self, thorough=thorough) | ||
644 | 4150 | return reconciler.reconcile() | ||
645 | 4151 | |||
646 | 4142 | 4152 | ||
647 | 4143 | class RemoteConfig(object): | 4153 | class RemoteConfig(object): |
648 | 4144 | """A Config that reads and writes from smart verbs. | 4154 | """A Config that reads and writes from smart verbs. |
649 | 4145 | 4155 | ||
650 | === modified file 'breezy/bzr/vf_repository.py' | |||
651 | --- breezy/bzr/vf_repository.py 2018-11-29 23:42:41 +0000 | |||
652 | +++ breezy/bzr/vf_repository.py 2019-01-01 23:11:42 +0000 | |||
653 | @@ -294,7 +294,7 @@ | |||
654 | 294 | or errored-on before record_iter_changes sees the item. | 294 | or errored-on before record_iter_changes sees the item. |
655 | 295 | :param _entry_factory: Private method to bind entry_factory locally for | 295 | :param _entry_factory: Private method to bind entry_factory locally for |
656 | 296 | performance. | 296 | performance. |
658 | 297 | :return: A generator of (file_id, relpath, fs_hash) tuples for use with | 297 | :return: A generator of (relpath, fs_hash) tuples for use with |
659 | 298 | tree._observed_sha1. | 298 | tree._observed_sha1. |
660 | 299 | """ | 299 | """ |
661 | 300 | # Create an inventory delta based on deltas between all the parents and | 300 | # Create an inventory delta based on deltas between all the parents and |
662 | @@ -487,7 +487,7 @@ | |||
663 | 487 | try: | 487 | try: |
664 | 488 | entry.text_sha1, entry.text_size = self._add_file_to_weave( | 488 | entry.text_sha1, entry.text_size = self._add_file_to_weave( |
665 | 489 | file_id, file_obj, heads, nostore_sha) | 489 | file_id, file_obj, heads, nostore_sha) |
667 | 490 | yield file_id, change[1][1], (entry.text_sha1, stat_value) | 490 | yield change[1][1], (entry.text_sha1, stat_value) |
668 | 491 | except errors.ExistingContent: | 491 | except errors.ExistingContent: |
669 | 492 | # No content change against a carry_over parent | 492 | # No content change against a carry_over parent |
670 | 493 | # Perhaps this should also yield a fs hash update? | 493 | # Perhaps this should also yield a fs hash update? |
671 | @@ -1737,6 +1737,13 @@ | |||
672 | 1737 | """Return a source for streaming from this repository.""" | 1737 | """Return a source for streaming from this repository.""" |
673 | 1738 | return StreamSource(self, to_format) | 1738 | return StreamSource(self, to_format) |
674 | 1739 | 1739 | ||
675 | 1740 | def reconcile(self, other=None, thorough=False): | ||
676 | 1741 | """Reconcile this repository.""" | ||
677 | 1742 | from .reconcile import VersionedFileRepoReconciler | ||
678 | 1743 | with self.lock_write(): | ||
679 | 1744 | reconciler = VersionedFileRepoReconciler(self, thorough=thorough) | ||
680 | 1745 | return reconciler.reconcile() | ||
681 | 1746 | |||
682 | 1740 | 1747 | ||
683 | 1741 | class MetaDirVersionedFileRepository(MetaDirRepository, | 1748 | class MetaDirVersionedFileRepository(MetaDirRepository, |
684 | 1742 | VersionedFileRepository): | 1749 | VersionedFileRepository): |
685 | 1743 | 1750 | ||
686 | === modified file 'breezy/bzr/workingtree_4.py' | |||
687 | --- breezy/bzr/workingtree_4.py 2018-11-21 03:20:30 +0000 | |||
688 | +++ breezy/bzr/workingtree_4.py 2019-01-01 23:11:42 +0000 | |||
689 | @@ -1935,7 +1935,7 @@ | |||
690 | 1935 | for path, identifier in desired_files: | 1935 | for path, identifier in desired_files: |
691 | 1936 | entry = self._get_entry(path=path) | 1936 | entry = self._get_entry(path=path) |
692 | 1937 | if entry == (None, None): | 1937 | if entry == (None, None): |
694 | 1938 | raise errors.NoSuchFile(self, path) | 1938 | raise errors.NoSuchFile(path) |
695 | 1939 | repo_desired_files.append((entry[0][2], entry[1][parent_index][4], | 1939 | repo_desired_files.append((entry[0][2], entry[1][parent_index][4], |
696 | 1940 | identifier)) | 1940 | identifier)) |
697 | 1941 | return self._repository.iter_files_bytes(repo_desired_files) | 1941 | return self._repository.iter_files_bytes(repo_desired_files) |
698 | 1942 | 1942 | ||
699 | === modified file 'breezy/commit.py' | |||
700 | --- breezy/commit.py 2018-11-16 23:15:15 +0000 | |||
701 | +++ breezy/commit.py 2019-01-01 23:11:42 +0000 | |||
702 | @@ -681,7 +681,7 @@ | |||
703 | 681 | if self.exclude: | 681 | if self.exclude: |
704 | 682 | iter_changes = filter_excluded(iter_changes, self.exclude) | 682 | iter_changes = filter_excluded(iter_changes, self.exclude) |
705 | 683 | iter_changes = self._filter_iter_changes(iter_changes) | 683 | iter_changes = self._filter_iter_changes(iter_changes) |
707 | 684 | for file_id, path, fs_hash in self.builder.record_iter_changes( | 684 | for path, fs_hash in self.builder.record_iter_changes( |
708 | 685 | self.work_tree, self.basis_revid, iter_changes): | 685 | self.work_tree, self.basis_revid, iter_changes): |
709 | 686 | self.work_tree._observed_sha1(path, fs_hash) | 686 | self.work_tree._observed_sha1(path, fs_hash) |
710 | 687 | 687 | ||
711 | 688 | 688 | ||
712 | === modified file 'breezy/diff.py' | |||
713 | --- breezy/diff.py 2018-11-18 19:48:57 +0000 | |||
714 | +++ breezy/diff.py 2019-01-01 23:11:42 +0000 | |||
715 | @@ -477,7 +477,7 @@ | |||
716 | 477 | tree.unlock() | 477 | tree.unlock() |
717 | 478 | 478 | ||
718 | 479 | 479 | ||
720 | 480 | def _patch_header_date(tree, file_id, path): | 480 | def _patch_header_date(tree, path): |
721 | 481 | """Returns a timestamp suitable for use in a patch header.""" | 481 | """Returns a timestamp suitable for use in a patch header.""" |
722 | 482 | try: | 482 | try: |
723 | 483 | mtime = tree.get_file_mtime(path) | 483 | mtime = tree.get_file_mtime(path) |
724 | @@ -526,10 +526,9 @@ | |||
725 | 526 | diff_tree.to_file, diff_tree.path_encoding) | 526 | diff_tree.to_file, diff_tree.path_encoding) |
726 | 527 | 527 | ||
727 | 528 | @staticmethod | 528 | @staticmethod |
729 | 529 | def _diff_many(differs, file_id, old_path, new_path, old_kind, new_kind): | 529 | def _diff_many(differs, old_path, new_path, old_kind, new_kind): |
730 | 530 | for file_differ in differs: | 530 | for file_differ in differs: |
733 | 531 | result = file_differ.diff(file_id, old_path, new_path, old_kind, | 531 | result = file_differ.diff(old_path, new_path, old_kind, new_kind) |
732 | 532 | new_kind) | ||
734 | 533 | if result is not DiffPath.CANNOT_DIFF: | 532 | if result is not DiffPath.CANNOT_DIFF: |
735 | 534 | return result | 533 | return result |
736 | 535 | else: | 534 | else: |
737 | @@ -553,10 +552,9 @@ | |||
738 | 553 | def from_diff_tree(klass, diff_tree): | 552 | def from_diff_tree(klass, diff_tree): |
739 | 554 | return klass(diff_tree.differs) | 553 | return klass(diff_tree.differs) |
740 | 555 | 554 | ||
742 | 556 | def diff(self, file_id, old_path, new_path, old_kind, new_kind): | 555 | def diff(self, old_path, new_path, old_kind, new_kind): |
743 | 557 | """Perform comparison | 556 | """Perform comparison |
744 | 558 | 557 | ||
745 | 559 | :param file_id: The file_id of the file to compare | ||
746 | 560 | :param old_path: Path of the file in the old tree | 558 | :param old_path: Path of the file in the old tree |
747 | 561 | :param new_path: Path of the file in the new tree | 559 | :param new_path: Path of the file in the new tree |
748 | 562 | :param old_kind: Old file-kind of the file | 560 | :param old_kind: Old file-kind of the file |
749 | @@ -564,17 +562,17 @@ | |||
750 | 564 | """ | 562 | """ |
751 | 565 | if None in (old_kind, new_kind): | 563 | if None in (old_kind, new_kind): |
752 | 566 | return DiffPath.CANNOT_DIFF | 564 | return DiffPath.CANNOT_DIFF |
755 | 567 | result = DiffPath._diff_many(self.differs, file_id, old_path, | 565 | result = DiffPath._diff_many( |
756 | 568 | new_path, old_kind, None) | 566 | self.differs, old_path, new_path, old_kind, None) |
757 | 569 | if result is DiffPath.CANNOT_DIFF: | 567 | if result is DiffPath.CANNOT_DIFF: |
758 | 570 | return result | 568 | return result |
761 | 571 | return DiffPath._diff_many(self.differs, file_id, old_path, new_path, | 569 | return DiffPath._diff_many( |
762 | 572 | None, new_kind) | 570 | self.differs, old_path, new_path, None, new_kind) |
763 | 573 | 571 | ||
764 | 574 | 572 | ||
765 | 575 | class DiffDirectory(DiffPath): | 573 | class DiffDirectory(DiffPath): |
766 | 576 | 574 | ||
768 | 577 | def diff(self, file_id, old_path, new_path, old_kind, new_kind): | 575 | def diff(self, old_path, new_path, old_kind, new_kind): |
769 | 578 | """Perform comparison between two directories. (dummy) | 576 | """Perform comparison between two directories. (dummy) |
770 | 579 | 577 | ||
771 | 580 | """ | 578 | """ |
772 | @@ -589,10 +587,9 @@ | |||
773 | 589 | 587 | ||
774 | 590 | class DiffSymlink(DiffPath): | 588 | class DiffSymlink(DiffPath): |
775 | 591 | 589 | ||
777 | 592 | def diff(self, file_id, old_path, new_path, old_kind, new_kind): | 590 | def diff(self, old_path, new_path, old_kind, new_kind): |
778 | 593 | """Perform comparison between two symlinks | 591 | """Perform comparison between two symlinks |
779 | 594 | 592 | ||
780 | 595 | :param file_id: The file_id of the file to compare | ||
781 | 596 | :param old_path: Path of the file in the old tree | 593 | :param old_path: Path of the file in the old tree |
782 | 597 | :param new_path: Path of the file in the new tree | 594 | :param new_path: Path of the file in the new tree |
783 | 598 | :param old_kind: Old file-kind of the file | 595 | :param old_kind: Old file-kind of the file |
784 | @@ -644,10 +641,9 @@ | |||
785 | 644 | self.path_encoding = path_encoding | 641 | self.path_encoding = path_encoding |
786 | 645 | self.context_lines = context_lines | 642 | self.context_lines = context_lines |
787 | 646 | 643 | ||
789 | 647 | def diff(self, file_id, old_path, new_path, old_kind, new_kind): | 644 | def diff(self, old_path, new_path, old_kind, new_kind): |
790 | 648 | """Compare two files in unified diff format | 645 | """Compare two files in unified diff format |
791 | 649 | 646 | ||
792 | 650 | :param file_id: The file_id of the file to compare | ||
793 | 651 | :param old_path: Path of the file in the old tree | 647 | :param old_path: Path of the file in the old tree |
794 | 652 | :param new_path: Path of the file in the new tree | 648 | :param new_path: Path of the file in the new tree |
795 | 653 | :param old_kind: Old file-kind of the file | 649 | :param old_kind: Old file-kind of the file |
796 | @@ -655,30 +651,25 @@ | |||
797 | 655 | """ | 651 | """ |
798 | 656 | if 'file' not in (old_kind, new_kind): | 652 | if 'file' not in (old_kind, new_kind): |
799 | 657 | return self.CANNOT_DIFF | 653 | return self.CANNOT_DIFF |
800 | 658 | from_file_id = to_file_id = file_id | ||
801 | 659 | if old_kind == 'file': | 654 | if old_kind == 'file': |
803 | 660 | old_date = _patch_header_date(self.old_tree, file_id, old_path) | 655 | old_date = _patch_header_date(self.old_tree, old_path) |
804 | 661 | elif old_kind is None: | 656 | elif old_kind is None: |
805 | 662 | old_date = self.EPOCH_DATE | 657 | old_date = self.EPOCH_DATE |
806 | 663 | from_file_id = None | ||
807 | 664 | else: | 658 | else: |
808 | 665 | return self.CANNOT_DIFF | 659 | return self.CANNOT_DIFF |
809 | 666 | if new_kind == 'file': | 660 | if new_kind == 'file': |
811 | 667 | new_date = _patch_header_date(self.new_tree, file_id, new_path) | 661 | new_date = _patch_header_date(self.new_tree, new_path) |
812 | 668 | elif new_kind is None: | 662 | elif new_kind is None: |
813 | 669 | new_date = self.EPOCH_DATE | 663 | new_date = self.EPOCH_DATE |
814 | 670 | to_file_id = None | ||
815 | 671 | else: | 664 | else: |
816 | 672 | return self.CANNOT_DIFF | 665 | return self.CANNOT_DIFF |
817 | 673 | from_label = '%s%s\t%s' % (self.old_label, old_path, | 666 | from_label = '%s%s\t%s' % (self.old_label, old_path, |
818 | 674 | old_date) | 667 | old_date) |
819 | 675 | to_label = '%s%s\t%s' % (self.new_label, new_path, | 668 | to_label = '%s%s\t%s' % (self.new_label, new_path, |
820 | 676 | new_date) | 669 | new_date) |
823 | 677 | return self.diff_text(old_path, new_path, from_label, to_label, | 670 | return self.diff_text(old_path, new_path, from_label, to_label) |
822 | 678 | from_file_id, to_file_id) | ||
824 | 679 | 671 | ||
827 | 680 | def diff_text(self, from_path, to_path, from_label, to_label, | 672 | def diff_text(self, from_path, to_path, from_label, to_label): |
826 | 681 | from_file_id=None, to_file_id=None): | ||
828 | 682 | """Diff the content of given files in two trees | 673 | """Diff the content of given files in two trees |
829 | 683 | 674 | ||
830 | 684 | :param from_path: The path in the from tree. If None, | 675 | :param from_path: The path in the from tree. If None, |
831 | @@ -686,18 +677,17 @@ | |||
832 | 686 | :param to_path: The path in the to tree. This may refer | 677 | :param to_path: The path in the to tree. This may refer |
833 | 687 | to a different file from from_path. If None, | 678 | to a different file from from_path. If None, |
834 | 688 | the file is not present in the to tree. | 679 | the file is not present in the to tree. |
835 | 689 | :param from_file_id: The id of the file in the from tree or None if | ||
836 | 690 | unknown. | ||
837 | 691 | :param to_file_id: The id of the file in the to tree or None if | ||
838 | 692 | unknown. | ||
839 | 693 | """ | 680 | """ |
844 | 694 | def _get_text(tree, file_id, path): | 681 | def _get_text(tree, path): |
845 | 695 | if file_id is None: | 682 | if path is None: |
846 | 696 | return [] | 683 | return [] |
847 | 697 | return tree.get_file_lines(path) | 684 | try: |
848 | 685 | return tree.get_file_lines(path) | ||
849 | 686 | except errors.NoSuchFile: | ||
850 | 687 | return [] | ||
851 | 698 | try: | 688 | try: |
854 | 699 | from_text = _get_text(self.old_tree, from_file_id, from_path) | 689 | from_text = _get_text(self.old_tree, from_path) |
855 | 700 | to_text = _get_text(self.new_tree, to_file_id, to_path) | 690 | to_text = _get_text(self.new_tree, to_path) |
856 | 701 | self.text_differ(from_label, from_text, to_label, to_text, | 691 | self.text_differ(from_label, from_text, to_label, to_text, |
857 | 702 | self.to_file, path_encoding=self.path_encoding, | 692 | self.to_file, path_encoding=self.path_encoding, |
858 | 703 | context_lines=self.context_lines) | 693 | context_lines=self.context_lines) |
859 | @@ -804,7 +794,7 @@ | |||
860 | 804 | return osutils.pathjoin(self._root, prefix, relpath_tmp) | 794 | return osutils.pathjoin(self._root, prefix, relpath_tmp) |
861 | 805 | 795 | ||
862 | 806 | def _write_file(self, relpath, tree, prefix, force_temp=False, | 796 | def _write_file(self, relpath, tree, prefix, force_temp=False, |
864 | 807 | allow_write=False, file_id=None): | 797 | allow_write=False): |
865 | 808 | if not force_temp and isinstance(tree, WorkingTree): | 798 | if not force_temp and isinstance(tree, WorkingTree): |
866 | 809 | full_path = tree.abspath(relpath) | 799 | full_path = tree.abspath(relpath) |
867 | 810 | if self._is_safepath(full_path): | 800 | if self._is_safepath(full_path): |
868 | @@ -836,12 +826,12 @@ | |||
869 | 836 | return full_path | 826 | return full_path |
870 | 837 | 827 | ||
871 | 838 | def _prepare_files(self, old_path, new_path, force_temp=False, | 828 | def _prepare_files(self, old_path, new_path, force_temp=False, |
878 | 839 | allow_write_new=False, file_id=None): | 829 | allow_write_new=False): |
879 | 840 | old_disk_path = self._write_file(old_path, self.old_tree, 'old', | 830 | old_disk_path = self._write_file( |
880 | 841 | force_temp, file_id=file_id) | 831 | old_path, self.old_tree, 'old', force_temp) |
881 | 842 | new_disk_path = self._write_file(new_path, self.new_tree, 'new', | 832 | new_disk_path = self._write_file( |
882 | 843 | force_temp, file_id=file_id, | 833 | new_path, self.new_tree, 'new', force_temp, |
883 | 844 | allow_write=allow_write_new) | 834 | allow_write=allow_write_new) |
884 | 845 | return old_disk_path, new_disk_path | 835 | return old_disk_path, new_disk_path |
885 | 846 | 836 | ||
886 | 847 | def finish(self): | 837 | def finish(self): |
887 | @@ -852,25 +842,23 @@ | |||
888 | 852 | mutter("The temporary directory \"%s\" was not " | 842 | mutter("The temporary directory \"%s\" was not " |
889 | 853 | "cleanly removed: %s." % (self._root, e)) | 843 | "cleanly removed: %s." % (self._root, e)) |
890 | 854 | 844 | ||
892 | 855 | def diff(self, file_id, old_path, new_path, old_kind, new_kind): | 845 | def diff(self, old_path, new_path, old_kind, new_kind): |
893 | 856 | if (old_kind, new_kind) != ('file', 'file'): | 846 | if (old_kind, new_kind) != ('file', 'file'): |
894 | 857 | return DiffPath.CANNOT_DIFF | 847 | return DiffPath.CANNOT_DIFF |
895 | 858 | (old_disk_path, new_disk_path) = self._prepare_files( | 848 | (old_disk_path, new_disk_path) = self._prepare_files( |
897 | 859 | old_path, new_path, file_id=file_id) | 849 | old_path, new_path) |
898 | 860 | self._execute(old_disk_path, new_disk_path) | 850 | self._execute(old_disk_path, new_disk_path) |
899 | 861 | 851 | ||
901 | 862 | def edit_file(self, old_path, new_path, file_id=None): | 852 | def edit_file(self, old_path, new_path): |
902 | 863 | """Use this tool to edit a file. | 853 | """Use this tool to edit a file. |
903 | 864 | 854 | ||
904 | 865 | A temporary copy will be edited, and the new contents will be | 855 | A temporary copy will be edited, and the new contents will be |
905 | 866 | returned. | 856 | returned. |
906 | 867 | 857 | ||
907 | 868 | :param file_id: The id of the file to edit. | ||
908 | 869 | :return: The new contents of the file. | 858 | :return: The new contents of the file. |
909 | 870 | """ | 859 | """ |
910 | 871 | old_abs_path, new_abs_path = self._prepare_files( | 860 | old_abs_path, new_abs_path = self._prepare_files( |
913 | 872 | old_path, new_path, allow_write_new=True, force_temp=True, | 861 | old_path, new_path, allow_write_new=True, force_temp=True) |
912 | 873 | file_id=file_id) | ||
914 | 874 | command = self._get_command(old_abs_path, new_abs_path) | 862 | command = self._get_command(old_abs_path, new_abs_path) |
915 | 875 | subprocess.call(command, cwd=self._root) | 863 | subprocess.call(command, cwd=self._root) |
916 | 876 | with open(new_abs_path, 'rb') as new_file: | 864 | with open(new_abs_path, 'rb') as new_file: |
917 | @@ -1028,16 +1016,15 @@ | |||
918 | 1028 | self.to_file.write(b"=== modified %s '%s'%s\n" % (kind[0].encode('ascii'), | 1016 | self.to_file.write(b"=== modified %s '%s'%s\n" % (kind[0].encode('ascii'), |
919 | 1029 | newpath_encoded, prop_str)) | 1017 | newpath_encoded, prop_str)) |
920 | 1030 | if changed_content: | 1018 | if changed_content: |
922 | 1031 | self._diff(oldpath, newpath, kind[0], kind[1], file_id=file_id) | 1019 | self._diff(oldpath, newpath, kind[0], kind[1]) |
923 | 1032 | has_changes = 1 | 1020 | has_changes = 1 |
924 | 1033 | if renamed: | 1021 | if renamed: |
925 | 1034 | has_changes = 1 | 1022 | has_changes = 1 |
926 | 1035 | return has_changes | 1023 | return has_changes |
927 | 1036 | 1024 | ||
929 | 1037 | def diff(self, file_id, old_path, new_path): | 1025 | def diff(self, old_path, new_path): |
930 | 1038 | """Perform a diff of a single file | 1026 | """Perform a diff of a single file |
931 | 1039 | 1027 | ||
932 | 1040 | :param file_id: file-id of the file | ||
933 | 1041 | :param old_path: The path of the file in the old tree | 1028 | :param old_path: The path of the file in the old tree |
934 | 1042 | :param new_path: The path of the file in the new tree | 1029 | :param new_path: The path of the file in the new tree |
935 | 1043 | """ | 1030 | """ |
936 | @@ -1049,11 +1036,11 @@ | |||
937 | 1049 | new_kind = None | 1036 | new_kind = None |
938 | 1050 | else: | 1037 | else: |
939 | 1051 | new_kind = self.new_tree.kind(new_path) | 1038 | new_kind = self.new_tree.kind(new_path) |
941 | 1052 | self._diff(old_path, new_path, old_kind, new_kind, file_id=file_id) | 1039 | self._diff(old_path, new_path, old_kind, new_kind) |
942 | 1053 | 1040 | ||
946 | 1054 | def _diff(self, old_path, new_path, old_kind, new_kind, file_id): | 1041 | def _diff(self, old_path, new_path, old_kind, new_kind): |
947 | 1055 | result = DiffPath._diff_many(self.differs, file_id, old_path, | 1042 | result = DiffPath._diff_many( |
948 | 1056 | new_path, old_kind, new_kind) | 1043 | self.differs, old_path, new_path, old_kind, new_kind) |
949 | 1057 | if result is DiffPath.CANNOT_DIFF: | 1044 | if result is DiffPath.CANNOT_DIFF: |
950 | 1058 | error_path = new_path | 1045 | error_path = new_path |
951 | 1059 | if error_path is None: | 1046 | if error_path is None: |
952 | 1060 | 1047 | ||
953 | === modified file 'breezy/git/branch.py' | |||
954 | --- breezy/git/branch.py 2018-11-16 23:15:15 +0000 | |||
955 | +++ breezy/git/branch.py 2019-01-01 23:11:42 +0000 | |||
956 | @@ -615,6 +615,12 @@ | |||
957 | 615 | self, stop_revision=revid, lossy=lossy, _stop_revno=revno) | 615 | self, stop_revision=revid, lossy=lossy, _stop_revno=revno) |
958 | 616 | return (push_result.new_revno, push_result.new_revid) | 616 | return (push_result.new_revno, push_result.new_revid) |
959 | 617 | 617 | ||
960 | 618 | def reconcile(self, thorough=True): | ||
961 | 619 | """Make sure the data stored in this branch is consistent.""" | ||
962 | 620 | from ..reconcile import ReconcileResult | ||
963 | 621 | # Nothing to do here | ||
964 | 622 | return ReconcileResult() | ||
965 | 623 | |||
966 | 618 | 624 | ||
967 | 619 | class LocalGitBranch(GitBranch): | 625 | class LocalGitBranch(GitBranch): |
968 | 620 | """A local Git branch.""" | 626 | """A local Git branch.""" |
969 | 621 | 627 | ||
970 | === modified file 'breezy/git/commit.py' | |||
971 | --- breezy/git/commit.py 2018-11-22 03:04:59 +0000 | |||
972 | +++ breezy/git/commit.py 2019-01-01 23:11:42 +0000 | |||
973 | @@ -142,7 +142,7 @@ | |||
974 | 142 | encoded_new_path = path[1].encode("utf-8") | 142 | encoded_new_path = path[1].encode("utf-8") |
975 | 143 | self._blobs[encoded_new_path] = (mode, sha) | 143 | self._blobs[encoded_new_path] = (mode, sha) |
976 | 144 | if st is not None: | 144 | if st is not None: |
978 | 145 | yield file_id, path[1], (entry.text_sha1, st) | 145 | yield path[1], (entry.text_sha1, st) |
979 | 146 | if self._mapping.generate_file_id(encoded_new_path) != file_id: | 146 | if self._mapping.generate_file_id(encoded_new_path) != file_id: |
980 | 147 | self._override_fileids[encoded_new_path] = file_id | 147 | self._override_fileids[encoded_new_path] = file_id |
981 | 148 | else: | 148 | else: |
982 | 149 | 149 | ||
983 | === modified file 'breezy/git/repository.py' | |||
984 | --- breezy/git/repository.py 2018-11-11 14:23:06 +0000 | |||
985 | +++ breezy/git/repository.py 2019-01-01 23:11:42 +0000 | |||
986 | @@ -68,31 +68,6 @@ | |||
987 | 68 | ) | 68 | ) |
988 | 69 | 69 | ||
989 | 70 | 70 | ||
990 | 71 | class RepoReconciler(object): | ||
991 | 72 | """Reconciler that reconciles a repository. | ||
992 | 73 | |||
993 | 74 | """ | ||
994 | 75 | |||
995 | 76 | def __init__(self, repo, other=None, thorough=False): | ||
996 | 77 | """Construct a RepoReconciler. | ||
997 | 78 | |||
998 | 79 | :param thorough: perform a thorough check which may take longer but | ||
999 | 80 | will correct non-data loss issues such as incorrect | ||
1000 | 81 | cached data. | ||
1001 | 82 | """ | ||
1002 | 83 | self.repo = repo | ||
1003 | 84 | |||
1004 | 85 | def reconcile(self): | ||
1005 | 86 | """Perform reconciliation. | ||
1006 | 87 | |||
1007 | 88 | After reconciliation the following attributes document found issues: | ||
1008 | 89 | inconsistent_parents: The number of revisions in the repository whose | ||
1009 | 90 | ancestry was being reported incorrectly. | ||
1010 | 91 | garbage_inventories: The number of inventory objects without revisions | ||
1011 | 92 | that were garbage collected. | ||
1012 | 93 | """ | ||
1013 | 94 | |||
1014 | 95 | |||
1015 | 96 | class GitCheck(check.Check): | 71 | class GitCheck(check.Check): |
1016 | 97 | 72 | ||
1017 | 98 | def __init__(self, repository, check_repo=True): | 73 | def __init__(self, repository, check_repo=True): |
1018 | @@ -241,9 +216,10 @@ | |||
1019 | 241 | 216 | ||
1020 | 242 | def reconcile(self, other=None, thorough=False): | 217 | def reconcile(self, other=None, thorough=False): |
1021 | 243 | """Reconcile this repository.""" | 218 | """Reconcile this repository.""" |
1025 | 244 | reconciler = RepoReconciler(self, thorough=thorough) | 219 | from ..reconcile import ReconcileResult |
1026 | 245 | reconciler.reconcile() | 220 | ret = ReconcileResult() |
1027 | 246 | return reconciler | 221 | ret.aborted = False |
1028 | 222 | return ret | ||
1029 | 247 | 223 | ||
1030 | 248 | def supports_rich_root(self): | 224 | def supports_rich_root(self): |
1031 | 249 | return True | 225 | return True |
1032 | 250 | 226 | ||
1033 | === modified file 'breezy/git/tests/test_blackbox.py' | |||
1034 | --- breezy/git/tests/test_blackbox.py 2018-12-18 19:51:52 +0000 | |||
1035 | +++ breezy/git/tests/test_blackbox.py 2019-01-01 23:11:42 +0000 | |||
1036 | @@ -395,6 +395,21 @@ | |||
1037 | 395 | self.assertEqual(error, '') | 395 | self.assertEqual(error, '') |
1038 | 396 | 396 | ||
1039 | 397 | 397 | ||
1040 | 398 | class ReconcileTests(ExternalBase): | ||
1041 | 399 | |||
1042 | 400 | def test_simple_reconcile(self): | ||
1043 | 401 | tree = self.make_branch_and_tree('.', format='git') | ||
1044 | 402 | self.build_tree_contents([('a', 'text for a\n')]) | ||
1045 | 403 | tree.add(['a']) | ||
1046 | 404 | output, error = self.run_bzr('reconcile') | ||
1047 | 405 | self.assertContainsRe( | ||
1048 | 406 | output, | ||
1049 | 407 | 'Reconciling branch file://.*\n' | ||
1050 | 408 | 'Reconciling repository file://.*\n' | ||
1051 | 409 | 'Reconciliation complete.\n') | ||
1052 | 410 | self.assertEqual(error, '') | ||
1053 | 411 | |||
1054 | 412 | |||
1055 | 398 | class StatusTests(ExternalBase): | 413 | class StatusTests(ExternalBase): |
1056 | 399 | 414 | ||
1057 | 400 | def test_empty_dir(self): | 415 | def test_empty_dir(self): |
1058 | 401 | 416 | ||
1059 | === modified file 'breezy/git/tree.py' | |||
1060 | --- breezy/git/tree.py 2018-12-11 17:29:18 +0000 | |||
1061 | +++ breezy/git/tree.py 2019-01-01 23:11:42 +0000 | |||
1062 | @@ -995,7 +995,7 @@ | |||
1063 | 995 | raise errors.NoSuchId(self, file_id) | 995 | raise errors.NoSuchId(self, file_id) |
1064 | 996 | 996 | ||
1065 | 997 | def _set_root_id(self, file_id): | 997 | def _set_root_id(self, file_id): |
1067 | 998 | self._fileid_map.set_file_id("", file_id) | 998 | raise errors.UnsupportedOperation(self._set_root_id, self) |
1068 | 999 | 999 | ||
1069 | 1000 | def get_root_id(self): | 1000 | def get_root_id(self): |
1070 | 1001 | return self.path2id(u"") | 1001 | return self.path2id(u"") |
1071 | 1002 | 1002 | ||
1072 | === modified file 'breezy/plugins/fastimport/revision_store.py' | |||
1073 | --- breezy/plugins/fastimport/revision_store.py 2018-11-16 18:33:17 +0000 | |||
1074 | +++ breezy/plugins/fastimport/revision_store.py 2019-01-01 23:11:42 +0000 | |||
1075 | @@ -367,7 +367,7 @@ | |||
1076 | 367 | basis_rev_id = _mod_revision.NULL_REVISION | 367 | basis_rev_id = _mod_revision.NULL_REVISION |
1077 | 368 | tree = _TreeShim(self.repo, basis_inv, inv_delta, text_provider) | 368 | tree = _TreeShim(self.repo, basis_inv, inv_delta, text_provider) |
1078 | 369 | changes = tree._delta_to_iter_changes() | 369 | changes = tree._delta_to_iter_changes() |
1080 | 370 | for (file_id, path, fs_hash) in builder.record_iter_changes( | 370 | for (path, fs_hash) in builder.record_iter_changes( |
1081 | 371 | tree, basis_rev_id, changes): | 371 | tree, basis_rev_id, changes): |
1082 | 372 | # So far, we don't *do* anything with the result | 372 | # So far, we don't *do* anything with the result |
1083 | 373 | pass | 373 | pass |
1084 | 374 | 374 | ||
1085 | === modified file 'breezy/reconcile.py' | |||
1086 | --- breezy/reconcile.py 2018-11-11 04:08:32 +0000 | |||
1087 | +++ breezy/reconcile.py 2019-01-01 23:11:42 +0000 | |||
1088 | @@ -19,23 +19,17 @@ | |||
1089 | 19 | from __future__ import absolute_import | 19 | from __future__ import absolute_import |
1090 | 20 | 20 | ||
1091 | 21 | __all__ = [ | 21 | __all__ = [ |
1092 | 22 | 'KnitReconciler', | ||
1093 | 23 | 'PackReconciler', | ||
1094 | 24 | 'reconcile', | 22 | 'reconcile', |
1095 | 25 | 'Reconciler', | 23 | 'Reconciler', |
1096 | 26 | 'RepoReconciler', | ||
1097 | 27 | ] | 24 | ] |
1098 | 28 | 25 | ||
1099 | 29 | 26 | ||
1100 | 30 | from . import ( | 27 | from . import ( |
1101 | 31 | cleanup, | 28 | cleanup, |
1102 | 32 | errors, | 29 | errors, |
1103 | 33 | revision as _mod_revision, | ||
1104 | 34 | ui, | 30 | ui, |
1105 | 35 | ) | 31 | ) |
1106 | 36 | from .trace import mutter | 32 | from .trace import mutter |
1107 | 37 | from .tsort import topo_sort | ||
1108 | 38 | from .bzr.versionedfile import AdapterFactory, FulltextContentFactory | ||
1109 | 39 | from .i18n import gettext | 33 | from .i18n import gettext |
1110 | 40 | 34 | ||
1111 | 41 | 35 | ||
1112 | @@ -52,7 +46,11 @@ | |||
1113 | 52 | :param canonicalize_chks: Make sure CHKs are in canonical form. | 46 | :param canonicalize_chks: Make sure CHKs are in canonical form. |
1114 | 53 | """ | 47 | """ |
1115 | 54 | reconciler = Reconciler(dir, canonicalize_chks=canonicalize_chks) | 48 | reconciler = Reconciler(dir, canonicalize_chks=canonicalize_chks) |
1117 | 55 | reconciler.reconcile() | 49 | return reconciler.reconcile() |
1118 | 50 | |||
1119 | 51 | |||
1120 | 52 | class ReconcileResult(object): | ||
1121 | 53 | """Class describing the result of a reconcile operation.""" | ||
1122 | 56 | 54 | ||
1123 | 57 | 55 | ||
1124 | 58 | class Reconciler(object): | 56 | class Reconciler(object): |
1125 | @@ -65,38 +63,29 @@ | |||
1126 | 65 | 63 | ||
1127 | 66 | def reconcile(self): | 64 | def reconcile(self): |
1128 | 67 | """Perform reconciliation. | 65 | """Perform reconciliation. |
1129 | 68 | |||
1130 | 69 | After reconciliation the following attributes document found issues: | ||
1131 | 70 | |||
1132 | 71 | * `inconsistent_parents`: The number of revisions in the repository | ||
1133 | 72 | whose ancestry was being reported incorrectly. | ||
1134 | 73 | * `garbage_inventories`: The number of inventory objects without | ||
1135 | 74 | revisions that were garbage collected. | ||
1136 | 75 | * `fixed_branch_history`: None if there was no branch, False if the | ||
1137 | 76 | branch history was correct, True if the branch history needed to be | ||
1138 | 77 | re-normalized. | ||
1139 | 78 | """ | 66 | """ |
1150 | 79 | operation = cleanup.OperationWithCleanups(self._reconcile) | 67 | with ui.ui_factory.nested_progress_bar() as self.pb: |
1151 | 80 | self.add_cleanup = operation.add_cleanup | 68 | result = ReconcileResult() |
1152 | 81 | operation.run_simple() | 69 | branch_result = self._reconcile_branch() |
1153 | 82 | 70 | repo_result = self._reconcile_repository() | |
1154 | 83 | def _reconcile(self): | 71 | # TODO(jelmer): Don't hardcode supported attributes here |
1155 | 84 | """Helper function for performing reconciliation.""" | 72 | result.inconsistent_parents = getattr( |
1156 | 85 | self.pb = ui.ui_factory.nested_progress_bar() | 73 | repo_result, 'inconsistent_parents', None) |
1157 | 86 | self.add_cleanup(self.pb.finished) | 74 | result.aborted = getattr(repo_result, 'aborted', None) |
1158 | 87 | self._reconcile_branch() | 75 | result.garbage_inventories = getattr( |
1159 | 88 | self._reconcile_repository() | 76 | repo_result, 'garbage_inventories', None) |
1160 | 77 | result.fixed_branch_history = getattr( | ||
1161 | 78 | branch_result, 'fixed_history', None) | ||
1162 | 79 | return result | ||
1163 | 89 | 80 | ||
1164 | 90 | def _reconcile_branch(self): | 81 | def _reconcile_branch(self): |
1165 | 91 | try: | 82 | try: |
1166 | 92 | self.branch = self.controldir.open_branch() | 83 | self.branch = self.controldir.open_branch() |
1167 | 93 | except errors.NotBranchError: | 84 | except errors.NotBranchError: |
1168 | 94 | # Nothing to check here | 85 | # Nothing to check here |
1169 | 95 | self.fixed_branch_history = None | ||
1170 | 96 | return | 86 | return |
1171 | 97 | ui.ui_factory.note(gettext('Reconciling branch %s') % self.branch.base) | 87 | ui.ui_factory.note(gettext('Reconciling branch %s') % self.branch.base) |
1174 | 98 | branch_reconciler = self.branch.reconcile(thorough=True) | 88 | return self.branch.reconcile(thorough=True) |
1173 | 99 | self.fixed_branch_history = branch_reconciler.fixed_history | ||
1175 | 100 | 89 | ||
1176 | 101 | def _reconcile_repository(self): | 90 | def _reconcile_repository(self): |
1177 | 102 | self.repo = self.controldir.find_repository() | 91 | self.repo = self.controldir.find_repository() |
1178 | @@ -109,453 +98,14 @@ | |||
1179 | 109 | except AttributeError: | 98 | except AttributeError: |
1180 | 110 | raise errors.BzrError( | 99 | raise errors.BzrError( |
1181 | 111 | gettext("%s cannot canonicalize CHKs.") % (self.repo,)) | 100 | gettext("%s cannot canonicalize CHKs.") % (self.repo,)) |
1183 | 112 | repo_reconciler = self.repo.reconcile_canonicalize_chks() | 101 | reconcile_result = self.repo.reconcile_canonicalize_chks() |
1184 | 113 | else: | 102 | else: |
1189 | 114 | repo_reconciler = self.repo.reconcile(thorough=True) | 103 | reconcile_result = self.repo.reconcile(thorough=True) |
1190 | 115 | self.inconsistent_parents = repo_reconciler.inconsistent_parents | 104 | if reconcile_result.aborted: |
1187 | 116 | self.garbage_inventories = repo_reconciler.garbage_inventories | ||
1188 | 117 | if repo_reconciler.aborted: | ||
1191 | 118 | ui.ui_factory.note(gettext( | 105 | ui.ui_factory.note(gettext( |
1192 | 119 | 'Reconcile aborted: revision index has inconsistent parents.')) | 106 | 'Reconcile aborted: revision index has inconsistent parents.')) |
1193 | 120 | ui.ui_factory.note(gettext( | 107 | ui.ui_factory.note(gettext( |
1194 | 121 | 'Run "brz check" for more details.')) | 108 | 'Run "brz check" for more details.')) |
1195 | 122 | else: | 109 | else: |
1196 | 123 | ui.ui_factory.note(gettext('Reconciliation complete.')) | 110 | ui.ui_factory.note(gettext('Reconciliation complete.')) |
1635 | 124 | 111 | return reconcile_result | |
1198 | 125 | |||
1199 | 126 | class BranchReconciler(object): | ||
1200 | 127 | """Reconciler that works on a branch.""" | ||
1201 | 128 | |||
1202 | 129 | def __init__(self, a_branch, thorough=False): | ||
1203 | 130 | self.fixed_history = None | ||
1204 | 131 | self.thorough = thorough | ||
1205 | 132 | self.branch = a_branch | ||
1206 | 133 | |||
1207 | 134 | def reconcile(self): | ||
1208 | 135 | operation = cleanup.OperationWithCleanups(self._reconcile) | ||
1209 | 136 | self.add_cleanup = operation.add_cleanup | ||
1210 | 137 | operation.run_simple() | ||
1211 | 138 | |||
1212 | 139 | def _reconcile(self): | ||
1213 | 140 | self.branch.lock_write() | ||
1214 | 141 | self.add_cleanup(self.branch.unlock) | ||
1215 | 142 | self.pb = ui.ui_factory.nested_progress_bar() | ||
1216 | 143 | self.add_cleanup(self.pb.finished) | ||
1217 | 144 | self._reconcile_steps() | ||
1218 | 145 | |||
1219 | 146 | def _reconcile_steps(self): | ||
1220 | 147 | self._reconcile_revision_history() | ||
1221 | 148 | |||
1222 | 149 | def _reconcile_revision_history(self): | ||
1223 | 150 | last_revno, last_revision_id = self.branch.last_revision_info() | ||
1224 | 151 | real_history = [] | ||
1225 | 152 | graph = self.branch.repository.get_graph() | ||
1226 | 153 | try: | ||
1227 | 154 | for revid in graph.iter_lefthand_ancestry( | ||
1228 | 155 | last_revision_id, (_mod_revision.NULL_REVISION,)): | ||
1229 | 156 | real_history.append(revid) | ||
1230 | 157 | except errors.RevisionNotPresent: | ||
1231 | 158 | pass # Hit a ghost left hand parent | ||
1232 | 159 | real_history.reverse() | ||
1233 | 160 | if last_revno != len(real_history): | ||
1234 | 161 | self.fixed_history = True | ||
1235 | 162 | # Technically for Branch5 formats, it is more efficient to use | ||
1236 | 163 | # set_revision_history, as this will regenerate it again. | ||
1237 | 164 | # Not really worth a whole BranchReconciler class just for this, | ||
1238 | 165 | # though. | ||
1239 | 166 | ui.ui_factory.note(gettext('Fixing last revision info {0} ' | ||
1240 | 167 | ' => {1}').format( | ||
1241 | 168 | last_revno, len(real_history))) | ||
1242 | 169 | self.branch.set_last_revision_info(len(real_history), | ||
1243 | 170 | last_revision_id) | ||
1244 | 171 | else: | ||
1245 | 172 | self.fixed_history = False | ||
1246 | 173 | ui.ui_factory.note(gettext('revision_history ok.')) | ||
1247 | 174 | |||
1248 | 175 | |||
1249 | 176 | class RepoReconciler(object): | ||
1250 | 177 | """Reconciler that reconciles a repository. | ||
1251 | 178 | |||
1252 | 179 | The goal of repository reconciliation is to make any derived data | ||
1253 | 180 | consistent with the core data committed by a user. This can involve | ||
1254 | 181 | reindexing, or removing unreferenced data if that can interfere with | ||
1255 | 182 | queries in a given repository. | ||
1256 | 183 | |||
1257 | 184 | Currently this consists of an inventory reweave with revision cross-checks. | ||
1258 | 185 | """ | ||
1259 | 186 | |||
1260 | 187 | def __init__(self, repo, other=None, thorough=False): | ||
1261 | 188 | """Construct a RepoReconciler. | ||
1262 | 189 | |||
1263 | 190 | :param thorough: perform a thorough check which may take longer but | ||
1264 | 191 | will correct non-data loss issues such as incorrect | ||
1265 | 192 | cached data. | ||
1266 | 193 | """ | ||
1267 | 194 | self.garbage_inventories = 0 | ||
1268 | 195 | self.inconsistent_parents = 0 | ||
1269 | 196 | self.aborted = False | ||
1270 | 197 | self.repo = repo | ||
1271 | 198 | self.thorough = thorough | ||
1272 | 199 | |||
1273 | 200 | def reconcile(self): | ||
1274 | 201 | """Perform reconciliation. | ||
1275 | 202 | |||
1276 | 203 | After reconciliation the following attributes document found issues: | ||
1277 | 204 | |||
1278 | 205 | * `inconsistent_parents`: The number of revisions in the repository | ||
1279 | 206 | whose ancestry was being reported incorrectly. | ||
1280 | 207 | * `garbage_inventories`: The number of inventory objects without | ||
1281 | 208 | revisions that were garbage collected. | ||
1282 | 209 | """ | ||
1283 | 210 | operation = cleanup.OperationWithCleanups(self._reconcile) | ||
1284 | 211 | self.add_cleanup = operation.add_cleanup | ||
1285 | 212 | operation.run_simple() | ||
1286 | 213 | |||
1287 | 214 | def _reconcile(self): | ||
1288 | 215 | self.repo.lock_write() | ||
1289 | 216 | self.add_cleanup(self.repo.unlock) | ||
1290 | 217 | self.pb = ui.ui_factory.nested_progress_bar() | ||
1291 | 218 | self.add_cleanup(self.pb.finished) | ||
1292 | 219 | self._reconcile_steps() | ||
1293 | 220 | |||
1294 | 221 | def _reconcile_steps(self): | ||
1295 | 222 | """Perform the steps to reconcile this repository.""" | ||
1296 | 223 | self._reweave_inventory() | ||
1297 | 224 | |||
1298 | 225 | def _reweave_inventory(self): | ||
1299 | 226 | """Regenerate the inventory weave for the repository from scratch. | ||
1300 | 227 | |||
1301 | 228 | This is a smart function: it will only do the reweave if doing it | ||
1302 | 229 | will correct data issues. The self.thorough flag controls whether | ||
1303 | 230 | only data-loss causing issues (!self.thorough) or all issues | ||
1304 | 231 | (self.thorough) are treated as requiring the reweave. | ||
1305 | 232 | """ | ||
1306 | 233 | transaction = self.repo.get_transaction() | ||
1307 | 234 | self.pb.update(gettext('Reading inventory data')) | ||
1308 | 235 | self.inventory = self.repo.inventories | ||
1309 | 236 | self.revisions = self.repo.revisions | ||
1310 | 237 | # the total set of revisions to process | ||
1311 | 238 | self.pending = {key[-1] for key in self.revisions.keys()} | ||
1312 | 239 | |||
1313 | 240 | # mapping from revision_id to parents | ||
1314 | 241 | self._rev_graph = {} | ||
1315 | 242 | # errors that we detect | ||
1316 | 243 | self.inconsistent_parents = 0 | ||
1317 | 244 | # we need the revision id of each revision and its available parents list | ||
1318 | 245 | self._setup_steps(len(self.pending)) | ||
1319 | 246 | for rev_id in self.pending: | ||
1320 | 247 | # put a revision into the graph. | ||
1321 | 248 | self._graph_revision(rev_id) | ||
1322 | 249 | self._check_garbage_inventories() | ||
1323 | 250 | # if there are no inconsistent_parents and | ||
1324 | 251 | # (no garbage inventories or we are not doing a thorough check) | ||
1325 | 252 | if (not self.inconsistent_parents | ||
1326 | 253 | and (not self.garbage_inventories or not self.thorough)): | ||
1327 | 254 | ui.ui_factory.note(gettext('Inventory ok.')) | ||
1328 | 255 | return | ||
1329 | 256 | self.pb.update(gettext('Backing up inventory'), 0, 0) | ||
1330 | 257 | self.repo._backup_inventory() | ||
1331 | 258 | ui.ui_factory.note(gettext('Backup inventory created.')) | ||
1332 | 259 | new_inventories = self.repo._temp_inventories() | ||
1333 | 260 | |||
1334 | 261 | # we have topological order of revisions and non ghost parents ready. | ||
1335 | 262 | self._setup_steps(len(self._rev_graph)) | ||
1336 | 263 | revision_keys = [(rev_id,) for rev_id in topo_sort(self._rev_graph)] | ||
1337 | 264 | stream = self._change_inv_parents( | ||
1338 | 265 | self.inventory.get_record_stream(revision_keys, 'unordered', True), | ||
1339 | 266 | self._new_inv_parents, | ||
1340 | 267 | set(revision_keys)) | ||
1341 | 268 | new_inventories.insert_record_stream(stream) | ||
1342 | 269 | # if this worked, the set of new_inventories.keys should equal | ||
1343 | 270 | # self.pending | ||
1344 | 271 | if not (set(new_inventories.keys()) | ||
1345 | 272 | == {(revid,) for revid in self.pending}): | ||
1346 | 273 | raise AssertionError() | ||
1347 | 274 | self.pb.update(gettext('Writing weave')) | ||
1348 | 275 | self.repo._activate_new_inventory() | ||
1349 | 276 | self.inventory = None | ||
1350 | 277 | ui.ui_factory.note(gettext('Inventory regenerated.')) | ||
1351 | 278 | |||
1352 | 279 | def _new_inv_parents(self, revision_key): | ||
1353 | 280 | """Lookup ghost-filtered parents for revision_key.""" | ||
1354 | 281 | # Use the filtered ghostless parents list: | ||
1355 | 282 | return tuple([(revid,) for revid in self._rev_graph[revision_key[-1]]]) | ||
1356 | 283 | |||
1357 | 284 | def _change_inv_parents(self, stream, get_parents, all_revision_keys): | ||
1358 | 285 | """Adapt a record stream to reconcile the parents.""" | ||
1359 | 286 | for record in stream: | ||
1360 | 287 | wanted_parents = get_parents(record.key) | ||
1361 | 288 | if wanted_parents and wanted_parents[0] not in all_revision_keys: | ||
1362 | 289 | # The check for the left most parent only handles knit | ||
1363 | 290 | # compressors, but this code only applies to knit and weave | ||
1364 | 291 | # repositories anyway. | ||
1365 | 292 | bytes = record.get_bytes_as('fulltext') | ||
1366 | 293 | yield FulltextContentFactory(record.key, wanted_parents, record.sha1, bytes) | ||
1367 | 294 | else: | ||
1368 | 295 | adapted_record = AdapterFactory( | ||
1369 | 296 | record.key, wanted_parents, record) | ||
1370 | 297 | yield adapted_record | ||
1371 | 298 | self._reweave_step('adding inventories') | ||
1372 | 299 | |||
1373 | 300 | def _setup_steps(self, new_total): | ||
1374 | 301 | """Setup the markers we need to control the progress bar.""" | ||
1375 | 302 | self.total = new_total | ||
1376 | 303 | self.count = 0 | ||
1377 | 304 | |||
1378 | 305 | def _graph_revision(self, rev_id): | ||
1379 | 306 | """Load a revision into the revision graph.""" | ||
1380 | 307 | # pick a random revision | ||
1381 | 308 | # analyse revision id rev_id and put it in the stack. | ||
1382 | 309 | self._reweave_step('loading revisions') | ||
1383 | 310 | rev = self.repo.get_revision_reconcile(rev_id) | ||
1384 | 311 | parents = [] | ||
1385 | 312 | for parent in rev.parent_ids: | ||
1386 | 313 | if self._parent_is_available(parent): | ||
1387 | 314 | parents.append(parent) | ||
1388 | 315 | else: | ||
1389 | 316 | mutter('found ghost %s', parent) | ||
1390 | 317 | self._rev_graph[rev_id] = parents | ||
1391 | 318 | |||
1392 | 319 | def _check_garbage_inventories(self): | ||
1393 | 320 | """Check for garbage inventories which we cannot trust | ||
1394 | 321 | |||
1395 | 322 | We cant trust them because their pre-requisite file data may not | ||
1396 | 323 | be present - all we know is that their revision was not installed. | ||
1397 | 324 | """ | ||
1398 | 325 | if not self.thorough: | ||
1399 | 326 | return | ||
1400 | 327 | inventories = set(self.inventory.keys()) | ||
1401 | 328 | revisions = set(self.revisions.keys()) | ||
1402 | 329 | garbage = inventories.difference(revisions) | ||
1403 | 330 | self.garbage_inventories = len(garbage) | ||
1404 | 331 | for revision_key in garbage: | ||
1405 | 332 | mutter('Garbage inventory {%s} found.', revision_key[-1]) | ||
1406 | 333 | |||
1407 | 334 | def _parent_is_available(self, parent): | ||
1408 | 335 | """True if parent is a fully available revision | ||
1409 | 336 | |||
1410 | 337 | A fully available revision has a inventory and a revision object in the | ||
1411 | 338 | repository. | ||
1412 | 339 | """ | ||
1413 | 340 | if parent in self._rev_graph: | ||
1414 | 341 | return True | ||
1415 | 342 | inv_present = (1 == len(self.inventory.get_parent_map([(parent,)]))) | ||
1416 | 343 | return (inv_present and self.repo.has_revision(parent)) | ||
1417 | 344 | |||
1418 | 345 | def _reweave_step(self, message): | ||
1419 | 346 | """Mark a single step of regeneration complete.""" | ||
1420 | 347 | self.pb.update(message, self.count, self.total) | ||
1421 | 348 | self.count += 1 | ||
1422 | 349 | |||
1423 | 350 | |||
1424 | 351 | class KnitReconciler(RepoReconciler): | ||
1425 | 352 | """Reconciler that reconciles a knit format repository. | ||
1426 | 353 | |||
1427 | 354 | This will detect garbage inventories and remove them in thorough mode. | ||
1428 | 355 | """ | ||
1429 | 356 | |||
1430 | 357 | def _reconcile_steps(self): | ||
1431 | 358 | """Perform the steps to reconcile this repository.""" | ||
1432 | 359 | if self.thorough: | ||
1433 | 360 | try: | ||
1434 | 361 | self._load_indexes() | ||
1435 | 362 | except errors.BzrCheckError: | ||
1436 | 363 | self.aborted = True | ||
1437 | 364 | return | ||
1438 | 365 | # knits never suffer this | ||
1439 | 366 | self._gc_inventory() | ||
1440 | 367 | self._fix_text_parents() | ||
1441 | 368 | |||
1442 | 369 | def _load_indexes(self): | ||
1443 | 370 | """Load indexes for the reconciliation.""" | ||
1444 | 371 | self.transaction = self.repo.get_transaction() | ||
1445 | 372 | self.pb.update(gettext('Reading indexes'), 0, 2) | ||
1446 | 373 | self.inventory = self.repo.inventories | ||
1447 | 374 | self.pb.update(gettext('Reading indexes'), 1, 2) | ||
1448 | 375 | self.repo._check_for_inconsistent_revision_parents() | ||
1449 | 376 | self.revisions = self.repo.revisions | ||
1450 | 377 | self.pb.update(gettext('Reading indexes'), 2, 2) | ||
1451 | 378 | |||
1452 | 379 | def _gc_inventory(self): | ||
1453 | 380 | """Remove inventories that are not referenced from the revision store.""" | ||
1454 | 381 | self.pb.update(gettext('Checking unused inventories'), 0, 1) | ||
1455 | 382 | self._check_garbage_inventories() | ||
1456 | 383 | self.pb.update(gettext('Checking unused inventories'), 1, 3) | ||
1457 | 384 | if not self.garbage_inventories: | ||
1458 | 385 | ui.ui_factory.note(gettext('Inventory ok.')) | ||
1459 | 386 | return | ||
1460 | 387 | self.pb.update(gettext('Backing up inventory'), 0, 0) | ||
1461 | 388 | self.repo._backup_inventory() | ||
1462 | 389 | ui.ui_factory.note(gettext('Backup Inventory created')) | ||
1463 | 390 | # asking for '' should never return a non-empty weave | ||
1464 | 391 | new_inventories = self.repo._temp_inventories() | ||
1465 | 392 | # we have topological order of revisions and non ghost parents ready. | ||
1466 | 393 | graph = self.revisions.get_parent_map(self.revisions.keys()) | ||
1467 | 394 | revision_keys = topo_sort(graph) | ||
1468 | 395 | revision_ids = [key[-1] for key in revision_keys] | ||
1469 | 396 | self._setup_steps(len(revision_keys)) | ||
1470 | 397 | stream = self._change_inv_parents( | ||
1471 | 398 | self.inventory.get_record_stream(revision_keys, 'unordered', True), | ||
1472 | 399 | graph.__getitem__, | ||
1473 | 400 | set(revision_keys)) | ||
1474 | 401 | new_inventories.insert_record_stream(stream) | ||
1475 | 402 | # if this worked, the set of new_inventory_vf.names should equal | ||
1476 | 403 | # the revisionds list | ||
1477 | 404 | if not(set(new_inventories.keys()) == set(revision_keys)): | ||
1478 | 405 | raise AssertionError() | ||
1479 | 406 | self.pb.update(gettext('Writing weave')) | ||
1480 | 407 | self.repo._activate_new_inventory() | ||
1481 | 408 | self.inventory = None | ||
1482 | 409 | ui.ui_factory.note(gettext('Inventory regenerated.')) | ||
1483 | 410 | |||
1484 | 411 | def _fix_text_parents(self): | ||
1485 | 412 | """Fix bad versionedfile parent entries. | ||
1486 | 413 | |||
1487 | 414 | It is possible for the parents entry in a versionedfile entry to be | ||
1488 | 415 | inconsistent with the values in the revision and inventory. | ||
1489 | 416 | |||
1490 | 417 | This method finds entries with such inconsistencies, corrects their | ||
1491 | 418 | parent lists, and replaces the versionedfile with a corrected version. | ||
1492 | 419 | """ | ||
1493 | 420 | transaction = self.repo.get_transaction() | ||
1494 | 421 | versions = [key[-1] for key in self.revisions.keys()] | ||
1495 | 422 | mutter('Prepopulating revision text cache with %d revisions', | ||
1496 | 423 | len(versions)) | ||
1497 | 424 | vf_checker = self.repo._get_versioned_file_checker() | ||
1498 | 425 | bad_parents, unused_versions = vf_checker.check_file_version_parents( | ||
1499 | 426 | self.repo.texts, self.pb) | ||
1500 | 427 | text_index = vf_checker.text_index | ||
1501 | 428 | per_id_bad_parents = {} | ||
1502 | 429 | for key in unused_versions: | ||
1503 | 430 | # Ensure that every file with unused versions gets rewritten. | ||
1504 | 431 | # NB: This is really not needed, reconcile != pack. | ||
1505 | 432 | per_id_bad_parents[key[0]] = {} | ||
1506 | 433 | # Generate per-knit/weave data. | ||
1507 | 434 | for key, details in bad_parents.items(): | ||
1508 | 435 | file_id = key[0] | ||
1509 | 436 | rev_id = key[1] | ||
1510 | 437 | knit_parents = tuple([parent[-1] for parent in details[0]]) | ||
1511 | 438 | correct_parents = tuple([parent[-1] for parent in details[1]]) | ||
1512 | 439 | file_details = per_id_bad_parents.setdefault(file_id, {}) | ||
1513 | 440 | file_details[rev_id] = (knit_parents, correct_parents) | ||
1514 | 441 | file_id_versions = {} | ||
1515 | 442 | for text_key in text_index: | ||
1516 | 443 | versions_list = file_id_versions.setdefault(text_key[0], []) | ||
1517 | 444 | versions_list.append(text_key[1]) | ||
1518 | 445 | # Do the reconcile of individual weaves. | ||
1519 | 446 | for num, file_id in enumerate(per_id_bad_parents): | ||
1520 | 447 | self.pb.update(gettext('Fixing text parents'), num, | ||
1521 | 448 | len(per_id_bad_parents)) | ||
1522 | 449 | versions_with_bad_parents = per_id_bad_parents[file_id] | ||
1523 | 450 | id_unused_versions = set(key[-1] for key in unused_versions | ||
1524 | 451 | if key[0] == file_id) | ||
1525 | 452 | if file_id in file_id_versions: | ||
1526 | 453 | file_versions = file_id_versions[file_id] | ||
1527 | 454 | else: | ||
1528 | 455 | # This id was present in the disk store but is not referenced | ||
1529 | 456 | # by any revision at all. | ||
1530 | 457 | file_versions = [] | ||
1531 | 458 | self._fix_text_parent(file_id, versions_with_bad_parents, | ||
1532 | 459 | id_unused_versions, file_versions) | ||
1533 | 460 | |||
1534 | 461 | def _fix_text_parent(self, file_id, versions_with_bad_parents, | ||
1535 | 462 | unused_versions, all_versions): | ||
1536 | 463 | """Fix bad versionedfile entries in a single versioned file.""" | ||
1537 | 464 | mutter('fixing text parent: %r (%d versions)', file_id, | ||
1538 | 465 | len(versions_with_bad_parents)) | ||
1539 | 466 | mutter('(%d are unused)', len(unused_versions)) | ||
1540 | 467 | new_file_id = b'temp:%s' % file_id | ||
1541 | 468 | new_parents = {} | ||
1542 | 469 | needed_keys = set() | ||
1543 | 470 | for version in all_versions: | ||
1544 | 471 | if version in unused_versions: | ||
1545 | 472 | continue | ||
1546 | 473 | elif version in versions_with_bad_parents: | ||
1547 | 474 | parents = versions_with_bad_parents[version][1] | ||
1548 | 475 | else: | ||
1549 | 476 | pmap = self.repo.texts.get_parent_map([(file_id, version)]) | ||
1550 | 477 | parents = [key[-1] for key in pmap[(file_id, version)]] | ||
1551 | 478 | new_parents[(new_file_id, version)] = [ | ||
1552 | 479 | (new_file_id, parent) for parent in parents] | ||
1553 | 480 | needed_keys.add((file_id, version)) | ||
1554 | 481 | |||
1555 | 482 | def fix_parents(stream): | ||
1556 | 483 | for record in stream: | ||
1557 | 484 | bytes = record.get_bytes_as('fulltext') | ||
1558 | 485 | new_key = (new_file_id, record.key[-1]) | ||
1559 | 486 | parents = new_parents[new_key] | ||
1560 | 487 | yield FulltextContentFactory(new_key, parents, record.sha1, bytes) | ||
1561 | 488 | stream = self.repo.texts.get_record_stream( | ||
1562 | 489 | needed_keys, 'topological', True) | ||
1563 | 490 | self.repo._remove_file_id(new_file_id) | ||
1564 | 491 | self.repo.texts.insert_record_stream(fix_parents(stream)) | ||
1565 | 492 | self.repo._remove_file_id(file_id) | ||
1566 | 493 | if len(new_parents): | ||
1567 | 494 | self.repo._move_file_id(new_file_id, file_id) | ||
1568 | 495 | |||
1569 | 496 | |||
1570 | 497 | class PackReconciler(RepoReconciler): | ||
1571 | 498 | """Reconciler that reconciles a pack based repository. | ||
1572 | 499 | |||
1573 | 500 | Garbage inventories do not affect ancestry queries, and removal is | ||
1574 | 501 | considerably more expensive as there is no separate versioned file for | ||
1575 | 502 | them, so they are not cleaned. In short it is currently a no-op. | ||
1576 | 503 | |||
1577 | 504 | In future this may be a good place to hook in annotation cache checking, | ||
1578 | 505 | index recreation etc. | ||
1579 | 506 | """ | ||
1580 | 507 | |||
1581 | 508 | # XXX: The index corruption that _fix_text_parents performs is needed for | ||
1582 | 509 | # packs, but not yet implemented. The basic approach is to: | ||
1583 | 510 | # - lock the names list | ||
1584 | 511 | # - perform a customised pack() that regenerates data as needed | ||
1585 | 512 | # - unlock the names list | ||
1586 | 513 | # https://bugs.launchpad.net/bzr/+bug/154173 | ||
1587 | 514 | |||
1588 | 515 | def __init__(self, repo, other=None, thorough=False, | ||
1589 | 516 | canonicalize_chks=False): | ||
1590 | 517 | super(PackReconciler, self).__init__(repo, other=other, | ||
1591 | 518 | thorough=thorough) | ||
1592 | 519 | self.canonicalize_chks = canonicalize_chks | ||
1593 | 520 | |||
1594 | 521 | def _reconcile_steps(self): | ||
1595 | 522 | """Perform the steps to reconcile this repository.""" | ||
1596 | 523 | if not self.thorough: | ||
1597 | 524 | return | ||
1598 | 525 | collection = self.repo._pack_collection | ||
1599 | 526 | collection.ensure_loaded() | ||
1600 | 527 | collection.lock_names() | ||
1601 | 528 | self.add_cleanup(collection._unlock_names) | ||
1602 | 529 | packs = collection.all_packs() | ||
1603 | 530 | all_revisions = self.repo.all_revision_ids() | ||
1604 | 531 | total_inventories = len(list( | ||
1605 | 532 | collection.inventory_index.combined_index.iter_all_entries())) | ||
1606 | 533 | if len(all_revisions): | ||
1607 | 534 | if self.canonicalize_chks: | ||
1608 | 535 | reconcile_meth = self.repo._canonicalize_chks_pack | ||
1609 | 536 | else: | ||
1610 | 537 | reconcile_meth = self.repo._reconcile_pack | ||
1611 | 538 | new_pack = reconcile_meth(collection, packs, ".reconcile", | ||
1612 | 539 | all_revisions, self.pb) | ||
1613 | 540 | if new_pack is not None: | ||
1614 | 541 | self._discard_and_save(packs) | ||
1615 | 542 | else: | ||
1616 | 543 | # only make a new pack when there is data to copy. | ||
1617 | 544 | self._discard_and_save(packs) | ||
1618 | 545 | self.garbage_inventories = total_inventories - len(list( | ||
1619 | 546 | collection.inventory_index.combined_index.iter_all_entries())) | ||
1620 | 547 | |||
1621 | 548 | def _discard_and_save(self, packs): | ||
1622 | 549 | """Discard some packs from the repository. | ||
1623 | 550 | |||
1624 | 551 | This removes them from the memory index, saves the in-memory index | ||
1625 | 552 | which makes the newly reconciled pack visible and hides the packs to be | ||
1626 | 553 | discarded, and finally renames the packs being discarded into the | ||
1627 | 554 | obsolete packs directory. | ||
1628 | 555 | |||
1629 | 556 | :param packs: The packs to discard. | ||
1630 | 557 | """ | ||
1631 | 558 | for pack in packs: | ||
1632 | 559 | self.repo._pack_collection._remove_pack_from_memory(pack) | ||
1633 | 560 | self.repo._pack_collection._save_pack_names() | ||
1634 | 561 | self.repo._pack_collection._obsolete_packs(packs) | ||
1636 | 562 | 112 | ||
1637 | === modified file 'breezy/repository.py' | |||
1638 | --- breezy/repository.py 2018-11-29 23:42:41 +0000 | |||
1639 | +++ breezy/repository.py 2019-01-01 23:11:42 +0000 | |||
1640 | @@ -218,7 +218,7 @@ | |||
1641 | 218 | to basis_revision_id. The iterator must not include any items with | 218 | to basis_revision_id. The iterator must not include any items with |
1642 | 219 | a current kind of None - missing items must be either filtered out | 219 | a current kind of None - missing items must be either filtered out |
1643 | 220 | or errored-on beefore record_iter_changes sees the item. | 220 | or errored-on beefore record_iter_changes sees the item. |
1645 | 221 | :return: A generator of (file_id, relpath, fs_hash) tuples for use with | 221 | :return: A generator of (relpath, fs_hash) tuples for use with |
1646 | 222 | tree._observed_sha1. | 222 | tree._observed_sha1. |
1647 | 223 | """ | 223 | """ |
1648 | 224 | raise NotImplementedError(self.record_iter_changes) | 224 | raise NotImplementedError(self.record_iter_changes) |
1649 | @@ -953,11 +953,7 @@ | |||
1650 | 953 | 953 | ||
1651 | 954 | def reconcile(self, other=None, thorough=False): | 954 | def reconcile(self, other=None, thorough=False): |
1652 | 955 | """Reconcile this repository.""" | 955 | """Reconcile this repository.""" |
1658 | 956 | from .reconcile import RepoReconciler | 956 | raise NotImplementedError(self.reconcile) |
1654 | 957 | with self.lock_write(): | ||
1655 | 958 | reconciler = RepoReconciler(self, thorough=thorough) | ||
1656 | 959 | reconciler.reconcile() | ||
1657 | 960 | return reconciler | ||
1659 | 961 | 957 | ||
1660 | 962 | def _refresh_data(self): | 958 | def _refresh_data(self): |
1661 | 963 | """Helper called from lock_* to ensure coherency with disk. | 959 | """Helper called from lock_* to ensure coherency with disk. |
1662 | 964 | 960 | ||
1663 | === modified file 'breezy/shelf_ui.py' | |||
1664 | --- breezy/shelf_ui.py 2018-11-16 18:33:17 +0000 | |||
1665 | +++ breezy/shelf_ui.py 2019-01-01 23:11:42 +0000 | |||
1666 | @@ -250,7 +250,7 @@ | |||
1667 | 250 | path_encoding = osutils.get_terminal_encoding() | 250 | path_encoding = osutils.get_terminal_encoding() |
1668 | 251 | text_differ = diff.DiffText(old_tree, new_tree, diff_file, | 251 | text_differ = diff.DiffText(old_tree, new_tree, diff_file, |
1669 | 252 | path_encoding=path_encoding) | 252 | path_encoding=path_encoding) |
1671 | 253 | patch = text_differ.diff(file_id, old_path, new_path, 'file', 'file') | 253 | patch = text_differ.diff(old_path, new_path, 'file', 'file') |
1672 | 254 | diff_file.seek(0) | 254 | diff_file.seek(0) |
1673 | 255 | return patches.parse_patch(diff_file) | 255 | return patches.parse_patch(diff_file) |
1674 | 256 | 256 | ||
1675 | @@ -365,8 +365,7 @@ | |||
1676 | 365 | """ | 365 | """ |
1677 | 366 | lines = osutils.split_lines(self.change_editor.edit_file( | 366 | lines = osutils.split_lines(self.change_editor.edit_file( |
1678 | 367 | self.change_editor.old_tree.id2path(file_id), | 367 | self.change_editor.old_tree.id2path(file_id), |
1681 | 368 | self.change_editor.new_tree.id2path(file_id), | 368 | self.change_editor.new_tree.id2path(file_id))) |
1680 | 369 | file_id=file_id)) | ||
1682 | 370 | return lines, self._count_changed_regions(work_tree_lines, lines) | 369 | return lines, self._count_changed_regions(work_tree_lines, lines) |
1683 | 371 | 370 | ||
1684 | 372 | @staticmethod | 371 | @staticmethod |
1685 | 373 | 372 | ||
1686 | === modified file 'breezy/tests/per_branch/test_reconcile.py' | |||
1687 | --- breezy/tests/per_branch/test_reconcile.py 2018-11-11 04:08:32 +0000 | |||
1688 | +++ breezy/tests/per_branch/test_reconcile.py 2019-01-01 23:11:42 +0000 | |||
1689 | @@ -68,9 +68,9 @@ | |||
1690 | 68 | def test_reconcile_returns_reconciler(self): | 68 | def test_reconcile_returns_reconciler(self): |
1691 | 69 | a_branch = self.make_branch('a_branch') | 69 | a_branch = self.make_branch('a_branch') |
1692 | 70 | result = a_branch.reconcile() | 70 | result = a_branch.reconcile() |
1694 | 71 | self.assertIsInstance(result, reconcile.BranchReconciler) | 71 | self.assertIsInstance(result, reconcile.ReconcileResult) |
1695 | 72 | # No history to fix | 72 | # No history to fix |
1697 | 73 | self.assertIs(False, result.fixed_history) | 73 | self.assertIs(False, getattr(result, 'fixed_history', False)) |
1698 | 74 | 74 | ||
1699 | 75 | def test_reconcile_supports_thorough(self): | 75 | def test_reconcile_supports_thorough(self): |
1700 | 76 | a_branch = self.make_branch('a_branch') | 76 | a_branch = self.make_branch('a_branch') |
1701 | 77 | 77 | ||
1702 | === modified file 'breezy/tests/per_intertree/test_compare.py' | |||
1703 | --- breezy/tests/per_intertree/test_compare.py 2018-11-22 03:51:03 +0000 | |||
1704 | +++ breezy/tests/per_intertree/test_compare.py 2019-01-01 23:11:42 +0000 | |||
1705 | @@ -798,13 +798,15 @@ | |||
1706 | 798 | tree1.mkdir('changing', b'parent-id') | 798 | tree1.mkdir('changing', b'parent-id') |
1707 | 799 | tree1.mkdir('changing/unchanging', b'mid-id') | 799 | tree1.mkdir('changing/unchanging', b'mid-id') |
1708 | 800 | tree1.add(['changing/unchanging/file'], [b'file-id'], ['file']) | 800 | tree1.add(['changing/unchanging/file'], [b'file-id'], ['file']) |
1710 | 801 | tree1.put_file_bytes_non_atomic('changing/unchanging/file', b'a file') | 801 | tree1.put_file_bytes_non_atomic( |
1711 | 802 | 'changing/unchanging/file', b'a file') | ||
1712 | 802 | tree2 = self.make_to_branch_and_tree('2') | 803 | tree2 = self.make_to_branch_and_tree('2') |
1713 | 803 | tree2.set_root_id(tree1.get_root_id()) | 804 | tree2.set_root_id(tree1.get_root_id()) |
1714 | 804 | tree2.mkdir('changed', b'parent-id') | 805 | tree2.mkdir('changed', b'parent-id') |
1715 | 805 | tree2.mkdir('changed/unchanging', b'mid-id') | 806 | tree2.mkdir('changed/unchanging', b'mid-id') |
1716 | 806 | tree2.add(['changed/unchanging/file'], [b'file-id'], ['file']) | 807 | tree2.add(['changed/unchanging/file'], [b'file-id'], ['file']) |
1718 | 807 | tree2.put_file_bytes_non_atomic('changed/unchanging/file', b'changed content') | 808 | tree2.put_file_bytes_non_atomic( |
1719 | 809 | 'changed/unchanging/file', b'changed content') | ||
1720 | 808 | tree1, tree2 = self.mutable_trees_to_test_trees(self, tree1, tree2) | 810 | tree1, tree2 = self.mutable_trees_to_test_trees(self, tree1, tree2) |
1721 | 809 | # parent-id has changed, as has file-id | 811 | # parent-id has changed, as has file-id |
1722 | 810 | root_id = tree1.path2id('') | 812 | root_id = tree1.path2id('') |
1723 | 811 | 813 | ||
1724 | === modified file 'breezy/tests/per_repository/test_commit_builder.py' | |||
1725 | --- breezy/tests/per_repository/test_commit_builder.py 2018-11-16 18:33:17 +0000 | |||
1726 | +++ breezy/tests/per_repository/test_commit_builder.py 2019-01-01 23:11:42 +0000 | |||
1727 | @@ -49,8 +49,7 @@ | |||
1728 | 49 | 49 | ||
1729 | 50 | def test_finish_inventory_record_iter_changes(self): | 50 | def test_finish_inventory_record_iter_changes(self): |
1730 | 51 | tree = self.make_branch_and_tree(".") | 51 | tree = self.make_branch_and_tree(".") |
1733 | 52 | tree.lock_write() | 52 | with tree.lock_write(): |
1732 | 53 | try: | ||
1734 | 54 | builder = tree.branch.get_commit_builder([]) | 53 | builder = tree.branch.get_commit_builder([]) |
1735 | 55 | try: | 54 | try: |
1736 | 56 | list(builder.record_iter_changes(tree, tree.last_revision(), | 55 | list(builder.record_iter_changes(tree, tree.last_revision(), |
1737 | @@ -61,13 +60,10 @@ | |||
1738 | 61 | raise | 60 | raise |
1739 | 62 | repo = tree.branch.repository | 61 | repo = tree.branch.repository |
1740 | 63 | repo.commit_write_group() | 62 | repo.commit_write_group() |
1741 | 64 | finally: | ||
1742 | 65 | tree.unlock() | ||
1743 | 66 | 63 | ||
1744 | 67 | def test_abort_record_iter_changes(self): | 64 | def test_abort_record_iter_changes(self): |
1745 | 68 | tree = self.make_branch_and_tree(".") | 65 | tree = self.make_branch_and_tree(".") |
1748 | 69 | tree.lock_write() | 66 | with tree.lock_write(): |
1747 | 70 | try: | ||
1749 | 71 | builder = tree.branch.get_commit_builder([]) | 67 | builder = tree.branch.get_commit_builder([]) |
1750 | 72 | try: | 68 | try: |
1751 | 73 | basis = tree.basis_tree() | 69 | basis = tree.basis_tree() |
1752 | @@ -77,56 +73,44 @@ | |||
1753 | 77 | builder.finish_inventory() | 73 | builder.finish_inventory() |
1754 | 78 | finally: | 74 | finally: |
1755 | 79 | builder.abort() | 75 | builder.abort() |
1756 | 80 | finally: | ||
1757 | 81 | tree.unlock() | ||
1758 | 82 | 76 | ||
1759 | 83 | def test_commit_lossy(self): | 77 | def test_commit_lossy(self): |
1760 | 84 | tree = self.make_branch_and_tree(".") | 78 | tree = self.make_branch_and_tree(".") |
1763 | 85 | tree.lock_write() | 79 | with tree.lock_write(): |
1762 | 86 | try: | ||
1764 | 87 | builder = tree.branch.get_commit_builder([], lossy=True) | 80 | builder = tree.branch.get_commit_builder([], lossy=True) |
1765 | 88 | list(builder.record_iter_changes(tree, tree.last_revision(), | 81 | list(builder.record_iter_changes(tree, tree.last_revision(), |
1766 | 89 | tree.iter_changes(tree.basis_tree()))) | 82 | tree.iter_changes(tree.basis_tree()))) |
1767 | 90 | builder.finish_inventory() | 83 | builder.finish_inventory() |
1768 | 91 | rev_id = builder.commit('foo bar blah') | 84 | rev_id = builder.commit('foo bar blah') |
1769 | 92 | finally: | ||
1770 | 93 | tree.unlock() | ||
1771 | 94 | rev = tree.branch.repository.get_revision(rev_id) | 85 | rev = tree.branch.repository.get_revision(rev_id) |
1772 | 95 | self.assertEqual('foo bar blah', rev.message) | 86 | self.assertEqual('foo bar blah', rev.message) |
1773 | 96 | 87 | ||
1774 | 97 | def test_commit_message(self): | 88 | def test_commit_message(self): |
1775 | 98 | tree = self.make_branch_and_tree(".") | 89 | tree = self.make_branch_and_tree(".") |
1778 | 99 | tree.lock_write() | 90 | with tree.lock_write(): |
1777 | 100 | try: | ||
1779 | 101 | builder = tree.branch.get_commit_builder([]) | 91 | builder = tree.branch.get_commit_builder([]) |
1780 | 102 | list(builder.record_iter_changes(tree, tree.last_revision(), | 92 | list(builder.record_iter_changes(tree, tree.last_revision(), |
1781 | 103 | tree.iter_changes(tree.basis_tree()))) | 93 | tree.iter_changes(tree.basis_tree()))) |
1782 | 104 | builder.finish_inventory() | 94 | builder.finish_inventory() |
1783 | 105 | rev_id = builder.commit('foo bar blah') | 95 | rev_id = builder.commit('foo bar blah') |
1784 | 106 | finally: | ||
1785 | 107 | tree.unlock() | ||
1786 | 108 | rev = tree.branch.repository.get_revision(rev_id) | 96 | rev = tree.branch.repository.get_revision(rev_id) |
1787 | 109 | self.assertEqual('foo bar blah', rev.message) | 97 | self.assertEqual('foo bar blah', rev.message) |
1788 | 110 | 98 | ||
1789 | 111 | def test_updates_branch(self): | 99 | def test_updates_branch(self): |
1790 | 112 | tree = self.make_branch_and_tree(".") | 100 | tree = self.make_branch_and_tree(".") |
1793 | 113 | tree.lock_write() | 101 | with tree.lock_write(): |
1792 | 114 | try: | ||
1794 | 115 | builder = tree.branch.get_commit_builder([]) | 102 | builder = tree.branch.get_commit_builder([]) |
1795 | 116 | list(builder.record_iter_changes(tree, tree.last_revision(), | 103 | list(builder.record_iter_changes(tree, tree.last_revision(), |
1796 | 117 | tree.iter_changes(tree.basis_tree()))) | 104 | tree.iter_changes(tree.basis_tree()))) |
1797 | 118 | builder.finish_inventory() | 105 | builder.finish_inventory() |
1798 | 119 | will_update_branch = builder.updates_branch | 106 | will_update_branch = builder.updates_branch |
1799 | 120 | rev_id = builder.commit('might update the branch') | 107 | rev_id = builder.commit('might update the branch') |
1800 | 121 | finally: | ||
1801 | 122 | tree.unlock() | ||
1802 | 123 | actually_updated_branch = (tree.branch.last_revision() == rev_id) | 108 | actually_updated_branch = (tree.branch.last_revision() == rev_id) |
1803 | 124 | self.assertEqual(actually_updated_branch, will_update_branch) | 109 | self.assertEqual(actually_updated_branch, will_update_branch) |
1804 | 125 | 110 | ||
1805 | 126 | def test_commit_with_revision_id_record_iter_changes(self): | 111 | def test_commit_with_revision_id_record_iter_changes(self): |
1806 | 127 | tree = self.make_branch_and_tree(".") | 112 | tree = self.make_branch_and_tree(".") |
1809 | 128 | tree.lock_write() | 113 | with tree.lock_write(): |
1808 | 129 | try: | ||
1810 | 130 | # use a unicode revision id to test more corner cases. | 114 | # use a unicode revision id to test more corner cases. |
1811 | 131 | # The repository layer is meant to handle this. | 115 | # The repository layer is meant to handle this. |
1812 | 132 | revision_id = u'\xc8abc'.encode('utf8') | 116 | revision_id = u'\xc8abc'.encode('utf8') |
1813 | @@ -150,20 +134,18 @@ | |||
1814 | 150 | builder.abort() | 134 | builder.abort() |
1815 | 151 | raise | 135 | raise |
1816 | 152 | self.assertEqual(revision_id, builder.commit('foo bar')) | 136 | self.assertEqual(revision_id, builder.commit('foo bar')) |
1817 | 153 | finally: | ||
1818 | 154 | tree.unlock() | ||
1819 | 155 | self.assertTrue(tree.branch.repository.has_revision(revision_id)) | 137 | self.assertTrue(tree.branch.repository.has_revision(revision_id)) |
1820 | 156 | # the revision id must be set on the inventory when saving it. This | 138 | # the revision id must be set on the inventory when saving it. This |
1821 | 157 | # does not precisely test that - a repository that wants to can add it | 139 | # does not precisely test that - a repository that wants to can add it |
1822 | 158 | # on deserialisation, but thats all the current contract guarantees | 140 | # on deserialisation, but thats all the current contract guarantees |
1823 | 159 | # anyway. | 141 | # anyway. |
1826 | 160 | self.assertEqual(revision_id, | 142 | self.assertEqual( |
1827 | 161 | tree.branch.repository.revision_tree(revision_id).get_revision_id()) | 143 | revision_id, |
1828 | 144 | tree.branch.repository.revision_tree(revision_id).get_revision_id()) | ||
1829 | 162 | 145 | ||
1830 | 163 | def test_commit_without_root_errors(self): | 146 | def test_commit_without_root_errors(self): |
1831 | 164 | tree = self.make_branch_and_tree(".") | 147 | tree = self.make_branch_and_tree(".") |
1834 | 165 | tree.lock_write() | 148 | with tree.lock_write(): |
1833 | 166 | try: | ||
1835 | 167 | builder = tree.branch.get_commit_builder([]) | 149 | builder = tree.branch.get_commit_builder([]) |
1836 | 168 | 150 | ||
1837 | 169 | def do_commit(): | 151 | def do_commit(): |
1838 | @@ -177,8 +159,6 @@ | |||
1839 | 177 | else: | 159 | else: |
1840 | 178 | builder.commit("msg") | 160 | builder.commit("msg") |
1841 | 179 | self.assertRaises(errors.RootMissing, do_commit) | 161 | self.assertRaises(errors.RootMissing, do_commit) |
1842 | 180 | finally: | ||
1843 | 181 | tree.unlock() | ||
1844 | 182 | 162 | ||
1845 | 183 | def test_commit_unchanged_root_record_iter_changes(self): | 163 | def test_commit_unchanged_root_record_iter_changes(self): |
1846 | 184 | tree = self.make_branch_and_tree(".") | 164 | tree = self.make_branch_and_tree(".") |
1847 | @@ -210,8 +190,7 @@ | |||
1848 | 210 | tree.add(["foo"]) | 190 | tree.add(["foo"]) |
1849 | 211 | foo_id = tree.path2id('foo') | 191 | foo_id = tree.path2id('foo') |
1850 | 212 | rev_id = tree.commit("added foo") | 192 | rev_id = tree.commit("added foo") |
1853 | 213 | tree.lock_write() | 193 | with tree.lock_write(): |
1852 | 214 | try: | ||
1854 | 215 | builder = tree.branch.get_commit_builder([rev_id]) | 194 | builder = tree.branch.get_commit_builder([rev_id]) |
1855 | 216 | try: | 195 | try: |
1856 | 217 | delete_change = (foo_id, ('foo', None), True, (True, False), | 196 | delete_change = (foo_id, ('foo', None), True, (True, False), |
1857 | @@ -228,8 +207,6 @@ | |||
1858 | 228 | except: | 207 | except: |
1859 | 229 | builder.abort() | 208 | builder.abort() |
1860 | 230 | raise | 209 | raise |
1861 | 231 | finally: | ||
1862 | 232 | tree.unlock() | ||
1863 | 233 | rev_tree = builder.revision_tree() | 210 | rev_tree = builder.revision_tree() |
1864 | 234 | rev_tree.lock_read() | 211 | rev_tree.lock_read() |
1865 | 235 | self.addCleanup(rev_tree.unlock) | 212 | self.addCleanup(rev_tree.unlock) |
1866 | @@ -462,7 +439,9 @@ | |||
1867 | 462 | self.assertFileGraph(expected_graph, tree, (file_id, rev2)) | 439 | self.assertFileGraph(expected_graph, tree, (file_id, rev2)) |
1868 | 463 | 440 | ||
1869 | 464 | def mini_commit_record_iter_changes(self, tree, name, new_name, | 441 | def mini_commit_record_iter_changes(self, tree, name, new_name, |
1871 | 465 | records_version=True, delta_against_basis=True, expect_fs_hash=False): | 442 | records_version=True, |
1872 | 443 | delta_against_basis=True, | ||
1873 | 444 | expect_fs_hash=False): | ||
1874 | 466 | """Perform a miniature commit looking for record entry results. | 445 | """Perform a miniature commit looking for record entry results. |
1875 | 467 | 446 | ||
1876 | 468 | This version uses the record_iter_changes interface. | 447 | This version uses the record_iter_changes interface. |
1877 | @@ -498,10 +477,10 @@ | |||
1878 | 498 | tree_file_stat[0].close() | 477 | tree_file_stat[0].close() |
1879 | 499 | self.assertLength(1, result) | 478 | self.assertLength(1, result) |
1880 | 500 | result = result[0] | 479 | result = result[0] |
1882 | 501 | self.assertEqual(result[:2], (file_id, new_name)) | 480 | self.assertEqual(result[0], new_name) |
1883 | 502 | self.assertEqual( | 481 | self.assertEqual( |
1886 | 503 | result[2][0], tree.get_file_sha1(new_name)) | 482 | result[1][0], tree.get_file_sha1(new_name)) |
1887 | 504 | self.assertEqualStat(result[2][1], tree_file_stat[1]) | 483 | self.assertEqualStat(result[1][1], tree_file_stat[1]) |
1888 | 505 | else: | 484 | else: |
1889 | 506 | self.assertEqual([], result) | 485 | self.assertEqual([], result) |
1890 | 507 | builder.finish_inventory() | 486 | builder.finish_inventory() |
1891 | @@ -600,9 +579,10 @@ | |||
1892 | 600 | rev2 = self._rename_in_tree(tree1, name, 'rev2') | 579 | rev2 = self._rename_in_tree(tree1, name, 'rev2') |
1893 | 601 | rev3 = self._rename_in_tree(tree2, name, 'rev3') | 580 | rev3 = self._rename_in_tree(tree2, name, 'rev3') |
1894 | 602 | tree1.merge_from_branch(tree2.branch) | 581 | tree1.merge_from_branch(tree2.branch) |
1898 | 603 | rev4 = self.mini_commit_record_iter_changes(tree1, 'new_' + name, 'new_' + name, | 582 | rev4 = self.mini_commit_record_iter_changes( |
1899 | 604 | expect_fs_hash=expect_fs_hash, | 583 | tree1, 'new_' + name, 'new_' + name, |
1900 | 605 | delta_against_basis=tree1.supports_rename_tracking()) | 584 | expect_fs_hash=expect_fs_hash, |
1901 | 585 | delta_against_basis=tree1.supports_rename_tracking()) | ||
1902 | 606 | tree3, = self._get_revtrees(tree1, [rev4]) | 586 | tree3, = self._get_revtrees(tree1, [rev4]) |
1903 | 607 | expected_graph = {} | 587 | expected_graph = {} |
1904 | 608 | if tree1.supports_rename_tracking(): | 588 | if tree1.supports_rename_tracking(): |
1905 | @@ -873,8 +853,7 @@ | |||
1906 | 873 | self.overrideAttr(config, '_auto_user_id', | 853 | self.overrideAttr(config, '_auto_user_id', |
1907 | 874 | lambda: (None, None)) | 854 | lambda: (None, None)) |
1908 | 875 | tree = self.make_branch_and_tree(".") | 855 | tree = self.make_branch_and_tree(".") |
1911 | 876 | tree.lock_write() | 856 | with tree.lock_write(): |
1910 | 877 | try: | ||
1912 | 878 | # Make sure no username is available. | 857 | # Make sure no username is available. |
1913 | 879 | self.assertRaises(config.NoWhoami, tree.branch.get_commit_builder, | 858 | self.assertRaises(config.NoWhoami, tree.branch.get_commit_builder, |
1914 | 880 | []) | 859 | []) |
1915 | @@ -889,5 +868,3 @@ | |||
1916 | 889 | raise | 868 | raise |
1917 | 890 | repo = tree.branch.repository | 869 | repo = tree.branch.repository |
1918 | 891 | repo.commit_write_group() | 870 | repo.commit_write_group() |
1919 | 892 | finally: | ||
1920 | 893 | tree.unlock() | ||
1921 | 894 | 871 | ||
1922 | === modified file 'breezy/tests/per_repository_vf/test_reconcile.py' | |||
1923 | --- breezy/tests/per_repository_vf/test_reconcile.py 2018-11-11 04:08:32 +0000 | |||
1924 | +++ breezy/tests/per_repository_vf/test_reconcile.py 2019-01-01 23:11:42 +0000 | |||
1925 | @@ -73,14 +73,14 @@ | |||
1926 | 73 | make sure we safely detect this problem. | 73 | make sure we safely detect this problem. |
1927 | 74 | """ | 74 | """ |
1928 | 75 | repo = self.make_repo_with_extra_ghost_index() | 75 | repo = self.make_repo_with_extra_ghost_index() |
1931 | 76 | reconciler = repo.reconcile(thorough=True) | 76 | result = repo.reconcile(thorough=True) |
1932 | 77 | self.assertTrue(reconciler.aborted, | 77 | self.assertTrue(result.aborted, |
1933 | 78 | "reconcile should have aborted due to bad parents.") | 78 | "reconcile should have aborted due to bad parents.") |
1934 | 79 | 79 | ||
1935 | 80 | def test_does_not_abort_on_clean_repo(self): | 80 | def test_does_not_abort_on_clean_repo(self): |
1936 | 81 | repo = self.make_repository('.') | 81 | repo = self.make_repository('.') |
1939 | 82 | reconciler = repo.reconcile(thorough=True) | 82 | result = repo.reconcile(thorough=True) |
1940 | 83 | self.assertFalse(reconciler.aborted, | 83 | self.assertFalse(result.aborted, |
1941 | 84 | "reconcile should not have aborted on an unbroken repository.") | 84 | "reconcile should not have aborted on an unbroken repository.") |
1942 | 85 | 85 | ||
1943 | 86 | 86 | ||
1944 | @@ -147,11 +147,11 @@ | |||
1945 | 147 | self.make_repository('empty') | 147 | self.make_repository('empty') |
1946 | 148 | d = BzrDir.open(self.get_url('empty')) | 148 | d = BzrDir.open(self.get_url('empty')) |
1947 | 149 | # calling on a empty repository should do nothing | 149 | # calling on a empty repository should do nothing |
1949 | 150 | reconciler = d.find_repository().reconcile(**kwargs) | 150 | result = d.find_repository().reconcile(**kwargs) |
1950 | 151 | # no inconsistent parents should have been found | 151 | # no inconsistent parents should have been found |
1952 | 152 | self.assertEqual(0, reconciler.inconsistent_parents) | 152 | self.assertEqual(0, result.inconsistent_parents) |
1953 | 153 | # and no garbage inventories | 153 | # and no garbage inventories |
1955 | 154 | self.assertEqual(0, reconciler.garbage_inventories) | 154 | self.assertEqual(0, result.garbage_inventories) |
1956 | 155 | # and no backup weave should have been needed/made. | 155 | # and no backup weave should have been needed/made. |
1957 | 156 | self.checkNoBackupInventory(d) | 156 | self.checkNoBackupInventory(d) |
1958 | 157 | 157 | ||
1959 | @@ -187,11 +187,11 @@ | |||
1960 | 187 | if not repo._reconcile_does_inventory_gc: | 187 | if not repo._reconcile_does_inventory_gc: |
1961 | 188 | raise TestSkipped('Irrelevant test') | 188 | raise TestSkipped('Irrelevant test') |
1962 | 189 | self.checkUnreconciled(d, repo.reconcile()) | 189 | self.checkUnreconciled(d, repo.reconcile()) |
1964 | 190 | reconciler = repo.reconcile(thorough=True) | 190 | result = repo.reconcile(thorough=True) |
1965 | 191 | # no bad parents | 191 | # no bad parents |
1967 | 192 | self.assertEqual(0, reconciler.inconsistent_parents) | 192 | self.assertEqual(0, result.inconsistent_parents) |
1968 | 193 | # and one garbage inventory | 193 | # and one garbage inventory |
1970 | 194 | self.assertEqual(1, reconciler.garbage_inventories) | 194 | self.assertEqual(1, result.garbage_inventories) |
1971 | 195 | self.check_missing_was_removed(repo) | 195 | self.check_missing_was_removed(repo) |
1972 | 196 | 196 | ||
1973 | 197 | def check_thorough_reweave_missing_revision(self, aBzrDir, reconcile, | 197 | def check_thorough_reweave_missing_revision(self, aBzrDir, reconcile, |
1974 | @@ -241,8 +241,7 @@ | |||
1975 | 241 | 241 | ||
1976 | 242 | def reconcile(): | 242 | def reconcile(): |
1977 | 243 | reconciler = Reconciler(d) | 243 | reconciler = Reconciler(d) |
1980 | 244 | reconciler.reconcile() | 244 | return reconciler.reconcile() |
1979 | 245 | return reconciler | ||
1981 | 246 | self.check_thorough_reweave_missing_revision(d, reconcile) | 245 | self.check_thorough_reweave_missing_revision(d, reconcile) |
1982 | 247 | 246 | ||
1983 | 248 | def test_reweave_inventory_without_revision_and_ghost(self): | 247 | def test_reweave_inventory_without_revision_and_ghost(self): |
1984 | 249 | 248 | ||
1985 | === modified file 'breezy/tests/per_repository_vf/test_repository.py' | |||
1986 | --- breezy/tests/per_repository_vf/test_repository.py 2018-11-18 01:02:16 +0000 | |||
1987 | +++ breezy/tests/per_repository_vf/test_repository.py 2019-01-01 23:11:42 +0000 | |||
1988 | @@ -271,7 +271,8 @@ | |||
1989 | 271 | with tree.lock_write(): | 271 | with tree.lock_write(): |
1990 | 272 | self.assertEqual(set(), set(repo.texts.keys())) | 272 | self.assertEqual(set(), set(repo.texts.keys())) |
1991 | 273 | tree.add(['foo'], [file_id], ['file']) | 273 | tree.add(['foo'], [file_id], ['file']) |
1993 | 274 | tree.put_file_bytes_non_atomic('foo', b'content\n') | 274 | tree.put_file_bytes_non_atomic( |
1994 | 275 | 'foo', b'content\n') | ||
1995 | 275 | try: | 276 | try: |
1996 | 276 | rev_key = (tree.commit("foo"),) | 277 | rev_key = (tree.commit("foo"),) |
1997 | 277 | except errors.IllegalPath: | 278 | except errors.IllegalPath: |
1998 | 278 | 279 | ||
1999 | === modified file 'breezy/tests/per_workingtree/test_parents.py' | |||
2000 | --- breezy/tests/per_workingtree/test_parents.py 2018-11-18 00:25:19 +0000 | |||
2001 | +++ breezy/tests/per_workingtree/test_parents.py 2019-01-01 23:11:42 +0000 | |||
2002 | @@ -466,8 +466,8 @@ | |||
2003 | 466 | _mod_revision.NULL_REVISION) | 466 | _mod_revision.NULL_REVISION) |
2004 | 467 | changes = shape_tree.iter_changes( | 467 | changes = shape_tree.iter_changes( |
2005 | 468 | base_tree) | 468 | base_tree) |
2008 | 469 | list(builder.record_iter_changes(shape_tree, | 469 | list(builder.record_iter_changes( |
2009 | 470 | base_tree.get_revision_id(), changes)) | 470 | shape_tree, base_tree.get_revision_id(), changes)) |
2010 | 471 | builder.finish_inventory() | 471 | builder.finish_inventory() |
2011 | 472 | builder.commit("Message") | 472 | builder.commit("Message") |
2012 | 473 | 473 | ||
2013 | 474 | 474 | ||
2014 | === modified file 'breezy/tests/test_diff.py' | |||
2015 | --- breezy/tests/test_diff.py 2018-11-11 04:08:32 +0000 | |||
2016 | +++ breezy/tests/test_diff.py 2019-01-01 23:11:42 +0000 | |||
2017 | @@ -699,7 +699,7 @@ | |||
2018 | 699 | 699 | ||
2019 | 700 | class DiffWasIs(diff.DiffPath): | 700 | class DiffWasIs(diff.DiffPath): |
2020 | 701 | 701 | ||
2022 | 702 | def diff(self, file_id, old_path, new_path, old_kind, new_kind): | 702 | def diff(self, old_path, new_path, old_kind, new_kind): |
2023 | 703 | self.to_file.write(b'was: ') | 703 | self.to_file.write(b'was: ') |
2024 | 704 | self.to_file.write(self.old_tree.get_file(old_path).read()) | 704 | self.to_file.write(self.old_tree.get_file(old_path).read()) |
2025 | 705 | self.to_file.write(b'is: ') | 705 | self.to_file.write(b'is: ') |
2026 | @@ -728,20 +728,19 @@ | |||
2027 | 728 | self.new_tree.add('newdir') | 728 | self.new_tree.add('newdir') |
2028 | 729 | self.new_tree.add('newdir/newfile', b'file-id') | 729 | self.new_tree.add('newdir/newfile', b'file-id') |
2029 | 730 | differ = diff.DiffText(self.old_tree, self.new_tree, BytesIO()) | 730 | differ = diff.DiffText(self.old_tree, self.new_tree, BytesIO()) |
2032 | 731 | differ.diff_text('olddir/oldfile', None, 'old label', | 731 | differ.diff_text('olddir/oldfile', None, 'old label', 'new label') |
2031 | 732 | 'new label', b'file-id', None) | ||
2033 | 733 | self.assertEqual( | 732 | self.assertEqual( |
2034 | 734 | b'--- old label\n+++ new label\n@@ -1,1 +0,0 @@\n-old\n\n', | 733 | b'--- old label\n+++ new label\n@@ -1,1 +0,0 @@\n-old\n\n', |
2035 | 735 | differ.to_file.getvalue()) | 734 | differ.to_file.getvalue()) |
2036 | 736 | differ.to_file.seek(0) | 735 | differ.to_file.seek(0) |
2037 | 737 | differ.diff_text(None, 'newdir/newfile', | 736 | differ.diff_text(None, 'newdir/newfile', |
2039 | 738 | 'old label', 'new label', None, b'file-id') | 737 | 'old label', 'new label') |
2040 | 739 | self.assertEqual( | 738 | self.assertEqual( |
2041 | 740 | b'--- old label\n+++ new label\n@@ -0,0 +1,1 @@\n+new\n\n', | 739 | b'--- old label\n+++ new label\n@@ -0,0 +1,1 @@\n+new\n\n', |
2042 | 741 | differ.to_file.getvalue()) | 740 | differ.to_file.getvalue()) |
2043 | 742 | differ.to_file.seek(0) | 741 | differ.to_file.seek(0) |
2044 | 743 | differ.diff_text('olddir/oldfile', 'newdir/newfile', | 742 | differ.diff_text('olddir/oldfile', 'newdir/newfile', |
2046 | 744 | 'old label', 'new label', b'file-id', b'file-id') | 743 | 'old label', 'new label') |
2047 | 745 | self.assertEqual( | 744 | self.assertEqual( |
2048 | 746 | b'--- old label\n+++ new label\n@@ -1,1 +1,1 @@\n-old\n+new\n\n', | 745 | b'--- old label\n+++ new label\n@@ -1,1 +1,1 @@\n-old\n+new\n\n', |
2049 | 747 | differ.to_file.getvalue()) | 746 | differ.to_file.getvalue()) |
2050 | @@ -789,7 +788,7 @@ | |||
2051 | 789 | ('new-tree/newdir/newfile', b'new\n')]) | 788 | ('new-tree/newdir/newfile', b'new\n')]) |
2052 | 790 | self.new_tree.add('newdir') | 789 | self.new_tree.add('newdir') |
2053 | 791 | self.new_tree.add('newdir/newfile', b'file-id') | 790 | self.new_tree.add('newdir/newfile', b'file-id') |
2055 | 792 | self.differ.diff(b'file-id', 'olddir/oldfile', 'newdir/newfile') | 791 | self.differ.diff('olddir/oldfile', 'newdir/newfile') |
2056 | 793 | self.assertContainsRe( | 792 | self.assertContainsRe( |
2057 | 794 | self.differ.to_file.getvalue(), | 793 | self.differ.to_file.getvalue(), |
2058 | 795 | br'--- olddir/oldfile.*\n\+\+\+ newdir/newfile.*\n\@\@ -1,1 \+1,1' | 794 | br'--- olddir/oldfile.*\n\+\+\+ newdir/newfile.*\n\@\@ -1,1 \+1,1' |
2059 | @@ -805,7 +804,7 @@ | |||
2060 | 805 | os.symlink('new', 'new-tree/newdir/newfile') | 804 | os.symlink('new', 'new-tree/newdir/newfile') |
2061 | 806 | self.new_tree.add('newdir') | 805 | self.new_tree.add('newdir') |
2062 | 807 | self.new_tree.add('newdir/newfile', b'file-id') | 806 | self.new_tree.add('newdir/newfile', b'file-id') |
2064 | 808 | self.differ.diff(b'file-id', 'olddir/oldfile', 'newdir/newfile') | 807 | self.differ.diff('olddir/oldfile', 'newdir/newfile') |
2065 | 809 | self.assertContainsRe( | 808 | self.assertContainsRe( |
2066 | 810 | self.differ.to_file.getvalue(), | 809 | self.differ.to_file.getvalue(), |
2067 | 811 | br'--- olddir/oldfile.*\n\+\+\+ newdir/newfile.*\n\@\@ -1,1 \+0,0' | 810 | br'--- olddir/oldfile.*\n\+\+\+ newdir/newfile.*\n\@\@ -1,1 \+0,0' |
2068 | @@ -816,7 +815,7 @@ | |||
2069 | 816 | def test_diff_directory(self): | 815 | def test_diff_directory(self): |
2070 | 817 | self.build_tree(['new-tree/new-dir/']) | 816 | self.build_tree(['new-tree/new-dir/']) |
2071 | 818 | self.new_tree.add('new-dir', b'new-dir-id') | 817 | self.new_tree.add('new-dir', b'new-dir-id') |
2073 | 819 | self.differ.diff(b'new-dir-id', None, 'new-dir') | 818 | self.differ.diff(None, 'new-dir') |
2074 | 820 | self.assertEqual(self.differ.to_file.getvalue(), b'') | 819 | self.assertEqual(self.differ.to_file.getvalue(), b'') |
2075 | 821 | 820 | ||
2076 | 822 | def create_old_new(self): | 821 | def create_old_new(self): |
2077 | @@ -838,7 +837,7 @@ | |||
2078 | 838 | differ = diff.DiffTree(self.old_tree, self.new_tree, BytesIO()) | 837 | differ = diff.DiffTree(self.old_tree, self.new_tree, BytesIO()) |
2079 | 839 | finally: | 838 | finally: |
2080 | 840 | diff.DiffTree.diff_factories = old_diff_factories | 839 | diff.DiffTree.diff_factories = old_diff_factories |
2082 | 841 | differ.diff(b'file-id', 'olddir/oldfile', 'newdir/newfile') | 840 | differ.diff('olddir/oldfile', 'newdir/newfile') |
2083 | 842 | self.assertNotContainsRe( | 841 | self.assertNotContainsRe( |
2084 | 843 | differ.to_file.getvalue(), | 842 | differ.to_file.getvalue(), |
2085 | 844 | br'--- olddir/oldfile.*\n\+\+\+ newdir/newfile.*\n\@\@ -1,1 \+1,1' | 843 | br'--- olddir/oldfile.*\n\+\+\+ newdir/newfile.*\n\@\@ -1,1 \+1,1' |
2086 | @@ -850,7 +849,7 @@ | |||
2087 | 850 | self.create_old_new() | 849 | self.create_old_new() |
2088 | 851 | differ = diff.DiffTree(self.old_tree, self.new_tree, BytesIO(), | 850 | differ = diff.DiffTree(self.old_tree, self.new_tree, BytesIO(), |
2089 | 852 | extra_factories=[DiffWasIs.from_diff_tree]) | 851 | extra_factories=[DiffWasIs.from_diff_tree]) |
2091 | 853 | differ.diff(b'file-id', 'olddir/oldfile', 'newdir/newfile') | 852 | differ.diff('olddir/oldfile', 'newdir/newfile') |
2092 | 854 | self.assertNotContainsRe( | 853 | self.assertNotContainsRe( |
2093 | 855 | differ.to_file.getvalue(), | 854 | differ.to_file.getvalue(), |
2094 | 856 | br'--- olddir/oldfile.*\n\+\+\+ newdir/newfile.*\n\@\@ -1,1 \+1,1' | 855 | br'--- olddir/oldfile.*\n\+\+\+ newdir/newfile.*\n\@\@ -1,1 \+1,1' |
2095 | @@ -1492,7 +1491,7 @@ | |||
2096 | 1492 | self.addCleanup(diff_obj.finish) | 1491 | self.addCleanup(diff_obj.finish) |
2097 | 1493 | self.assertContainsRe(diff_obj._root, 'brz-diff-[^/]*') | 1492 | self.assertContainsRe(diff_obj._root, 'brz-diff-[^/]*') |
2098 | 1494 | old_path, new_path = diff_obj._prepare_files( | 1493 | old_path, new_path = diff_obj._prepare_files( |
2100 | 1495 | 'oldname', 'newname', file_id=b'file-id') | 1494 | 'oldname', 'newname') |
2101 | 1496 | self.assertContainsRe(old_path, 'old/oldname$') | 1495 | self.assertContainsRe(old_path, 'old/oldname$') |
2102 | 1497 | self.assertEqual(315532800, os.stat(old_path).st_mtime) | 1496 | self.assertEqual(315532800, os.stat(old_path).st_mtime) |
2103 | 1498 | self.assertContainsRe(new_path, 'tree/newname$') | 1497 | self.assertContainsRe(new_path, 'tree/newname$') |
2104 | @@ -1501,7 +1500,7 @@ | |||
2105 | 1501 | if osutils.host_os_dereferences_symlinks(): | 1500 | if osutils.host_os_dereferences_symlinks(): |
2106 | 1502 | self.assertTrue(os.path.samefile('tree/newname', new_path)) | 1501 | self.assertTrue(os.path.samefile('tree/newname', new_path)) |
2107 | 1503 | # make sure we can create files with the same parent directories | 1502 | # make sure we can create files with the same parent directories |
2109 | 1504 | diff_obj._prepare_files('oldname2', 'newname2', file_id=b'file2-id') | 1503 | diff_obj._prepare_files('oldname2', 'newname2') |
2110 | 1505 | 1504 | ||
2111 | 1506 | 1505 | ||
2112 | 1507 | class TestDiffFromToolEncodedFilename(tests.TestCaseWithTransport): | 1506 | class TestDiffFromToolEncodedFilename(tests.TestCaseWithTransport): |
2113 | 1508 | 1507 | ||
2114 | === modified file 'breezy/tests/test_foreign.py' | |||
2115 | --- breezy/tests/test_foreign.py 2018-11-16 18:33:17 +0000 | |||
2116 | +++ breezy/tests/test_foreign.py 2019-01-01 23:11:42 +0000 | |||
2117 | @@ -205,10 +205,10 @@ | |||
2118 | 205 | parent_revids = [] | 205 | parent_revids = [] |
2119 | 206 | else: | 206 | else: |
2120 | 207 | parent_revids = [parent_revid] | 207 | parent_revids = [parent_revid] |
2125 | 208 | builder = self.target.get_commit_builder(parent_revids, | 208 | builder = self.target.get_commit_builder( |
2126 | 209 | self.target.get_config_stack(), rev.timestamp, | 209 | parent_revids, self.target.get_config_stack(), rev.timestamp, |
2127 | 210 | rev.timezone, rev.committer, rev.properties, | 210 | rev.timezone, rev.committer, rev.properties, |
2128 | 211 | new_revid) | 211 | new_revid) |
2129 | 212 | try: | 212 | try: |
2130 | 213 | parent_tree = self.target.repository.revision_tree( | 213 | parent_tree = self.target.repository.revision_tree( |
2131 | 214 | parent_revid) | 214 | parent_revid) |
2132 | 215 | 215 | ||
2133 | === modified file 'breezy/tests/test_reconcile.py' | |||
2134 | --- breezy/tests/test_reconcile.py 2017-06-10 16:40:42 +0000 | |||
2135 | +++ breezy/tests/test_reconcile.py 2019-01-01 23:11:42 +0000 | |||
2136 | @@ -38,14 +38,14 @@ | |||
2137 | 38 | child = bzrdir.BzrDirMetaFormat1().initialize('child') | 38 | child = bzrdir.BzrDirMetaFormat1().initialize('child') |
2138 | 39 | self.assertRaises(errors.NoRepositoryPresent, child.open_repository) | 39 | self.assertRaises(errors.NoRepositoryPresent, child.open_repository) |
2139 | 40 | reconciler = Reconciler(child) | 40 | reconciler = Reconciler(child) |
2141 | 41 | reconciler.reconcile() | 41 | result = reconciler.reconcile() |
2142 | 42 | # smoke test for reconcile appears to work too. | 42 | # smoke test for reconcile appears to work too. |
2143 | 43 | reconcile(child) | 43 | reconcile(child) |
2144 | 44 | # no inconsistent parents should have been found | 44 | # no inconsistent parents should have been found |
2145 | 45 | # but the values should have been set. | 45 | # but the values should have been set. |
2147 | 46 | self.assertEqual(0, reconciler.inconsistent_parents) | 46 | self.assertEqual(0, result.inconsistent_parents) |
2148 | 47 | # and no garbage inventories | 47 | # and no garbage inventories |
2150 | 48 | self.assertEqual(0, reconciler.garbage_inventories) | 48 | self.assertEqual(0, result.garbage_inventories) |
2151 | 49 | 49 | ||
2152 | 50 | 50 | ||
2153 | 51 | class TestReconciler(tests.TestCaseWithTransport): | 51 | class TestReconciler(tests.TestCaseWithTransport): |
2154 | @@ -53,20 +53,20 @@ | |||
2155 | 53 | def test_reconciler_with_no_branch(self): | 53 | def test_reconciler_with_no_branch(self): |
2156 | 54 | repo = self.make_repository('repo') | 54 | repo = self.make_repository('repo') |
2157 | 55 | reconciler = Reconciler(repo.controldir) | 55 | reconciler = Reconciler(repo.controldir) |
2159 | 56 | reconciler.reconcile() | 56 | result = reconciler.reconcile() |
2160 | 57 | # no inconsistent parents should have been found | 57 | # no inconsistent parents should have been found |
2161 | 58 | # but the values should have been set. | 58 | # but the values should have been set. |
2163 | 59 | self.assertEqual(0, reconciler.inconsistent_parents) | 59 | self.assertEqual(0, result.inconsistent_parents) |
2164 | 60 | # and no garbage inventories | 60 | # and no garbage inventories |
2167 | 61 | self.assertEqual(0, reconciler.garbage_inventories) | 61 | self.assertEqual(0, result.garbage_inventories) |
2168 | 62 | self.assertIs(None, reconciler.fixed_branch_history) | 62 | self.assertIs(None, result.fixed_branch_history) |
2169 | 63 | 63 | ||
2170 | 64 | def test_reconciler_finds_branch(self): | 64 | def test_reconciler_finds_branch(self): |
2171 | 65 | a_branch = self.make_branch('a_branch') | 65 | a_branch = self.make_branch('a_branch') |
2172 | 66 | reconciler = Reconciler(a_branch.controldir) | 66 | reconciler = Reconciler(a_branch.controldir) |
2174 | 67 | reconciler.reconcile() | 67 | result = reconciler.reconcile() |
2175 | 68 | 68 | ||
2176 | 69 | # It should have checked the repository, and the branch | 69 | # It should have checked the repository, and the branch |
2180 | 70 | self.assertEqual(0, reconciler.inconsistent_parents) | 70 | self.assertEqual(0, result.inconsistent_parents) |
2181 | 71 | self.assertEqual(0, reconciler.garbage_inventories) | 71 | self.assertEqual(0, result.garbage_inventories) |
2182 | 72 | self.assertIs(False, reconciler.fixed_branch_history) | 72 | self.assertIs(False, result.fixed_branch_history) |
Somewhat rubber-stamping code move, motive seems reasonable.