Status: | Merged |
---|---|
Approved by: | Jelmer Vernooij |
Approved revision: | no longer in the source branch. |
Merge reported by: | The Breezy Bot |
Merged at revision: | not available |
Proposed branch: | lp:~jelmer/brz/git-merge |
Merge into: | lp:brz/3.1 |
Diff against target: |
2619 lines (+1173/-957) 16 files modified
breezy/bzr/tests/test_transform.py (+417/-0) breezy/bzr/transform.py (+262/-27) breezy/bzr/workingtree.py (+134/-0) breezy/bzr/workingtree_3.py (+2/-2) breezy/bzr/workingtree_4.py (+5/-4) breezy/git/tests/__init__.py (+1/-0) breezy/git/tests/test_blackbox.py (+14/-0) breezy/git/tests/test_transform.py (+41/-0) breezy/git/transform.py (+11/-27) breezy/git/workingtree.py (+126/-0) breezy/plugins/weave_fmt/workingtree.py (+2/-2) breezy/tests/per_tree/__init__.py (+4/-1) breezy/tests/per_workingtree/test_transform.py (+1/-1) breezy/tests/test_transform.py (+0/-406) breezy/transform.py (+151/-385) breezy/workingtree.py (+2/-102) |
To merge this branch: | bzr merge lp:~jelmer/brz/git-merge |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Jelmer Vernooij | Approve | ||
Review via email: mp+390155@code.launchpad.net |
Commit message
Some more improvements for merges in Git.
Description of the change
add test for git file merge.
To post a comment you must log in.
Revision history for this message
Jelmer Vernooij (jelmer) : | # |
review:
Approve
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote : | # |
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote : | # |
Running landing tests failed
https:/
Revision history for this message
The Breezy Bot (the-breezy-bot) wrote : | # |
Running landing tests failed
https:/
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'breezy/bzr/tests/test_transform.py' |
2 | --- breezy/bzr/tests/test_transform.py 2020-07-03 00:43:31 +0000 |
3 | +++ breezy/bzr/tests/test_transform.py 2020-09-02 17:42:36 +0000 |
4 | @@ -14,6 +14,16 @@ |
5 | # along with this program; if not, write to the Free Software |
6 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
7 | |
8 | +import codecs |
9 | +import os |
10 | +import time |
11 | + |
12 | +from ...tests import features |
13 | +from ... import errors, filters, osutils, rules |
14 | +from ...controldir import ControlDir |
15 | +from ..conflicts import DuplicateEntry |
16 | +from ..transform import build_tree |
17 | + |
18 | from . import TestCaseWithTransport |
19 | |
20 | |
21 | @@ -47,3 +57,410 @@ |
22 | tt.version_file(tt.root, file_id=tree.path2id('')) |
23 | tt.trans_id_tree_path('foo') |
24 | self.assertEqual([], tt._inventory_altered()) |
25 | + |
26 | + |
27 | +class TestBuildTree(TestCaseWithTransport): |
28 | + |
29 | + def test_build_tree_with_symlinks(self): |
30 | + self.requireFeature(features.SymlinkFeature) |
31 | + os.mkdir('a') |
32 | + a = ControlDir.create_standalone_workingtree('a') |
33 | + os.mkdir('a/foo') |
34 | + with open('a/foo/bar', 'wb') as f: |
35 | + f.write(b'contents') |
36 | + os.symlink('a/foo/bar', 'a/foo/baz') |
37 | + a.add(['foo', 'foo/bar', 'foo/baz']) |
38 | + a.commit('initial commit') |
39 | + b = ControlDir.create_standalone_workingtree('b') |
40 | + basis = a.basis_tree() |
41 | + basis.lock_read() |
42 | + self.addCleanup(basis.unlock) |
43 | + build_tree(basis, b) |
44 | + self.assertIs(os.path.isdir('b/foo'), True) |
45 | + with open('b/foo/bar', 'rb') as f: |
46 | + self.assertEqual(f.read(), b"contents") |
47 | + self.assertEqual(os.readlink('b/foo/baz'), 'a/foo/bar') |
48 | + |
49 | + def test_build_with_references(self): |
50 | + tree = self.make_branch_and_tree('source', |
51 | + format='development-subtree') |
52 | + subtree = self.make_branch_and_tree('source/subtree', |
53 | + format='development-subtree') |
54 | + tree.add_reference(subtree) |
55 | + tree.commit('a revision') |
56 | + tree.branch.create_checkout('target') |
57 | + self.assertPathExists('target') |
58 | + self.assertPathExists('target/subtree') |
59 | + |
60 | + def test_file_conflict_handling(self): |
61 | + """Ensure that when building trees, conflict handling is done""" |
62 | + source = self.make_branch_and_tree('source') |
63 | + target = self.make_branch_and_tree('target') |
64 | + self.build_tree(['source/file', 'target/file']) |
65 | + source.add('file', b'new-file') |
66 | + source.commit('added file') |
67 | + build_tree(source.basis_tree(), target) |
68 | + self.assertEqual( |
69 | + [DuplicateEntry('Moved existing file to', 'file.moved', |
70 | + 'file', None, 'new-file')], |
71 | + target.conflicts()) |
72 | + target2 = self.make_branch_and_tree('target2') |
73 | + with open('target2/file', 'wb') as target_file, \ |
74 | + open('source/file', 'rb') as source_file: |
75 | + target_file.write(source_file.read()) |
76 | + build_tree(source.basis_tree(), target2) |
77 | + self.assertEqual([], target2.conflicts()) |
78 | + |
79 | + def test_symlink_conflict_handling(self): |
80 | + """Ensure that when building trees, conflict handling is done""" |
81 | + self.requireFeature(features.SymlinkFeature) |
82 | + source = self.make_branch_and_tree('source') |
83 | + os.symlink('foo', 'source/symlink') |
84 | + source.add('symlink', b'new-symlink') |
85 | + source.commit('added file') |
86 | + target = self.make_branch_and_tree('target') |
87 | + os.symlink('bar', 'target/symlink') |
88 | + build_tree(source.basis_tree(), target) |
89 | + self.assertEqual( |
90 | + [DuplicateEntry('Moved existing file to', 'symlink.moved', |
91 | + 'symlink', None, 'new-symlink')], |
92 | + target.conflicts()) |
93 | + target = self.make_branch_and_tree('target2') |
94 | + os.symlink('foo', 'target2/symlink') |
95 | + build_tree(source.basis_tree(), target) |
96 | + self.assertEqual([], target.conflicts()) |
97 | + |
98 | + def test_directory_conflict_handling(self): |
99 | + """Ensure that when building trees, conflict handling is done""" |
100 | + source = self.make_branch_and_tree('source') |
101 | + target = self.make_branch_and_tree('target') |
102 | + self.build_tree(['source/dir1/', 'source/dir1/file', 'target/dir1/']) |
103 | + source.add(['dir1', 'dir1/file'], [b'new-dir1', b'new-file']) |
104 | + source.commit('added file') |
105 | + build_tree(source.basis_tree(), target) |
106 | + self.assertEqual([], target.conflicts()) |
107 | + self.assertPathExists('target/dir1/file') |
108 | + |
109 | + # Ensure contents are merged |
110 | + target = self.make_branch_and_tree('target2') |
111 | + self.build_tree(['target2/dir1/', 'target2/dir1/file2']) |
112 | + build_tree(source.basis_tree(), target) |
113 | + self.assertEqual([], target.conflicts()) |
114 | + self.assertPathExists('target2/dir1/file2') |
115 | + self.assertPathExists('target2/dir1/file') |
116 | + |
117 | + # Ensure new contents are suppressed for existing branches |
118 | + target = self.make_branch_and_tree('target3') |
119 | + self.make_branch('target3/dir1') |
120 | + self.build_tree(['target3/dir1/file2']) |
121 | + build_tree(source.basis_tree(), target) |
122 | + self.assertPathDoesNotExist('target3/dir1/file') |
123 | + self.assertPathExists('target3/dir1/file2') |
124 | + self.assertPathExists('target3/dir1.diverted/file') |
125 | + self.assertEqual( |
126 | + [DuplicateEntry('Diverted to', 'dir1.diverted', |
127 | + 'dir1', 'new-dir1', None)], |
128 | + target.conflicts()) |
129 | + |
130 | + target = self.make_branch_and_tree('target4') |
131 | + self.build_tree(['target4/dir1/']) |
132 | + self.make_branch('target4/dir1/file') |
133 | + build_tree(source.basis_tree(), target) |
134 | + self.assertPathExists('target4/dir1/file') |
135 | + self.assertEqual('directory', osutils.file_kind('target4/dir1/file')) |
136 | + self.assertPathExists('target4/dir1/file.diverted') |
137 | + self.assertEqual( |
138 | + [DuplicateEntry('Diverted to', 'dir1/file.diverted', |
139 | + 'dir1/file', 'new-file', None)], |
140 | + target.conflicts()) |
141 | + |
142 | + def test_mixed_conflict_handling(self): |
143 | + """Ensure that when building trees, conflict handling is done""" |
144 | + source = self.make_branch_and_tree('source') |
145 | + target = self.make_branch_and_tree('target') |
146 | + self.build_tree(['source/name', 'target/name/']) |
147 | + source.add('name', b'new-name') |
148 | + source.commit('added file') |
149 | + build_tree(source.basis_tree(), target) |
150 | + self.assertEqual( |
151 | + [DuplicateEntry('Moved existing file to', |
152 | + 'name.moved', 'name', None, 'new-name')], |
153 | + target.conflicts()) |
154 | + |
155 | + def test_raises_in_populated(self): |
156 | + source = self.make_branch_and_tree('source') |
157 | + self.build_tree(['source/name']) |
158 | + source.add('name') |
159 | + source.commit('added name') |
160 | + target = self.make_branch_and_tree('target') |
161 | + self.build_tree(['target/name']) |
162 | + target.add('name') |
163 | + self.assertRaises(errors.WorkingTreeAlreadyPopulated, |
164 | + build_tree, source.basis_tree(), target) |
165 | + |
166 | + def test_build_tree_rename_count(self): |
167 | + source = self.make_branch_and_tree('source') |
168 | + self.build_tree(['source/file1', 'source/dir1/']) |
169 | + source.add(['file1', 'dir1']) |
170 | + source.commit('add1') |
171 | + target1 = self.make_branch_and_tree('target1') |
172 | + transform_result = build_tree(source.basis_tree(), target1) |
173 | + self.assertEqual(2, transform_result.rename_count) |
174 | + |
175 | + self.build_tree(['source/dir1/file2']) |
176 | + source.add(['dir1/file2']) |
177 | + source.commit('add3') |
178 | + target2 = self.make_branch_and_tree('target2') |
179 | + transform_result = build_tree(source.basis_tree(), target2) |
180 | + # children of non-root directories should not be renamed |
181 | + self.assertEqual(2, transform_result.rename_count) |
182 | + |
183 | + def create_ab_tree(self): |
184 | + """Create a committed test tree with two files""" |
185 | + source = self.make_branch_and_tree('source') |
186 | + self.build_tree_contents([('source/file1', b'A')]) |
187 | + self.build_tree_contents([('source/file2', b'B')]) |
188 | + source.add(['file1', 'file2'], [b'file1-id', b'file2-id']) |
189 | + source.commit('commit files') |
190 | + source.lock_write() |
191 | + self.addCleanup(source.unlock) |
192 | + return source |
193 | + |
194 | + def test_build_tree_accelerator_tree(self): |
195 | + source = self.create_ab_tree() |
196 | + self.build_tree_contents([('source/file2', b'C')]) |
197 | + calls = [] |
198 | + real_source_get_file = source.get_file |
199 | + |
200 | + def get_file(path): |
201 | + calls.append(path) |
202 | + return real_source_get_file(path) |
203 | + source.get_file = get_file |
204 | + target = self.make_branch_and_tree('target') |
205 | + revision_tree = source.basis_tree() |
206 | + revision_tree.lock_read() |
207 | + self.addCleanup(revision_tree.unlock) |
208 | + build_tree(revision_tree, target, source) |
209 | + self.assertEqual(['file1'], calls) |
210 | + target.lock_read() |
211 | + self.addCleanup(target.unlock) |
212 | + self.assertEqual([], list(target.iter_changes(revision_tree))) |
213 | + |
214 | + def test_build_tree_accelerator_tree_observes_sha1(self): |
215 | + source = self.create_ab_tree() |
216 | + sha1 = osutils.sha_string(b'A') |
217 | + target = self.make_branch_and_tree('target') |
218 | + target.lock_write() |
219 | + self.addCleanup(target.unlock) |
220 | + state = target.current_dirstate() |
221 | + state._cutoff_time = time.time() + 60 |
222 | + build_tree(source.basis_tree(), target, source) |
223 | + entry = state._get_entry(0, path_utf8=b'file1') |
224 | + self.assertEqual(sha1, entry[1][0][1]) |
225 | + |
226 | + def test_build_tree_accelerator_tree_missing_file(self): |
227 | + source = self.create_ab_tree() |
228 | + os.unlink('source/file1') |
229 | + source.remove(['file2']) |
230 | + target = self.make_branch_and_tree('target') |
231 | + revision_tree = source.basis_tree() |
232 | + revision_tree.lock_read() |
233 | + self.addCleanup(revision_tree.unlock) |
234 | + build_tree(revision_tree, target, source) |
235 | + target.lock_read() |
236 | + self.addCleanup(target.unlock) |
237 | + self.assertEqual([], list(target.iter_changes(revision_tree))) |
238 | + |
239 | + def test_build_tree_accelerator_wrong_kind(self): |
240 | + self.requireFeature(features.SymlinkFeature) |
241 | + source = self.make_branch_and_tree('source') |
242 | + self.build_tree_contents([('source/file1', b'')]) |
243 | + self.build_tree_contents([('source/file2', b'')]) |
244 | + source.add(['file1', 'file2'], [b'file1-id', b'file2-id']) |
245 | + source.commit('commit files') |
246 | + os.unlink('source/file2') |
247 | + self.build_tree_contents([('source/file2/', b'C')]) |
248 | + os.unlink('source/file1') |
249 | + os.symlink('file2', 'source/file1') |
250 | + calls = [] |
251 | + real_source_get_file = source.get_file |
252 | + |
253 | + def get_file(path): |
254 | + calls.append(path) |
255 | + return real_source_get_file(path) |
256 | + source.get_file = get_file |
257 | + target = self.make_branch_and_tree('target') |
258 | + revision_tree = source.basis_tree() |
259 | + revision_tree.lock_read() |
260 | + self.addCleanup(revision_tree.unlock) |
261 | + build_tree(revision_tree, target, source) |
262 | + self.assertEqual([], calls) |
263 | + target.lock_read() |
264 | + self.addCleanup(target.unlock) |
265 | + self.assertEqual([], list(target.iter_changes(revision_tree))) |
266 | + |
267 | + def test_build_tree_hardlink(self): |
268 | + self.requireFeature(features.HardlinkFeature) |
269 | + source = self.create_ab_tree() |
270 | + target = self.make_branch_and_tree('target') |
271 | + revision_tree = source.basis_tree() |
272 | + revision_tree.lock_read() |
273 | + self.addCleanup(revision_tree.unlock) |
274 | + build_tree(revision_tree, target, source, hardlink=True) |
275 | + target.lock_read() |
276 | + self.addCleanup(target.unlock) |
277 | + self.assertEqual([], list(target.iter_changes(revision_tree))) |
278 | + source_stat = os.stat('source/file1') |
279 | + target_stat = os.stat('target/file1') |
280 | + self.assertEqual(source_stat, target_stat) |
281 | + |
282 | + # Explicitly disallowing hardlinks should prevent them. |
283 | + target2 = self.make_branch_and_tree('target2') |
284 | + build_tree(revision_tree, target2, source, hardlink=False) |
285 | + target2.lock_read() |
286 | + self.addCleanup(target2.unlock) |
287 | + self.assertEqual([], list(target2.iter_changes(revision_tree))) |
288 | + source_stat = os.stat('source/file1') |
289 | + target2_stat = os.stat('target2/file1') |
290 | + self.assertNotEqual(source_stat, target2_stat) |
291 | + |
292 | + def test_build_tree_accelerator_tree_moved(self): |
293 | + source = self.make_branch_and_tree('source') |
294 | + self.build_tree_contents([('source/file1', b'A')]) |
295 | + source.add(['file1'], [b'file1-id']) |
296 | + source.commit('commit files') |
297 | + source.rename_one('file1', 'file2') |
298 | + source.lock_read() |
299 | + self.addCleanup(source.unlock) |
300 | + target = self.make_branch_and_tree('target') |
301 | + revision_tree = source.basis_tree() |
302 | + revision_tree.lock_read() |
303 | + self.addCleanup(revision_tree.unlock) |
304 | + build_tree(revision_tree, target, source) |
305 | + target.lock_read() |
306 | + self.addCleanup(target.unlock) |
307 | + self.assertEqual([], list(target.iter_changes(revision_tree))) |
308 | + |
309 | + def test_build_tree_hardlinks_preserve_execute(self): |
310 | + self.requireFeature(features.HardlinkFeature) |
311 | + source = self.create_ab_tree() |
312 | + tt = source.transform() |
313 | + trans_id = tt.trans_id_tree_path('file1') |
314 | + tt.set_executability(True, trans_id) |
315 | + tt.apply() |
316 | + self.assertTrue(source.is_executable('file1')) |
317 | + target = self.make_branch_and_tree('target') |
318 | + revision_tree = source.basis_tree() |
319 | + revision_tree.lock_read() |
320 | + self.addCleanup(revision_tree.unlock) |
321 | + build_tree(revision_tree, target, source, hardlink=True) |
322 | + target.lock_read() |
323 | + self.addCleanup(target.unlock) |
324 | + self.assertEqual([], list(target.iter_changes(revision_tree))) |
325 | + self.assertTrue(source.is_executable('file1')) |
326 | + |
327 | + def install_rot13_content_filter(self, pattern): |
328 | + # We could use |
329 | + # self.addCleanup(filters._reset_registry, filters._reset_registry()) |
330 | + # below, but that looks a bit... hard to read even if it's exactly |
331 | + # the same thing. |
332 | + original_registry = filters._reset_registry() |
333 | + |
334 | + def restore_registry(): |
335 | + filters._reset_registry(original_registry) |
336 | + self.addCleanup(restore_registry) |
337 | + |
338 | + def rot13(chunks, context=None): |
339 | + return [ |
340 | + codecs.encode(chunk.decode('ascii'), 'rot13').encode('ascii') |
341 | + for chunk in chunks] |
342 | + rot13filter = filters.ContentFilter(rot13, rot13) |
343 | + filters.filter_stacks_registry.register( |
344 | + 'rot13', {'yes': [rot13filter]}.get) |
345 | + os.mkdir(self.test_home_dir + '/.bazaar') |
346 | + rules_filename = self.test_home_dir + '/.bazaar/rules' |
347 | + with open(rules_filename, 'wb') as f: |
348 | + f.write(b'[name %s]\nrot13=yes\n' % (pattern,)) |
349 | + |
350 | + def uninstall_rules(): |
351 | + os.remove(rules_filename) |
352 | + rules.reset_rules() |
353 | + self.addCleanup(uninstall_rules) |
354 | + rules.reset_rules() |
355 | + |
356 | + def test_build_tree_content_filtered_files_are_not_hardlinked(self): |
357 | + """build_tree will not hardlink files that have content filtering rules |
358 | + applied to them (but will still hardlink other files from the same tree |
359 | + if it can). |
360 | + """ |
361 | + self.requireFeature(features.HardlinkFeature) |
362 | + self.install_rot13_content_filter(b'file1') |
363 | + source = self.create_ab_tree() |
364 | + target = self.make_branch_and_tree('target') |
365 | + revision_tree = source.basis_tree() |
366 | + revision_tree.lock_read() |
367 | + self.addCleanup(revision_tree.unlock) |
368 | + build_tree(revision_tree, target, source, hardlink=True) |
369 | + target.lock_read() |
370 | + self.addCleanup(target.unlock) |
371 | + self.assertEqual([], list(target.iter_changes(revision_tree))) |
372 | + source_stat = os.stat('source/file1') |
373 | + target_stat = os.stat('target/file1') |
374 | + self.assertNotEqual(source_stat, target_stat) |
375 | + source_stat = os.stat('source/file2') |
376 | + target_stat = os.stat('target/file2') |
377 | + self.assertEqualStat(source_stat, target_stat) |
378 | + |
379 | + def test_case_insensitive_build_tree_inventory(self): |
380 | + if (features.CaseInsensitiveFilesystemFeature.available() |
381 | + or features.CaseInsCasePresFilenameFeature.available()): |
382 | + raise tests.UnavailableFeature('Fully case sensitive filesystem') |
383 | + source = self.make_branch_and_tree('source') |
384 | + self.build_tree(['source/file', 'source/FILE']) |
385 | + source.add(['file', 'FILE'], [b'lower-id', b'upper-id']) |
386 | + source.commit('added files') |
387 | + # Don't try this at home, kids! |
388 | + # Force the tree to report that it is case insensitive |
389 | + target = self.make_branch_and_tree('target') |
390 | + target.case_sensitive = False |
391 | + build_tree(source.basis_tree(), target, source, delta_from_tree=True) |
392 | + self.assertEqual('file.moved', target.id2path(b'lower-id')) |
393 | + self.assertEqual('FILE', target.id2path(b'upper-id')) |
394 | + |
395 | + def test_build_tree_observes_sha(self): |
396 | + source = self.make_branch_and_tree('source') |
397 | + self.build_tree(['source/file1', 'source/dir/', 'source/dir/file2']) |
398 | + source.add(['file1', 'dir', 'dir/file2'], |
399 | + [b'file1-id', b'dir-id', b'file2-id']) |
400 | + source.commit('new files') |
401 | + target = self.make_branch_and_tree('target') |
402 | + target.lock_write() |
403 | + self.addCleanup(target.unlock) |
404 | + # We make use of the fact that DirState caches its cutoff time. So we |
405 | + # set the 'safe' time to one minute in the future. |
406 | + state = target.current_dirstate() |
407 | + state._cutoff_time = time.time() + 60 |
408 | + build_tree(source.basis_tree(), target) |
409 | + entry1_sha = osutils.sha_file_by_name('source/file1') |
410 | + entry2_sha = osutils.sha_file_by_name('source/dir/file2') |
411 | + # entry[1] is the state information, entry[1][0] is the state of the |
412 | + # working tree, entry[1][0][1] is the sha value for the current working |
413 | + # tree |
414 | + entry1 = state._get_entry(0, path_utf8=b'file1') |
415 | + self.assertEqual(entry1_sha, entry1[1][0][1]) |
416 | + # The 'size' field must also be set. |
417 | + self.assertEqual(25, entry1[1][0][2]) |
418 | + entry1_state = entry1[1][0] |
419 | + entry2 = state._get_entry(0, path_utf8=b'dir/file2') |
420 | + self.assertEqual(entry2_sha, entry2[1][0][1]) |
421 | + self.assertEqual(29, entry2[1][0][2]) |
422 | + entry2_state = entry2[1][0] |
423 | + # Now, make sure that we don't have to re-read the content. The |
424 | + # packed_stat should match exactly. |
425 | + self.assertEqual(entry1_sha, target.get_file_sha1('file1')) |
426 | + self.assertEqual(entry2_sha, target.get_file_sha1('dir/file2')) |
427 | + self.assertEqual(entry1_state, entry1[1][0]) |
428 | + self.assertEqual(entry2_state, entry2[1][0]) |
429 | + |
430 | + |
431 | + |
432 | |
433 | === modified file 'breezy/bzr/transform.py' |
434 | --- breezy/bzr/transform.py 2020-08-15 15:06:56 +0000 |
435 | +++ breezy/bzr/transform.py 2020-09-02 17:42:36 +0000 |
436 | @@ -24,7 +24,9 @@ |
437 | |
438 | from .. import ( |
439 | annotate, |
440 | + cleanup, |
441 | conflicts, |
442 | + controldir, |
443 | errors, |
444 | lock, |
445 | multiparent, |
446 | @@ -36,8 +38,10 @@ |
447 | urlutils, |
448 | ) |
449 | |
450 | +from ..filters import filtered_output_bytes, ContentFilterContext |
451 | from ..i18n import gettext |
452 | from ..mutabletree import MutableTree |
453 | +from ..progress import ProgressPhase |
454 | from ..sixish import text_type, viewvalues, viewitems |
455 | from ..transform import ( |
456 | ROOT_PARENT, |
457 | @@ -53,7 +57,11 @@ |
458 | ReusingTransform, |
459 | MalformedTransform, |
460 | PreviewTree, |
461 | + new_by_entry, |
462 | + _reparent_children, |
463 | + resolve_conflicts, |
464 | ) |
465 | +from ..tree import find_previous_path |
466 | from .conflicts import Conflict |
467 | |
468 | from . import ( |
469 | @@ -62,6 +70,22 @@ |
470 | ) |
471 | |
472 | |
473 | +def _content_match(tree, entry, tree_path, kind, target_path): |
474 | + if entry.kind != kind: |
475 | + return False |
476 | + if entry.kind == "directory": |
477 | + return True |
478 | + if entry.kind == "file": |
479 | + with open(target_path, 'rb') as f1, \ |
480 | + tree.get_file(tree_path) as f2: |
481 | + if osutils.compare_files(f1, f2): |
482 | + return True |
483 | + elif entry.kind == "symlink": |
484 | + if tree.get_symlink_target(tree_path) == os.readlink(target_path): |
485 | + return True |
486 | + return False |
487 | + |
488 | + |
489 | class TreeTransformBase(TreeTransform): |
490 | """The base class for TreeTransform and its kin.""" |
491 | |
492 | @@ -388,7 +412,6 @@ |
493 | |
494 | def _parent_loops(self): |
495 | """No entry should be its own ancestor""" |
496 | - conflicts = [] |
497 | for trans_id in self._new_parent: |
498 | seen = set() |
499 | parent_id = trans_id |
500 | @@ -399,14 +422,12 @@ |
501 | except KeyError: |
502 | break |
503 | if parent_id == trans_id: |
504 | - conflicts.append(('parent loop', trans_id)) |
505 | + yield ('parent loop', trans_id) |
506 | if parent_id in seen: |
507 | break |
508 | - return conflicts |
509 | |
510 | def _unversioned_parents(self, by_parent): |
511 | """If parent directories are versioned, children must be versioned.""" |
512 | - conflicts = [] |
513 | for parent_id, children in viewitems(by_parent): |
514 | if parent_id == ROOT_PARENT: |
515 | continue |
516 | @@ -414,27 +435,24 @@ |
517 | continue |
518 | for child_id in children: |
519 | if self.final_is_versioned(child_id): |
520 | - conflicts.append(('unversioned parent', parent_id)) |
521 | + yield ('unversioned parent', parent_id) |
522 | break |
523 | - return conflicts |
524 | |
525 | def _improper_versioning(self): |
526 | """Cannot version a file with no contents, or a bad type. |
527 | |
528 | However, existing entries with no contents are okay. |
529 | """ |
530 | - conflicts = [] |
531 | for trans_id in self._new_id: |
532 | kind = self.final_kind(trans_id) |
533 | if kind == 'symlink' and not self._tree.supports_symlinks(): |
534 | # Ignore symlinks as they are not supported on this platform |
535 | continue |
536 | if kind is None: |
537 | - conflicts.append(('versioning no contents', trans_id)) |
538 | + yield ('versioning no contents', trans_id) |
539 | continue |
540 | if not self._tree.versionable_kind(kind): |
541 | - conflicts.append(('versioning bad kind', trans_id, kind)) |
542 | - return conflicts |
543 | + yield ('versioning bad kind', trans_id, kind) |
544 | |
545 | def _executability_conflicts(self): |
546 | """Check for bad executability changes. |
547 | @@ -444,31 +462,25 @@ |
548 | 2. only files can be executable. (The execute bit on a directory |
549 | does not indicate searchability) |
550 | """ |
551 | - conflicts = [] |
552 | for trans_id in self._new_executability: |
553 | if not self.final_is_versioned(trans_id): |
554 | - conflicts.append(('unversioned executability', trans_id)) |
555 | + yield ('unversioned executability', trans_id) |
556 | else: |
557 | if self.final_kind(trans_id) != "file": |
558 | - conflicts.append(('non-file executability', trans_id)) |
559 | - return conflicts |
560 | + yield ('non-file executability', trans_id) |
561 | |
562 | def _overwrite_conflicts(self): |
563 | """Check for overwrites (not permitted on Win32)""" |
564 | - conflicts = [] |
565 | for trans_id in self._new_contents: |
566 | if self.tree_kind(trans_id) is None: |
567 | continue |
568 | if trans_id not in self._removed_contents: |
569 | - conflicts.append(('overwrite', trans_id, |
570 | - self.final_name(trans_id))) |
571 | - return conflicts |
572 | + yield ('overwrite', trans_id, self.final_name(trans_id)) |
573 | |
574 | def _duplicate_entries(self, by_parent): |
575 | """No directory may have two entries with the same name.""" |
576 | - conflicts = [] |
577 | if (self._new_name, self._new_parent) == ({}, {}): |
578 | - return conflicts |
579 | + return |
580 | for children in viewvalues(by_parent): |
581 | name_ids = [] |
582 | for child_tid in children: |
583 | @@ -486,15 +498,12 @@ |
584 | if kind is None and not self.final_is_versioned(trans_id): |
585 | continue |
586 | if name == last_name: |
587 | - conflicts.append(('duplicate', last_trans_id, trans_id, |
588 | - name)) |
589 | + yield ('duplicate', last_trans_id, trans_id, name) |
590 | last_name = name |
591 | last_trans_id = trans_id |
592 | - return conflicts |
593 | |
594 | def _parent_type_conflicts(self, by_parent): |
595 | """Children must have a directory parent""" |
596 | - conflicts = [] |
597 | for parent_id, children in viewitems(by_parent): |
598 | if parent_id == ROOT_PARENT: |
599 | continue |
600 | @@ -510,11 +519,10 @@ |
601 | kind = self.final_kind(parent_id) |
602 | if kind is None: |
603 | # The directory will be deleted |
604 | - conflicts.append(('missing parent', parent_id)) |
605 | + yield ('missing parent', parent_id) |
606 | elif kind != "directory": |
607 | # Meh, we need a *directory* to put something in it |
608 | - conflicts.append(('non-directory parent', parent_id)) |
609 | - return conflicts |
610 | + yield ('non-directory parent', parent_id) |
611 | |
612 | def _set_executability(self, path, trans_id): |
613 | """Set the executability of versioned files """ |
614 | @@ -2253,3 +2261,230 @@ |
615 | trans_id = self._path2trans_id(path) |
616 | name = self._transform._limbo_name(trans_id) |
617 | return open(name, 'rb') |
618 | + |
619 | + |
620 | +def build_tree(tree, wt, accelerator_tree=None, hardlink=False, |
621 | + delta_from_tree=False): |
622 | + """Create working tree for a branch, using a TreeTransform. |
623 | + |
624 | + This function should be used on empty trees, having a tree root at most. |
625 | + (see merge and revert functionality for working with existing trees) |
626 | + |
627 | + Existing files are handled like so: |
628 | + |
629 | + - Existing bzrdirs take precedence over creating new items. They are |
630 | + created as '%s.diverted' % name. |
631 | + - Otherwise, if the content on disk matches the content we are building, |
632 | + it is silently replaced. |
633 | + - Otherwise, conflict resolution will move the old file to 'oldname.moved'. |
634 | + |
635 | + :param tree: The tree to convert wt into a copy of |
636 | + :param wt: The working tree that files will be placed into |
637 | + :param accelerator_tree: A tree which can be used for retrieving file |
638 | + contents more quickly than tree itself, i.e. a workingtree. tree |
639 | + will be used for cases where accelerator_tree's content is different. |
640 | + :param hardlink: If true, hard-link files to accelerator_tree, where |
641 | + possible. accelerator_tree must implement abspath, i.e. be a |
642 | + working tree. |
643 | + :param delta_from_tree: If true, build_tree may use the input Tree to |
644 | + generate the inventory delta. |
645 | + """ |
646 | + with cleanup.ExitStack() as exit_stack: |
647 | + exit_stack.enter_context(wt.lock_tree_write()) |
648 | + exit_stack.enter_context(tree.lock_read()) |
649 | + if accelerator_tree is not None: |
650 | + exit_stack.enter_context(accelerator_tree.lock_read()) |
651 | + return _build_tree(tree, wt, accelerator_tree, hardlink, |
652 | + delta_from_tree) |
653 | + |
654 | + |
655 | +def resolve_checkout(tt, conflicts, divert): |
656 | + new_conflicts = set() |
657 | + for c_type, conflict in ((c[0], c) for c in conflicts): |
658 | + # Anything but a 'duplicate' would indicate programmer error |
659 | + if c_type != 'duplicate': |
660 | + raise AssertionError(c_type) |
661 | + # Now figure out which is new and which is old |
662 | + if tt.new_contents(conflict[1]): |
663 | + new_file = conflict[1] |
664 | + old_file = conflict[2] |
665 | + else: |
666 | + new_file = conflict[2] |
667 | + old_file = conflict[1] |
668 | + |
669 | + # We should only get here if the conflict wasn't completely |
670 | + # resolved |
671 | + final_parent = tt.final_parent(old_file) |
672 | + if new_file in divert: |
673 | + new_name = tt.final_name(old_file) + '.diverted' |
674 | + tt.adjust_path(new_name, final_parent, new_file) |
675 | + new_conflicts.add((c_type, 'Diverted to', |
676 | + new_file, old_file)) |
677 | + else: |
678 | + new_name = tt.final_name(old_file) + '.moved' |
679 | + tt.adjust_path(new_name, final_parent, old_file) |
680 | + new_conflicts.add((c_type, 'Moved existing file to', |
681 | + old_file, new_file)) |
682 | + return new_conflicts |
683 | + |
684 | + |
685 | +def _build_tree(tree, wt, accelerator_tree, hardlink, delta_from_tree): |
686 | + """See build_tree.""" |
687 | + for num, _unused in enumerate(wt.all_versioned_paths()): |
688 | + if num > 0: # more than just a root |
689 | + raise errors.WorkingTreeAlreadyPopulated(base=wt.basedir) |
690 | + file_trans_id = {} |
691 | + top_pb = ui.ui_factory.nested_progress_bar() |
692 | + pp = ProgressPhase("Build phase", 2, top_pb) |
693 | + if tree.path2id('') is not None: |
694 | + # This is kind of a hack: we should be altering the root |
695 | + # as part of the regular tree shape diff logic. |
696 | + # The conditional test here is to avoid doing an |
697 | + # expensive operation (flush) every time the root id |
698 | + # is set within the tree, nor setting the root and thus |
699 | + # marking the tree as dirty, because we use two different |
700 | + # idioms here: tree interfaces and inventory interfaces. |
701 | + if wt.path2id('') != tree.path2id(''): |
702 | + wt.set_root_id(tree.path2id('')) |
703 | + wt.flush() |
704 | + tt = wt.transform() |
705 | + divert = set() |
706 | + try: |
707 | + pp.next_phase() |
708 | + file_trans_id[find_previous_path(wt, tree, '')] = tt.trans_id_tree_path('') |
709 | + with ui.ui_factory.nested_progress_bar() as pb: |
710 | + deferred_contents = [] |
711 | + num = 0 |
712 | + total = len(tree.all_versioned_paths()) |
713 | + if delta_from_tree: |
714 | + precomputed_delta = [] |
715 | + else: |
716 | + precomputed_delta = None |
717 | + # Check if tree inventory has content. If so, we populate |
718 | + # existing_files with the directory content. If there are no |
719 | + # entries we skip populating existing_files as its not used. |
720 | + # This improves performance and unncessary work on large |
721 | + # directory trees. (#501307) |
722 | + if total > 0: |
723 | + existing_files = set() |
724 | + for dir, files in wt.walkdirs(): |
725 | + existing_files.update(f[0] for f in files) |
726 | + for num, (tree_path, entry) in \ |
727 | + enumerate(tree.iter_entries_by_dir()): |
728 | + pb.update(gettext("Building tree"), num |
729 | + - len(deferred_contents), total) |
730 | + if entry.parent_id is None: |
731 | + continue |
732 | + reparent = False |
733 | + file_id = entry.file_id |
734 | + if delta_from_tree: |
735 | + precomputed_delta.append((None, tree_path, file_id, entry)) |
736 | + if tree_path in existing_files: |
737 | + target_path = wt.abspath(tree_path) |
738 | + kind = osutils.file_kind(target_path) |
739 | + if kind == "directory": |
740 | + try: |
741 | + controldir.ControlDir.open(target_path) |
742 | + except errors.NotBranchError: |
743 | + pass |
744 | + else: |
745 | + divert.add(tree_path) |
746 | + if (tree_path not in divert |
747 | + and _content_match( |
748 | + tree, entry, tree_path, kind, target_path)): |
749 | + tt.delete_contents(tt.trans_id_tree_path(tree_path)) |
750 | + if kind == 'directory': |
751 | + reparent = True |
752 | + parent_id = file_trans_id[osutils.dirname(tree_path)] |
753 | + if entry.kind == 'file': |
754 | + # We *almost* replicate new_by_entry, so that we can defer |
755 | + # getting the file text, and get them all at once. |
756 | + trans_id = tt.create_path(entry.name, parent_id) |
757 | + file_trans_id[tree_path] = trans_id |
758 | + tt.version_file(trans_id, file_id=file_id) |
759 | + executable = tree.is_executable(tree_path) |
760 | + if executable: |
761 | + tt.set_executability(executable, trans_id) |
762 | + trans_data = (trans_id, tree_path, entry.text_sha1) |
763 | + deferred_contents.append((tree_path, trans_data)) |
764 | + else: |
765 | + file_trans_id[tree_path] = new_by_entry( |
766 | + tree_path, tt, entry, parent_id, tree) |
767 | + if reparent: |
768 | + new_trans_id = file_trans_id[tree_path] |
769 | + old_parent = tt.trans_id_tree_path(tree_path) |
770 | + _reparent_children(tt, old_parent, new_trans_id) |
771 | + offset = num + 1 - len(deferred_contents) |
772 | + _create_files(tt, tree, deferred_contents, pb, offset, |
773 | + accelerator_tree, hardlink) |
774 | + pp.next_phase() |
775 | + divert_trans = set(file_trans_id[f] for f in divert) |
776 | + |
777 | + def resolver(t, c): |
778 | + return resolve_checkout(t, c, divert_trans) |
779 | + raw_conflicts = resolve_conflicts(tt, pass_func=resolver) |
780 | + if len(raw_conflicts) > 0: |
781 | + precomputed_delta = None |
782 | + conflicts = tt.cook_conflicts(raw_conflicts) |
783 | + for conflict in conflicts: |
784 | + trace.warning(text_type(conflict)) |
785 | + try: |
786 | + wt.add_conflicts(conflicts) |
787 | + except errors.UnsupportedOperation: |
788 | + pass |
789 | + result = tt.apply(no_conflicts=True, |
790 | + precomputed_delta=precomputed_delta) |
791 | + finally: |
792 | + tt.finalize() |
793 | + top_pb.finished() |
794 | + return result |
795 | + |
796 | + |
797 | +def _create_files(tt, tree, desired_files, pb, offset, accelerator_tree, |
798 | + hardlink): |
799 | + total = len(desired_files) + offset |
800 | + wt = tt._tree |
801 | + if accelerator_tree is None: |
802 | + new_desired_files = desired_files |
803 | + else: |
804 | + iter = accelerator_tree.iter_changes(tree, include_unchanged=True) |
805 | + unchanged = [ |
806 | + change.path for change in iter |
807 | + if not (change.changed_content or change.executable[0] != change.executable[1])] |
808 | + if accelerator_tree.supports_content_filtering(): |
809 | + unchanged = [(tp, ap) for (tp, ap) in unchanged |
810 | + if not next(accelerator_tree.iter_search_rules([ap]))] |
811 | + unchanged = dict(unchanged) |
812 | + new_desired_files = [] |
813 | + count = 0 |
814 | + for unused_tree_path, (trans_id, tree_path, text_sha1) in desired_files: |
815 | + accelerator_path = unchanged.get(tree_path) |
816 | + if accelerator_path is None: |
817 | + new_desired_files.append((tree_path, |
818 | + (trans_id, tree_path, text_sha1))) |
819 | + continue |
820 | + pb.update(gettext('Adding file contents'), count + offset, total) |
821 | + if hardlink: |
822 | + tt.create_hardlink(accelerator_tree.abspath(accelerator_path), |
823 | + trans_id) |
824 | + else: |
825 | + with accelerator_tree.get_file(accelerator_path) as f: |
826 | + chunks = osutils.file_iterator(f) |
827 | + if wt.supports_content_filtering(): |
828 | + filters = wt._content_filter_stack(tree_path) |
829 | + chunks = filtered_output_bytes(chunks, filters, |
830 | + ContentFilterContext(tree_path, tree)) |
831 | + tt.create_file(chunks, trans_id, sha1=text_sha1) |
832 | + count += 1 |
833 | + offset += count |
834 | + for count, ((trans_id, tree_path, text_sha1), contents) in enumerate( |
835 | + tree.iter_files_bytes(new_desired_files)): |
836 | + if wt.supports_content_filtering(): |
837 | + filters = wt._content_filter_stack(tree_path) |
838 | + contents = filtered_output_bytes(contents, filters, |
839 | + ContentFilterContext(tree_path, tree)) |
840 | + tt.create_file(contents, trans_id, sha1=text_sha1) |
841 | + pb.update(gettext('Adding file contents'), count + offset, total) |
842 | + |
843 | + |
844 | + |
845 | |
846 | === modified file 'breezy/bzr/workingtree.py' |
847 | --- breezy/bzr/workingtree.py 2020-08-09 19:38:19 +0000 |
848 | +++ breezy/bzr/workingtree.py 2020-09-02 17:42:36 +0000 |
849 | @@ -57,6 +57,7 @@ |
850 | conflicts as _mod_conflicts, |
851 | globbing, |
852 | ignores, |
853 | + merge, |
854 | revision as _mod_revision, |
855 | rio as _mod_rio, |
856 | ) |
857 | @@ -1867,6 +1868,139 @@ |
858 | return False |
859 | return True |
860 | |
861 | + _marker = object() |
862 | + |
863 | + def update(self, change_reporter=None, possible_transports=None, |
864 | + revision=None, old_tip=_marker, show_base=False): |
865 | + """Update a working tree along its branch. |
866 | + |
867 | + This will update the branch if its bound too, which means we have |
868 | + multiple trees involved: |
869 | + |
870 | + - The new basis tree of the master. |
871 | + - The old basis tree of the branch. |
872 | + - The old basis tree of the working tree. |
873 | + - The current working tree state. |
874 | + |
875 | + Pathologically, all three may be different, and non-ancestors of each |
876 | + other. Conceptually we want to: |
877 | + |
878 | + - Preserve the wt.basis->wt.state changes |
879 | + - Transform the wt.basis to the new master basis. |
880 | + - Apply a merge of the old branch basis to get any 'local' changes from |
881 | + it into the tree. |
882 | + - Restore the wt.basis->wt.state changes. |
883 | + |
884 | + There isn't a single operation at the moment to do that, so we: |
885 | + |
886 | + - Merge current state -> basis tree of the master w.r.t. the old tree |
887 | + basis. |
888 | + - Do a 'normal' merge of the old branch basis if it is relevant. |
889 | + |
890 | + :param revision: The target revision to update to. Must be in the |
891 | + revision history. |
892 | + :param old_tip: If branch.update() has already been run, the value it |
893 | + returned (old tip of the branch or None). _marker is used |
894 | + otherwise. |
895 | + """ |
896 | + if self.branch.get_bound_location() is not None: |
897 | + self.lock_write() |
898 | + update_branch = (old_tip is self._marker) |
899 | + else: |
900 | + self.lock_tree_write() |
901 | + update_branch = False |
902 | + try: |
903 | + if update_branch: |
904 | + old_tip = self.branch.update(possible_transports) |
905 | + else: |
906 | + if old_tip is self._marker: |
907 | + old_tip = None |
908 | + return self._update_tree(old_tip, change_reporter, revision, show_base) |
909 | + finally: |
910 | + self.unlock() |
911 | + |
912 | + def _update_tree(self, old_tip=None, change_reporter=None, revision=None, |
913 | + show_base=False): |
914 | + """Update a tree to the master branch. |
915 | + |
916 | + :param old_tip: if supplied, the previous tip revision the branch, |
917 | + before it was changed to the master branch's tip. |
918 | + """ |
919 | + # here if old_tip is not None, it is the old tip of the branch before |
920 | + # it was updated from the master branch. This should become a pending |
921 | + # merge in the working tree to preserve the user existing work. we |
922 | + # cant set that until we update the working trees last revision to be |
923 | + # one from the new branch, because it will just get absorbed by the |
924 | + # parent de-duplication logic. |
925 | + # |
926 | + # We MUST save it even if an error occurs, because otherwise the users |
927 | + # local work is unreferenced and will appear to have been lost. |
928 | + # |
929 | + with self.lock_tree_write(): |
930 | + nb_conflicts = 0 |
931 | + try: |
932 | + last_rev = self.get_parent_ids()[0] |
933 | + except IndexError: |
934 | + last_rev = _mod_revision.NULL_REVISION |
935 | + if revision is None: |
936 | + revision = self.branch.last_revision() |
937 | + |
938 | + old_tip = old_tip or _mod_revision.NULL_REVISION |
939 | + |
940 | + if not _mod_revision.is_null(old_tip) and old_tip != last_rev: |
941 | + # the branch we are bound to was updated |
942 | + # merge those changes in first |
943 | + base_tree = self.basis_tree() |
944 | + other_tree = self.branch.repository.revision_tree(old_tip) |
945 | + nb_conflicts = merge.merge_inner(self.branch, other_tree, |
946 | + base_tree, this_tree=self, |
947 | + change_reporter=change_reporter, |
948 | + show_base=show_base) |
949 | + if nb_conflicts: |
950 | + self.add_parent_tree((old_tip, other_tree)) |
951 | + return nb_conflicts |
952 | + |
953 | + if last_rev != _mod_revision.ensure_null(revision): |
954 | + # the working tree is up to date with the branch |
955 | + # we can merge the specified revision from master |
956 | + to_tree = self.branch.repository.revision_tree(revision) |
957 | + to_root_id = to_tree.path2id('') |
958 | + |
959 | + basis = self.basis_tree() |
960 | + with basis.lock_read(): |
961 | + if (basis.path2id('') is None or basis.path2id('') != to_root_id): |
962 | + self.set_root_id(to_root_id) |
963 | + self.flush() |
964 | + |
965 | + # determine the branch point |
966 | + graph = self.branch.repository.get_graph() |
967 | + base_rev_id = graph.find_unique_lca(self.branch.last_revision(), |
968 | + last_rev) |
969 | + base_tree = self.branch.repository.revision_tree(base_rev_id) |
970 | + |
971 | + nb_conflicts = merge.merge_inner(self.branch, to_tree, base_tree, |
972 | + this_tree=self, |
973 | + change_reporter=change_reporter, |
974 | + show_base=show_base) |
975 | + self.set_last_revision(revision) |
976 | + # TODO - dedup parents list with things merged by pull ? |
977 | + # reuse the tree we've updated to to set the basis: |
978 | + parent_trees = [(revision, to_tree)] |
979 | + merges = self.get_parent_ids()[1:] |
980 | + # Ideally we ask the tree for the trees here, that way the working |
981 | + # tree can decide whether to give us the entire tree or give us a |
982 | + # lazy initialised tree. dirstate for instance will have the trees |
983 | + # in ram already, whereas a last-revision + basis-inventory tree |
984 | + # will not, but also does not need them when setting parents. |
985 | + for parent in merges: |
986 | + parent_trees.append( |
987 | + (parent, self.branch.repository.revision_tree(parent))) |
988 | + if not _mod_revision.is_null(old_tip): |
989 | + parent_trees.append( |
990 | + (old_tip, self.branch.repository.revision_tree(old_tip))) |
991 | + self.set_parent_trees(parent_trees) |
992 | + last_rev = parent_trees[0][0] |
993 | + return nb_conflicts |
994 | |
995 | |
996 | class WorkingTreeFormatMetaDir(bzrdir.BzrFormat, WorkingTreeFormat): |
997 | |
998 | === modified file 'breezy/bzr/workingtree_3.py' |
999 | --- breezy/bzr/workingtree_3.py 2019-06-29 13:16:26 +0000 |
1000 | +++ breezy/bzr/workingtree_3.py 2020-09-02 17:42:36 +0000 |
1001 | @@ -25,6 +25,7 @@ |
1002 | from . import ( |
1003 | bzrdir, |
1004 | inventory, |
1005 | + transform as bzr_transform, |
1006 | ) |
1007 | |
1008 | from .. import ( |
1009 | @@ -33,7 +34,6 @@ |
1010 | osutils, |
1011 | revision as _mod_revision, |
1012 | trace, |
1013 | - transform, |
1014 | ) |
1015 | from ..lockable_files import LockableFiles |
1016 | from ..lockdir import LockDir |
1017 | @@ -230,7 +230,7 @@ |
1018 | wt.set_parent_trees([]) |
1019 | else: |
1020 | wt.set_parent_trees([(revision_id, basis_tree)]) |
1021 | - transform.build_tree(basis_tree, wt) |
1022 | + bzr_transform.build_tree(basis_tree, wt) |
1023 | for hook in MutableTree.hooks['post_build_tree']: |
1024 | hook(wt) |
1025 | finally: |
1026 | |
1027 | === modified file 'breezy/bzr/workingtree_4.py' |
1028 | --- breezy/bzr/workingtree_4.py 2020-08-15 12:10:03 +0000 |
1029 | +++ breezy/bzr/workingtree_4.py 2020-09-02 17:42:36 +0000 |
1030 | @@ -43,12 +43,12 @@ |
1031 | revision as _mod_revision, |
1032 | revisiontree, |
1033 | trace, |
1034 | - transform, |
1035 | views, |
1036 | ) |
1037 | from breezy.bzr import ( |
1038 | dirstate, |
1039 | generate_ids, |
1040 | + transform as bzr_transform, |
1041 | ) |
1042 | """) |
1043 | |
1044 | @@ -1577,9 +1577,10 @@ |
1045 | # delta_from_tree is safe even for DirStateRevisionTrees, |
1046 | # because wt4.apply_inventory_delta does not mutate the input |
1047 | # inventory entries. |
1048 | - transform.build_tree(basis, wt, accelerator_tree, |
1049 | - hardlink=hardlink, |
1050 | - delta_from_tree=delta_from_tree) |
1051 | + bzr_transform.build_tree( |
1052 | + basis, wt, accelerator_tree, |
1053 | + hardlink=hardlink, |
1054 | + delta_from_tree=delta_from_tree) |
1055 | for hook in MutableTree.hooks['post_build_tree']: |
1056 | hook(wt) |
1057 | finally: |
1058 | |
1059 | === modified file 'breezy/git/tests/__init__.py' |
1060 | --- breezy/git/tests/__init__.py 2020-06-21 22:00:40 +0000 |
1061 | +++ breezy/git/tests/__init__.py 2020-09-02 17:42:36 +0000 |
1062 | @@ -229,6 +229,7 @@ |
1063 | 'test_revspec', |
1064 | 'test_roundtrip', |
1065 | 'test_server', |
1066 | + 'test_transform', |
1067 | 'test_transportgit', |
1068 | 'test_tree', |
1069 | 'test_unpeel_map', |
1070 | |
1071 | === modified file 'breezy/git/tests/test_blackbox.py' |
1072 | --- breezy/git/tests/test_blackbox.py 2020-03-22 20:02:36 +0000 |
1073 | +++ breezy/git/tests/test_blackbox.py 2020-09-02 17:42:36 +0000 |
1074 | @@ -177,6 +177,20 @@ |
1075 | error, |
1076 | 'Pushed up to revision id git(.*).\n') |
1077 | |
1078 | + def test_merge(self): |
1079 | + self.run_bzr(['init', '--git', 'orig']) |
1080 | + self.build_tree_contents([('orig/a', 'orig contents\n')]) |
1081 | + self.run_bzr(['add', 'orig/a']) |
1082 | + self.run_bzr(['commit', '-m', 'add orig', 'orig']) |
1083 | + self.run_bzr(['clone', 'orig', 'other']) |
1084 | + self.build_tree_contents([('other/a', 'new contents\n')]) |
1085 | + self.run_bzr(['commit', '-m', 'modify', 'other']) |
1086 | + self.build_tree_contents([('orig/b', 'more\n')]) |
1087 | + self.run_bzr(['add', 'orig/b']) |
1088 | + self.build_tree_contents([('orig/a', 'new contents\n')]) |
1089 | + self.run_bzr(['commit', '-m', 'more', 'orig']) |
1090 | + self.run_bzr(['merge', '-d', 'orig', 'other']) |
1091 | + |
1092 | def test_push_lossy_non_mainline(self): |
1093 | self.run_bzr(['init', '--git', 'bla']) |
1094 | self.run_bzr(['init', 'foo']) |
1095 | |
1096 | === added file 'breezy/git/tests/test_transform.py' |
1097 | --- breezy/git/tests/test_transform.py 1970-01-01 00:00:00 +0000 |
1098 | +++ breezy/git/tests/test_transform.py 2020-09-02 17:42:36 +0000 |
1099 | @@ -0,0 +1,41 @@ |
1100 | +# Copyright (C) 2020 Jelmer Vernooij <jelmer@jelmer.uk> |
1101 | +# |
1102 | +# This program is free software; you can redistribute it and/or modify |
1103 | +# it under the terms of the GNU General Public License as published by |
1104 | +# the Free Software Foundation; either version 2 of the License, or |
1105 | +# (at your option) any later version. |
1106 | +# |
1107 | +# This program is distributed in the hope that it will be useful, |
1108 | +# but WITHOUT ANY WARRANTY; without even the implied warranty of |
1109 | +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
1110 | +# GNU General Public License for more details. |
1111 | +# |
1112 | +# You should have received a copy of the GNU General Public License |
1113 | +# along with this program; if not, write to the Free Software |
1114 | +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
1115 | + |
1116 | +"""Tests for tree transform.""" |
1117 | + |
1118 | +from __future__ import absolute_import |
1119 | + |
1120 | +import os |
1121 | + |
1122 | +from ...transform import ROOT_PARENT, conflict_pass, resolve_conflicts |
1123 | +from . import TestCaseWithTransport |
1124 | + |
1125 | + |
1126 | +class GitTransformTests(TestCaseWithTransport): |
1127 | + |
1128 | + def test_directory_exists(self): |
1129 | + tree = self.make_branch_and_tree('.', format='git') |
1130 | + tt = tree.transform() |
1131 | + dir1 = tt.new_directory('dir', ROOT_PARENT) |
1132 | + tt.new_file('name1', dir1, [b'content1']) |
1133 | + dir2 = tt.new_directory('dir', ROOT_PARENT) |
1134 | + tt.new_file('name2', dir2, [b'content2']) |
1135 | + raw_conflicts = resolve_conflicts( |
1136 | + tt, None, lambda t, c: conflict_pass(t, c)) |
1137 | + conflicts = tt.cook_conflicts(raw_conflicts) |
1138 | + self.assertEqual([], list(conflicts)) |
1139 | + tt.apply() |
1140 | + self.assertEqual(set(['name1', 'name2']), set(os.listdir('dir'))) |
1141 | |
1142 | === modified file 'breezy/git/transform.py' |
1143 | --- breezy/git/transform.py 2020-08-22 15:44:15 +0000 |
1144 | +++ breezy/git/transform.py 2020-09-02 17:42:36 +0000 |
1145 | @@ -301,7 +301,6 @@ |
1146 | |
1147 | def _parent_loops(self): |
1148 | """No entry should be its own ancestor""" |
1149 | - conflicts = [] |
1150 | for trans_id in self._new_parent: |
1151 | seen = set() |
1152 | parent_id = trans_id |
1153 | @@ -312,28 +311,25 @@ |
1154 | except KeyError: |
1155 | break |
1156 | if parent_id == trans_id: |
1157 | - conflicts.append(('parent loop', trans_id)) |
1158 | + yield ('parent loop', trans_id) |
1159 | if parent_id in seen: |
1160 | break |
1161 | - return conflicts |
1162 | |
1163 | def _improper_versioning(self): |
1164 | """Cannot version a file with no contents, or a bad type. |
1165 | |
1166 | However, existing entries with no contents are okay. |
1167 | """ |
1168 | - conflicts = [] |
1169 | for trans_id in self._versioned: |
1170 | kind = self.final_kind(trans_id) |
1171 | if kind == 'symlink' and not self._tree.supports_symlinks(): |
1172 | # Ignore symlinks as they are not supported on this platform |
1173 | continue |
1174 | if kind is None: |
1175 | - conflicts.append(('versioning no contents', trans_id)) |
1176 | + yield ('versioning no contents', trans_id) |
1177 | continue |
1178 | if not self._tree.versionable_kind(kind): |
1179 | - conflicts.append(('versioning bad kind', trans_id, kind)) |
1180 | - return conflicts |
1181 | + yield ('versioning bad kind', trans_id, kind) |
1182 | |
1183 | def _executability_conflicts(self): |
1184 | """Check for bad executability changes. |
1185 | @@ -343,31 +339,25 @@ |
1186 | 2. only files can be executable. (The execute bit on a directory |
1187 | does not indicate searchability) |
1188 | """ |
1189 | - conflicts = [] |
1190 | for trans_id in self._new_executability: |
1191 | if not self.final_is_versioned(trans_id): |
1192 | - conflicts.append(('unversioned executability', trans_id)) |
1193 | + yield ('unversioned executability', trans_id) |
1194 | else: |
1195 | if self.final_kind(trans_id) != "file": |
1196 | - conflicts.append(('non-file executability', trans_id)) |
1197 | - return conflicts |
1198 | + yield ('non-file executability', trans_id) |
1199 | |
1200 | def _overwrite_conflicts(self): |
1201 | """Check for overwrites (not permitted on Win32)""" |
1202 | - conflicts = [] |
1203 | for trans_id in self._new_contents: |
1204 | if self.tree_kind(trans_id) is None: |
1205 | continue |
1206 | if trans_id not in self._removed_contents: |
1207 | - conflicts.append(('overwrite', trans_id, |
1208 | - self.final_name(trans_id))) |
1209 | - return conflicts |
1210 | + yield ('overwrite', trans_id, self.final_name(trans_id)) |
1211 | |
1212 | def _duplicate_entries(self, by_parent): |
1213 | """No directory may have two entries with the same name.""" |
1214 | - conflicts = [] |
1215 | if (self._new_name, self._new_parent) == ({}, {}): |
1216 | - return conflicts |
1217 | + return |
1218 | for children in viewvalues(by_parent): |
1219 | name_ids = [] |
1220 | for child_tid in children: |
1221 | @@ -385,15 +375,12 @@ |
1222 | if kind is None and not self.final_is_versioned(trans_id): |
1223 | continue |
1224 | if name == last_name: |
1225 | - conflicts.append(('duplicate', last_trans_id, trans_id, |
1226 | - name)) |
1227 | + yield ('duplicate', last_trans_id, trans_id, name) |
1228 | last_name = name |
1229 | last_trans_id = trans_id |
1230 | - return conflicts |
1231 | |
1232 | def _parent_type_conflicts(self, by_parent): |
1233 | """Children must have a directory parent""" |
1234 | - conflicts = [] |
1235 | for parent_id, children in viewitems(by_parent): |
1236 | if parent_id == ROOT_PARENT: |
1237 | continue |
1238 | @@ -409,11 +396,10 @@ |
1239 | kind = self.final_kind(parent_id) |
1240 | if kind is None: |
1241 | # The directory will be deleted |
1242 | - conflicts.append(('missing parent', parent_id)) |
1243 | + yield ('missing parent', parent_id) |
1244 | elif kind != "directory": |
1245 | # Meh, we need a *directory* to put something in it |
1246 | - conflicts.append(('non-directory parent', parent_id)) |
1247 | - return conflicts |
1248 | + yield ('non-directory parent', parent_id) |
1249 | |
1250 | def _set_executability(self, path, trans_id): |
1251 | """Set the executability of versioned files """ |
1252 | @@ -746,7 +732,7 @@ |
1253 | """Cancel the creation of new file contents.""" |
1254 | raise NotImplementedError(self.cancel_creation) |
1255 | |
1256 | - def apply(self, no_conflicts=False, precomputed_delta=None, _mover=None): |
1257 | + def apply(self, no_conflicts=False, _mover=None): |
1258 | """Apply all changes to the inventory and filesystem. |
1259 | |
1260 | If filesystem or inventory conflicts are present, MalformedTransform |
1261 | @@ -756,8 +742,6 @@ |
1262 | |
1263 | :param no_conflicts: if True, the caller guarantees there are no |
1264 | conflicts, so no check is made. |
1265 | - :param precomputed_delta: An inventory delta to use instead of |
1266 | - calculating one. |
1267 | :param _mover: Supply an alternate FileMover, for testing |
1268 | """ |
1269 | raise NotImplementedError(self.apply) |
1270 | |
1271 | === modified file 'breezy/git/workingtree.py' |
1272 | --- breezy/git/workingtree.py 2020-08-16 22:04:52 +0000 |
1273 | +++ breezy/git/workingtree.py 2020-09-02 17:42:36 +0000 |
1274 | @@ -1451,6 +1451,132 @@ |
1275 | config.write_to_path(path) |
1276 | self.add('.gitmodules') |
1277 | |
1278 | + _marker = object() |
1279 | + |
1280 | + def update(self, change_reporter=None, possible_transports=None, |
1281 | + revision=None, old_tip=_marker, show_base=False): |
1282 | + """Update a working tree along its branch. |
1283 | + |
1284 | + This will update the branch if its bound too, which means we have |
1285 | + multiple trees involved: |
1286 | + |
1287 | + - The new basis tree of the master. |
1288 | + - The old basis tree of the branch. |
1289 | + - The old basis tree of the working tree. |
1290 | + - The current working tree state. |
1291 | + |
1292 | + Pathologically, all three may be different, and non-ancestors of each |
1293 | + other. Conceptually we want to: |
1294 | + |
1295 | + - Preserve the wt.basis->wt.state changes |
1296 | + - Transform the wt.basis to the new master basis. |
1297 | + - Apply a merge of the old branch basis to get any 'local' changes from |
1298 | + it into the tree. |
1299 | + - Restore the wt.basis->wt.state changes. |
1300 | + |
1301 | + There isn't a single operation at the moment to do that, so we: |
1302 | + |
1303 | + - Merge current state -> basis tree of the master w.r.t. the old tree |
1304 | + basis. |
1305 | + - Do a 'normal' merge of the old branch basis if it is relevant. |
1306 | + |
1307 | + :param revision: The target revision to update to. Must be in the |
1308 | + revision history. |
1309 | + :param old_tip: If branch.update() has already been run, the value it |
1310 | + returned (old tip of the branch or None). _marker is used |
1311 | + otherwise. |
1312 | + """ |
1313 | + if self.branch.get_bound_location() is not None: |
1314 | + self.lock_write() |
1315 | + update_branch = (old_tip is self._marker) |
1316 | + else: |
1317 | + self.lock_tree_write() |
1318 | + update_branch = False |
1319 | + try: |
1320 | + if update_branch: |
1321 | + old_tip = self.branch.update(possible_transports) |
1322 | + else: |
1323 | + if old_tip is self._marker: |
1324 | + old_tip = None |
1325 | + return self._update_tree(old_tip, change_reporter, revision, show_base) |
1326 | + finally: |
1327 | + self.unlock() |
1328 | + |
1329 | + def _update_tree(self, old_tip=None, change_reporter=None, revision=None, |
1330 | + show_base=False): |
1331 | + """Update a tree to the master branch. |
1332 | + |
1333 | + :param old_tip: if supplied, the previous tip revision the branch, |
1334 | + before it was changed to the master branch's tip. |
1335 | + """ |
1336 | + # here if old_tip is not None, it is the old tip of the branch before |
1337 | + # it was updated from the master branch. This should become a pending |
1338 | + # merge in the working tree to preserve the user existing work. we |
1339 | + # cant set that until we update the working trees last revision to be |
1340 | + # one from the new branch, because it will just get absorbed by the |
1341 | + # parent de-duplication logic. |
1342 | + # |
1343 | + # We MUST save it even if an error occurs, because otherwise the users |
1344 | + # local work is unreferenced and will appear to have been lost. |
1345 | + # |
1346 | + with self.lock_tree_write(): |
1347 | + from .. import merge |
1348 | + nb_conflicts = 0 |
1349 | + try: |
1350 | + last_rev = self.get_parent_ids()[0] |
1351 | + except IndexError: |
1352 | + last_rev = _mod_revision.NULL_REVISION |
1353 | + if revision is None: |
1354 | + revision = self.branch.last_revision() |
1355 | + |
1356 | + old_tip = old_tip or _mod_revision.NULL_REVISION |
1357 | + |
1358 | + if not _mod_revision.is_null(old_tip) and old_tip != last_rev: |
1359 | + # the branch we are bound to was updated |
1360 | + # merge those changes in first |
1361 | + base_tree = self.basis_tree() |
1362 | + other_tree = self.branch.repository.revision_tree(old_tip) |
1363 | + nb_conflicts = merge.merge_inner(self.branch, other_tree, |
1364 | + base_tree, this_tree=self, |
1365 | + change_reporter=change_reporter, |
1366 | + show_base=show_base) |
1367 | + if nb_conflicts: |
1368 | + self.add_parent_tree((old_tip, other_tree)) |
1369 | + return nb_conflicts |
1370 | + |
1371 | + if last_rev != _mod_revision.ensure_null(revision): |
1372 | + to_tree = self.branch.repository.revision_tree(revision) |
1373 | + |
1374 | + # determine the branch point |
1375 | + graph = self.branch.repository.get_graph() |
1376 | + base_rev_id = graph.find_unique_lca(self.branch.last_revision(), |
1377 | + last_rev) |
1378 | + base_tree = self.branch.repository.revision_tree(base_rev_id) |
1379 | + |
1380 | + nb_conflicts = merge.merge_inner(self.branch, to_tree, base_tree, |
1381 | + this_tree=self, |
1382 | + change_reporter=change_reporter, |
1383 | + show_base=show_base) |
1384 | + self.set_last_revision(revision) |
1385 | + # TODO - dedup parents list with things merged by pull ? |
1386 | + # reuse the tree we've updated to to set the basis: |
1387 | + parent_trees = [(revision, to_tree)] |
1388 | + merges = self.get_parent_ids()[1:] |
1389 | + # Ideally we ask the tree for the trees here, that way the working |
1390 | + # tree can decide whether to give us the entire tree or give us a |
1391 | + # lazy initialised tree. dirstate for instance will have the trees |
1392 | + # in ram already, whereas a last-revision + basis-inventory tree |
1393 | + # will not, but also does not need them when setting parents. |
1394 | + for parent in merges: |
1395 | + parent_trees.append( |
1396 | + (parent, self.branch.repository.revision_tree(parent))) |
1397 | + if not _mod_revision.is_null(old_tip): |
1398 | + parent_trees.append( |
1399 | + (old_tip, self.branch.repository.revision_tree(old_tip))) |
1400 | + self.set_parent_trees(parent_trees) |
1401 | + last_rev = parent_trees[0][0] |
1402 | + return nb_conflicts |
1403 | + |
1404 | |
1405 | class GitWorkingTreeFormat(workingtree.WorkingTreeFormat): |
1406 | |
1407 | |
1408 | === modified file 'breezy/plugins/weave_fmt/workingtree.py' |
1409 | --- breezy/plugins/weave_fmt/workingtree.py 2020-08-09 18:10:01 +0000 |
1410 | +++ breezy/plugins/weave_fmt/workingtree.py 2020-09-02 17:42:36 +0000 |
1411 | @@ -25,11 +25,11 @@ |
1412 | lock, |
1413 | osutils, |
1414 | revision as _mod_revision, |
1415 | - transform, |
1416 | ) |
1417 | from ...bzr import ( |
1418 | conflicts as _mod_bzr_conflicts, |
1419 | inventory, |
1420 | + transform as bzr_transform, |
1421 | xml5, |
1422 | ) |
1423 | from ...mutabletree import MutableTree |
1424 | @@ -117,7 +117,7 @@ |
1425 | else: |
1426 | parent_trees = [(revision_id, basis_tree)] |
1427 | wt.set_parent_trees(parent_trees) |
1428 | - transform.build_tree(basis_tree, wt) |
1429 | + bzr_transform.build_tree(basis_tree, wt) |
1430 | for hook in MutableTree.hooks['post_build_tree']: |
1431 | hook(wt) |
1432 | return wt |
1433 | |
1434 | === modified file 'breezy/tests/per_tree/__init__.py' |
1435 | --- breezy/tests/per_tree/__init__.py 2020-07-05 13:18:03 +0000 |
1436 | +++ breezy/tests/per_tree/__init__.py 2020-09-02 17:42:36 +0000 |
1437 | @@ -26,6 +26,7 @@ |
1438 | """ |
1439 | |
1440 | from breezy import ( |
1441 | + cleanup, |
1442 | errors, |
1443 | tests, |
1444 | transform, |
1445 | @@ -83,7 +84,9 @@ |
1446 | tree.lock_read() |
1447 | testcase.addCleanup(tree.unlock) |
1448 | pp = None |
1449 | - transform._prepare_revert_transform(basis, tree, tt, None, False, None, |
1450 | + es = cleanup.ExitStack() |
1451 | + testcase.addCleanup(es.close) |
1452 | + transform._prepare_revert_transform(es, basis, tree, tt, None, False, None, |
1453 | basis, {}) |
1454 | preview_tree = tt.get_preview_tree() |
1455 | preview_tree.set_parent_ids(tree.get_parent_ids()) |
1456 | |
1457 | === modified file 'breezy/tests/per_workingtree/test_transform.py' |
1458 | --- breezy/tests/per_workingtree/test_transform.py 2020-08-20 19:27:00 +0000 |
1459 | +++ breezy/tests/per_workingtree/test_transform.py 2020-09-02 17:42:36 +0000 |
1460 | @@ -64,7 +64,6 @@ |
1461 | create_from_tree, |
1462 | FinalPaths, |
1463 | resolve_conflicts, |
1464 | - resolve_checkout, |
1465 | ROOT_PARENT, |
1466 | ImmortalLimbo, |
1467 | MalformedTransform, |
1468 | @@ -74,6 +73,7 @@ |
1469 | ) |
1470 | |
1471 | from breezy.bzr.inventorytree import InventoryTreeChange |
1472 | +from breezy.bzr.transform import resolve_checkout |
1473 | |
1474 | from breezy.tests.per_workingtree import TestCaseWithWorkingTree |
1475 | |
1476 | |
1477 | === modified file 'breezy/tests/test_transform.py' |
1478 | --- breezy/tests/test_transform.py 2020-08-09 18:10:01 +0000 |
1479 | +++ breezy/tests/test_transform.py 2020-09-02 17:42:36 +0000 |
1480 | @@ -75,12 +75,10 @@ |
1481 | SymlinkFeature, |
1482 | ) |
1483 | from ..transform import ( |
1484 | - build_tree, |
1485 | create_from_tree, |
1486 | _FileMover, |
1487 | FinalPaths, |
1488 | resolve_conflicts, |
1489 | - resolve_checkout, |
1490 | ROOT_PARENT, |
1491 | ImmortalLimbo, |
1492 | MalformedTransform, |
1493 | @@ -301,410 +299,6 @@ |
1494 | self.assertEqual(this.wt.id2path(b'i'), pathjoin('b/i1.OTHER')) |
1495 | |
1496 | |
1497 | -class TestBuildTree(tests.TestCaseWithTransport): |
1498 | - |
1499 | - def test_build_tree_with_symlinks(self): |
1500 | - self.requireFeature(SymlinkFeature) |
1501 | - os.mkdir('a') |
1502 | - a = ControlDir.create_standalone_workingtree('a') |
1503 | - os.mkdir('a/foo') |
1504 | - with open('a/foo/bar', 'wb') as f: |
1505 | - f.write(b'contents') |
1506 | - os.symlink('a/foo/bar', 'a/foo/baz') |
1507 | - a.add(['foo', 'foo/bar', 'foo/baz']) |
1508 | - a.commit('initial commit') |
1509 | - b = ControlDir.create_standalone_workingtree('b') |
1510 | - basis = a.basis_tree() |
1511 | - basis.lock_read() |
1512 | - self.addCleanup(basis.unlock) |
1513 | - build_tree(basis, b) |
1514 | - self.assertIs(os.path.isdir('b/foo'), True) |
1515 | - with open('b/foo/bar', 'rb') as f: |
1516 | - self.assertEqual(f.read(), b"contents") |
1517 | - self.assertEqual(os.readlink('b/foo/baz'), 'a/foo/bar') |
1518 | - |
1519 | - def test_build_with_references(self): |
1520 | - tree = self.make_branch_and_tree('source', |
1521 | - format='development-subtree') |
1522 | - subtree = self.make_branch_and_tree('source/subtree', |
1523 | - format='development-subtree') |
1524 | - tree.add_reference(subtree) |
1525 | - tree.commit('a revision') |
1526 | - tree.branch.create_checkout('target') |
1527 | - self.assertPathExists('target') |
1528 | - self.assertPathExists('target/subtree') |
1529 | - |
1530 | - def test_file_conflict_handling(self): |
1531 | - """Ensure that when building trees, conflict handling is done""" |
1532 | - source = self.make_branch_and_tree('source') |
1533 | - target = self.make_branch_and_tree('target') |
1534 | - self.build_tree(['source/file', 'target/file']) |
1535 | - source.add('file', b'new-file') |
1536 | - source.commit('added file') |
1537 | - build_tree(source.basis_tree(), target) |
1538 | - self.assertEqual( |
1539 | - [DuplicateEntry('Moved existing file to', 'file.moved', |
1540 | - 'file', None, 'new-file')], |
1541 | - target.conflicts()) |
1542 | - target2 = self.make_branch_and_tree('target2') |
1543 | - with open('target2/file', 'wb') as target_file, \ |
1544 | - open('source/file', 'rb') as source_file: |
1545 | - target_file.write(source_file.read()) |
1546 | - build_tree(source.basis_tree(), target2) |
1547 | - self.assertEqual([], target2.conflicts()) |
1548 | - |
1549 | - def test_symlink_conflict_handling(self): |
1550 | - """Ensure that when building trees, conflict handling is done""" |
1551 | - self.requireFeature(SymlinkFeature) |
1552 | - source = self.make_branch_and_tree('source') |
1553 | - os.symlink('foo', 'source/symlink') |
1554 | - source.add('symlink', b'new-symlink') |
1555 | - source.commit('added file') |
1556 | - target = self.make_branch_and_tree('target') |
1557 | - os.symlink('bar', 'target/symlink') |
1558 | - build_tree(source.basis_tree(), target) |
1559 | - self.assertEqual( |
1560 | - [DuplicateEntry('Moved existing file to', 'symlink.moved', |
1561 | - 'symlink', None, 'new-symlink')], |
1562 | - target.conflicts()) |
1563 | - target = self.make_branch_and_tree('target2') |
1564 | - os.symlink('foo', 'target2/symlink') |
1565 | - build_tree(source.basis_tree(), target) |
1566 | - self.assertEqual([], target.conflicts()) |
1567 | - |
1568 | - def test_directory_conflict_handling(self): |
1569 | - """Ensure that when building trees, conflict handling is done""" |
1570 | - source = self.make_branch_and_tree('source') |
1571 | - target = self.make_branch_and_tree('target') |
1572 | - self.build_tree(['source/dir1/', 'source/dir1/file', 'target/dir1/']) |
1573 | - source.add(['dir1', 'dir1/file'], [b'new-dir1', b'new-file']) |
1574 | - source.commit('added file') |
1575 | - build_tree(source.basis_tree(), target) |
1576 | - self.assertEqual([], target.conflicts()) |
1577 | - self.assertPathExists('target/dir1/file') |
1578 | - |
1579 | - # Ensure contents are merged |
1580 | - target = self.make_branch_and_tree('target2') |
1581 | - self.build_tree(['target2/dir1/', 'target2/dir1/file2']) |
1582 | - build_tree(source.basis_tree(), target) |
1583 | - self.assertEqual([], target.conflicts()) |
1584 | - self.assertPathExists('target2/dir1/file2') |
1585 | - self.assertPathExists('target2/dir1/file') |
1586 | - |
1587 | - # Ensure new contents are suppressed for existing branches |
1588 | - target = self.make_branch_and_tree('target3') |
1589 | - self.make_branch('target3/dir1') |
1590 | - self.build_tree(['target3/dir1/file2']) |
1591 | - build_tree(source.basis_tree(), target) |
1592 | - self.assertPathDoesNotExist('target3/dir1/file') |
1593 | - self.assertPathExists('target3/dir1/file2') |
1594 | - self.assertPathExists('target3/dir1.diverted/file') |
1595 | - self.assertEqual( |
1596 | - [DuplicateEntry('Diverted to', 'dir1.diverted', |
1597 | - 'dir1', 'new-dir1', None)], |
1598 | - target.conflicts()) |
1599 | - |
1600 | - target = self.make_branch_and_tree('target4') |
1601 | - self.build_tree(['target4/dir1/']) |
1602 | - self.make_branch('target4/dir1/file') |
1603 | - build_tree(source.basis_tree(), target) |
1604 | - self.assertPathExists('target4/dir1/file') |
1605 | - self.assertEqual('directory', file_kind('target4/dir1/file')) |
1606 | - self.assertPathExists('target4/dir1/file.diverted') |
1607 | - self.assertEqual( |
1608 | - [DuplicateEntry('Diverted to', 'dir1/file.diverted', |
1609 | - 'dir1/file', 'new-file', None)], |
1610 | - target.conflicts()) |
1611 | - |
1612 | - def test_mixed_conflict_handling(self): |
1613 | - """Ensure that when building trees, conflict handling is done""" |
1614 | - source = self.make_branch_and_tree('source') |
1615 | - target = self.make_branch_and_tree('target') |
1616 | - self.build_tree(['source/name', 'target/name/']) |
1617 | - source.add('name', b'new-name') |
1618 | - source.commit('added file') |
1619 | - build_tree(source.basis_tree(), target) |
1620 | - self.assertEqual( |
1621 | - [DuplicateEntry('Moved existing file to', |
1622 | - 'name.moved', 'name', None, 'new-name')], |
1623 | - target.conflicts()) |
1624 | - |
1625 | - def test_raises_in_populated(self): |
1626 | - source = self.make_branch_and_tree('source') |
1627 | - self.build_tree(['source/name']) |
1628 | - source.add('name') |
1629 | - source.commit('added name') |
1630 | - target = self.make_branch_and_tree('target') |
1631 | - self.build_tree(['target/name']) |
1632 | - target.add('name') |
1633 | - self.assertRaises(errors.WorkingTreeAlreadyPopulated, |
1634 | - build_tree, source.basis_tree(), target) |
1635 | - |
1636 | - def test_build_tree_rename_count(self): |
1637 | - source = self.make_branch_and_tree('source') |
1638 | - self.build_tree(['source/file1', 'source/dir1/']) |
1639 | - source.add(['file1', 'dir1']) |
1640 | - source.commit('add1') |
1641 | - target1 = self.make_branch_and_tree('target1') |
1642 | - transform_result = build_tree(source.basis_tree(), target1) |
1643 | - self.assertEqual(2, transform_result.rename_count) |
1644 | - |
1645 | - self.build_tree(['source/dir1/file2']) |
1646 | - source.add(['dir1/file2']) |
1647 | - source.commit('add3') |
1648 | - target2 = self.make_branch_and_tree('target2') |
1649 | - transform_result = build_tree(source.basis_tree(), target2) |
1650 | - # children of non-root directories should not be renamed |
1651 | - self.assertEqual(2, transform_result.rename_count) |
1652 | - |
1653 | - def create_ab_tree(self): |
1654 | - """Create a committed test tree with two files""" |
1655 | - source = self.make_branch_and_tree('source') |
1656 | - self.build_tree_contents([('source/file1', b'A')]) |
1657 | - self.build_tree_contents([('source/file2', b'B')]) |
1658 | - source.add(['file1', 'file2'], [b'file1-id', b'file2-id']) |
1659 | - source.commit('commit files') |
1660 | - source.lock_write() |
1661 | - self.addCleanup(source.unlock) |
1662 | - return source |
1663 | - |
1664 | - def test_build_tree_accelerator_tree(self): |
1665 | - source = self.create_ab_tree() |
1666 | - self.build_tree_contents([('source/file2', b'C')]) |
1667 | - calls = [] |
1668 | - real_source_get_file = source.get_file |
1669 | - |
1670 | - def get_file(path): |
1671 | - calls.append(path) |
1672 | - return real_source_get_file(path) |
1673 | - source.get_file = get_file |
1674 | - target = self.make_branch_and_tree('target') |
1675 | - revision_tree = source.basis_tree() |
1676 | - revision_tree.lock_read() |
1677 | - self.addCleanup(revision_tree.unlock) |
1678 | - build_tree(revision_tree, target, source) |
1679 | - self.assertEqual(['file1'], calls) |
1680 | - target.lock_read() |
1681 | - self.addCleanup(target.unlock) |
1682 | - self.assertEqual([], list(target.iter_changes(revision_tree))) |
1683 | - |
1684 | - def test_build_tree_accelerator_tree_observes_sha1(self): |
1685 | - source = self.create_ab_tree() |
1686 | - sha1 = osutils.sha_string(b'A') |
1687 | - target = self.make_branch_and_tree('target') |
1688 | - target.lock_write() |
1689 | - self.addCleanup(target.unlock) |
1690 | - state = target.current_dirstate() |
1691 | - state._cutoff_time = time.time() + 60 |
1692 | - build_tree(source.basis_tree(), target, source) |
1693 | - entry = state._get_entry(0, path_utf8=b'file1') |
1694 | - self.assertEqual(sha1, entry[1][0][1]) |
1695 | - |
1696 | - def test_build_tree_accelerator_tree_missing_file(self): |
1697 | - source = self.create_ab_tree() |
1698 | - os.unlink('source/file1') |
1699 | - source.remove(['file2']) |
1700 | - target = self.make_branch_and_tree('target') |
1701 | - revision_tree = source.basis_tree() |
1702 | - revision_tree.lock_read() |
1703 | - self.addCleanup(revision_tree.unlock) |
1704 | - build_tree(revision_tree, target, source) |
1705 | - target.lock_read() |
1706 | - self.addCleanup(target.unlock) |
1707 | - self.assertEqual([], list(target.iter_changes(revision_tree))) |
1708 | - |
1709 | - def test_build_tree_accelerator_wrong_kind(self): |
1710 | - self.requireFeature(SymlinkFeature) |
1711 | - source = self.make_branch_and_tree('source') |
1712 | - self.build_tree_contents([('source/file1', b'')]) |
1713 | - self.build_tree_contents([('source/file2', b'')]) |
1714 | - source.add(['file1', 'file2'], [b'file1-id', b'file2-id']) |
1715 | - source.commit('commit files') |
1716 | - os.unlink('source/file2') |
1717 | - self.build_tree_contents([('source/file2/', b'C')]) |
1718 | - os.unlink('source/file1') |
1719 | - os.symlink('file2', 'source/file1') |
1720 | - calls = [] |
1721 | - real_source_get_file = source.get_file |
1722 | - |
1723 | - def get_file(path): |
1724 | - calls.append(path) |
1725 | - return real_source_get_file(path) |
1726 | - source.get_file = get_file |
1727 | - target = self.make_branch_and_tree('target') |
1728 | - revision_tree = source.basis_tree() |
1729 | - revision_tree.lock_read() |
1730 | - self.addCleanup(revision_tree.unlock) |
1731 | - build_tree(revision_tree, target, source) |
1732 | - self.assertEqual([], calls) |
1733 | - target.lock_read() |
1734 | - self.addCleanup(target.unlock) |
1735 | - self.assertEqual([], list(target.iter_changes(revision_tree))) |
1736 | - |
1737 | - def test_build_tree_hardlink(self): |
1738 | - self.requireFeature(HardlinkFeature) |
1739 | - source = self.create_ab_tree() |
1740 | - target = self.make_branch_and_tree('target') |
1741 | - revision_tree = source.basis_tree() |
1742 | - revision_tree.lock_read() |
1743 | - self.addCleanup(revision_tree.unlock) |
1744 | - build_tree(revision_tree, target, source, hardlink=True) |
1745 | - target.lock_read() |
1746 | - self.addCleanup(target.unlock) |
1747 | - self.assertEqual([], list(target.iter_changes(revision_tree))) |
1748 | - source_stat = os.stat('source/file1') |
1749 | - target_stat = os.stat('target/file1') |
1750 | - self.assertEqual(source_stat, target_stat) |
1751 | - |
1752 | - # Explicitly disallowing hardlinks should prevent them. |
1753 | - target2 = self.make_branch_and_tree('target2') |
1754 | - build_tree(revision_tree, target2, source, hardlink=False) |
1755 | - target2.lock_read() |
1756 | - self.addCleanup(target2.unlock) |
1757 | - self.assertEqual([], list(target2.iter_changes(revision_tree))) |
1758 | - source_stat = os.stat('source/file1') |
1759 | - target2_stat = os.stat('target2/file1') |
1760 | - self.assertNotEqual(source_stat, target2_stat) |
1761 | - |
1762 | - def test_build_tree_accelerator_tree_moved(self): |
1763 | - source = self.make_branch_and_tree('source') |
1764 | - self.build_tree_contents([('source/file1', b'A')]) |
1765 | - source.add(['file1'], [b'file1-id']) |
1766 | - source.commit('commit files') |
1767 | - source.rename_one('file1', 'file2') |
1768 | - source.lock_read() |
1769 | - self.addCleanup(source.unlock) |
1770 | - target = self.make_branch_and_tree('target') |
1771 | - revision_tree = source.basis_tree() |
1772 | - revision_tree.lock_read() |
1773 | - self.addCleanup(revision_tree.unlock) |
1774 | - build_tree(revision_tree, target, source) |
1775 | - target.lock_read() |
1776 | - self.addCleanup(target.unlock) |
1777 | - self.assertEqual([], list(target.iter_changes(revision_tree))) |
1778 | - |
1779 | - def test_build_tree_hardlinks_preserve_execute(self): |
1780 | - self.requireFeature(HardlinkFeature) |
1781 | - source = self.create_ab_tree() |
1782 | - tt = source.transform() |
1783 | - trans_id = tt.trans_id_tree_path('file1') |
1784 | - tt.set_executability(True, trans_id) |
1785 | - tt.apply() |
1786 | - self.assertTrue(source.is_executable('file1')) |
1787 | - target = self.make_branch_and_tree('target') |
1788 | - revision_tree = source.basis_tree() |
1789 | - revision_tree.lock_read() |
1790 | - self.addCleanup(revision_tree.unlock) |
1791 | - build_tree(revision_tree, target, source, hardlink=True) |
1792 | - target.lock_read() |
1793 | - self.addCleanup(target.unlock) |
1794 | - self.assertEqual([], list(target.iter_changes(revision_tree))) |
1795 | - self.assertTrue(source.is_executable('file1')) |
1796 | - |
1797 | - def install_rot13_content_filter(self, pattern): |
1798 | - # We could use |
1799 | - # self.addCleanup(filters._reset_registry, filters._reset_registry()) |
1800 | - # below, but that looks a bit... hard to read even if it's exactly |
1801 | - # the same thing. |
1802 | - original_registry = filters._reset_registry() |
1803 | - |
1804 | - def restore_registry(): |
1805 | - filters._reset_registry(original_registry) |
1806 | - self.addCleanup(restore_registry) |
1807 | - |
1808 | - def rot13(chunks, context=None): |
1809 | - return [ |
1810 | - codecs.encode(chunk.decode('ascii'), 'rot13').encode('ascii') |
1811 | - for chunk in chunks] |
1812 | - rot13filter = filters.ContentFilter(rot13, rot13) |
1813 | - filters.filter_stacks_registry.register( |
1814 | - 'rot13', {'yes': [rot13filter]}.get) |
1815 | - os.mkdir(self.test_home_dir + '/.bazaar') |
1816 | - rules_filename = self.test_home_dir + '/.bazaar/rules' |
1817 | - with open(rules_filename, 'wb') as f: |
1818 | - f.write(b'[name %s]\nrot13=yes\n' % (pattern,)) |
1819 | - |
1820 | - def uninstall_rules(): |
1821 | - os.remove(rules_filename) |
1822 | - rules.reset_rules() |
1823 | - self.addCleanup(uninstall_rules) |
1824 | - rules.reset_rules() |
1825 | - |
1826 | - def test_build_tree_content_filtered_files_are_not_hardlinked(self): |
1827 | - """build_tree will not hardlink files that have content filtering rules |
1828 | - applied to them (but will still hardlink other files from the same tree |
1829 | - if it can). |
1830 | - """ |
1831 | - self.requireFeature(HardlinkFeature) |
1832 | - self.install_rot13_content_filter(b'file1') |
1833 | - source = self.create_ab_tree() |
1834 | - target = self.make_branch_and_tree('target') |
1835 | - revision_tree = source.basis_tree() |
1836 | - revision_tree.lock_read() |
1837 | - self.addCleanup(revision_tree.unlock) |
1838 | - build_tree(revision_tree, target, source, hardlink=True) |
1839 | - target.lock_read() |
1840 | - self.addCleanup(target.unlock) |
1841 | - self.assertEqual([], list(target.iter_changes(revision_tree))) |
1842 | - source_stat = os.stat('source/file1') |
1843 | - target_stat = os.stat('target/file1') |
1844 | - self.assertNotEqual(source_stat, target_stat) |
1845 | - source_stat = os.stat('source/file2') |
1846 | - target_stat = os.stat('target/file2') |
1847 | - self.assertEqualStat(source_stat, target_stat) |
1848 | - |
1849 | - def test_case_insensitive_build_tree_inventory(self): |
1850 | - if (features.CaseInsensitiveFilesystemFeature.available() |
1851 | - or features.CaseInsCasePresFilenameFeature.available()): |
1852 | - raise tests.UnavailableFeature('Fully case sensitive filesystem') |
1853 | - source = self.make_branch_and_tree('source') |
1854 | - self.build_tree(['source/file', 'source/FILE']) |
1855 | - source.add(['file', 'FILE'], [b'lower-id', b'upper-id']) |
1856 | - source.commit('added files') |
1857 | - # Don't try this at home, kids! |
1858 | - # Force the tree to report that it is case insensitive |
1859 | - target = self.make_branch_and_tree('target') |
1860 | - target.case_sensitive = False |
1861 | - build_tree(source.basis_tree(), target, source, delta_from_tree=True) |
1862 | - self.assertEqual('file.moved', target.id2path(b'lower-id')) |
1863 | - self.assertEqual('FILE', target.id2path(b'upper-id')) |
1864 | - |
1865 | - def test_build_tree_observes_sha(self): |
1866 | - source = self.make_branch_and_tree('source') |
1867 | - self.build_tree(['source/file1', 'source/dir/', 'source/dir/file2']) |
1868 | - source.add(['file1', 'dir', 'dir/file2'], |
1869 | - [b'file1-id', b'dir-id', b'file2-id']) |
1870 | - source.commit('new files') |
1871 | - target = self.make_branch_and_tree('target') |
1872 | - target.lock_write() |
1873 | - self.addCleanup(target.unlock) |
1874 | - # We make use of the fact that DirState caches its cutoff time. So we |
1875 | - # set the 'safe' time to one minute in the future. |
1876 | - state = target.current_dirstate() |
1877 | - state._cutoff_time = time.time() + 60 |
1878 | - build_tree(source.basis_tree(), target) |
1879 | - entry1_sha = osutils.sha_file_by_name('source/file1') |
1880 | - entry2_sha = osutils.sha_file_by_name('source/dir/file2') |
1881 | - # entry[1] is the state information, entry[1][0] is the state of the |
1882 | - # working tree, entry[1][0][1] is the sha value for the current working |
1883 | - # tree |
1884 | - entry1 = state._get_entry(0, path_utf8=b'file1') |
1885 | - self.assertEqual(entry1_sha, entry1[1][0][1]) |
1886 | - # The 'size' field must also be set. |
1887 | - self.assertEqual(25, entry1[1][0][2]) |
1888 | - entry1_state = entry1[1][0] |
1889 | - entry2 = state._get_entry(0, path_utf8=b'dir/file2') |
1890 | - self.assertEqual(entry2_sha, entry2[1][0][1]) |
1891 | - self.assertEqual(29, entry2[1][0][2]) |
1892 | - entry2_state = entry2[1][0] |
1893 | - # Now, make sure that we don't have to re-read the content. The |
1894 | - # packed_stat should match exactly. |
1895 | - self.assertEqual(entry1_sha, target.get_file_sha1('file1')) |
1896 | - self.assertEqual(entry2_sha, target.get_file_sha1('dir/file2')) |
1897 | - self.assertEqual(entry1_state, entry1[1][0]) |
1898 | - self.assertEqual(entry2_state, entry2[1][0]) |
1899 | - |
1900 | - |
1901 | class TestCommitTransform(tests.TestCaseWithTransport): |
1902 | |
1903 | def get_branch(self): |
1904 | |
1905 | === modified file 'breezy/transform.py' |
1906 | --- breezy/transform.py 2020-08-15 17:47:31 +0000 |
1907 | +++ breezy/transform.py 2020-09-02 17:42:36 +0000 |
1908 | @@ -646,200 +646,6 @@ |
1909 | return [(self.get_path(t), t) for t in trans_ids] |
1910 | |
1911 | |
1912 | -def build_tree(tree, wt, accelerator_tree=None, hardlink=False, |
1913 | - delta_from_tree=False): |
1914 | - """Create working tree for a branch, using a TreeTransform. |
1915 | - |
1916 | - This function should be used on empty trees, having a tree root at most. |
1917 | - (see merge and revert functionality for working with existing trees) |
1918 | - |
1919 | - Existing files are handled like so: |
1920 | - |
1921 | - - Existing bzrdirs take precedence over creating new items. They are |
1922 | - created as '%s.diverted' % name. |
1923 | - - Otherwise, if the content on disk matches the content we are building, |
1924 | - it is silently replaced. |
1925 | - - Otherwise, conflict resolution will move the old file to 'oldname.moved'. |
1926 | - |
1927 | - :param tree: The tree to convert wt into a copy of |
1928 | - :param wt: The working tree that files will be placed into |
1929 | - :param accelerator_tree: A tree which can be used for retrieving file |
1930 | - contents more quickly than tree itself, i.e. a workingtree. tree |
1931 | - will be used for cases where accelerator_tree's content is different. |
1932 | - :param hardlink: If true, hard-link files to accelerator_tree, where |
1933 | - possible. accelerator_tree must implement abspath, i.e. be a |
1934 | - working tree. |
1935 | - :param delta_from_tree: If true, build_tree may use the input Tree to |
1936 | - generate the inventory delta. |
1937 | - """ |
1938 | - with cleanup.ExitStack() as exit_stack: |
1939 | - exit_stack.enter_context(wt.lock_tree_write()) |
1940 | - exit_stack.enter_context(tree.lock_read()) |
1941 | - if accelerator_tree is not None: |
1942 | - exit_stack.enter_context(accelerator_tree.lock_read()) |
1943 | - return _build_tree(tree, wt, accelerator_tree, hardlink, |
1944 | - delta_from_tree) |
1945 | - |
1946 | - |
1947 | -def _build_tree(tree, wt, accelerator_tree, hardlink, delta_from_tree): |
1948 | - """See build_tree.""" |
1949 | - for num, _unused in enumerate(wt.all_versioned_paths()): |
1950 | - if num > 0: # more than just a root |
1951 | - raise errors.WorkingTreeAlreadyPopulated(base=wt.basedir) |
1952 | - file_trans_id = {} |
1953 | - top_pb = ui.ui_factory.nested_progress_bar() |
1954 | - pp = ProgressPhase("Build phase", 2, top_pb) |
1955 | - if tree.path2id('') is not None: |
1956 | - # This is kind of a hack: we should be altering the root |
1957 | - # as part of the regular tree shape diff logic. |
1958 | - # The conditional test here is to avoid doing an |
1959 | - # expensive operation (flush) every time the root id |
1960 | - # is set within the tree, nor setting the root and thus |
1961 | - # marking the tree as dirty, because we use two different |
1962 | - # idioms here: tree interfaces and inventory interfaces. |
1963 | - if wt.path2id('') != tree.path2id(''): |
1964 | - wt.set_root_id(tree.path2id('')) |
1965 | - wt.flush() |
1966 | - tt = wt.transform() |
1967 | - divert = set() |
1968 | - try: |
1969 | - pp.next_phase() |
1970 | - file_trans_id[find_previous_path(wt, tree, '')] = tt.trans_id_tree_path('') |
1971 | - with ui.ui_factory.nested_progress_bar() as pb: |
1972 | - deferred_contents = [] |
1973 | - num = 0 |
1974 | - total = len(tree.all_versioned_paths()) |
1975 | - if delta_from_tree: |
1976 | - precomputed_delta = [] |
1977 | - else: |
1978 | - precomputed_delta = None |
1979 | - # Check if tree inventory has content. If so, we populate |
1980 | - # existing_files with the directory content. If there are no |
1981 | - # entries we skip populating existing_files as its not used. |
1982 | - # This improves performance and unncessary work on large |
1983 | - # directory trees. (#501307) |
1984 | - if total > 0: |
1985 | - existing_files = set() |
1986 | - for dir, files in wt.walkdirs(): |
1987 | - existing_files.update(f[0] for f in files) |
1988 | - for num, (tree_path, entry) in \ |
1989 | - enumerate(tree.iter_entries_by_dir()): |
1990 | - pb.update(gettext("Building tree"), num |
1991 | - - len(deferred_contents), total) |
1992 | - if entry.parent_id is None: |
1993 | - continue |
1994 | - reparent = False |
1995 | - file_id = entry.file_id |
1996 | - if delta_from_tree: |
1997 | - precomputed_delta.append((None, tree_path, file_id, entry)) |
1998 | - if tree_path in existing_files: |
1999 | - target_path = wt.abspath(tree_path) |
2000 | - kind = file_kind(target_path) |
2001 | - if kind == "directory": |
2002 | - try: |
2003 | - controldir.ControlDir.open(target_path) |
2004 | - except errors.NotBranchError: |
2005 | - pass |
2006 | - else: |
2007 | - divert.add(tree_path) |
2008 | - if (tree_path not in divert |
2009 | - and _content_match( |
2010 | - tree, entry, tree_path, kind, target_path)): |
2011 | - tt.delete_contents(tt.trans_id_tree_path(tree_path)) |
2012 | - if kind == 'directory': |
2013 | - reparent = True |
2014 | - parent_id = file_trans_id[osutils.dirname(tree_path)] |
2015 | - if entry.kind == 'file': |
2016 | - # We *almost* replicate new_by_entry, so that we can defer |
2017 | - # getting the file text, and get them all at once. |
2018 | - trans_id = tt.create_path(entry.name, parent_id) |
2019 | - file_trans_id[tree_path] = trans_id |
2020 | - tt.version_file(trans_id, file_id=file_id) |
2021 | - executable = tree.is_executable(tree_path) |
2022 | - if executable: |
2023 | - tt.set_executability(executable, trans_id) |
2024 | - trans_data = (trans_id, tree_path, entry.text_sha1) |
2025 | - deferred_contents.append((tree_path, trans_data)) |
2026 | - else: |
2027 | - file_trans_id[tree_path] = new_by_entry( |
2028 | - tree_path, tt, entry, parent_id, tree) |
2029 | - if reparent: |
2030 | - new_trans_id = file_trans_id[tree_path] |
2031 | - old_parent = tt.trans_id_tree_path(tree_path) |
2032 | - _reparent_children(tt, old_parent, new_trans_id) |
2033 | - offset = num + 1 - len(deferred_contents) |
2034 | - _create_files(tt, tree, deferred_contents, pb, offset, |
2035 | - accelerator_tree, hardlink) |
2036 | - pp.next_phase() |
2037 | - divert_trans = set(file_trans_id[f] for f in divert) |
2038 | - |
2039 | - def resolver(t, c): |
2040 | - return resolve_checkout(t, c, divert_trans) |
2041 | - raw_conflicts = resolve_conflicts(tt, pass_func=resolver) |
2042 | - if len(raw_conflicts) > 0: |
2043 | - precomputed_delta = None |
2044 | - conflicts = tt.cook_conflicts(raw_conflicts) |
2045 | - for conflict in conflicts: |
2046 | - trace.warning(text_type(conflict)) |
2047 | - try: |
2048 | - wt.add_conflicts(conflicts) |
2049 | - except errors.UnsupportedOperation: |
2050 | - pass |
2051 | - result = tt.apply(no_conflicts=True, |
2052 | - precomputed_delta=precomputed_delta) |
2053 | - finally: |
2054 | - tt.finalize() |
2055 | - top_pb.finished() |
2056 | - return result |
2057 | - |
2058 | - |
2059 | -def _create_files(tt, tree, desired_files, pb, offset, accelerator_tree, |
2060 | - hardlink): |
2061 | - total = len(desired_files) + offset |
2062 | - wt = tt._tree |
2063 | - if accelerator_tree is None: |
2064 | - new_desired_files = desired_files |
2065 | - else: |
2066 | - iter = accelerator_tree.iter_changes(tree, include_unchanged=True) |
2067 | - unchanged = [ |
2068 | - change.path for change in iter |
2069 | - if not (change.changed_content or change.executable[0] != change.executable[1])] |
2070 | - if accelerator_tree.supports_content_filtering(): |
2071 | - unchanged = [(tp, ap) for (tp, ap) in unchanged |
2072 | - if not next(accelerator_tree.iter_search_rules([ap]))] |
2073 | - unchanged = dict(unchanged) |
2074 | - new_desired_files = [] |
2075 | - count = 0 |
2076 | - for unused_tree_path, (trans_id, tree_path, text_sha1) in desired_files: |
2077 | - accelerator_path = unchanged.get(tree_path) |
2078 | - if accelerator_path is None: |
2079 | - new_desired_files.append((tree_path, |
2080 | - (trans_id, tree_path, text_sha1))) |
2081 | - continue |
2082 | - pb.update(gettext('Adding file contents'), count + offset, total) |
2083 | - if hardlink: |
2084 | - tt.create_hardlink(accelerator_tree.abspath(accelerator_path), |
2085 | - trans_id) |
2086 | - else: |
2087 | - with accelerator_tree.get_file(accelerator_path) as f: |
2088 | - chunks = osutils.file_iterator(f) |
2089 | - if wt.supports_content_filtering(): |
2090 | - filters = wt._content_filter_stack(tree_path) |
2091 | - chunks = filtered_output_bytes(chunks, filters, |
2092 | - ContentFilterContext(tree_path, tree)) |
2093 | - tt.create_file(chunks, trans_id, sha1=text_sha1) |
2094 | - count += 1 |
2095 | - offset += count |
2096 | - for count, ((trans_id, tree_path, text_sha1), contents) in enumerate( |
2097 | - tree.iter_files_bytes(new_desired_files)): |
2098 | - if wt.supports_content_filtering(): |
2099 | - filters = wt._content_filter_stack(tree_path) |
2100 | - contents = filtered_output_bytes(contents, filters, |
2101 | - ContentFilterContext(tree_path, tree)) |
2102 | - tt.create_file(contents, trans_id, sha1=text_sha1) |
2103 | - pb.update(gettext('Adding file contents'), count + offset, total) |
2104 | - |
2105 | - |
2106 | def _reparent_children(tt, old_parent, new_parent): |
2107 | for child in tt.iter_tree_children(old_parent): |
2108 | tt.adjust_path(tt.final_name(child), new_parent, child) |
2109 | @@ -852,52 +658,6 @@ |
2110 | return by_parent[old_parent] |
2111 | |
2112 | |
2113 | -def _content_match(tree, entry, tree_path, kind, target_path): |
2114 | - if entry.kind != kind: |
2115 | - return False |
2116 | - if entry.kind == "directory": |
2117 | - return True |
2118 | - if entry.kind == "file": |
2119 | - with open(target_path, 'rb') as f1, \ |
2120 | - tree.get_file(tree_path) as f2: |
2121 | - if osutils.compare_files(f1, f2): |
2122 | - return True |
2123 | - elif entry.kind == "symlink": |
2124 | - if tree.get_symlink_target(tree_path) == os.readlink(target_path): |
2125 | - return True |
2126 | - return False |
2127 | - |
2128 | - |
2129 | -def resolve_checkout(tt, conflicts, divert): |
2130 | - new_conflicts = set() |
2131 | - for c_type, conflict in ((c[0], c) for c in conflicts): |
2132 | - # Anything but a 'duplicate' would indicate programmer error |
2133 | - if c_type != 'duplicate': |
2134 | - raise AssertionError(c_type) |
2135 | - # Now figure out which is new and which is old |
2136 | - if tt.new_contents(conflict[1]): |
2137 | - new_file = conflict[1] |
2138 | - old_file = conflict[2] |
2139 | - else: |
2140 | - new_file = conflict[2] |
2141 | - old_file = conflict[1] |
2142 | - |
2143 | - # We should only get here if the conflict wasn't completely |
2144 | - # resolved |
2145 | - final_parent = tt.final_parent(old_file) |
2146 | - if new_file in divert: |
2147 | - new_name = tt.final_name(old_file) + '.diverted' |
2148 | - tt.adjust_path(new_name, final_parent, new_file) |
2149 | - new_conflicts.add((c_type, 'Diverted to', |
2150 | - new_file, old_file)) |
2151 | - else: |
2152 | - new_name = tt.final_name(old_file) + '.moved' |
2153 | - tt.adjust_path(new_name, final_parent, old_file) |
2154 | - new_conflicts.add((c_type, 'Moved existing file to', |
2155 | - old_file, new_file)) |
2156 | - return new_conflicts |
2157 | - |
2158 | - |
2159 | def new_by_entry(path, tt, entry, parent_id, tree): |
2160 | """Create a new file according to its inventory entry""" |
2161 | name = entry.name |
2162 | @@ -960,38 +720,13 @@ |
2163 | tt.set_executability(entry.executable, trans_id) |
2164 | |
2165 | |
2166 | -def revert(working_tree, target_tree, filenames, backups=False, |
2167 | - pb=None, change_reporter=None): |
2168 | - """Revert a working tree's contents to those of a target tree.""" |
2169 | - pb = ui.ui_factory.nested_progress_bar() |
2170 | - try: |
2171 | - with target_tree.lock_read(), working_tree.transform(pb) as tt: |
2172 | - pp = ProgressPhase("Revert phase", 3, pb) |
2173 | - conflicts, merge_modified = _prepare_revert_transform( |
2174 | - working_tree, target_tree, tt, filenames, backups, pp) |
2175 | - if change_reporter: |
2176 | - from . import delta |
2177 | - change_reporter = delta._ChangeReporter( |
2178 | - unversioned_filter=working_tree.is_ignored) |
2179 | - delta.report_changes(tt.iter_changes(), change_reporter) |
2180 | - for conflict in conflicts: |
2181 | - trace.warning(text_type(conflict)) |
2182 | - pp.next_phase() |
2183 | - tt.apply() |
2184 | - if working_tree.supports_merge_modified(): |
2185 | - working_tree.set_merge_modified(merge_modified) |
2186 | - finally: |
2187 | - pb.clear() |
2188 | - return conflicts |
2189 | - |
2190 | - |
2191 | -def _prepare_revert_transform(working_tree, target_tree, tt, filenames, |
2192 | +def _prepare_revert_transform(es, working_tree, target_tree, tt, filenames, |
2193 | backups, pp, basis_tree=None, |
2194 | merge_modified=None): |
2195 | with ui.ui_factory.nested_progress_bar() as child_pb: |
2196 | if merge_modified is None: |
2197 | merge_modified = working_tree.merge_modified() |
2198 | - merge_modified = _alter_files(working_tree, target_tree, tt, |
2199 | + merge_modified = _alter_files(es, working_tree, target_tree, tt, |
2200 | child_pb, filenames, backups, |
2201 | merge_modified, basis_tree) |
2202 | with ui.ui_factory.nested_progress_bar() as child_pb: |
2203 | @@ -1001,10 +736,34 @@ |
2204 | return conflicts, merge_modified |
2205 | |
2206 | |
2207 | -def _alter_files(working_tree, target_tree, tt, pb, specific_files, |
2208 | +def revert(working_tree, target_tree, filenames, backups=False, |
2209 | + pb=None, change_reporter=None, merge_modified=None, basis_tree=None): |
2210 | + """Revert a working tree's contents to those of a target tree.""" |
2211 | + with cleanup.ExitStack() as es: |
2212 | + pb = es.enter_context(ui.ui_factory.nested_progress_bar()) |
2213 | + es.enter_context(target_tree.lock_read()) |
2214 | + tt = es.enter_context(working_tree.transform(pb)) |
2215 | + pp = ProgressPhase("Revert phase", 3, pb) |
2216 | + conflicts, merge_modified = _prepare_revert_transform( |
2217 | + es, working_tree, target_tree, tt, filenames, backups, pp) |
2218 | + if change_reporter: |
2219 | + from . import delta |
2220 | + change_reporter = delta._ChangeReporter( |
2221 | + unversioned_filter=working_tree.is_ignored) |
2222 | + delta.report_changes(tt.iter_changes(), change_reporter) |
2223 | + for conflict in conflicts: |
2224 | + trace.warning(text_type(conflict)) |
2225 | + pp.next_phase() |
2226 | + tt.apply() |
2227 | + if working_tree.supports_merge_modified(): |
2228 | + working_tree.set_merge_modified(merge_modified) |
2229 | + return conflicts |
2230 | + |
2231 | + |
2232 | +def _alter_files(es, working_tree, target_tree, tt, pb, specific_files, |
2233 | backups, merge_modified, basis_tree=None): |
2234 | if basis_tree is not None: |
2235 | - basis_tree.lock_read() |
2236 | + es.enter_context(basis_tree.lock_read()) |
2237 | # We ask the working_tree for its changes relative to the target, rather |
2238 | # than the target changes relative to the working tree. Because WT4 has an |
2239 | # optimizer to compare itself to a target, but no optimizer for the |
2240 | @@ -1015,123 +774,122 @@ |
2241 | skip_root = True |
2242 | else: |
2243 | skip_root = False |
2244 | - try: |
2245 | - deferred_files = [] |
2246 | - for id_num, change in enumerate(change_list): |
2247 | - target_path, wt_path = change.path |
2248 | - target_versioned, wt_versioned = change.versioned |
2249 | - target_parent = change.parent_id[0] |
2250 | - target_name, wt_name = change.name |
2251 | - target_kind, wt_kind = change.kind |
2252 | - target_executable, wt_executable = change.executable |
2253 | - if skip_root and wt_path == '': |
2254 | - continue |
2255 | - trans_id = tt.trans_id_file_id(change.file_id) |
2256 | - mode_id = None |
2257 | - if change.changed_content: |
2258 | - keep_content = False |
2259 | - if wt_kind == 'file' and (backups or target_kind is None): |
2260 | - wt_sha1 = working_tree.get_file_sha1(wt_path) |
2261 | - if merge_modified.get(wt_path) != wt_sha1: |
2262 | - # acquire the basis tree lazily to prevent the |
2263 | - # expense of accessing it when it's not needed ? |
2264 | - # (Guessing, RBC, 200702) |
2265 | - if basis_tree is None: |
2266 | - basis_tree = working_tree.basis_tree() |
2267 | - basis_tree.lock_read() |
2268 | - basis_inter = InterTree.get(basis_tree, working_tree) |
2269 | - basis_path = basis_inter.find_source_path(wt_path) |
2270 | - if basis_path is None: |
2271 | - if target_kind is None and not target_versioned: |
2272 | - keep_content = True |
2273 | - else: |
2274 | - if wt_sha1 != basis_tree.get_file_sha1(basis_path): |
2275 | - keep_content = True |
2276 | - if wt_kind is not None: |
2277 | - if not keep_content: |
2278 | - tt.delete_contents(trans_id) |
2279 | - elif target_kind is not None: |
2280 | - parent_trans_id = tt.trans_id_tree_path(osutils.dirname(wt_path)) |
2281 | - backup_name = tt._available_backup_name( |
2282 | - wt_name, parent_trans_id) |
2283 | - tt.adjust_path(backup_name, parent_trans_id, trans_id) |
2284 | - new_trans_id = tt.create_path(wt_name, parent_trans_id) |
2285 | - if wt_versioned and target_versioned: |
2286 | - tt.unversion_file(trans_id) |
2287 | - tt.version_file( |
2288 | - new_trans_id, file_id=getattr(change, 'file_id', None)) |
2289 | - # New contents should have the same unix perms as old |
2290 | - # contents |
2291 | - mode_id = trans_id |
2292 | - trans_id = new_trans_id |
2293 | - if target_kind in ('directory', 'tree-reference'): |
2294 | - tt.create_directory(trans_id) |
2295 | - if target_kind == 'tree-reference': |
2296 | - revision = target_tree.get_reference_revision( |
2297 | - target_path) |
2298 | - tt.set_tree_reference(revision, trans_id) |
2299 | - elif target_kind == 'symlink': |
2300 | - tt.create_symlink(target_tree.get_symlink_target( |
2301 | - target_path), trans_id) |
2302 | - elif target_kind == 'file': |
2303 | - deferred_files.append( |
2304 | - (target_path, (trans_id, mode_id, target_path))) |
2305 | + deferred_files = [] |
2306 | + for id_num, change in enumerate(change_list): |
2307 | + target_path, wt_path = change.path |
2308 | + target_versioned, wt_versioned = change.versioned |
2309 | + target_parent = change.parent_id[0] |
2310 | + target_name, wt_name = change.name |
2311 | + target_kind, wt_kind = change.kind |
2312 | + target_executable, wt_executable = change.executable |
2313 | + if skip_root and wt_path == '': |
2314 | + continue |
2315 | + mode_id = None |
2316 | + if wt_path is not None: |
2317 | + trans_id = tt.trans_id_tree_path(wt_path) |
2318 | + else: |
2319 | + trans_id = tt.assign_id() |
2320 | + if change.changed_content: |
2321 | + keep_content = False |
2322 | + if wt_kind == 'file' and (backups or target_kind is None): |
2323 | + wt_sha1 = working_tree.get_file_sha1(wt_path) |
2324 | + if merge_modified.get(wt_path) != wt_sha1: |
2325 | + # acquire the basis tree lazily to prevent the |
2326 | + # expense of accessing it when it's not needed ? |
2327 | + # (Guessing, RBC, 200702) |
2328 | if basis_tree is None: |
2329 | basis_tree = working_tree.basis_tree() |
2330 | - basis_tree.lock_read() |
2331 | - new_sha1 = target_tree.get_file_sha1(target_path) |
2332 | - basis_inter = InterTree.get(basis_tree, target_tree) |
2333 | - basis_path = basis_inter.find_source_path(target_path) |
2334 | - if (basis_path is not None and |
2335 | - new_sha1 == basis_tree.get_file_sha1(basis_path)): |
2336 | - # If the new contents of the file match what is in basis, |
2337 | - # then there is no need to store in merge_modified. |
2338 | - if basis_path in merge_modified: |
2339 | - del merge_modified[basis_path] |
2340 | + es.enter_context(basis_tree.lock_read()) |
2341 | + basis_inter = InterTree.get(basis_tree, working_tree) |
2342 | + basis_path = basis_inter.find_source_path(wt_path) |
2343 | + if basis_path is None: |
2344 | + if target_kind is None and not target_versioned: |
2345 | + keep_content = True |
2346 | else: |
2347 | - merge_modified[target_path] = new_sha1 |
2348 | - |
2349 | - # preserve the execute bit when backing up |
2350 | - if keep_content and wt_executable == target_executable: |
2351 | - tt.set_executability(target_executable, trans_id) |
2352 | + if wt_sha1 != basis_tree.get_file_sha1(basis_path): |
2353 | + keep_content = True |
2354 | + if wt_kind is not None: |
2355 | + if not keep_content: |
2356 | + tt.delete_contents(trans_id) |
2357 | elif target_kind is not None: |
2358 | - raise AssertionError(target_kind) |
2359 | - if not wt_versioned and target_versioned: |
2360 | - tt.version_file( |
2361 | - trans_id, file_id=getattr(change, 'file_id', None)) |
2362 | - if wt_versioned and not target_versioned: |
2363 | - tt.unversion_file(trans_id) |
2364 | - if (target_name is not None |
2365 | - and (wt_name != target_name or change.is_reparented())): |
2366 | - if target_path == '': |
2367 | - parent_trans = ROOT_PARENT |
2368 | - else: |
2369 | - parent_trans = tt.trans_id_file_id(target_parent) |
2370 | - if wt_path == '' and wt_versioned: |
2371 | - tt.adjust_root_path(target_name, parent_trans) |
2372 | - else: |
2373 | - tt.adjust_path(target_name, parent_trans, trans_id) |
2374 | - if wt_executable != target_executable and target_kind == "file": |
2375 | - tt.set_executability(target_executable, trans_id) |
2376 | - if working_tree.supports_content_filtering(): |
2377 | - for (trans_id, mode_id, target_path), bytes in ( |
2378 | - target_tree.iter_files_bytes(deferred_files)): |
2379 | - # We're reverting a tree to the target tree so using the |
2380 | - # target tree to find the file path seems the best choice |
2381 | - # here IMO - Ian C 27/Oct/2009 |
2382 | - filters = working_tree._content_filter_stack(target_path) |
2383 | - bytes = filtered_output_bytes( |
2384 | - bytes, filters, |
2385 | - ContentFilterContext(target_path, working_tree)) |
2386 | - tt.create_file(bytes, trans_id, mode_id) |
2387 | - else: |
2388 | - for (trans_id, mode_id, target_path), bytes in target_tree.iter_files_bytes( |
2389 | - deferred_files): |
2390 | - tt.create_file(bytes, trans_id, mode_id) |
2391 | - tt.fixup_new_roots() |
2392 | - finally: |
2393 | - if basis_tree is not None: |
2394 | - basis_tree.unlock() |
2395 | + parent_trans_id = tt.trans_id_tree_path(osutils.dirname(wt_path)) |
2396 | + backup_name = tt._available_backup_name( |
2397 | + wt_name, parent_trans_id) |
2398 | + tt.adjust_path(backup_name, parent_trans_id, trans_id) |
2399 | + new_trans_id = tt.create_path(wt_name, parent_trans_id) |
2400 | + if wt_versioned and target_versioned: |
2401 | + tt.unversion_file(trans_id) |
2402 | + tt.version_file( |
2403 | + new_trans_id, file_id=getattr(change, 'file_id', None)) |
2404 | + # New contents should have the same unix perms as old |
2405 | + # contents |
2406 | + mode_id = trans_id |
2407 | + trans_id = new_trans_id |
2408 | + if target_kind in ('directory', 'tree-reference'): |
2409 | + tt.create_directory(trans_id) |
2410 | + if target_kind == 'tree-reference': |
2411 | + revision = target_tree.get_reference_revision( |
2412 | + target_path) |
2413 | + tt.set_tree_reference(revision, trans_id) |
2414 | + elif target_kind == 'symlink': |
2415 | + tt.create_symlink(target_tree.get_symlink_target( |
2416 | + target_path), trans_id) |
2417 | + elif target_kind == 'file': |
2418 | + deferred_files.append( |
2419 | + (target_path, (trans_id, mode_id, target_path))) |
2420 | + if basis_tree is None: |
2421 | + basis_tree = working_tree.basis_tree() |
2422 | + es.enter_context(basis_tree.lock_read()) |
2423 | + new_sha1 = target_tree.get_file_sha1(target_path) |
2424 | + basis_inter = InterTree.get(basis_tree, target_tree) |
2425 | + basis_path = basis_inter.find_source_path(target_path) |
2426 | + if (basis_path is not None and |
2427 | + new_sha1 == basis_tree.get_file_sha1(basis_path)): |
2428 | + # If the new contents of the file match what is in basis, |
2429 | + # then there is no need to store in merge_modified. |
2430 | + if basis_path in merge_modified: |
2431 | + del merge_modified[basis_path] |
2432 | + else: |
2433 | + merge_modified[target_path] = new_sha1 |
2434 | + |
2435 | + # preserve the execute bit when backing up |
2436 | + if keep_content and wt_executable == target_executable: |
2437 | + tt.set_executability(target_executable, trans_id) |
2438 | + elif target_kind is not None: |
2439 | + raise AssertionError(target_kind) |
2440 | + if not wt_versioned and target_versioned: |
2441 | + tt.version_file( |
2442 | + trans_id, file_id=getattr(change, 'file_id', None)) |
2443 | + if wt_versioned and not target_versioned: |
2444 | + tt.unversion_file(trans_id) |
2445 | + if (target_name is not None |
2446 | + and (wt_name != target_name or change.is_reparented())): |
2447 | + if target_path == '': |
2448 | + parent_trans = ROOT_PARENT |
2449 | + else: |
2450 | + parent_trans = tt.trans_id_file_id(target_parent) |
2451 | + if wt_path == '' and wt_versioned: |
2452 | + tt.adjust_root_path(target_name, parent_trans) |
2453 | + else: |
2454 | + tt.adjust_path(target_name, parent_trans, trans_id) |
2455 | + if wt_executable != target_executable and target_kind == "file": |
2456 | + tt.set_executability(target_executable, trans_id) |
2457 | + if working_tree.supports_content_filtering(): |
2458 | + for (trans_id, mode_id, target_path), bytes in ( |
2459 | + target_tree.iter_files_bytes(deferred_files)): |
2460 | + # We're reverting a tree to the target tree so using the |
2461 | + # target tree to find the file path seems the best choice |
2462 | + # here IMO - Ian C 27/Oct/2009 |
2463 | + filters = working_tree._content_filter_stack(target_path) |
2464 | + bytes = filtered_output_bytes( |
2465 | + bytes, filters, |
2466 | + ContentFilterContext(target_path, working_tree)) |
2467 | + tt.create_file(bytes, trans_id, mode_id) |
2468 | + else: |
2469 | + for (trans_id, mode_id, target_path), bytes in target_tree.iter_files_bytes( |
2470 | + deferred_files): |
2471 | + tt.create_file(bytes, trans_id, mode_id) |
2472 | + tt.fixup_new_roots() |
2473 | return merge_modified |
2474 | |
2475 | |
2476 | @@ -1162,9 +920,17 @@ |
2477 | existing_file, new_file = trans_id, last_trans_id |
2478 | else: |
2479 | existing_file, new_file = last_trans_id, trans_id |
2480 | - new_name = tt.final_name(existing_file) + '.moved' |
2481 | - tt.adjust_path(new_name, final_parent, existing_file) |
2482 | - yield (c_type, 'Moved existing file to', existing_file, new_file) |
2483 | + if (not tt._tree.has_versioned_directories() and |
2484 | + tt.final_kind(trans_id) == 'directory' and |
2485 | + tt.final_kind(last_trans_id) == 'directory'): |
2486 | + _reparent_transform_children(tt, existing_file, new_file) |
2487 | + tt.delete_contents(existing_file) |
2488 | + tt.unversion_file(existing_file) |
2489 | + tt.cancel_creation(existing_file) |
2490 | + else: |
2491 | + new_name = tt.final_name(existing_file) + '.moved' |
2492 | + tt.adjust_path(new_name, final_parent, existing_file) |
2493 | + yield (c_type, 'Moved existing file to', existing_file, new_file) |
2494 | |
2495 | |
2496 | def resolve_parent_loop(tt, path_tree, c_type, cur): |
2497 | |
2498 | === modified file 'breezy/workingtree.py' |
2499 | --- breezy/workingtree.py 2020-08-15 17:47:31 +0000 |
2500 | +++ breezy/workingtree.py 2020-09-02 17:42:36 +0000 |
2501 | @@ -1077,10 +1077,8 @@ |
2502 | """ |
2503 | raise NotImplementedError(self.unlock) |
2504 | |
2505 | - _marker = object() |
2506 | - |
2507 | def update(self, change_reporter=None, possible_transports=None, |
2508 | - revision=None, old_tip=_marker, show_base=False): |
2509 | + revision=None, old_tip=None, show_base=False): |
2510 | """Update a working tree along its branch. |
2511 | |
2512 | This will update the branch if its bound too, which means we have |
2513 | @@ -1112,105 +1110,7 @@ |
2514 | returned (old tip of the branch or None). _marker is used |
2515 | otherwise. |
2516 | """ |
2517 | - if self.branch.get_bound_location() is not None: |
2518 | - self.lock_write() |
2519 | - update_branch = (old_tip is self._marker) |
2520 | - else: |
2521 | - self.lock_tree_write() |
2522 | - update_branch = False |
2523 | - try: |
2524 | - if update_branch: |
2525 | - old_tip = self.branch.update(possible_transports) |
2526 | - else: |
2527 | - if old_tip is self._marker: |
2528 | - old_tip = None |
2529 | - return self._update_tree(old_tip, change_reporter, revision, show_base) |
2530 | - finally: |
2531 | - self.unlock() |
2532 | - |
2533 | - def _update_tree(self, old_tip=None, change_reporter=None, revision=None, |
2534 | - show_base=False): |
2535 | - """Update a tree to the master branch. |
2536 | - |
2537 | - :param old_tip: if supplied, the previous tip revision the branch, |
2538 | - before it was changed to the master branch's tip. |
2539 | - """ |
2540 | - # here if old_tip is not None, it is the old tip of the branch before |
2541 | - # it was updated from the master branch. This should become a pending |
2542 | - # merge in the working tree to preserve the user existing work. we |
2543 | - # cant set that until we update the working trees last revision to be |
2544 | - # one from the new branch, because it will just get absorbed by the |
2545 | - # parent de-duplication logic. |
2546 | - # |
2547 | - # We MUST save it even if an error occurs, because otherwise the users |
2548 | - # local work is unreferenced and will appear to have been lost. |
2549 | - # |
2550 | - with self.lock_tree_write(): |
2551 | - nb_conflicts = 0 |
2552 | - try: |
2553 | - last_rev = self.get_parent_ids()[0] |
2554 | - except IndexError: |
2555 | - last_rev = _mod_revision.NULL_REVISION |
2556 | - if revision is None: |
2557 | - revision = self.branch.last_revision() |
2558 | - |
2559 | - old_tip = old_tip or _mod_revision.NULL_REVISION |
2560 | - |
2561 | - if not _mod_revision.is_null(old_tip) and old_tip != last_rev: |
2562 | - # the branch we are bound to was updated |
2563 | - # merge those changes in first |
2564 | - base_tree = self.basis_tree() |
2565 | - other_tree = self.branch.repository.revision_tree(old_tip) |
2566 | - nb_conflicts = merge.merge_inner(self.branch, other_tree, |
2567 | - base_tree, this_tree=self, |
2568 | - change_reporter=change_reporter, |
2569 | - show_base=show_base) |
2570 | - if nb_conflicts: |
2571 | - self.add_parent_tree((old_tip, other_tree)) |
2572 | - note(gettext('Rerun update after fixing the conflicts.')) |
2573 | - return nb_conflicts |
2574 | - |
2575 | - if last_rev != _mod_revision.ensure_null(revision): |
2576 | - # the working tree is up to date with the branch |
2577 | - # we can merge the specified revision from master |
2578 | - to_tree = self.branch.repository.revision_tree(revision) |
2579 | - to_root_id = to_tree.path2id('') |
2580 | - |
2581 | - basis = self.basis_tree() |
2582 | - with basis.lock_read(): |
2583 | - if (basis.path2id('') is None or basis.path2id('') != to_root_id): |
2584 | - self.set_root_id(to_root_id) |
2585 | - self.flush() |
2586 | - |
2587 | - # determine the branch point |
2588 | - graph = self.branch.repository.get_graph() |
2589 | - base_rev_id = graph.find_unique_lca(self.branch.last_revision(), |
2590 | - last_rev) |
2591 | - base_tree = self.branch.repository.revision_tree(base_rev_id) |
2592 | - |
2593 | - nb_conflicts = merge.merge_inner(self.branch, to_tree, base_tree, |
2594 | - this_tree=self, |
2595 | - change_reporter=change_reporter, |
2596 | - show_base=show_base) |
2597 | - self.set_last_revision(revision) |
2598 | - # TODO - dedup parents list with things merged by pull ? |
2599 | - # reuse the tree we've updated to to set the basis: |
2600 | - parent_trees = [(revision, to_tree)] |
2601 | - merges = self.get_parent_ids()[1:] |
2602 | - # Ideally we ask the tree for the trees here, that way the working |
2603 | - # tree can decide whether to give us the entire tree or give us a |
2604 | - # lazy initialised tree. dirstate for instance will have the trees |
2605 | - # in ram already, whereas a last-revision + basis-inventory tree |
2606 | - # will not, but also does not need them when setting parents. |
2607 | - for parent in merges: |
2608 | - parent_trees.append( |
2609 | - (parent, self.branch.repository.revision_tree(parent))) |
2610 | - if not _mod_revision.is_null(old_tip): |
2611 | - parent_trees.append( |
2612 | - (old_tip, self.branch.repository.revision_tree(old_tip))) |
2613 | - self.set_parent_trees(parent_trees) |
2614 | - last_rev = parent_trees[0][0] |
2615 | - return nb_conflicts |
2616 | + raise NotImplementedError(self.update) |
2617 | |
2618 | def set_conflicts(self, arg): |
2619 | raise errors.UnsupportedOperation(self.set_conflicts, self) |
Running landing tests failed /ci.breezy- vcs.org/ job/brz- 3.1/job/ brz-3.1- land/303/
https:/