Merge lp:~jelmer/brz/merge-3.1 into lp:brz

Proposed by Jelmer Vernooij on 2020-11-18
Status: Merged
Approved by: Jelmer Vernooij on 2020-11-19
Approved revision: no longer in the source branch.
Merge reported by: The Breezy Bot
Merged at revision: not available
Proposed branch: lp:~jelmer/brz/merge-3.1
Merge into: lp:brz
Diff against target: 3183 lines (+1374/-1010)
26 files modified
MANIFEST.in (+1/-1)
breezy/bzr/tests/test_transform.py (+417/-0)
breezy/bzr/transform.py (+262/-27)
breezy/bzr/workingtree.py (+134/-0)
breezy/bzr/workingtree_3.py (+2/-2)
breezy/bzr/workingtree_4.py (+5/-4)
breezy/git/interrepo.py (+33/-0)
breezy/git/repository.py (+1/-0)
breezy/git/tests/__init__.py (+1/-0)
breezy/git/tests/test_blackbox.py (+14/-0)
breezy/git/tests/test_transform.py (+41/-0)
breezy/git/transform.py (+11/-27)
breezy/git/workingtree.py (+126/-0)
breezy/patches.py (+6/-1)
breezy/plugins/github/hoster.py (+10/-4)
breezy/plugins/gitlab/hoster.py (+65/-18)
breezy/plugins/launchpad/hoster.py (+14/-8)
breezy/plugins/propose/cmds.py (+39/-15)
breezy/plugins/weave_fmt/workingtree.py (+2/-2)
breezy/propose.py (+21/-6)
breezy/tests/per_tree/__init__.py (+5/-1)
breezy/tests/per_workingtree/test_transform.py (+1/-1)
breezy/tests/test_propose.py (+10/-0)
breezy/tests/test_transform.py (+0/-406)
breezy/transform.py (+151/-385)
breezy/workingtree.py (+2/-102)
To merge this branch: bzr merge lp:~jelmer/brz/merge-3.1
Reviewer Review Type Date Requested Status
Jelmer Vernooij Approve on 2020-11-18
Review via email: mp+394038@code.launchpad.net

Commit message

Merge lp:brz/3.1.

Description of the change

Merge lp:brz/3.1.

To post a comment you must log in.
Jelmer Vernooij (jelmer) :
review: Approve
lp:~jelmer/brz/merge-3.1 updated on 2020-11-19
7526. By Jelmer Vernooij on 2020-11-19

Merge lp:brz/3.1.

Merged from https://code.launchpad.net/~jelmer/brz/merge-3.1/+merge/394038

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'MANIFEST.in'
2--- MANIFEST.in 2018-04-01 17:37:15 +0000
3+++ MANIFEST.in 2020-11-19 18:29:10 +0000
4@@ -1,6 +1,6 @@
5 include brz README.rst setup.py COPYING.txt
6 # FIXME: Not needed, remove after 2.7.0 -- vila 2016-02-07
7-include BRANCH.TODO INSTALL Makefile MANIFEST.in NEWS profile_imports.py README_BDIST_RPM .rsyncexclude .testr.conf TODO tools/bzr_epydoc tools/packaging/lp-upload-release tools/subunit-sum breezy/plugins/news_merge/README breezy/plugins/po_merge/README breezy/tests/ssl_certs/ca.key breezy/tests/ssl_certs/server.csr breezy/plugins/grep/.bzrignore breezy/plugins/grep/NEWS
8+include BRANCH.TODO INSTALL Makefile MANIFEST.in NEWS profile_imports.py README_BDIST_RPM .rsyncexclude .testr.conf TODO tools/brz_epydoc tools/packaging/lp-upload-release tools/subunit-sum breezy/plugins/news_merge/README breezy/plugins/po_merge/README breezy/tests/ssl_certs/ca.key breezy/tests/ssl_certs/server.csr
9 # bzr export still create some empty dirs that need to be removed
10 # breezy/plugins/weave_fmt/tests/ breezy/store/revision/ doc/ja/_templates/ man1/ man1
11 recursive-include tools/win32 *
12
13=== modified file 'breezy/bzr/tests/test_transform.py'
14--- breezy/bzr/tests/test_transform.py 2020-07-03 00:43:31 +0000
15+++ breezy/bzr/tests/test_transform.py 2020-11-19 18:29:10 +0000
16@@ -14,6 +14,16 @@
17 # along with this program; if not, write to the Free Software
18 # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
20+import codecs
21+import os
22+import time
23+
24+from ...tests import features
25+from ... import errors, filters, osutils, rules
26+from ...controldir import ControlDir
27+from ..conflicts import DuplicateEntry
28+from ..transform import build_tree
29+
30 from . import TestCaseWithTransport
31
32
33@@ -47,3 +57,410 @@
34 tt.version_file(tt.root, file_id=tree.path2id(''))
35 tt.trans_id_tree_path('foo')
36 self.assertEqual([], tt._inventory_altered())
37+
38+
39+class TestBuildTree(TestCaseWithTransport):
40+
41+ def test_build_tree_with_symlinks(self):
42+ self.requireFeature(features.SymlinkFeature)
43+ os.mkdir('a')
44+ a = ControlDir.create_standalone_workingtree('a')
45+ os.mkdir('a/foo')
46+ with open('a/foo/bar', 'wb') as f:
47+ f.write(b'contents')
48+ os.symlink('a/foo/bar', 'a/foo/baz')
49+ a.add(['foo', 'foo/bar', 'foo/baz'])
50+ a.commit('initial commit')
51+ b = ControlDir.create_standalone_workingtree('b')
52+ basis = a.basis_tree()
53+ basis.lock_read()
54+ self.addCleanup(basis.unlock)
55+ build_tree(basis, b)
56+ self.assertIs(os.path.isdir('b/foo'), True)
57+ with open('b/foo/bar', 'rb') as f:
58+ self.assertEqual(f.read(), b"contents")
59+ self.assertEqual(os.readlink('b/foo/baz'), 'a/foo/bar')
60+
61+ def test_build_with_references(self):
62+ tree = self.make_branch_and_tree('source',
63+ format='development-subtree')
64+ subtree = self.make_branch_and_tree('source/subtree',
65+ format='development-subtree')
66+ tree.add_reference(subtree)
67+ tree.commit('a revision')
68+ tree.branch.create_checkout('target')
69+ self.assertPathExists('target')
70+ self.assertPathExists('target/subtree')
71+
72+ def test_file_conflict_handling(self):
73+ """Ensure that when building trees, conflict handling is done"""
74+ source = self.make_branch_and_tree('source')
75+ target = self.make_branch_and_tree('target')
76+ self.build_tree(['source/file', 'target/file'])
77+ source.add('file', b'new-file')
78+ source.commit('added file')
79+ build_tree(source.basis_tree(), target)
80+ self.assertEqual(
81+ [DuplicateEntry('Moved existing file to', 'file.moved',
82+ 'file', None, 'new-file')],
83+ target.conflicts())
84+ target2 = self.make_branch_and_tree('target2')
85+ with open('target2/file', 'wb') as target_file, \
86+ open('source/file', 'rb') as source_file:
87+ target_file.write(source_file.read())
88+ build_tree(source.basis_tree(), target2)
89+ self.assertEqual([], target2.conflicts())
90+
91+ def test_symlink_conflict_handling(self):
92+ """Ensure that when building trees, conflict handling is done"""
93+ self.requireFeature(features.SymlinkFeature)
94+ source = self.make_branch_and_tree('source')
95+ os.symlink('foo', 'source/symlink')
96+ source.add('symlink', b'new-symlink')
97+ source.commit('added file')
98+ target = self.make_branch_and_tree('target')
99+ os.symlink('bar', 'target/symlink')
100+ build_tree(source.basis_tree(), target)
101+ self.assertEqual(
102+ [DuplicateEntry('Moved existing file to', 'symlink.moved',
103+ 'symlink', None, 'new-symlink')],
104+ target.conflicts())
105+ target = self.make_branch_and_tree('target2')
106+ os.symlink('foo', 'target2/symlink')
107+ build_tree(source.basis_tree(), target)
108+ self.assertEqual([], target.conflicts())
109+
110+ def test_directory_conflict_handling(self):
111+ """Ensure that when building trees, conflict handling is done"""
112+ source = self.make_branch_and_tree('source')
113+ target = self.make_branch_and_tree('target')
114+ self.build_tree(['source/dir1/', 'source/dir1/file', 'target/dir1/'])
115+ source.add(['dir1', 'dir1/file'], [b'new-dir1', b'new-file'])
116+ source.commit('added file')
117+ build_tree(source.basis_tree(), target)
118+ self.assertEqual([], target.conflicts())
119+ self.assertPathExists('target/dir1/file')
120+
121+ # Ensure contents are merged
122+ target = self.make_branch_and_tree('target2')
123+ self.build_tree(['target2/dir1/', 'target2/dir1/file2'])
124+ build_tree(source.basis_tree(), target)
125+ self.assertEqual([], target.conflicts())
126+ self.assertPathExists('target2/dir1/file2')
127+ self.assertPathExists('target2/dir1/file')
128+
129+ # Ensure new contents are suppressed for existing branches
130+ target = self.make_branch_and_tree('target3')
131+ self.make_branch('target3/dir1')
132+ self.build_tree(['target3/dir1/file2'])
133+ build_tree(source.basis_tree(), target)
134+ self.assertPathDoesNotExist('target3/dir1/file')
135+ self.assertPathExists('target3/dir1/file2')
136+ self.assertPathExists('target3/dir1.diverted/file')
137+ self.assertEqual(
138+ [DuplicateEntry('Diverted to', 'dir1.diverted',
139+ 'dir1', 'new-dir1', None)],
140+ target.conflicts())
141+
142+ target = self.make_branch_and_tree('target4')
143+ self.build_tree(['target4/dir1/'])
144+ self.make_branch('target4/dir1/file')
145+ build_tree(source.basis_tree(), target)
146+ self.assertPathExists('target4/dir1/file')
147+ self.assertEqual('directory', osutils.file_kind('target4/dir1/file'))
148+ self.assertPathExists('target4/dir1/file.diverted')
149+ self.assertEqual(
150+ [DuplicateEntry('Diverted to', 'dir1/file.diverted',
151+ 'dir1/file', 'new-file', None)],
152+ target.conflicts())
153+
154+ def test_mixed_conflict_handling(self):
155+ """Ensure that when building trees, conflict handling is done"""
156+ source = self.make_branch_and_tree('source')
157+ target = self.make_branch_and_tree('target')
158+ self.build_tree(['source/name', 'target/name/'])
159+ source.add('name', b'new-name')
160+ source.commit('added file')
161+ build_tree(source.basis_tree(), target)
162+ self.assertEqual(
163+ [DuplicateEntry('Moved existing file to',
164+ 'name.moved', 'name', None, 'new-name')],
165+ target.conflicts())
166+
167+ def test_raises_in_populated(self):
168+ source = self.make_branch_and_tree('source')
169+ self.build_tree(['source/name'])
170+ source.add('name')
171+ source.commit('added name')
172+ target = self.make_branch_and_tree('target')
173+ self.build_tree(['target/name'])
174+ target.add('name')
175+ self.assertRaises(errors.WorkingTreeAlreadyPopulated,
176+ build_tree, source.basis_tree(), target)
177+
178+ def test_build_tree_rename_count(self):
179+ source = self.make_branch_and_tree('source')
180+ self.build_tree(['source/file1', 'source/dir1/'])
181+ source.add(['file1', 'dir1'])
182+ source.commit('add1')
183+ target1 = self.make_branch_and_tree('target1')
184+ transform_result = build_tree(source.basis_tree(), target1)
185+ self.assertEqual(2, transform_result.rename_count)
186+
187+ self.build_tree(['source/dir1/file2'])
188+ source.add(['dir1/file2'])
189+ source.commit('add3')
190+ target2 = self.make_branch_and_tree('target2')
191+ transform_result = build_tree(source.basis_tree(), target2)
192+ # children of non-root directories should not be renamed
193+ self.assertEqual(2, transform_result.rename_count)
194+
195+ def create_ab_tree(self):
196+ """Create a committed test tree with two files"""
197+ source = self.make_branch_and_tree('source')
198+ self.build_tree_contents([('source/file1', b'A')])
199+ self.build_tree_contents([('source/file2', b'B')])
200+ source.add(['file1', 'file2'], [b'file1-id', b'file2-id'])
201+ source.commit('commit files')
202+ source.lock_write()
203+ self.addCleanup(source.unlock)
204+ return source
205+
206+ def test_build_tree_accelerator_tree(self):
207+ source = self.create_ab_tree()
208+ self.build_tree_contents([('source/file2', b'C')])
209+ calls = []
210+ real_source_get_file = source.get_file
211+
212+ def get_file(path):
213+ calls.append(path)
214+ return real_source_get_file(path)
215+ source.get_file = get_file
216+ target = self.make_branch_and_tree('target')
217+ revision_tree = source.basis_tree()
218+ revision_tree.lock_read()
219+ self.addCleanup(revision_tree.unlock)
220+ build_tree(revision_tree, target, source)
221+ self.assertEqual(['file1'], calls)
222+ target.lock_read()
223+ self.addCleanup(target.unlock)
224+ self.assertEqual([], list(target.iter_changes(revision_tree)))
225+
226+ def test_build_tree_accelerator_tree_observes_sha1(self):
227+ source = self.create_ab_tree()
228+ sha1 = osutils.sha_string(b'A')
229+ target = self.make_branch_and_tree('target')
230+ target.lock_write()
231+ self.addCleanup(target.unlock)
232+ state = target.current_dirstate()
233+ state._cutoff_time = time.time() + 60
234+ build_tree(source.basis_tree(), target, source)
235+ entry = state._get_entry(0, path_utf8=b'file1')
236+ self.assertEqual(sha1, entry[1][0][1])
237+
238+ def test_build_tree_accelerator_tree_missing_file(self):
239+ source = self.create_ab_tree()
240+ os.unlink('source/file1')
241+ source.remove(['file2'])
242+ target = self.make_branch_and_tree('target')
243+ revision_tree = source.basis_tree()
244+ revision_tree.lock_read()
245+ self.addCleanup(revision_tree.unlock)
246+ build_tree(revision_tree, target, source)
247+ target.lock_read()
248+ self.addCleanup(target.unlock)
249+ self.assertEqual([], list(target.iter_changes(revision_tree)))
250+
251+ def test_build_tree_accelerator_wrong_kind(self):
252+ self.requireFeature(features.SymlinkFeature)
253+ source = self.make_branch_and_tree('source')
254+ self.build_tree_contents([('source/file1', b'')])
255+ self.build_tree_contents([('source/file2', b'')])
256+ source.add(['file1', 'file2'], [b'file1-id', b'file2-id'])
257+ source.commit('commit files')
258+ os.unlink('source/file2')
259+ self.build_tree_contents([('source/file2/', b'C')])
260+ os.unlink('source/file1')
261+ os.symlink('file2', 'source/file1')
262+ calls = []
263+ real_source_get_file = source.get_file
264+
265+ def get_file(path):
266+ calls.append(path)
267+ return real_source_get_file(path)
268+ source.get_file = get_file
269+ target = self.make_branch_and_tree('target')
270+ revision_tree = source.basis_tree()
271+ revision_tree.lock_read()
272+ self.addCleanup(revision_tree.unlock)
273+ build_tree(revision_tree, target, source)
274+ self.assertEqual([], calls)
275+ target.lock_read()
276+ self.addCleanup(target.unlock)
277+ self.assertEqual([], list(target.iter_changes(revision_tree)))
278+
279+ def test_build_tree_hardlink(self):
280+ self.requireFeature(features.HardlinkFeature)
281+ source = self.create_ab_tree()
282+ target = self.make_branch_and_tree('target')
283+ revision_tree = source.basis_tree()
284+ revision_tree.lock_read()
285+ self.addCleanup(revision_tree.unlock)
286+ build_tree(revision_tree, target, source, hardlink=True)
287+ target.lock_read()
288+ self.addCleanup(target.unlock)
289+ self.assertEqual([], list(target.iter_changes(revision_tree)))
290+ source_stat = os.stat('source/file1')
291+ target_stat = os.stat('target/file1')
292+ self.assertEqual(source_stat, target_stat)
293+
294+ # Explicitly disallowing hardlinks should prevent them.
295+ target2 = self.make_branch_and_tree('target2')
296+ build_tree(revision_tree, target2, source, hardlink=False)
297+ target2.lock_read()
298+ self.addCleanup(target2.unlock)
299+ self.assertEqual([], list(target2.iter_changes(revision_tree)))
300+ source_stat = os.stat('source/file1')
301+ target2_stat = os.stat('target2/file1')
302+ self.assertNotEqual(source_stat, target2_stat)
303+
304+ def test_build_tree_accelerator_tree_moved(self):
305+ source = self.make_branch_and_tree('source')
306+ self.build_tree_contents([('source/file1', b'A')])
307+ source.add(['file1'], [b'file1-id'])
308+ source.commit('commit files')
309+ source.rename_one('file1', 'file2')
310+ source.lock_read()
311+ self.addCleanup(source.unlock)
312+ target = self.make_branch_and_tree('target')
313+ revision_tree = source.basis_tree()
314+ revision_tree.lock_read()
315+ self.addCleanup(revision_tree.unlock)
316+ build_tree(revision_tree, target, source)
317+ target.lock_read()
318+ self.addCleanup(target.unlock)
319+ self.assertEqual([], list(target.iter_changes(revision_tree)))
320+
321+ def test_build_tree_hardlinks_preserve_execute(self):
322+ self.requireFeature(features.HardlinkFeature)
323+ source = self.create_ab_tree()
324+ tt = source.transform()
325+ trans_id = tt.trans_id_tree_path('file1')
326+ tt.set_executability(True, trans_id)
327+ tt.apply()
328+ self.assertTrue(source.is_executable('file1'))
329+ target = self.make_branch_and_tree('target')
330+ revision_tree = source.basis_tree()
331+ revision_tree.lock_read()
332+ self.addCleanup(revision_tree.unlock)
333+ build_tree(revision_tree, target, source, hardlink=True)
334+ target.lock_read()
335+ self.addCleanup(target.unlock)
336+ self.assertEqual([], list(target.iter_changes(revision_tree)))
337+ self.assertTrue(source.is_executable('file1'))
338+
339+ def install_rot13_content_filter(self, pattern):
340+ # We could use
341+ # self.addCleanup(filters._reset_registry, filters._reset_registry())
342+ # below, but that looks a bit... hard to read even if it's exactly
343+ # the same thing.
344+ original_registry = filters._reset_registry()
345+
346+ def restore_registry():
347+ filters._reset_registry(original_registry)
348+ self.addCleanup(restore_registry)
349+
350+ def rot13(chunks, context=None):
351+ return [
352+ codecs.encode(chunk.decode('ascii'), 'rot13').encode('ascii')
353+ for chunk in chunks]
354+ rot13filter = filters.ContentFilter(rot13, rot13)
355+ filters.filter_stacks_registry.register(
356+ 'rot13', {'yes': [rot13filter]}.get)
357+ os.mkdir(self.test_home_dir + '/.bazaar')
358+ rules_filename = self.test_home_dir + '/.bazaar/rules'
359+ with open(rules_filename, 'wb') as f:
360+ f.write(b'[name %s]\nrot13=yes\n' % (pattern,))
361+
362+ def uninstall_rules():
363+ os.remove(rules_filename)
364+ rules.reset_rules()
365+ self.addCleanup(uninstall_rules)
366+ rules.reset_rules()
367+
368+ def test_build_tree_content_filtered_files_are_not_hardlinked(self):
369+ """build_tree will not hardlink files that have content filtering rules
370+ applied to them (but will still hardlink other files from the same tree
371+ if it can).
372+ """
373+ self.requireFeature(features.HardlinkFeature)
374+ self.install_rot13_content_filter(b'file1')
375+ source = self.create_ab_tree()
376+ target = self.make_branch_and_tree('target')
377+ revision_tree = source.basis_tree()
378+ revision_tree.lock_read()
379+ self.addCleanup(revision_tree.unlock)
380+ build_tree(revision_tree, target, source, hardlink=True)
381+ target.lock_read()
382+ self.addCleanup(target.unlock)
383+ self.assertEqual([], list(target.iter_changes(revision_tree)))
384+ source_stat = os.stat('source/file1')
385+ target_stat = os.stat('target/file1')
386+ self.assertNotEqual(source_stat, target_stat)
387+ source_stat = os.stat('source/file2')
388+ target_stat = os.stat('target/file2')
389+ self.assertEqualStat(source_stat, target_stat)
390+
391+ def test_case_insensitive_build_tree_inventory(self):
392+ if (features.CaseInsensitiveFilesystemFeature.available()
393+ or features.CaseInsCasePresFilenameFeature.available()):
394+ raise tests.UnavailableFeature('Fully case sensitive filesystem')
395+ source = self.make_branch_and_tree('source')
396+ self.build_tree(['source/file', 'source/FILE'])
397+ source.add(['file', 'FILE'], [b'lower-id', b'upper-id'])
398+ source.commit('added files')
399+ # Don't try this at home, kids!
400+ # Force the tree to report that it is case insensitive
401+ target = self.make_branch_and_tree('target')
402+ target.case_sensitive = False
403+ build_tree(source.basis_tree(), target, source, delta_from_tree=True)
404+ self.assertEqual('file.moved', target.id2path(b'lower-id'))
405+ self.assertEqual('FILE', target.id2path(b'upper-id'))
406+
407+ def test_build_tree_observes_sha(self):
408+ source = self.make_branch_and_tree('source')
409+ self.build_tree(['source/file1', 'source/dir/', 'source/dir/file2'])
410+ source.add(['file1', 'dir', 'dir/file2'],
411+ [b'file1-id', b'dir-id', b'file2-id'])
412+ source.commit('new files')
413+ target = self.make_branch_and_tree('target')
414+ target.lock_write()
415+ self.addCleanup(target.unlock)
416+ # We make use of the fact that DirState caches its cutoff time. So we
417+ # set the 'safe' time to one minute in the future.
418+ state = target.current_dirstate()
419+ state._cutoff_time = time.time() + 60
420+ build_tree(source.basis_tree(), target)
421+ entry1_sha = osutils.sha_file_by_name('source/file1')
422+ entry2_sha = osutils.sha_file_by_name('source/dir/file2')
423+ # entry[1] is the state information, entry[1][0] is the state of the
424+ # working tree, entry[1][0][1] is the sha value for the current working
425+ # tree
426+ entry1 = state._get_entry(0, path_utf8=b'file1')
427+ self.assertEqual(entry1_sha, entry1[1][0][1])
428+ # The 'size' field must also be set.
429+ self.assertEqual(25, entry1[1][0][2])
430+ entry1_state = entry1[1][0]
431+ entry2 = state._get_entry(0, path_utf8=b'dir/file2')
432+ self.assertEqual(entry2_sha, entry2[1][0][1])
433+ self.assertEqual(29, entry2[1][0][2])
434+ entry2_state = entry2[1][0]
435+ # Now, make sure that we don't have to re-read the content. The
436+ # packed_stat should match exactly.
437+ self.assertEqual(entry1_sha, target.get_file_sha1('file1'))
438+ self.assertEqual(entry2_sha, target.get_file_sha1('dir/file2'))
439+ self.assertEqual(entry1_state, entry1[1][0])
440+ self.assertEqual(entry2_state, entry2[1][0])
441+
442+
443+
444
445=== modified file 'breezy/bzr/transform.py'
446--- breezy/bzr/transform.py 2020-08-22 22:46:24 +0000
447+++ breezy/bzr/transform.py 2020-11-19 18:29:10 +0000
448@@ -17,6 +17,7 @@
449
450 from __future__ import absolute_import
451
452+import contextlib
453 import errno
454 import os
455 from stat import S_IEXEC, S_ISREG
456@@ -25,6 +26,7 @@
457 from .. import (
458 annotate,
459 conflicts,
460+ controldir,
461 errors,
462 lock,
463 multiparent,
464@@ -36,8 +38,10 @@
465 urlutils,
466 )
467
468+from ..filters import filtered_output_bytes, ContentFilterContext
469 from ..i18n import gettext
470 from ..mutabletree import MutableTree
471+from ..progress import ProgressPhase
472 from ..transform import (
473 ROOT_PARENT,
474 _FileMover,
475@@ -52,7 +56,11 @@
476 ReusingTransform,
477 MalformedTransform,
478 PreviewTree,
479+ new_by_entry,
480+ _reparent_children,
481+ resolve_conflicts,
482 )
483+from ..tree import find_previous_path
484 from .conflicts import Conflict
485
486 from . import (
487@@ -61,6 +69,22 @@
488 )
489
490
491+def _content_match(tree, entry, tree_path, kind, target_path):
492+ if entry.kind != kind:
493+ return False
494+ if entry.kind == "directory":
495+ return True
496+ if entry.kind == "file":
497+ with open(target_path, 'rb') as f1, \
498+ tree.get_file(tree_path) as f2:
499+ if osutils.compare_files(f1, f2):
500+ return True
501+ elif entry.kind == "symlink":
502+ if tree.get_symlink_target(tree_path) == os.readlink(target_path):
503+ return True
504+ return False
505+
506+
507 class TreeTransformBase(TreeTransform):
508 """The base class for TreeTransform and its kin."""
509
510@@ -387,7 +411,6 @@
511
512 def _parent_loops(self):
513 """No entry should be its own ancestor"""
514- conflicts = []
515 for trans_id in self._new_parent:
516 seen = set()
517 parent_id = trans_id
518@@ -398,14 +421,12 @@
519 except KeyError:
520 break
521 if parent_id == trans_id:
522- conflicts.append(('parent loop', trans_id))
523+ yield ('parent loop', trans_id)
524 if parent_id in seen:
525 break
526- return conflicts
527
528 def _unversioned_parents(self, by_parent):
529 """If parent directories are versioned, children must be versioned."""
530- conflicts = []
531 for parent_id, children in by_parent.items():
532 if parent_id == ROOT_PARENT:
533 continue
534@@ -413,27 +434,24 @@
535 continue
536 for child_id in children:
537 if self.final_is_versioned(child_id):
538- conflicts.append(('unversioned parent', parent_id))
539+ yield ('unversioned parent', parent_id)
540 break
541- return conflicts
542
543 def _improper_versioning(self):
544 """Cannot version a file with no contents, or a bad type.
545
546 However, existing entries with no contents are okay.
547 """
548- conflicts = []
549 for trans_id in self._new_id:
550 kind = self.final_kind(trans_id)
551 if kind == 'symlink' and not self._tree.supports_symlinks():
552 # Ignore symlinks as they are not supported on this platform
553 continue
554 if kind is None:
555- conflicts.append(('versioning no contents', trans_id))
556+ yield ('versioning no contents', trans_id)
557 continue
558 if not self._tree.versionable_kind(kind):
559- conflicts.append(('versioning bad kind', trans_id, kind))
560- return conflicts
561+ yield ('versioning bad kind', trans_id, kind)
562
563 def _executability_conflicts(self):
564 """Check for bad executability changes.
565@@ -443,31 +461,25 @@
566 2. only files can be executable. (The execute bit on a directory
567 does not indicate searchability)
568 """
569- conflicts = []
570 for trans_id in self._new_executability:
571 if not self.final_is_versioned(trans_id):
572- conflicts.append(('unversioned executability', trans_id))
573+ yield ('unversioned executability', trans_id)
574 else:
575 if self.final_kind(trans_id) != "file":
576- conflicts.append(('non-file executability', trans_id))
577- return conflicts
578+ yield ('non-file executability', trans_id)
579
580 def _overwrite_conflicts(self):
581 """Check for overwrites (not permitted on Win32)"""
582- conflicts = []
583 for trans_id in self._new_contents:
584 if self.tree_kind(trans_id) is None:
585 continue
586 if trans_id not in self._removed_contents:
587- conflicts.append(('overwrite', trans_id,
588- self.final_name(trans_id)))
589- return conflicts
590+ yield ('overwrite', trans_id, self.final_name(trans_id))
591
592 def _duplicate_entries(self, by_parent):
593 """No directory may have two entries with the same name."""
594- conflicts = []
595 if (self._new_name, self._new_parent) == ({}, {}):
596- return conflicts
597+ return
598 for children in by_parent.values():
599 name_ids = []
600 for child_tid in children:
601@@ -485,15 +497,12 @@
602 if kind is None and not self.final_is_versioned(trans_id):
603 continue
604 if name == last_name:
605- conflicts.append(('duplicate', last_trans_id, trans_id,
606- name))
607+ yield ('duplicate', last_trans_id, trans_id, name)
608 last_name = name
609 last_trans_id = trans_id
610- return conflicts
611
612 def _parent_type_conflicts(self, by_parent):
613 """Children must have a directory parent"""
614- conflicts = []
615 for parent_id, children in by_parent.items():
616 if parent_id == ROOT_PARENT:
617 continue
618@@ -509,11 +518,10 @@
619 kind = self.final_kind(parent_id)
620 if kind is None:
621 # The directory will be deleted
622- conflicts.append(('missing parent', parent_id))
623+ yield ('missing parent', parent_id)
624 elif kind != "directory":
625 # Meh, we need a *directory* to put something in it
626- conflicts.append(('non-directory parent', parent_id))
627- return conflicts
628+ yield ('non-directory parent', parent_id)
629
630 def _set_executability(self, path, trans_id):
631 """Set the executability of versioned files """
632@@ -2252,3 +2260,230 @@
633 trans_id = self._path2trans_id(path)
634 name = self._transform._limbo_name(trans_id)
635 return open(name, 'rb')
636+
637+
638+def build_tree(tree, wt, accelerator_tree=None, hardlink=False,
639+ delta_from_tree=False):
640+ """Create working tree for a branch, using a TreeTransform.
641+
642+ This function should be used on empty trees, having a tree root at most.
643+ (see merge and revert functionality for working with existing trees)
644+
645+ Existing files are handled like so:
646+
647+ - Existing bzrdirs take precedence over creating new items. They are
648+ created as '%s.diverted' % name.
649+ - Otherwise, if the content on disk matches the content we are building,
650+ it is silently replaced.
651+ - Otherwise, conflict resolution will move the old file to 'oldname.moved'.
652+
653+ :param tree: The tree to convert wt into a copy of
654+ :param wt: The working tree that files will be placed into
655+ :param accelerator_tree: A tree which can be used for retrieving file
656+ contents more quickly than tree itself, i.e. a workingtree. tree
657+ will be used for cases where accelerator_tree's content is different.
658+ :param hardlink: If true, hard-link files to accelerator_tree, where
659+ possible. accelerator_tree must implement abspath, i.e. be a
660+ working tree.
661+ :param delta_from_tree: If true, build_tree may use the input Tree to
662+ generate the inventory delta.
663+ """
664+ with contextlib.ExitStack() as exit_stack:
665+ exit_stack.enter_context(wt.lock_tree_write())
666+ exit_stack.enter_context(tree.lock_read())
667+ if accelerator_tree is not None:
668+ exit_stack.enter_context(accelerator_tree.lock_read())
669+ return _build_tree(tree, wt, accelerator_tree, hardlink,
670+ delta_from_tree)
671+
672+
673+def resolve_checkout(tt, conflicts, divert):
674+ new_conflicts = set()
675+ for c_type, conflict in ((c[0], c) for c in conflicts):
676+ # Anything but a 'duplicate' would indicate programmer error
677+ if c_type != 'duplicate':
678+ raise AssertionError(c_type)
679+ # Now figure out which is new and which is old
680+ if tt.new_contents(conflict[1]):
681+ new_file = conflict[1]
682+ old_file = conflict[2]
683+ else:
684+ new_file = conflict[2]
685+ old_file = conflict[1]
686+
687+ # We should only get here if the conflict wasn't completely
688+ # resolved
689+ final_parent = tt.final_parent(old_file)
690+ if new_file in divert:
691+ new_name = tt.final_name(old_file) + '.diverted'
692+ tt.adjust_path(new_name, final_parent, new_file)
693+ new_conflicts.add((c_type, 'Diverted to',
694+ new_file, old_file))
695+ else:
696+ new_name = tt.final_name(old_file) + '.moved'
697+ tt.adjust_path(new_name, final_parent, old_file)
698+ new_conflicts.add((c_type, 'Moved existing file to',
699+ old_file, new_file))
700+ return new_conflicts
701+
702+
703+def _build_tree(tree, wt, accelerator_tree, hardlink, delta_from_tree):
704+ """See build_tree."""
705+ for num, _unused in enumerate(wt.all_versioned_paths()):
706+ if num > 0: # more than just a root
707+ raise errors.WorkingTreeAlreadyPopulated(base=wt.basedir)
708+ file_trans_id = {}
709+ top_pb = ui.ui_factory.nested_progress_bar()
710+ pp = ProgressPhase("Build phase", 2, top_pb)
711+ if tree.path2id('') is not None:
712+ # This is kind of a hack: we should be altering the root
713+ # as part of the regular tree shape diff logic.
714+ # The conditional test here is to avoid doing an
715+ # expensive operation (flush) every time the root id
716+ # is set within the tree, nor setting the root and thus
717+ # marking the tree as dirty, because we use two different
718+ # idioms here: tree interfaces and inventory interfaces.
719+ if wt.path2id('') != tree.path2id(''):
720+ wt.set_root_id(tree.path2id(''))
721+ wt.flush()
722+ tt = wt.transform()
723+ divert = set()
724+ try:
725+ pp.next_phase()
726+ file_trans_id[find_previous_path(wt, tree, '')] = tt.trans_id_tree_path('')
727+ with ui.ui_factory.nested_progress_bar() as pb:
728+ deferred_contents = []
729+ num = 0
730+ total = len(tree.all_versioned_paths())
731+ if delta_from_tree:
732+ precomputed_delta = []
733+ else:
734+ precomputed_delta = None
735+ # Check if tree inventory has content. If so, we populate
736+ # existing_files with the directory content. If there are no
737+ # entries we skip populating existing_files as its not used.
738+ # This improves performance and unncessary work on large
739+ # directory trees. (#501307)
740+ if total > 0:
741+ existing_files = set()
742+ for dir, files in wt.walkdirs():
743+ existing_files.update(f[0] for f in files)
744+ for num, (tree_path, entry) in \
745+ enumerate(tree.iter_entries_by_dir()):
746+ pb.update(gettext("Building tree"), num
747+ - len(deferred_contents), total)
748+ if entry.parent_id is None:
749+ continue
750+ reparent = False
751+ file_id = entry.file_id
752+ if delta_from_tree:
753+ precomputed_delta.append((None, tree_path, file_id, entry))
754+ if tree_path in existing_files:
755+ target_path = wt.abspath(tree_path)
756+ kind = osutils.file_kind(target_path)
757+ if kind == "directory":
758+ try:
759+ controldir.ControlDir.open(target_path)
760+ except errors.NotBranchError:
761+ pass
762+ else:
763+ divert.add(tree_path)
764+ if (tree_path not in divert
765+ and _content_match(
766+ tree, entry, tree_path, kind, target_path)):
767+ tt.delete_contents(tt.trans_id_tree_path(tree_path))
768+ if kind == 'directory':
769+ reparent = True
770+ parent_id = file_trans_id[osutils.dirname(tree_path)]
771+ if entry.kind == 'file':
772+ # We *almost* replicate new_by_entry, so that we can defer
773+ # getting the file text, and get them all at once.
774+ trans_id = tt.create_path(entry.name, parent_id)
775+ file_trans_id[tree_path] = trans_id
776+ tt.version_file(trans_id, file_id=file_id)
777+ executable = tree.is_executable(tree_path)
778+ if executable:
779+ tt.set_executability(executable, trans_id)
780+ trans_data = (trans_id, tree_path, entry.text_sha1)
781+ deferred_contents.append((tree_path, trans_data))
782+ else:
783+ file_trans_id[tree_path] = new_by_entry(
784+ tree_path, tt, entry, parent_id, tree)
785+ if reparent:
786+ new_trans_id = file_trans_id[tree_path]
787+ old_parent = tt.trans_id_tree_path(tree_path)
788+ _reparent_children(tt, old_parent, new_trans_id)
789+ offset = num + 1 - len(deferred_contents)
790+ _create_files(tt, tree, deferred_contents, pb, offset,
791+ accelerator_tree, hardlink)
792+ pp.next_phase()
793+ divert_trans = set(file_trans_id[f] for f in divert)
794+
795+ def resolver(t, c):
796+ return resolve_checkout(t, c, divert_trans)
797+ raw_conflicts = resolve_conflicts(tt, pass_func=resolver)
798+ if len(raw_conflicts) > 0:
799+ precomputed_delta = None
800+ conflicts = tt.cook_conflicts(raw_conflicts)
801+ for conflict in conflicts:
802+ trace.warning(str(conflict))
803+ try:
804+ wt.add_conflicts(conflicts)
805+ except errors.UnsupportedOperation:
806+ pass
807+ result = tt.apply(no_conflicts=True,
808+ precomputed_delta=precomputed_delta)
809+ finally:
810+ tt.finalize()
811+ top_pb.finished()
812+ return result
813+
814+
815+def _create_files(tt, tree, desired_files, pb, offset, accelerator_tree,
816+ hardlink):
817+ total = len(desired_files) + offset
818+ wt = tt._tree
819+ if accelerator_tree is None:
820+ new_desired_files = desired_files
821+ else:
822+ iter = accelerator_tree.iter_changes(tree, include_unchanged=True)
823+ unchanged = [
824+ change.path for change in iter
825+ if not (change.changed_content or change.executable[0] != change.executable[1])]
826+ if accelerator_tree.supports_content_filtering():
827+ unchanged = [(tp, ap) for (tp, ap) in unchanged
828+ if not next(accelerator_tree.iter_search_rules([ap]))]
829+ unchanged = dict(unchanged)
830+ new_desired_files = []
831+ count = 0
832+ for unused_tree_path, (trans_id, tree_path, text_sha1) in desired_files:
833+ accelerator_path = unchanged.get(tree_path)
834+ if accelerator_path is None:
835+ new_desired_files.append((tree_path,
836+ (trans_id, tree_path, text_sha1)))
837+ continue
838+ pb.update(gettext('Adding file contents'), count + offset, total)
839+ if hardlink:
840+ tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
841+ trans_id)
842+ else:
843+ with accelerator_tree.get_file(accelerator_path) as f:
844+ chunks = osutils.file_iterator(f)
845+ if wt.supports_content_filtering():
846+ filters = wt._content_filter_stack(tree_path)
847+ chunks = filtered_output_bytes(chunks, filters,
848+ ContentFilterContext(tree_path, tree))
849+ tt.create_file(chunks, trans_id, sha1=text_sha1)
850+ count += 1
851+ offset += count
852+ for count, ((trans_id, tree_path, text_sha1), contents) in enumerate(
853+ tree.iter_files_bytes(new_desired_files)):
854+ if wt.supports_content_filtering():
855+ filters = wt._content_filter_stack(tree_path)
856+ contents = filtered_output_bytes(contents, filters,
857+ ContentFilterContext(tree_path, tree))
858+ tt.create_file(contents, trans_id, sha1=text_sha1)
859+ pb.update(gettext('Adding file contents'), count + offset, total)
860+
861+
862+
863
864=== modified file 'breezy/bzr/workingtree.py'
865--- breezy/bzr/workingtree.py 2020-08-22 22:46:24 +0000
866+++ breezy/bzr/workingtree.py 2020-11-19 18:29:10 +0000
867@@ -56,6 +56,7 @@
868 conflicts as _mod_conflicts,
869 globbing,
870 ignores,
871+ merge,
872 revision as _mod_revision,
873 rio as _mod_rio,
874 )
875@@ -1862,6 +1863,139 @@
876 return False
877 return True
878
879+ _marker = object()
880+
881+ def update(self, change_reporter=None, possible_transports=None,
882+ revision=None, old_tip=_marker, show_base=False):
883+ """Update a working tree along its branch.
884+
885+ This will update the branch if its bound too, which means we have
886+ multiple trees involved:
887+
888+ - The new basis tree of the master.
889+ - The old basis tree of the branch.
890+ - The old basis tree of the working tree.
891+ - The current working tree state.
892+
893+ Pathologically, all three may be different, and non-ancestors of each
894+ other. Conceptually we want to:
895+
896+ - Preserve the wt.basis->wt.state changes
897+ - Transform the wt.basis to the new master basis.
898+ - Apply a merge of the old branch basis to get any 'local' changes from
899+ it into the tree.
900+ - Restore the wt.basis->wt.state changes.
901+
902+ There isn't a single operation at the moment to do that, so we:
903+
904+ - Merge current state -> basis tree of the master w.r.t. the old tree
905+ basis.
906+ - Do a 'normal' merge of the old branch basis if it is relevant.
907+
908+ :param revision: The target revision to update to. Must be in the
909+ revision history.
910+ :param old_tip: If branch.update() has already been run, the value it
911+ returned (old tip of the branch or None). _marker is used
912+ otherwise.
913+ """
914+ if self.branch.get_bound_location() is not None:
915+ self.lock_write()
916+ update_branch = (old_tip is self._marker)
917+ else:
918+ self.lock_tree_write()
919+ update_branch = False
920+ try:
921+ if update_branch:
922+ old_tip = self.branch.update(possible_transports)
923+ else:
924+ if old_tip is self._marker:
925+ old_tip = None
926+ return self._update_tree(old_tip, change_reporter, revision, show_base)
927+ finally:
928+ self.unlock()
929+
930+ def _update_tree(self, old_tip=None, change_reporter=None, revision=None,
931+ show_base=False):
932+ """Update a tree to the master branch.
933+
934+ :param old_tip: if supplied, the previous tip revision the branch,
935+ before it was changed to the master branch's tip.
936+ """
937+ # here if old_tip is not None, it is the old tip of the branch before
938+ # it was updated from the master branch. This should become a pending
939+ # merge in the working tree to preserve the user existing work. we
940+ # cant set that until we update the working trees last revision to be
941+ # one from the new branch, because it will just get absorbed by the
942+ # parent de-duplication logic.
943+ #
944+ # We MUST save it even if an error occurs, because otherwise the users
945+ # local work is unreferenced and will appear to have been lost.
946+ #
947+ with self.lock_tree_write():
948+ nb_conflicts = 0
949+ try:
950+ last_rev = self.get_parent_ids()[0]
951+ except IndexError:
952+ last_rev = _mod_revision.NULL_REVISION
953+ if revision is None:
954+ revision = self.branch.last_revision()
955+
956+ old_tip = old_tip or _mod_revision.NULL_REVISION
957+
958+ if not _mod_revision.is_null(old_tip) and old_tip != last_rev:
959+ # the branch we are bound to was updated
960+ # merge those changes in first
961+ base_tree = self.basis_tree()
962+ other_tree = self.branch.repository.revision_tree(old_tip)
963+ nb_conflicts = merge.merge_inner(self.branch, other_tree,
964+ base_tree, this_tree=self,
965+ change_reporter=change_reporter,
966+ show_base=show_base)
967+ if nb_conflicts:
968+ self.add_parent_tree((old_tip, other_tree))
969+ return nb_conflicts
970+
971+ if last_rev != _mod_revision.ensure_null(revision):
972+ # the working tree is up to date with the branch
973+ # we can merge the specified revision from master
974+ to_tree = self.branch.repository.revision_tree(revision)
975+ to_root_id = to_tree.path2id('')
976+
977+ basis = self.basis_tree()
978+ with basis.lock_read():
979+ if (basis.path2id('') is None or basis.path2id('') != to_root_id):
980+ self.set_root_id(to_root_id)
981+ self.flush()
982+
983+ # determine the branch point
984+ graph = self.branch.repository.get_graph()
985+ base_rev_id = graph.find_unique_lca(self.branch.last_revision(),
986+ last_rev)
987+ base_tree = self.branch.repository.revision_tree(base_rev_id)
988+
989+ nb_conflicts = merge.merge_inner(self.branch, to_tree, base_tree,
990+ this_tree=self,
991+ change_reporter=change_reporter,
992+ show_base=show_base)
993+ self.set_last_revision(revision)
994+ # TODO - dedup parents list with things merged by pull ?
995+ # reuse the tree we've updated to to set the basis:
996+ parent_trees = [(revision, to_tree)]
997+ merges = self.get_parent_ids()[1:]
998+ # Ideally we ask the tree for the trees here, that way the working
999+ # tree can decide whether to give us the entire tree or give us a
1000+ # lazy initialised tree. dirstate for instance will have the trees
1001+ # in ram already, whereas a last-revision + basis-inventory tree
1002+ # will not, but also does not need them when setting parents.
1003+ for parent in merges:
1004+ parent_trees.append(
1005+ (parent, self.branch.repository.revision_tree(parent)))
1006+ if not _mod_revision.is_null(old_tip):
1007+ parent_trees.append(
1008+ (old_tip, self.branch.repository.revision_tree(old_tip)))
1009+ self.set_parent_trees(parent_trees)
1010+ last_rev = parent_trees[0][0]
1011+ return nb_conflicts
1012
1013
1014 class WorkingTreeFormatMetaDir(bzrdir.BzrFormat, WorkingTreeFormat):
1015
1016=== modified file 'breezy/bzr/workingtree_3.py'
1017--- breezy/bzr/workingtree_3.py 2020-02-18 01:57:45 +0000
1018+++ breezy/bzr/workingtree_3.py 2020-11-19 18:29:10 +0000
1019@@ -23,6 +23,7 @@
1020 from . import (
1021 bzrdir,
1022 inventory,
1023+ transform as bzr_transform,
1024 )
1025
1026 from .. import (
1027@@ -31,7 +32,6 @@
1028 osutils,
1029 revision as _mod_revision,
1030 trace,
1031- transform,
1032 )
1033 from ..lockable_files import LockableFiles
1034 from ..lockdir import LockDir
1035@@ -228,7 +228,7 @@
1036 wt.set_parent_trees([])
1037 else:
1038 wt.set_parent_trees([(revision_id, basis_tree)])
1039- transform.build_tree(basis_tree, wt)
1040+ bzr_transform.build_tree(basis_tree, wt)
1041 for hook in MutableTree.hooks['post_build_tree']:
1042 hook(wt)
1043 finally:
1044
1045=== modified file 'breezy/bzr/workingtree_4.py'
1046--- breezy/bzr/workingtree_4.py 2020-08-22 22:46:24 +0000
1047+++ breezy/bzr/workingtree_4.py 2020-11-19 18:29:10 +0000
1048@@ -41,12 +41,12 @@
1049 revision as _mod_revision,
1050 revisiontree,
1051 trace,
1052- transform,
1053 views,
1054 )
1055 from breezy.bzr import (
1056 dirstate,
1057 generate_ids,
1058+ transform as bzr_transform,
1059 )
1060 """)
1061
1062@@ -1572,9 +1572,10 @@
1063 # delta_from_tree is safe even for DirStateRevisionTrees,
1064 # because wt4.apply_inventory_delta does not mutate the input
1065 # inventory entries.
1066- transform.build_tree(basis, wt, accelerator_tree,
1067- hardlink=hardlink,
1068- delta_from_tree=delta_from_tree)
1069+ bzr_transform.build_tree(
1070+ basis, wt, accelerator_tree,
1071+ hardlink=hardlink,
1072+ delta_from_tree=delta_from_tree)
1073 for hook in MutableTree.hooks['post_build_tree']:
1074 hook(wt)
1075 finally:
1076
1077=== modified file 'breezy/git/interrepo.py'
1078--- breezy/git/interrepo.py 2020-07-18 23:14:00 +0000
1079+++ breezy/git/interrepo.py 2020-11-19 18:29:10 +0000
1080@@ -788,3 +788,36 @@
1081 """Be compatible with GitRepository."""
1082 return (isinstance(source, RemoteGitRepository) and
1083 isinstance(target, LocalGitRepository))
1084+
1085+
1086+
1087+class InterLocalGitRemoteGitRepository(InterToGitRepository):
1088+
1089+ def fetch_refs(self, update_refs, lossy=False, overwrite=False):
1090+ """Import the gist of the ancestry of a particular revision."""
1091+ if lossy:
1092+ raise LossyPushToSameVCS(self.source, self.target)
1093+
1094+ def git_update_refs(old_refs):
1095+ ret = {}
1096+ self.old_refs = {
1097+ k: (v, None) for (k, v) in viewitems(old_refs)}
1098+ new_refs = update_refs(self.old_refs)
1099+ for name, (gitid, revid) in viewitems(new_refs):
1100+ if gitid is None:
1101+ gitid = self.source_store._lookup_revision_sha1(revid)
1102+ if not overwrite:
1103+ if remote_divergence(
1104+ old_refs.get(name), gitid, self.source_store):
1105+ raise DivergedBranches(self.source, self.target)
1106+ ret[name] = gitid
1107+ return ret
1108+ new_refs = self.target.send_pack(
1109+ git_update_refs,
1110+ self.source._git.generate_pack_data)
1111+ return None, self.old_refs, new_refs
1112+
1113+ @staticmethod
1114+ def is_compatible(source, target):
1115+ return (isinstance(source, LocalGitRepository) and
1116+ isinstance(target, RemoteGitRepository))
1117
1118=== modified file 'breezy/git/repository.py'
1119--- breezy/git/repository.py 2020-07-18 23:14:00 +0000
1120+++ breezy/git/repository.py 2020-11-19 18:29:10 +0000
1121@@ -101,6 +101,7 @@
1122 for optimiser in ['InterRemoteGitNonGitRepository',
1123 'InterLocalGitNonGitRepository',
1124 'InterLocalGitLocalGitRepository',
1125+ 'InterLocalGitRemoteGitRepository',
1126 'InterRemoteGitLocalGitRepository',
1127 'InterToLocalGitRepository',
1128 'InterToRemoteGitRepository',
1129
1130=== modified file 'breezy/git/tests/__init__.py'
1131--- breezy/git/tests/__init__.py 2020-06-23 01:02:30 +0000
1132+++ breezy/git/tests/__init__.py 2020-11-19 18:29:10 +0000
1133@@ -227,6 +227,7 @@
1134 'test_revspec',
1135 'test_roundtrip',
1136 'test_server',
1137+ 'test_transform',
1138 'test_transportgit',
1139 'test_tree',
1140 'test_unpeel_map',
1141
1142=== modified file 'breezy/git/tests/test_blackbox.py'
1143--- breezy/git/tests/test_blackbox.py 2020-05-06 02:13:25 +0000
1144+++ breezy/git/tests/test_blackbox.py 2020-11-19 18:29:10 +0000
1145@@ -175,6 +175,20 @@
1146 error,
1147 'Pushed up to revision id git(.*).\n')
1148
1149+ def test_merge(self):
1150+ self.run_bzr(['init', '--git', 'orig'])
1151+ self.build_tree_contents([('orig/a', 'orig contents\n')])
1152+ self.run_bzr(['add', 'orig/a'])
1153+ self.run_bzr(['commit', '-m', 'add orig', 'orig'])
1154+ self.run_bzr(['clone', 'orig', 'other'])
1155+ self.build_tree_contents([('other/a', 'new contents\n')])
1156+ self.run_bzr(['commit', '-m', 'modify', 'other'])
1157+ self.build_tree_contents([('orig/b', 'more\n')])
1158+ self.run_bzr(['add', 'orig/b'])
1159+ self.build_tree_contents([('orig/a', 'new contents\n')])
1160+ self.run_bzr(['commit', '-m', 'more', 'orig'])
1161+ self.run_bzr(['merge', '-d', 'orig', 'other'])
1162+
1163 def test_push_lossy_non_mainline(self):
1164 self.run_bzr(['init', '--git', 'bla'])
1165 self.run_bzr(['init', 'foo'])
1166
1167=== added file 'breezy/git/tests/test_transform.py'
1168--- breezy/git/tests/test_transform.py 1970-01-01 00:00:00 +0000
1169+++ breezy/git/tests/test_transform.py 2020-11-19 18:29:10 +0000
1170@@ -0,0 +1,41 @@
1171+# Copyright (C) 2020 Jelmer Vernooij <jelmer@jelmer.uk>
1172+#
1173+# This program is free software; you can redistribute it and/or modify
1174+# it under the terms of the GNU General Public License as published by
1175+# the Free Software Foundation; either version 2 of the License, or
1176+# (at your option) any later version.
1177+#
1178+# This program is distributed in the hope that it will be useful,
1179+# but WITHOUT ANY WARRANTY; without even the implied warranty of
1180+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
1181+# GNU General Public License for more details.
1182+#
1183+# You should have received a copy of the GNU General Public License
1184+# along with this program; if not, write to the Free Software
1185+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
1186+
1187+"""Tests for tree transform."""
1188+
1189+from __future__ import absolute_import
1190+
1191+import os
1192+
1193+from ...transform import ROOT_PARENT, conflict_pass, resolve_conflicts
1194+from . import TestCaseWithTransport
1195+
1196+
1197+class GitTransformTests(TestCaseWithTransport):
1198+
1199+ def test_directory_exists(self):
1200+ tree = self.make_branch_and_tree('.', format='git')
1201+ tt = tree.transform()
1202+ dir1 = tt.new_directory('dir', ROOT_PARENT)
1203+ tt.new_file('name1', dir1, [b'content1'])
1204+ dir2 = tt.new_directory('dir', ROOT_PARENT)
1205+ tt.new_file('name2', dir2, [b'content2'])
1206+ raw_conflicts = resolve_conflicts(
1207+ tt, None, lambda t, c: conflict_pass(t, c))
1208+ conflicts = tt.cook_conflicts(raw_conflicts)
1209+ self.assertEqual([], list(conflicts))
1210+ tt.apply()
1211+ self.assertEqual(set(['name1', 'name2']), set(os.listdir('dir')))
1212
1213=== modified file 'breezy/git/transform.py'
1214--- breezy/git/transform.py 2020-08-22 22:46:24 +0000
1215+++ breezy/git/transform.py 2020-11-19 18:29:10 +0000
1216@@ -300,7 +300,6 @@
1217
1218 def _parent_loops(self):
1219 """No entry should be its own ancestor"""
1220- conflicts = []
1221 for trans_id in self._new_parent:
1222 seen = set()
1223 parent_id = trans_id
1224@@ -311,28 +310,25 @@
1225 except KeyError:
1226 break
1227 if parent_id == trans_id:
1228- conflicts.append(('parent loop', trans_id))
1229+ yield ('parent loop', trans_id)
1230 if parent_id in seen:
1231 break
1232- return conflicts
1233
1234 def _improper_versioning(self):
1235 """Cannot version a file with no contents, or a bad type.
1236
1237 However, existing entries with no contents are okay.
1238 """
1239- conflicts = []
1240 for trans_id in self._versioned:
1241 kind = self.final_kind(trans_id)
1242 if kind == 'symlink' and not self._tree.supports_symlinks():
1243 # Ignore symlinks as they are not supported on this platform
1244 continue
1245 if kind is None:
1246- conflicts.append(('versioning no contents', trans_id))
1247+ yield ('versioning no contents', trans_id)
1248 continue
1249 if not self._tree.versionable_kind(kind):
1250- conflicts.append(('versioning bad kind', trans_id, kind))
1251- return conflicts
1252+ yield ('versioning bad kind', trans_id, kind)
1253
1254 def _executability_conflicts(self):
1255 """Check for bad executability changes.
1256@@ -342,31 +338,25 @@
1257 2. only files can be executable. (The execute bit on a directory
1258 does not indicate searchability)
1259 """
1260- conflicts = []
1261 for trans_id in self._new_executability:
1262 if not self.final_is_versioned(trans_id):
1263- conflicts.append(('unversioned executability', trans_id))
1264+ yield ('unversioned executability', trans_id)
1265 else:
1266 if self.final_kind(trans_id) != "file":
1267- conflicts.append(('non-file executability', trans_id))
1268- return conflicts
1269+ yield ('non-file executability', trans_id)
1270
1271 def _overwrite_conflicts(self):
1272 """Check for overwrites (not permitted on Win32)"""
1273- conflicts = []
1274 for trans_id in self._new_contents:
1275 if self.tree_kind(trans_id) is None:
1276 continue
1277 if trans_id not in self._removed_contents:
1278- conflicts.append(('overwrite', trans_id,
1279- self.final_name(trans_id)))
1280- return conflicts
1281+ yield ('overwrite', trans_id, self.final_name(trans_id))
1282
1283 def _duplicate_entries(self, by_parent):
1284 """No directory may have two entries with the same name."""
1285- conflicts = []
1286 if (self._new_name, self._new_parent) == ({}, {}):
1287- return conflicts
1288+ return
1289 for children in by_parent.values():
1290 name_ids = []
1291 for child_tid in children:
1292@@ -384,15 +374,12 @@
1293 if kind is None and not self.final_is_versioned(trans_id):
1294 continue
1295 if name == last_name:
1296- conflicts.append(('duplicate', last_trans_id, trans_id,
1297- name))
1298+ yield ('duplicate', last_trans_id, trans_id, name)
1299 last_name = name
1300 last_trans_id = trans_id
1301- return conflicts
1302
1303 def _parent_type_conflicts(self, by_parent):
1304 """Children must have a directory parent"""
1305- conflicts = []
1306 for parent_id, children in by_parent.items():
1307 if parent_id == ROOT_PARENT:
1308 continue
1309@@ -408,11 +395,10 @@
1310 kind = self.final_kind(parent_id)
1311 if kind is None:
1312 # The directory will be deleted
1313- conflicts.append(('missing parent', parent_id))
1314+ yield ('missing parent', parent_id)
1315 elif kind != "directory":
1316 # Meh, we need a *directory* to put something in it
1317- conflicts.append(('non-directory parent', parent_id))
1318- return conflicts
1319+ yield ('non-directory parent', parent_id)
1320
1321 def _set_executability(self, path, trans_id):
1322 """Set the executability of versioned files """
1323@@ -745,7 +731,7 @@
1324 """Cancel the creation of new file contents."""
1325 raise NotImplementedError(self.cancel_creation)
1326
1327- def apply(self, no_conflicts=False, precomputed_delta=None, _mover=None):
1328+ def apply(self, no_conflicts=False, _mover=None):
1329 """Apply all changes to the inventory and filesystem.
1330
1331 If filesystem or inventory conflicts are present, MalformedTransform
1332@@ -755,8 +741,6 @@
1333
1334 :param no_conflicts: if True, the caller guarantees there are no
1335 conflicts, so no check is made.
1336- :param precomputed_delta: An inventory delta to use instead of
1337- calculating one.
1338 :param _mover: Supply an alternate FileMover, for testing
1339 """
1340 raise NotImplementedError(self.apply)
1341
1342=== modified file 'breezy/git/workingtree.py'
1343--- breezy/git/workingtree.py 2020-08-22 22:46:24 +0000
1344+++ breezy/git/workingtree.py 2020-11-19 18:29:10 +0000
1345@@ -1448,6 +1448,132 @@
1346 config.write_to_path(path)
1347 self.add('.gitmodules')
1348
1349+ _marker = object()
1350+
1351+ def update(self, change_reporter=None, possible_transports=None,
1352+ revision=None, old_tip=_marker, show_base=False):
1353+ """Update a working tree along its branch.
1354+
1355+ This will update the branch if its bound too, which means we have
1356+ multiple trees involved:
1357+
1358+ - The new basis tree of the master.
1359+ - The old basis tree of the branch.
1360+ - The old basis tree of the working tree.
1361+ - The current working tree state.
1362+
1363+ Pathologically, all three may be different, and non-ancestors of each
1364+ other. Conceptually we want to:
1365+
1366+ - Preserve the wt.basis->wt.state changes
1367+ - Transform the wt.basis to the new master basis.
1368+ - Apply a merge of the old branch basis to get any 'local' changes from
1369+ it into the tree.
1370+ - Restore the wt.basis->wt.state changes.
1371+
1372+ There isn't a single operation at the moment to do that, so we:
1373+
1374+ - Merge current state -> basis tree of the master w.r.t. the old tree
1375+ basis.
1376+ - Do a 'normal' merge of the old branch basis if it is relevant.
1377+
1378+ :param revision: The target revision to update to. Must be in the
1379+ revision history.
1380+ :param old_tip: If branch.update() has already been run, the value it
1381+ returned (old tip of the branch or None). _marker is used
1382+ otherwise.
1383+ """
1384+ if self.branch.get_bound_location() is not None:
1385+ self.lock_write()
1386+ update_branch = (old_tip is self._marker)
1387+ else:
1388+ self.lock_tree_write()
1389+ update_branch = False
1390+ try:
1391+ if update_branch:
1392+ old_tip = self.branch.update(possible_transports)
1393+ else:
1394+ if old_tip is self._marker:
1395+ old_tip = None
1396+ return self._update_tree(old_tip, change_reporter, revision, show_base)
1397+ finally:
1398+ self.unlock()
1399+
1400+ def _update_tree(self, old_tip=None, change_reporter=None, revision=None,
1401+ show_base=False):
1402+ """Update a tree to the master branch.
1403+
1404+ :param old_tip: if supplied, the previous tip revision the branch,
1405+ before it was changed to the master branch's tip.
1406+ """
1407+ # here if old_tip is not None, it is the old tip of the branch before
1408+ # it was updated from the master branch. This should become a pending
1409+ # merge in the working tree to preserve the user existing work. we
1410+ # cant set that until we update the working trees last revision to be
1411+ # one from the new branch, because it will just get absorbed by the
1412+ # parent de-duplication logic.
1413+ #
1414+ # We MUST save it even if an error occurs, because otherwise the users
1415+ # local work is unreferenced and will appear to have been lost.
1416+ #
1417+ with self.lock_tree_write():
1418+ from .. import merge
1419+ nb_conflicts = 0
1420+ try:
1421+ last_rev = self.get_parent_ids()[0]
1422+ except IndexError:
1423+ last_rev = _mod_revision.NULL_REVISION
1424+ if revision is None:
1425+ revision = self.branch.last_revision()
1426+
1427+ old_tip = old_tip or _mod_revision.NULL_REVISION
1428+
1429+ if not _mod_revision.is_null(old_tip) and old_tip != last_rev:
1430+ # the branch we are bound to was updated
1431+ # merge those changes in first
1432+ base_tree = self.basis_tree()
1433+ other_tree = self.branch.repository.revision_tree(old_tip)
1434+ nb_conflicts = merge.merge_inner(self.branch, other_tree,
1435+ base_tree, this_tree=self,
1436+ change_reporter=change_reporter,
1437+ show_base=show_base)
1438+ if nb_conflicts:
1439+ self.add_parent_tree((old_tip, other_tree))
1440+ return nb_conflicts
1441+
1442+ if last_rev != _mod_revision.ensure_null(revision):
1443+ to_tree = self.branch.repository.revision_tree(revision)
1444+
1445+ # determine the branch point
1446+ graph = self.branch.repository.get_graph()
1447+ base_rev_id = graph.find_unique_lca(self.branch.last_revision(),
1448+ last_rev)
1449+ base_tree = self.branch.repository.revision_tree(base_rev_id)
1450+
1451+ nb_conflicts = merge.merge_inner(self.branch, to_tree, base_tree,
1452+ this_tree=self,
1453+ change_reporter=change_reporter,
1454+ show_base=show_base)
1455+ self.set_last_revision(revision)
1456+ # TODO - dedup parents list with things merged by pull ?
1457+ # reuse the tree we've updated to to set the basis:
1458+ parent_trees = [(revision, to_tree)]
1459+ merges = self.get_parent_ids()[1:]
1460+ # Ideally we ask the tree for the trees here, that way the working
1461+ # tree can decide whether to give us the entire tree or give us a
1462+ # lazy initialised tree. dirstate for instance will have the trees
1463+ # in ram already, whereas a last-revision + basis-inventory tree
1464+ # will not, but also does not need them when setting parents.
1465+ for parent in merges:
1466+ parent_trees.append(
1467+ (parent, self.branch.repository.revision_tree(parent)))
1468+ if not _mod_revision.is_null(old_tip):
1469+ parent_trees.append(
1470+ (old_tip, self.branch.repository.revision_tree(old_tip)))
1471+ self.set_parent_trees(parent_trees)
1472+ last_rev = parent_trees[0][0]
1473+ return nb_conflicts
1474+
1475
1476 class GitWorkingTreeFormat(workingtree.WorkingTreeFormat):
1477
1478
1479=== modified file 'breezy/patches.py'
1480--- breezy/patches.py 2020-07-18 23:14:00 +0000
1481+++ breezy/patches.py 2020-11-19 18:29:10 +0000
1482@@ -417,7 +417,12 @@
1483
1484 for line in iter_lines:
1485 if line.startswith(b'=== '):
1486- if len(saved_lines) > 0:
1487+ if allow_dirty and beginning:
1488+ # Patches can have "junk" at the beginning
1489+ # Stripping junk from the end of patches is handled when we
1490+ # parse the patch
1491+ pass
1492+ elif len(saved_lines) > 0:
1493 if keep_dirty and len(dirty_head) > 0:
1494 yield {'saved_lines': saved_lines,
1495 'dirty_head': dirty_head}
1496
1497=== modified file 'breezy/plugins/github/hoster.py'
1498--- breezy/plugins/github/hoster.py 2020-08-10 15:00:17 +0000
1499+++ breezy/plugins/github/hoster.py 2020-11-19 18:29:10 +0000
1500@@ -524,7 +524,7 @@
1501 def iter_instances(cls):
1502 yield cls(get_transport(API_GITHUB_URL))
1503
1504- def iter_my_proposals(self, status='open'):
1505+ def iter_my_proposals(self, status='open', author=None):
1506 query = ['is:pr']
1507 if status == 'open':
1508 query.append('is:open')
1509@@ -535,7 +535,9 @@
1510 query.append('is:closed')
1511 elif status == 'merged':
1512 query.append('is:merged')
1513- query.append('author:%s' % self.current_user['login'])
1514+ if author is None:
1515+ author = self.current_user['login']
1516+ query.append('author:%s' % author)
1517 for issue in self._search_issues(query=' '.join(query)):
1518 url = issue['pull_request']['url']
1519 response = self._api_request('GET', url)
1520@@ -546,8 +548,12 @@
1521 def get_proposal_by_url(self, url):
1522 raise UnsupportedHoster(url)
1523
1524- def iter_my_forks(self):
1525- response = self._api_request('GET', '/user/repos')
1526+ def iter_my_forks(self, owner=None):
1527+ if owner:
1528+ path = '/users/%s/repos' % owner
1529+ else:
1530+ path = '/user/repos'
1531+ response = self._api_request('GET', path)
1532 if response.status != 200:
1533 raise UnexpectedHttpStatus(self.transport.user_url, response.status)
1534 for project in json.loads(response.text):
1535
1536=== modified file 'breezy/plugins/gitlab/hoster.py'
1537--- breezy/plugins/gitlab/hoster.py 2020-08-10 15:00:17 +0000
1538+++ breezy/plugins/gitlab/hoster.py 2020-11-19 18:29:10 +0000
1539@@ -18,6 +18,7 @@
1540
1541 import json
1542 import os
1543+import re
1544 import time
1545
1546 from ... import (
1547@@ -122,8 +123,21 @@
1548 self.project = project
1549
1550
1551-class MergeRequestExists(Exception):
1552- """Raised when a merge requests already exists."""
1553+class MergeRequestConflict(Exception):
1554+ """Raised when a merge requests conflicts."""
1555+
1556+ def __init__(self, reason):
1557+ self.reason = reason
1558+
1559+
1560+class ProjectCreationTimeout(errors.BzrError):
1561+
1562+ _fmt = ("Timeout (%(timeout)ds) while waiting for project "
1563+ "%(project)s to be created.")
1564+
1565+ def __init__(self, project, timeout):
1566+ self.project = project
1567+ self.timeout = timeout
1568
1569
1570 def default_config_path():
1571@@ -338,6 +352,17 @@
1572 def base_hostname(self):
1573 return urlutils.parse_url(self.base_url)[3]
1574
1575+ def _find_correct_project_name(self, path):
1576+ try:
1577+ resp = self.transport.request(
1578+ 'GET', urlutils.join(self.base_url, path),
1579+ headers=self.headers)
1580+ except errors.RedirectRequested as e:
1581+ return urlutils.parse_url(e.target)[-1].strip('/')
1582+ if resp.status != 200:
1583+ _unexpected_status(path, resp)
1584+ return None
1585+
1586 def _api_request(self, method, path, fields=None, body=None):
1587 return self.transport.request(
1588 method, urlutils.join(self.base_url, 'api', 'v4', path),
1589@@ -369,10 +394,14 @@
1590 return ret[0]
1591 _unexpected_status(path, response)
1592
1593- def _get_project(self, project_name):
1594+ def _get_project(self, project_name, _redirect_checked=False):
1595 path = 'projects/%s' % urlutils.quote(str(project_name), '')
1596 response = self._api_request('GET', path)
1597 if response.status == 404:
1598+ if not _redirect_checked:
1599+ project_name = self._find_correct_project_name(project_name)
1600+ if project_name is not None:
1601+ return self._get_project(project_name, _redirect_checked=True)
1602 raise NoSuchProject(project_name)
1603 if response.status == 200:
1604 return json.loads(response.data)
1605@@ -399,7 +428,8 @@
1606 while project['import_status'] not in ('finished', 'none'):
1607 mutter('import status is %s', project['import_status'])
1608 if time.time() > deadline:
1609- raise Exception('timeout waiting for project to become available')
1610+ raise ProjectCreationTimeout(
1611+ project['path_with_namespace'], timeout)
1612 time.sleep(interval)
1613 project = self._get_project(project['path_with_namespace'])
1614 return project
1615@@ -431,7 +461,7 @@
1616 for entry in json.loads(response.data):
1617 yield entry
1618
1619- def _list_merge_requests(self, owner=None, project=None, state=None):
1620+ def _list_merge_requests(self, author=None, project=None, state=None):
1621 if project is not None:
1622 path = 'projects/%s/merge_requests' % urlutils.quote(str(project), '')
1623 else:
1624@@ -439,8 +469,8 @@
1625 parameters = {}
1626 if state:
1627 parameters['state'] = state
1628- if owner:
1629- parameters['owner_id'] = urlutils.quote(owner, '')
1630+ if author:
1631+ parameters['author_username'] = urlutils.quote(author, '')
1632 return self._list_paged(path, parameters, per_page=DEFAULT_PAGE_SIZE)
1633
1634 def _get_merge_request(self, project, merge_id):
1635@@ -497,7 +527,7 @@
1636 if response.status == 403:
1637 raise errors.PermissionDenied(response.text)
1638 if response.status == 409:
1639- raise MergeRequestExists()
1640+ raise MergeRequestConflict(json.loads(response.data))
1641 if response.status == 422:
1642 data = json.loads(response.data)
1643 raise GitLabUnprocessable(data['error'])
1644@@ -514,15 +544,19 @@
1645 def publish_derived(self, local_branch, base_branch, name, project=None,
1646 owner=None, revision_id=None, overwrite=False,
1647 allow_lossy=True, tag_selector=None):
1648- (host, base_project, base_branch_name) = parse_gitlab_branch_url(base_branch)
1649+ (host, base_project_name, base_branch_name) = parse_gitlab_branch_url(base_branch)
1650+ if owner is None:
1651+ owner = base_branch.get_config_stack().get('fork-namespace')
1652 if owner is None:
1653 owner = self.get_current_user()
1654+ base_project = self._get_project(base_project_name)
1655 if project is None:
1656- project = self._get_project(base_project)['path']
1657+ project = base_project['path']
1658 try:
1659 target_project = self._get_project('%s/%s' % (owner, project))
1660 except NoSuchProject:
1661- target_project = self._fork_project(base_project, owner=owner)
1662+ target_project = self._fork_project(
1663+ base_project['path_with_namespace'], owner=owner)
1664 remote_repo_url = git_url_to_bzr_url(target_project['ssh_url_to_repo'])
1665 remote_dir = controldir.ControlDir.open(remote_repo_url)
1666 try:
1667@@ -615,14 +649,17 @@
1668 get_transport(credentials['url']),
1669 private_token=credentials.get('private_token'))
1670
1671- def iter_my_proposals(self, status='open'):
1672+ def iter_my_proposals(self, status='open', author=None):
1673+ if author is None:
1674+ author = self.get_current_user()
1675 state = mp_status_to_status(status)
1676- for mp in self._list_merge_requests(
1677- owner=self.get_current_user(), state=state):
1678+ for mp in self._list_merge_requests(author=author, state=state):
1679 yield GitLabMergeProposal(self, mp)
1680
1681- def iter_my_forks(self):
1682- for project in self._list_projects(owner=self.get_current_user()):
1683+ def iter_my_forks(self, owner=None):
1684+ if owner is not None:
1685+ owner = self.get_current_user()
1686+ for project in self._list_projects(owner=owner):
1687 base_project = project.get('forked_from_project')
1688 if not base_project:
1689 continue
1690@@ -717,8 +754,18 @@
1691 kwargs['assignee_ids'].append(user['id'])
1692 try:
1693 merge_request = self.gl._create_mergerequest(**kwargs)
1694- except MergeRequestExists:
1695- raise MergeProposalExists(self.source_branch.user_url)
1696+ except MergeRequestConflict as e:
1697+ m = re.fullmatch(
1698+ r'Another open merge request already exists for '
1699+ r'this source branch: \!([0-9]+)',
1700+ e.reason['message'][0])
1701+ if m:
1702+ merge_id = int(m.group(1))
1703+ mr = self.gl._get_merge_request(
1704+ target_project['path_with_namespace'], merge_id)
1705+ raise MergeProposalExists(
1706+ self.source_branch.user_url, GitLabMergeProposal(self.gl, mr))
1707+ raise Exception('conflict: %r' % e.reason)
1708 except GitLabUnprocessable as e:
1709 if e.error == [
1710 "Source project is not a fork of the target project"]:
1711
1712=== modified file 'breezy/plugins/launchpad/hoster.py'
1713--- breezy/plugins/launchpad/hoster.py 2020-08-10 15:00:17 +0000
1714+++ breezy/plugins/launchpad/hoster.py 2020-11-19 18:29:10 +0000
1715@@ -476,15 +476,25 @@
1716 if creds is not None:
1717 yield cls(service_root)
1718
1719- def iter_my_proposals(self, status='open'):
1720+ def iter_my_proposals(self, status='open', author=None):
1721 statuses = status_to_lp_mp_statuses(status)
1722- for mp in self.launchpad.me.getMergeProposals(status=statuses):
1723+ if author is None:
1724+ author_obj = self.launchpad.me
1725+ else:
1726+ author_obj = self._getPerson(author)
1727+ for mp in author_obj.getMergeProposals(status=statuses):
1728 yield LaunchpadMergeProposal(mp)
1729
1730- def iter_my_forks(self):
1731+ def iter_my_forks(self, owner=None):
1732 # Launchpad doesn't really have the concept of "forks"
1733 return iter([])
1734
1735+ def _getPerson(self, person):
1736+ if '@' in name:
1737+ return self.launchpad.people.getByEmail(email=name)
1738+ else:
1739+ return self.launchpad.people[name]
1740+
1741 def get_proposal_by_url(self, url):
1742 # Launchpad doesn't have a way to find a merge proposal by URL.
1743 (scheme, user, password, host, port, path) = urlutils.parse_url(
1744@@ -602,11 +612,7 @@
1745 else:
1746 reviewer_objs = []
1747 for reviewer in reviewers:
1748- if '@' in reviewer:
1749- reviewer_obj = self.launchpad.people.getByEmail(email=reviewer)
1750- else:
1751- reviewer_obj = self.launchpad.people[reviewer]
1752- reviewer_objs.append(reviewer_obj)
1753+ reviewer_objs.append(self.lp_host._getPerson(reviewer))
1754 try:
1755 mp = _call_webservice(
1756 self.source_branch_lp.createMergeProposal,
1757
1758=== modified file 'breezy/plugins/propose/cmds.py'
1759--- breezy/plugins/propose/cmds.py 2020-08-10 15:00:17 +0000
1760+++ breezy/plugins/propose/cmds.py 2020-11-19 18:29:10 +0000
1761@@ -143,7 +143,7 @@
1762 RegistryOption(
1763 'hoster',
1764 help='Use the hoster.',
1765- lazy_registry=('breezy.plugins.propose.propose', 'hosters')),
1766+ lazy_registry=('breezy.propose', 'hosters')),
1767 ListOption('reviewers', short_name='R', type=str,
1768 help='Requested reviewers.'),
1769 Option('name', help='Name of the new remote branch.', type=str),
1770@@ -161,6 +161,7 @@
1771 help='Allow collaboration from target branch maintainer(s)'),
1772 Option('allow-empty',
1773 help='Do not prevent empty merge proposals.'),
1774+ Option('overwrite', help="Overwrite existing commits."),
1775 ]
1776 takes_args = ['submit_branch?']
1777
1778@@ -169,7 +170,7 @@
1779 def run(self, submit_branch=None, directory='.', hoster=None,
1780 reviewers=None, name=None, no_allow_lossy=False, description=None,
1781 labels=None, prerequisite=None, commit_message=None, wip=False,
1782- allow_collaboration=False, allow_empty=False):
1783+ allow_collaboration=False, allow_empty=False, overwrite=False):
1784 tree, branch, relpath = (
1785 controldir.ControlDir.open_containing_tree_or_branch(directory))
1786 if submit_branch is None:
1787@@ -189,7 +190,8 @@
1788 if name is None:
1789 name = branch_name(branch)
1790 remote_branch, public_branch_url = hoster.publish_derived(
1791- branch, target, name=name, allow_lossy=not no_allow_lossy)
1792+ branch, target, name=name, allow_lossy=not no_allow_lossy,
1793+ overwrite=overwrite)
1794 branch.set_push_location(remote_branch.user_url)
1795 branch.set_submit_branch(target.user_url)
1796 note(gettext('Published branch to %s') % public_branch_url)
1797@@ -253,6 +255,7 @@
1798
1799 hidden = True
1800
1801+ takes_args = ['base-url?']
1802 takes_options = [
1803 'verbose',
1804 RegistryOption.from_kwargs(
1805@@ -264,18 +267,32 @@
1806 all='All merge proposals',
1807 open='Open merge proposals',
1808 merged='Merged merge proposals',
1809- closed='Closed merge proposals')]
1810-
1811- def run(self, status='open', verbose=False):
1812- for instance in _mod_propose.iter_hoster_instances():
1813+ closed='Closed merge proposals'),
1814+ RegistryOption(
1815+ 'hoster',
1816+ help='Use the hoster.',
1817+ lazy_registry=('breezy.propose', 'hosters')),
1818+ ]
1819+
1820+ def run(self, status='open', verbose=False, hoster=None, base_url=None):
1821+
1822+ for instance in _mod_propose.iter_hoster_instances(hoster=hoster):
1823+ if base_url is not None and instance.base_url != base_url:
1824+ continue
1825 try:
1826 for mp in instance.iter_my_proposals(status=status):
1827 self.outf.write('%s\n' % mp.url)
1828 if verbose:
1829- self.outf.write(
1830- '(Merging %s into %s)\n' %
1831- (mp.get_source_branch_url(),
1832- mp.get_target_branch_url()))
1833+ source_branch_url = mp.get_source_branch_url()
1834+ if source_branch_url:
1835+ self.outf.write(
1836+ '(Merging %s into %s)\n' %
1837+ (source_branch_url,
1838+ mp.get_target_branch_url()))
1839+ else:
1840+ self.outf.write(
1841+ '(Merging into %s)\n' %
1842+ mp.get_target_branch_url())
1843 description = mp.get_description()
1844 if description:
1845 self.outf.writelines(
1846@@ -307,10 +324,17 @@
1847 for instance in _mod_propose.iter_hoster_instances():
1848 current_user = instance.get_current_user()
1849 if current_user is not None:
1850- self.outf.write(
1851- gettext('%s (%s) - user: %s (%s)\n') % (
1852- instance.name, instance.base_url,
1853- current_user, instance.get_user_url(current_user)))
1854+ current_user_url = instance.get_user_url(current_user)
1855+ if current_user_url is not None:
1856+ self.outf.write(
1857+ gettext('%s (%s) - user: %s (%s)\n') % (
1858+ instance.name, instance.base_url,
1859+ current_user, current_user_url))
1860+ else:
1861+ self.outf.write(
1862+ gettext('%s (%s) - user: %s\n') % (
1863+ instance.name, instance.base_url,
1864+ current_user))
1865 else:
1866 self.outf.write(
1867 gettext('%s (%s) - not logged in\n') % (
1868
1869=== modified file 'breezy/plugins/weave_fmt/workingtree.py'
1870--- breezy/plugins/weave_fmt/workingtree.py 2020-08-10 15:00:17 +0000
1871+++ breezy/plugins/weave_fmt/workingtree.py 2020-11-19 18:29:10 +0000
1872@@ -24,11 +24,11 @@
1873 lock,
1874 osutils,
1875 revision as _mod_revision,
1876- transform,
1877 )
1878 from ...bzr import (
1879 conflicts as _mod_bzr_conflicts,
1880 inventory,
1881+ transform as bzr_transform,
1882 xml5,
1883 )
1884 from ...mutabletree import MutableTree
1885@@ -113,7 +113,7 @@
1886 else:
1887 parent_trees = [(revision_id, basis_tree)]
1888 wt.set_parent_trees(parent_trees)
1889- transform.build_tree(basis_tree, wt)
1890+ bzr_transform.build_tree(basis_tree, wt)
1891 for hook in MutableTree.hooks['post_build_tree']:
1892 hook(wt)
1893 return wt
1894
1895=== modified file 'breezy/propose.py'
1896--- breezy/propose.py 2020-08-10 15:00:17 +0000
1897+++ breezy/propose.py 2020-11-19 18:29:10 +0000
1898@@ -16,6 +16,8 @@
1899
1900 """Helper functions for proposing merges."""
1901
1902+import re
1903+
1904 from . import (
1905 errors,
1906 hooks,
1907@@ -37,9 +39,10 @@
1908
1909 _fmt = "A merge proposal already exists: %(url)s."
1910
1911- def __init__(self, url):
1912+ def __init__(self, url, existing_proposal=None):
1913 errors.BzrError.__init__(self)
1914 self.url = url
1915+ self.existing_proposal = existing_proposal
1916
1917
1918 class UnsupportedHoster(errors.BzrError):
1919@@ -338,20 +341,22 @@
1920 """Create a Hoster object if this hoster knows about a URL."""
1921 raise NotImplementedError(cls.probe_from_url)
1922
1923- def iter_my_proposals(self, status='open'):
1924+ def iter_my_proposals(self, status='open', author=None):
1925 """Iterate over the proposals created by the currently logged in user.
1926
1927 :param status: Only yield proposals with this status
1928 (one of: 'open', 'closed', 'merged', 'all')
1929+ :param author: Name of author to query (defaults to current user)
1930 :return: Iterator over MergeProposal objects
1931 :raise HosterLoginRequired: Action requires a hoster login, but none is
1932 known.
1933 """
1934 raise NotImplementedError(self.iter_my_proposals)
1935
1936- def iter_my_forks(self):
1937+ def iter_my_forks(self, owner=None):
1938 """Iterate over the currently logged in users' forks.
1939
1940+ :param owner: Name of owner to query (defaults to current user)
1941 :return: Iterator over project_name
1942 """
1943 raise NotImplementedError(self.iter_my_forks)
1944@@ -383,7 +388,13 @@
1945
1946 def determine_title(description):
1947 """Determine the title for a merge proposal based on full description."""
1948- return description.splitlines()[0].split('.')[0]
1949+ firstline = description.splitlines()[0]
1950+ try:
1951+ i = firstline.index('. ')
1952+ except ValueError:
1953+ return firstline.rstrip('.')
1954+ else:
1955+ return firstline[:i]
1956
1957
1958 def get_hoster(branch, possible_hosters=None):
1959@@ -410,12 +421,16 @@
1960 raise UnsupportedHoster(branch)
1961
1962
1963-def iter_hoster_instances():
1964+def iter_hoster_instances(hoster=None):
1965 """Iterate over all known hoster instances.
1966
1967 :return: Iterator over Hoster instances
1968 """
1969- for name, hoster_cls in hosters.items():
1970+ if hoster is None:
1971+ hoster_clses = [hoster_cls for name, hoster_cls in hosters.items()]
1972+ else:
1973+ hoster_clses = [hoster]
1974+ for hoster_cls in hoster_clses:
1975 for instance in hoster_cls.iter_instances():
1976 yield instance
1977
1978
1979=== modified file 'breezy/tests/per_tree/__init__.py'
1980--- breezy/tests/per_tree/__init__.py 2020-07-05 13:18:03 +0000
1981+++ breezy/tests/per_tree/__init__.py 2020-11-19 18:29:10 +0000
1982@@ -25,6 +25,8 @@
1983 - tests/test_workingtree.py
1984 """
1985
1986+import contextlib
1987+
1988 from breezy import (
1989 errors,
1990 tests,
1991@@ -83,7 +85,9 @@
1992 tree.lock_read()
1993 testcase.addCleanup(tree.unlock)
1994 pp = None
1995- transform._prepare_revert_transform(basis, tree, tt, None, False, None,
1996+ es = contextlib.ExitStack()
1997+ testcase.addCleanup(es.close)
1998+ transform._prepare_revert_transform(es, basis, tree, tt, None, False, None,
1999 basis, {})
2000 preview_tree = tt.get_preview_tree()
2001 preview_tree.set_parent_ids(tree.get_parent_ids())
2002
2003=== modified file 'breezy/tests/per_workingtree/test_transform.py'
2004--- breezy/tests/per_workingtree/test_transform.py 2020-08-22 22:46:24 +0000
2005+++ breezy/tests/per_workingtree/test_transform.py 2020-11-19 18:29:10 +0000
2006@@ -61,7 +61,6 @@
2007 create_from_tree,
2008 FinalPaths,
2009 resolve_conflicts,
2010- resolve_checkout,
2011 ROOT_PARENT,
2012 ImmortalLimbo,
2013 MalformedTransform,
2014@@ -71,6 +70,7 @@
2015 )
2016
2017 from breezy.bzr.inventorytree import InventoryTreeChange
2018+from breezy.bzr.transform import resolve_checkout
2019
2020 from breezy.tests.per_workingtree import TestCaseWithWorkingTree
2021
2022
2023=== modified file 'breezy/tests/test_propose.py'
2024--- breezy/tests/test_propose.py 2020-01-15 23:33:01 +0000
2025+++ breezy/tests/test_propose.py 2020-11-19 18:29:10 +0000
2026@@ -117,3 +117,13 @@
2027
2028 And here are some more details.
2029 """))
2030+ self.assertEqual('Make some change', determine_title("""\
2031+Make some change. And another one.
2032+
2033+With details.
2034+"""))
2035+ self.assertEqual('Release version 5.1', determine_title("""\
2036+Release version 5.1
2037+
2038+And here are some more details.
2039+"""))
2040
2041=== modified file 'breezy/tests/test_transform.py'
2042--- breezy/tests/test_transform.py 2020-08-10 15:00:17 +0000
2043+++ breezy/tests/test_transform.py 2020-11-19 18:29:10 +0000
2044@@ -70,12 +70,10 @@
2045 SymlinkFeature,
2046 )
2047 from ..transform import (
2048- build_tree,
2049 create_from_tree,
2050 _FileMover,
2051 FinalPaths,
2052 resolve_conflicts,
2053- resolve_checkout,
2054 ROOT_PARENT,
2055 ImmortalLimbo,
2056 MalformedTransform,
2057@@ -293,410 +291,6 @@
2058 self.assertEqual(this.wt.id2path(b'i'), pathjoin('b/i1.OTHER'))
2059
2060
2061-class TestBuildTree(tests.TestCaseWithTransport):
2062-
2063- def test_build_tree_with_symlinks(self):
2064- self.requireFeature(SymlinkFeature)
2065- os.mkdir('a')
2066- a = ControlDir.create_standalone_workingtree('a')
2067- os.mkdir('a/foo')
2068- with open('a/foo/bar', 'wb') as f:
2069- f.write(b'contents')
2070- os.symlink('a/foo/bar', 'a/foo/baz')
2071- a.add(['foo', 'foo/bar', 'foo/baz'])
2072- a.commit('initial commit')
2073- b = ControlDir.create_standalone_workingtree('b')
2074- basis = a.basis_tree()
2075- basis.lock_read()
2076- self.addCleanup(basis.unlock)
2077- build_tree(basis, b)
2078- self.assertIs(os.path.isdir('b/foo'), True)
2079- with open('b/foo/bar', 'rb') as f:
2080- self.assertEqual(f.read(), b"contents")
2081- self.assertEqual(os.readlink('b/foo/baz'), 'a/foo/bar')
2082-
2083- def test_build_with_references(self):
2084- tree = self.make_branch_and_tree('source',
2085- format='development-subtree')
2086- subtree = self.make_branch_and_tree('source/subtree',
2087- format='development-subtree')
2088- tree.add_reference(subtree)
2089- tree.commit('a revision')
2090- tree.branch.create_checkout('target')
2091- self.assertPathExists('target')
2092- self.assertPathExists('target/subtree')
2093-
2094- def test_file_conflict_handling(self):
2095- """Ensure that when building trees, conflict handling is done"""
2096- source = self.make_branch_and_tree('source')
2097- target = self.make_branch_and_tree('target')
2098- self.build_tree(['source/file', 'target/file'])
2099- source.add('file', b'new-file')
2100- source.commit('added file')
2101- build_tree(source.basis_tree(), target)
2102- self.assertEqual(
2103- [DuplicateEntry('Moved existing file to', 'file.moved',
2104- 'file', None, 'new-file')],
2105- target.conflicts())
2106- target2 = self.make_branch_and_tree('target2')
2107- with open('target2/file', 'wb') as target_file, \
2108- open('source/file', 'rb') as source_file:
2109- target_file.write(source_file.read())
2110- build_tree(source.basis_tree(), target2)
2111- self.assertEqual([], target2.conflicts())
2112-
2113- def test_symlink_conflict_handling(self):
2114- """Ensure that when building trees, conflict handling is done"""
2115- self.requireFeature(SymlinkFeature)
2116- source = self.make_branch_and_tree('source')
2117- os.symlink('foo', 'source/symlink')
2118- source.add('symlink', b'new-symlink')
2119- source.commit('added file')
2120- target = self.make_branch_and_tree('target')
2121- os.symlink('bar', 'target/symlink')
2122- build_tree(source.basis_tree(), target)
2123- self.assertEqual(
2124- [DuplicateEntry('Moved existing file to', 'symlink.moved',
2125- 'symlink', None, 'new-symlink')],
2126- target.conflicts())
2127- target = self.make_branch_and_tree('target2')
2128- os.symlink('foo', 'target2/symlink')
2129- build_tree(source.basis_tree(), target)
2130- self.assertEqual([], target.conflicts())
2131-
2132- def test_directory_conflict_handling(self):
2133- """Ensure that when building trees, conflict handling is done"""
2134- source = self.make_branch_and_tree('source')
2135- target = self.make_branch_and_tree('target')
2136- self.build_tree(['source/dir1/', 'source/dir1/file', 'target/dir1/'])
2137- source.add(['dir1', 'dir1/file'], [b'new-dir1', b'new-file'])
2138- source.commit('added file')
2139- build_tree(source.basis_tree(), target)
2140- self.assertEqual([], target.conflicts())
2141- self.assertPathExists('target/dir1/file')
2142-
2143- # Ensure contents are merged
2144- target = self.make_branch_and_tree('target2')
2145- self.build_tree(['target2/dir1/', 'target2/dir1/file2'])
2146- build_tree(source.basis_tree(), target)
2147- self.assertEqual([], target.conflicts())
2148- self.assertPathExists('target2/dir1/file2')
2149- self.assertPathExists('target2/dir1/file')
2150-
2151- # Ensure new contents are suppressed for existing branches
2152- target = self.make_branch_and_tree('target3')
2153- self.make_branch('target3/dir1')
2154- self.build_tree(['target3/dir1/file2'])
2155- build_tree(source.basis_tree(), target)
2156- self.assertPathDoesNotExist('target3/dir1/file')
2157- self.assertPathExists('target3/dir1/file2')
2158- self.assertPathExists('target3/dir1.diverted/file')
2159- self.assertEqual(
2160- [DuplicateEntry('Diverted to', 'dir1.diverted',
2161- 'dir1', 'new-dir1', None)],
2162- target.conflicts())
2163-
2164- target = self.make_branch_and_tree('target4')
2165- self.build_tree(['target4/dir1/'])
2166- self.make_branch('target4/dir1/file')
2167- build_tree(source.basis_tree(), target)
2168- self.assertPathExists('target4/dir1/file')
2169- self.assertEqual('directory', file_kind('target4/dir1/file'))
2170- self.assertPathExists('target4/dir1/file.diverted')
2171- self.assertEqual(
2172- [DuplicateEntry('Diverted to', 'dir1/file.diverted',
2173- 'dir1/file', 'new-file', None)],
2174- target.conflicts())
2175-
2176- def test_mixed_conflict_handling(self):
2177- """Ensure that when building trees, conflict handling is done"""
2178- source = self.make_branch_and_tree('source')
2179- target = self.make_branch_and_tree('target')
2180- self.build_tree(['source/name', 'target/name/'])
2181- source.add('name', b'new-name')
2182- source.commit('added file')
2183- build_tree(source.basis_tree(), target)
2184- self.assertEqual(
2185- [DuplicateEntry('Moved existing file to',
2186- 'name.moved', 'name', None, 'new-name')],
2187- target.conflicts())
2188-
2189- def test_raises_in_populated(self):
2190- source = self.make_branch_and_tree('source')
2191- self.build_tree(['source/name'])
2192- source.add('name')
2193- source.commit('added name')
2194- target = self.make_branch_and_tree('target')
2195- self.build_tree(['target/name'])
2196- target.add('name')
2197- self.assertRaises(errors.WorkingTreeAlreadyPopulated,
2198- build_tree, source.basis_tree(), target)
2199-
2200- def test_build_tree_rename_count(self):
2201- source = self.make_branch_and_tree('source')
2202- self.build_tree(['source/file1', 'source/dir1/'])
2203- source.add(['file1', 'dir1'])
2204- source.commit('add1')
2205- target1 = self.make_branch_and_tree('target1')
2206- transform_result = build_tree(source.basis_tree(), target1)
2207- self.assertEqual(2, transform_result.rename_count)
2208-
2209- self.build_tree(['source/dir1/file2'])
2210- source.add(['dir1/file2'])
2211- source.commit('add3')
2212- target2 = self.make_branch_and_tree('target2')
2213- transform_result = build_tree(source.basis_tree(), target2)
2214- # children of non-root directories should not be renamed
2215- self.assertEqual(2, transform_result.rename_count)
2216-
2217- def create_ab_tree(self):
2218- """Create a committed test tree with two files"""
2219- source = self.make_branch_and_tree('source')
2220- self.build_tree_contents([('source/file1', b'A')])
2221- self.build_tree_contents([('source/file2', b'B')])
2222- source.add(['file1', 'file2'], [b'file1-id', b'file2-id'])
2223- source.commit('commit files')
2224- source.lock_write()
2225- self.addCleanup(source.unlock)
2226- return source
2227-
2228- def test_build_tree_accelerator_tree(self):
2229- source = self.create_ab_tree()
2230- self.build_tree_contents([('source/file2', b'C')])
2231- calls = []
2232- real_source_get_file = source.get_file
2233-
2234- def get_file(path):
2235- calls.append(path)
2236- return real_source_get_file(path)
2237- source.get_file = get_file
2238- target = self.make_branch_and_tree('target')
2239- revision_tree = source.basis_tree()
2240- revision_tree.lock_read()
2241- self.addCleanup(revision_tree.unlock)
2242- build_tree(revision_tree, target, source)
2243- self.assertEqual(['file1'], calls)
2244- target.lock_read()
2245- self.addCleanup(target.unlock)
2246- self.assertEqual([], list(target.iter_changes(revision_tree)))
2247-
2248- def test_build_tree_accelerator_tree_observes_sha1(self):
2249- source = self.create_ab_tree()
2250- sha1 = osutils.sha_string(b'A')
2251- target = self.make_branch_and_tree('target')
2252- target.lock_write()
2253- self.addCleanup(target.unlock)
2254- state = target.current_dirstate()
2255- state._cutoff_time = time.time() + 60
2256- build_tree(source.basis_tree(), target, source)
2257- entry = state._get_entry(0, path_utf8=b'file1')
2258- self.assertEqual(sha1, entry[1][0][1])
2259-
2260- def test_build_tree_accelerator_tree_missing_file(self):
2261- source = self.create_ab_tree()
2262- os.unlink('source/file1')
2263- source.remove(['file2'])
2264- target = self.make_branch_and_tree('target')
2265- revision_tree = source.basis_tree()
2266- revision_tree.lock_read()
2267- self.addCleanup(revision_tree.unlock)
2268- build_tree(revision_tree, target, source)
2269- target.lock_read()
2270- self.addCleanup(target.unlock)
2271- self.assertEqual([], list(target.iter_changes(revision_tree)))
2272-
2273- def test_build_tree_accelerator_wrong_kind(self):
2274- self.requireFeature(SymlinkFeature)
2275- source = self.make_branch_and_tree('source')
2276- self.build_tree_contents([('source/file1', b'')])
2277- self.build_tree_contents([('source/file2', b'')])
2278- source.add(['file1', 'file2'], [b'file1-id', b'file2-id'])
2279- source.commit('commit files')
2280- os.unlink('source/file2')
2281- self.build_tree_contents([('source/file2/', b'C')])
2282- os.unlink('source/file1')
2283- os.symlink('file2', 'source/file1')
2284- calls = []
2285- real_source_get_file = source.get_file
2286-
2287- def get_file(path):
2288- calls.append(path)
2289- return real_source_get_file(path)
2290- source.get_file = get_file
2291- target = self.make_branch_and_tree('target')
2292- revision_tree = source.basis_tree()
2293- revision_tree.lock_read()
2294- self.addCleanup(revision_tree.unlock)
2295- build_tree(revision_tree, target, source)
2296- self.assertEqual([], calls)
2297- target.lock_read()
2298- self.addCleanup(target.unlock)
2299- self.assertEqual([], list(target.iter_changes(revision_tree)))
2300-
2301- def test_build_tree_hardlink(self):
2302- self.requireFeature(HardlinkFeature)
2303- source = self.create_ab_tree()
2304- target = self.make_branch_and_tree('target')
2305- revision_tree = source.basis_tree()
2306- revision_tree.lock_read()
2307- self.addCleanup(revision_tree.unlock)
2308- build_tree(revision_tree, target, source, hardlink=True)
2309- target.lock_read()
2310- self.addCleanup(target.unlock)
2311- self.assertEqual([], list(target.iter_changes(revision_tree)))
2312- source_stat = os.stat('source/file1')
2313- target_stat = os.stat('target/file1')
2314- self.assertEqual(source_stat, target_stat)
2315-
2316- # Explicitly disallowing hardlinks should prevent them.
2317- target2 = self.make_branch_and_tree('target2')
2318- build_tree(revision_tree, target2, source, hardlink=False)
2319- target2.lock_read()
2320- self.addCleanup(target2.unlock)
2321- self.assertEqual([], list(target2.iter_changes(revision_tree)))
2322- source_stat = os.stat('source/file1')
2323- target2_stat = os.stat('target2/file1')
2324- self.assertNotEqual(source_stat, target2_stat)
2325-
2326- def test_build_tree_accelerator_tree_moved(self):
2327- source = self.make_branch_and_tree('source')
2328- self.build_tree_contents([('source/file1', b'A')])
2329- source.add(['file1'], [b'file1-id'])
2330- source.commit('commit files')
2331- source.rename_one('file1', 'file2')
2332- source.lock_read()
2333- self.addCleanup(source.unlock)
2334- target = self.make_branch_and_tree('target')
2335- revision_tree = source.basis_tree()
2336- revision_tree.lock_read()
2337- self.addCleanup(revision_tree.unlock)
2338- build_tree(revision_tree, target, source)
2339- target.lock_read()
2340- self.addCleanup(target.unlock)
2341- self.assertEqual([], list(target.iter_changes(revision_tree)))
2342-
2343- def test_build_tree_hardlinks_preserve_execute(self):
2344- self.requireFeature(HardlinkFeature)
2345- source = self.create_ab_tree()
2346- tt = source.transform()
2347- trans_id = tt.trans_id_tree_path('file1')
2348- tt.set_executability(True, trans_id)
2349- tt.apply()
2350- self.assertTrue(source.is_executable('file1'))
2351- target = self.make_branch_and_tree('target')
2352- revision_tree = source.basis_tree()
2353- revision_tree.lock_read()
2354- self.addCleanup(revision_tree.unlock)
2355- build_tree(revision_tree, target, source, hardlink=True)
2356- target.lock_read()
2357- self.addCleanup(target.unlock)
2358- self.assertEqual([], list(target.iter_changes(revision_tree)))
2359- self.assertTrue(source.is_executable('file1'))
2360-
2361- def install_rot13_content_filter(self, pattern):
2362- # We could use
2363- # self.addCleanup(filters._reset_registry, filters._reset_registry())
2364- # below, but that looks a bit... hard to read even if it's exactly
2365- # the same thing.
2366- original_registry = filters._reset_registry()
2367-
2368- def restore_registry():
2369- filters._reset_registry(original_registry)
2370- self.addCleanup(restore_registry)
2371-
2372- def rot13(chunks, context=None):
2373- return [
2374- codecs.encode(chunk.decode('ascii'), 'rot13').encode('ascii')
2375- for chunk in chunks]
2376- rot13filter = filters.ContentFilter(rot13, rot13)
2377- filters.filter_stacks_registry.register(
2378- 'rot13', {'yes': [rot13filter]}.get)
2379- os.mkdir(self.test_home_dir + '/.bazaar')
2380- rules_filename = self.test_home_dir + '/.bazaar/rules'
2381- with open(rules_filename, 'wb') as f:
2382- f.write(b'[name %s]\nrot13=yes\n' % (pattern,))
2383-
2384- def uninstall_rules():
2385- os.remove(rules_filename)
2386- rules.reset_rules()
2387- self.addCleanup(uninstall_rules)
2388- rules.reset_rules()
2389-
2390- def test_build_tree_content_filtered_files_are_not_hardlinked(self):
2391- """build_tree will not hardlink files that have content filtering rules
2392- applied to them (but will still hardlink other files from the same tree
2393- if it can).
2394- """
2395- self.requireFeature(HardlinkFeature)
2396- self.install_rot13_content_filter(b'file1')
2397- source = self.create_ab_tree()
2398- target = self.make_branch_and_tree('target')
2399- revision_tree = source.basis_tree()
2400- revision_tree.lock_read()
2401- self.addCleanup(revision_tree.unlock)
2402- build_tree(revision_tree, target, source, hardlink=True)
2403- target.lock_read()
2404- self.addCleanup(target.unlock)
2405- self.assertEqual([], list(target.iter_changes(revision_tree)))
2406- source_stat = os.stat('source/file1')
2407- target_stat = os.stat('target/file1')
2408- self.assertNotEqual(source_stat, target_stat)
2409- source_stat = os.stat('source/file2')
2410- target_stat = os.stat('target/file2')
2411- self.assertEqualStat(source_stat, target_stat)
2412-
2413- def test_case_insensitive_build_tree_inventory(self):
2414- if (features.CaseInsensitiveFilesystemFeature.available()
2415- or features.CaseInsCasePresFilenameFeature.available()):
2416- raise tests.UnavailableFeature('Fully case sensitive filesystem')
2417- source = self.make_branch_and_tree('source')
2418- self.build_tree(['source/file', 'source/FILE'])
2419- source.add(['file', 'FILE'], [b'lower-id', b'upper-id'])
2420- source.commit('added files')
2421- # Don't try this at home, kids!
2422- # Force the tree to report that it is case insensitive
2423- target = self.make_branch_and_tree('target')
2424- target.case_sensitive = False
2425- build_tree(source.basis_tree(), target, source, delta_from_tree=True)
2426- self.assertEqual('file.moved', target.id2path(b'lower-id'))
2427- self.assertEqual('FILE', target.id2path(b'upper-id'))
2428-
2429- def test_build_tree_observes_sha(self):
2430- source = self.make_branch_and_tree('source')
2431- self.build_tree(['source/file1', 'source/dir/', 'source/dir/file2'])
2432- source.add(['file1', 'dir', 'dir/file2'],
2433- [b'file1-id', b'dir-id', b'file2-id'])
2434- source.commit('new files')
2435- target = self.make_branch_and_tree('target')
2436- target.lock_write()
2437- self.addCleanup(target.unlock)
2438- # We make use of the fact that DirState caches its cutoff time. So we
2439- # set the 'safe' time to one minute in the future.
2440- state = target.current_dirstate()
2441- state._cutoff_time = time.time() + 60
2442- build_tree(source.basis_tree(), target)
2443- entry1_sha = osutils.sha_file_by_name('source/file1')
2444- entry2_sha = osutils.sha_file_by_name('source/dir/file2')
2445- # entry[1] is the state information, entry[1][0] is the state of the
2446- # working tree, entry[1][0][1] is the sha value for the current working
2447- # tree
2448- entry1 = state._get_entry(0, path_utf8=b'file1')
2449- self.assertEqual(entry1_sha, entry1[1][0][1])
2450- # The 'size' field must also be set.
2451- self.assertEqual(25, entry1[1][0][2])
2452- entry1_state = entry1[1][0]
2453- entry2 = state._get_entry(0, path_utf8=b'dir/file2')
2454- self.assertEqual(entry2_sha, entry2[1][0][1])
2455- self.assertEqual(29, entry2[1][0][2])
2456- entry2_state = entry2[1][0]
2457- # Now, make sure that we don't have to re-read the content. The
2458- # packed_stat should match exactly.
2459- self.assertEqual(entry1_sha, target.get_file_sha1('file1'))
2460- self.assertEqual(entry2_sha, target.get_file_sha1('dir/file2'))
2461- self.assertEqual(entry1_state, entry1[1][0])
2462- self.assertEqual(entry2_state, entry2[1][0])
2463-
2464-
2465 class TestCommitTransform(tests.TestCaseWithTransport):
2466
2467 def get_branch(self):
2468
2469=== modified file 'breezy/transform.py'
2470--- breezy/transform.py 2020-08-22 22:46:24 +0000
2471+++ breezy/transform.py 2020-11-19 18:29:10 +0000
2472@@ -639,200 +639,6 @@
2473 return [(self.get_path(t), t) for t in trans_ids]
2474
2475
2476-def build_tree(tree, wt, accelerator_tree=None, hardlink=False,
2477- delta_from_tree=False):
2478- """Create working tree for a branch, using a TreeTransform.
2479-
2480- This function should be used on empty trees, having a tree root at most.
2481- (see merge and revert functionality for working with existing trees)
2482-
2483- Existing files are handled like so:
2484-
2485- - Existing bzrdirs take precedence over creating new items. They are
2486- created as '%s.diverted' % name.
2487- - Otherwise, if the content on disk matches the content we are building,
2488- it is silently replaced.
2489- - Otherwise, conflict resolution will move the old file to 'oldname.moved'.
2490-
2491- :param tree: The tree to convert wt into a copy of
2492- :param wt: The working tree that files will be placed into
2493- :param accelerator_tree: A tree which can be used for retrieving file
2494- contents more quickly than tree itself, i.e. a workingtree. tree
2495- will be used for cases where accelerator_tree's content is different.
2496- :param hardlink: If true, hard-link files to accelerator_tree, where
2497- possible. accelerator_tree must implement abspath, i.e. be a
2498- working tree.
2499- :param delta_from_tree: If true, build_tree may use the input Tree to
2500- generate the inventory delta.
2501- """
2502- with contextlib.ExitStack() as exit_stack:
2503- exit_stack.enter_context(wt.lock_tree_write())
2504- exit_stack.enter_context(tree.lock_read())
2505- if accelerator_tree is not None:
2506- exit_stack.enter_context(accelerator_tree.lock_read())
2507- return _build_tree(tree, wt, accelerator_tree, hardlink,
2508- delta_from_tree)
2509-
2510-
2511-def _build_tree(tree, wt, accelerator_tree, hardlink, delta_from_tree):
2512- """See build_tree."""
2513- for num, _unused in enumerate(wt.all_versioned_paths()):
2514- if num > 0: # more than just a root
2515- raise errors.WorkingTreeAlreadyPopulated(base=wt.basedir)
2516- file_trans_id = {}
2517- top_pb = ui.ui_factory.nested_progress_bar()
2518- pp = ProgressPhase("Build phase", 2, top_pb)
2519- if tree.path2id('') is not None:
2520- # This is kind of a hack: we should be altering the root
2521- # as part of the regular tree shape diff logic.
2522- # The conditional test here is to avoid doing an
2523- # expensive operation (flush) every time the root id
2524- # is set within the tree, nor setting the root and thus
2525- # marking the tree as dirty, because we use two different
2526- # idioms here: tree interfaces and inventory interfaces.
2527- if wt.path2id('') != tree.path2id(''):
2528- wt.set_root_id(tree.path2id(''))
2529- wt.flush()
2530- tt = wt.transform()
2531- divert = set()
2532- try:
2533- pp.next_phase()
2534- file_trans_id[find_previous_path(wt, tree, '')] = tt.trans_id_tree_path('')
2535- with ui.ui_factory.nested_progress_bar() as pb:
2536- deferred_contents = []
2537- num = 0
2538- total = len(tree.all_versioned_paths())
2539- if delta_from_tree:
2540- precomputed_delta = []
2541- else:
2542- precomputed_delta = None
2543- # Check if tree inventory has content. If so, we populate
2544- # existing_files with the directory content. If there are no
2545- # entries we skip populating existing_files as its not used.
2546- # This improves performance and unncessary work on large
2547- # directory trees. (#501307)
2548- if total > 0:
2549- existing_files = set()
2550- for dir, files in wt.walkdirs():
2551- existing_files.update(f[0] for f in files)
2552- for num, (tree_path, entry) in \
2553- enumerate(tree.iter_entries_by_dir()):
2554- pb.update(gettext("Building tree"), num
2555- - len(deferred_contents), total)
2556- if entry.parent_id is None:
2557- continue
2558- reparent = False
2559- file_id = entry.file_id
2560- if delta_from_tree:
2561- precomputed_delta.append((None, tree_path, file_id, entry))
2562- if tree_path in existing_files:
2563- target_path = wt.abspath(tree_path)
2564- kind = file_kind(target_path)
2565- if kind == "directory":
2566- try:
2567- controldir.ControlDir.open(target_path)
2568- except errors.NotBranchError:
2569- pass
2570- else:
2571- divert.add(tree_path)
2572- if (tree_path not in divert
2573- and _content_match(
2574- tree, entry, tree_path, kind, target_path)):
2575- tt.delete_contents(tt.trans_id_tree_path(tree_path))
2576- if kind == 'directory':
2577- reparent = True
2578- parent_id = file_trans_id[osutils.dirname(tree_path)]
2579- if entry.kind == 'file':
2580- # We *almost* replicate new_by_entry, so that we can defer
2581- # getting the file text, and get them all at once.
2582- trans_id = tt.create_path(entry.name, parent_id)
2583- file_trans_id[tree_path] = trans_id
2584- tt.version_file(trans_id, file_id=file_id)
2585- executable = tree.is_executable(tree_path)
2586- if executable:
2587- tt.set_executability(executable, trans_id)
2588- trans_data = (trans_id, tree_path, entry.text_sha1)
2589- deferred_contents.append((tree_path, trans_data))
2590- else:
2591- file_trans_id[tree_path] = new_by_entry(
2592- tree_path, tt, entry, parent_id, tree)
2593- if reparent:
2594- new_trans_id = file_trans_id[tree_path]
2595- old_parent = tt.trans_id_tree_path(tree_path)
2596- _reparent_children(tt, old_parent, new_trans_id)
2597- offset = num + 1 - len(deferred_contents)
2598- _create_files(tt, tree, deferred_contents, pb, offset,
2599- accelerator_tree, hardlink)
2600- pp.next_phase()
2601- divert_trans = set(file_trans_id[f] for f in divert)
2602-
2603- def resolver(t, c):
2604- return resolve_checkout(t, c, divert_trans)
2605- raw_conflicts = resolve_conflicts(tt, pass_func=resolver)
2606- if len(raw_conflicts) > 0:
2607- precomputed_delta = None
2608- conflicts = tt.cook_conflicts(raw_conflicts)
2609- for conflict in conflicts:
2610- trace.warning(str(conflict))
2611- try:
2612- wt.add_conflicts(conflicts)
2613- except errors.UnsupportedOperation:
2614- pass
2615- result = tt.apply(no_conflicts=True,
2616- precomputed_delta=precomputed_delta)
2617- finally:
2618- tt.finalize()
2619- top_pb.finished()
2620- return result
2621-
2622-
2623-def _create_files(tt, tree, desired_files, pb, offset, accelerator_tree,
2624- hardlink):
2625- total = len(desired_files) + offset
2626- wt = tt._tree
2627- if accelerator_tree is None:
2628- new_desired_files = desired_files
2629- else:
2630- iter = accelerator_tree.iter_changes(tree, include_unchanged=True)
2631- unchanged = [
2632- change.path for change in iter
2633- if not (change.changed_content or change.executable[0] != change.executable[1])]
2634- if accelerator_tree.supports_content_filtering():
2635- unchanged = [(tp, ap) for (tp, ap) in unchanged
2636- if not next(accelerator_tree.iter_search_rules([ap]))]
2637- unchanged = dict(unchanged)
2638- new_desired_files = []
2639- count = 0
2640- for unused_tree_path, (trans_id, tree_path, text_sha1) in desired_files:
2641- accelerator_path = unchanged.get(tree_path)
2642- if accelerator_path is None:
2643- new_desired_files.append((tree_path,
2644- (trans_id, tree_path, text_sha1)))
2645- continue
2646- pb.update(gettext('Adding file contents'), count + offset, total)
2647- if hardlink:
2648- tt.create_hardlink(accelerator_tree.abspath(accelerator_path),
2649- trans_id)
2650- else:
2651- with accelerator_tree.get_file(accelerator_path) as f:
2652- chunks = osutils.file_iterator(f)
2653- if wt.supports_content_filtering():
2654- filters = wt._content_filter_stack(tree_path)
2655- chunks = filtered_output_bytes(chunks, filters,
2656- ContentFilterContext(tree_path, tree))
2657- tt.create_file(chunks, trans_id, sha1=text_sha1)
2658- count += 1
2659- offset += count
2660- for count, ((trans_id, tree_path, text_sha1), contents) in enumerate(
2661- tree.iter_files_bytes(new_desired_files)):
2662- if wt.supports_content_filtering():
2663- filters = wt._content_filter_stack(tree_path)
2664- contents = filtered_output_bytes(contents, filters,
2665- ContentFilterContext(tree_path, tree))
2666- tt.create_file(contents, trans_id, sha1=text_sha1)
2667- pb.update(gettext('Adding file contents'), count + offset, total)
2668-
2669-
2670 def _reparent_children(tt, old_parent, new_parent):
2671 for child in tt.iter_tree_children(old_parent):
2672 tt.adjust_path(tt.final_name(child), new_parent, child)
2673@@ -845,52 +651,6 @@
2674 return by_parent[old_parent]
2675
2676
2677-def _content_match(tree, entry, tree_path, kind, target_path):
2678- if entry.kind != kind:
2679- return False
2680- if entry.kind == "directory":
2681- return True
2682- if entry.kind == "file":
2683- with open(target_path, 'rb') as f1, \
2684- tree.get_file(tree_path) as f2:
2685- if osutils.compare_files(f1, f2):
2686- return True
2687- elif entry.kind == "symlink":
2688- if tree.get_symlink_target(tree_path) == os.readlink(target_path):
2689- return True
2690- return False
2691-
2692-
2693-def resolve_checkout(tt, conflicts, divert):
2694- new_conflicts = set()
2695- for c_type, conflict in ((c[0], c) for c in conflicts):
2696- # Anything but a 'duplicate' would indicate programmer error
2697- if c_type != 'duplicate':
2698- raise AssertionError(c_type)
2699- # Now figure out which is new and which is old
2700- if tt.new_contents(conflict[1]):
2701- new_file = conflict[1]
2702- old_file = conflict[2]
2703- else:
2704- new_file = conflict[2]
2705- old_file = conflict[1]
2706-
2707- # We should only get here if the conflict wasn't completely
2708- # resolved
2709- final_parent = tt.final_parent(old_file)
2710- if new_file in divert:
2711- new_name = tt.final_name(old_file) + '.diverted'
2712- tt.adjust_path(new_name, final_parent, new_file)
2713- new_conflicts.add((c_type, 'Diverted to',
2714- new_file, old_file))
2715- else:
2716- new_name = tt.final_name(old_file) + '.moved'
2717- tt.adjust_path(new_name, final_parent, old_file)
2718- new_conflicts.add((c_type, 'Moved existing file to',
2719- old_file, new_file))
2720- return new_conflicts
2721-
2722-
2723 def new_by_entry(path, tt, entry, parent_id, tree):
2724 """Create a new file according to its inventory entry"""
2725 name = entry.name
2726@@ -953,38 +713,13 @@
2727 tt.set_executability(entry.executable, trans_id)
2728
2729
2730-def revert(working_tree, target_tree, filenames, backups=False,
2731- pb=None, change_reporter=None):
2732- """Revert a working tree's contents to those of a target tree."""
2733- pb = ui.ui_factory.nested_progress_bar()
2734- try:
2735- with target_tree.lock_read(), working_tree.transform(pb) as tt:
2736- pp = ProgressPhase("Revert phase", 3, pb)
2737- conflicts, merge_modified = _prepare_revert_transform(
2738- working_tree, target_tree, tt, filenames, backups, pp)
2739- if change_reporter:
2740- from . import delta
2741- change_reporter = delta._ChangeReporter(
2742- unversioned_filter=working_tree.is_ignored)
2743- delta.report_changes(tt.iter_changes(), change_reporter)
2744- for conflict in conflicts:
2745- trace.warning(str(conflict))
2746- pp.next_phase()
2747- tt.apply()
2748- if working_tree.supports_merge_modified():
2749- working_tree.set_merge_modified(merge_modified)
2750- finally:
2751- pb.clear()
2752- return conflicts
2753-
2754-
2755-def _prepare_revert_transform(working_tree, target_tree, tt, filenames,
2756+def _prepare_revert_transform(es, working_tree, target_tree, tt, filenames,
2757 backups, pp, basis_tree=None,
2758 merge_modified=None):
2759 with ui.ui_factory.nested_progress_bar() as child_pb:
2760 if merge_modified is None:
2761 merge_modified = working_tree.merge_modified()
2762- merge_modified = _alter_files(working_tree, target_tree, tt,
2763+ merge_modified = _alter_files(es, working_tree, target_tree, tt,
2764 child_pb, filenames, backups,
2765 merge_modified, basis_tree)
2766 with ui.ui_factory.nested_progress_bar() as child_pb:
2767@@ -994,10 +729,34 @@
2768 return conflicts, merge_modified
2769
2770
2771-def _alter_files(working_tree, target_tree, tt, pb, specific_files,
2772+def revert(working_tree, target_tree, filenames, backups=False,
2773+ pb=None, change_reporter=None, merge_modified=None, basis_tree=None):
2774+ """Revert a working tree's contents to those of a target tree."""
2775+ with contextlib.ExitStack() as es:
2776+ pb = es.enter_context(ui.ui_factory.nested_progress_bar())
2777+ es.enter_context(target_tree.lock_read())
2778+ tt = es.enter_context(working_tree.transform(pb))
2779+ pp = ProgressPhase("Revert phase", 3, pb)
2780+ conflicts, merge_modified = _prepare_revert_transform(
2781+ es, working_tree, target_tree, tt, filenames, backups, pp)
2782+ if change_reporter:
2783+ from . import delta
2784+ change_reporter = delta._ChangeReporter(
2785+ unversioned_filter=working_tree.is_ignored)
2786+ delta.report_changes(tt.iter_changes(), change_reporter)
2787+ for conflict in conflicts:
2788+ trace.warning(str(conflict))
2789+ pp.next_phase()
2790+ tt.apply()
2791+ if working_tree.supports_merge_modified():
2792+ working_tree.set_merge_modified(merge_modified)
2793+ return conflicts
2794+
2795+
2796+def _alter_files(es, working_tree, target_tree, tt, pb, specific_files,
2797 backups, merge_modified, basis_tree=None):
2798 if basis_tree is not None:
2799- basis_tree.lock_read()
2800+ es.enter_context(basis_tree.lock_read())
2801 # We ask the working_tree for its changes relative to the target, rather
2802 # than the target changes relative to the working tree. Because WT4 has an
2803 # optimizer to compare itself to a target, but no optimizer for the
2804@@ -1008,123 +767,122 @@
2805 skip_root = True
2806 else:
2807 skip_root = False
2808- try:
2809- deferred_files = []
2810- for id_num, change in enumerate(change_list):
2811- target_path, wt_path = change.path
2812- target_versioned, wt_versioned = change.versioned
2813- target_parent = change.parent_id[0]
2814- target_name, wt_name = change.name
2815- target_kind, wt_kind = change.kind
2816- target_executable, wt_executable = change.executable
2817- if skip_root and wt_path == '':
2818- continue
2819- trans_id = tt.trans_id_file_id(change.file_id)
2820- mode_id = None
2821- if change.changed_content:
2822- keep_content = False
2823- if wt_kind == 'file' and (backups or target_kind is None):
2824- wt_sha1 = working_tree.get_file_sha1(wt_path)
2825- if merge_modified.get(wt_path) != wt_sha1:
2826- # acquire the basis tree lazily to prevent the
2827- # expense of accessing it when it's not needed ?
2828- # (Guessing, RBC, 200702)
2829- if basis_tree is None:
2830- basis_tree = working_tree.basis_tree()
2831- basis_tree.lock_read()
2832- basis_inter = InterTree.get(basis_tree, working_tree)
2833- basis_path = basis_inter.find_source_path(wt_path)
2834- if basis_path is None:
2835- if target_kind is None and not target_versioned:
2836- keep_content = True
2837- else:
2838- if wt_sha1 != basis_tree.get_file_sha1(basis_path):
2839- keep_content = True
2840- if wt_kind is not None:
2841- if not keep_content:
2842- tt.delete_contents(trans_id)
2843- elif target_kind is not None:
2844- parent_trans_id = tt.trans_id_tree_path(osutils.dirname(wt_path))
2845- backup_name = tt._available_backup_name(
2846- wt_name, parent_trans_id)
2847- tt.adjust_path(backup_name, parent_trans_id, trans_id)
2848- new_trans_id = tt.create_path(wt_name, parent_trans_id)
2849- if wt_versioned and target_versioned:
2850- tt.unversion_file(trans_id)
2851- tt.version_file(
2852- new_trans_id, file_id=getattr(change, 'file_id', None))
2853- # New contents should have the same unix perms as old
2854- # contents
2855- mode_id = trans_id
2856- trans_id = new_trans_id
2857- if target_kind in ('directory', 'tree-reference'):
2858- tt.create_directory(trans_id)
2859- if target_kind == 'tree-reference':
2860- revision = target_tree.get_reference_revision(
2861- target_path)
2862- tt.set_tree_reference(revision, trans_id)
2863- elif target_kind == 'symlink':
2864- tt.create_symlink(target_tree.get_symlink_target(
2865- target_path), trans_id)
2866- elif target_kind == 'file':
2867- deferred_files.append(
2868- (target_path, (trans_id, mode_id, target_path)))
2869+ deferred_files = []
2870+ for id_num, change in enumerate(change_list):
2871+ target_path, wt_path = change.path
2872+ target_versioned, wt_versioned = change.versioned
2873+ target_parent = change.parent_id[0]
2874+ target_name, wt_name = change.name
2875+ target_kind, wt_kind = change.kind
2876+ target_executable, wt_executable = change.executable
2877+ if skip_root and wt_path == '':
2878+ continue
2879+ mode_id = None
2880+ if wt_path is not None:
2881+ trans_id = tt.trans_id_tree_path(wt_path)
2882+ else:
2883+ trans_id = tt.assign_id()
2884+ if change.changed_content:
2885+ keep_content = False
2886+ if wt_kind == 'file' and (backups or target_kind is None):
2887+ wt_sha1 = working_tree.get_file_sha1(wt_path)
2888+ if merge_modified.get(wt_path) != wt_sha1:
2889+ # acquire the basis tree lazily to prevent the
2890+ # expense of accessing it when it's not needed ?
2891+ # (Guessing, RBC, 200702)
2892 if basis_tree is None:
2893 basis_tree = working_tree.basis_tree()
2894- basis_tree.lock_read()
2895- new_sha1 = target_tree.get_file_sha1(target_path)
2896- basis_inter = InterTree.get(basis_tree, target_tree)
2897- basis_path = basis_inter.find_source_path(target_path)
2898- if (basis_path is not None and
2899- new_sha1 == basis_tree.get_file_sha1(basis_path)):
2900- # If the new contents of the file match what is in basis,
2901- # then there is no need to store in merge_modified.
2902- if basis_path in merge_modified:
2903- del merge_modified[basis_path]
2904+ es.enter_context(basis_tree.lock_read())
2905+ basis_inter = InterTree.get(basis_tree, working_tree)
2906+ basis_path = basis_inter.find_source_path(wt_path)
2907+ if basis_path is None:
2908+ if target_kind is None and not target_versioned:
2909+ keep_content = True
2910 else:
2911- merge_modified[target_path] = new_sha1
2912-
2913- # preserve the execute bit when backing up
2914- if keep_content and wt_executable == target_executable:
2915- tt.set_executability(target_executable, trans_id)
2916+ if wt_sha1 != basis_tree.get_file_sha1(basis_path):
2917+ keep_content = True
2918+ if wt_kind is not None:
2919+ if not keep_content:
2920+ tt.delete_contents(trans_id)
2921 elif target_kind is not None:
2922- raise AssertionError(target_kind)
2923- if not wt_versioned and target_versioned:
2924- tt.version_file(
2925- trans_id, file_id=getattr(change, 'file_id', None))
2926- if wt_versioned and not target_versioned:
2927- tt.unversion_file(trans_id)
2928- if (target_name is not None
2929- and (wt_name != target_name or change.is_reparented())):
2930- if target_path == '':
2931- parent_trans = ROOT_PARENT
2932- else:
2933- parent_trans = tt.trans_id_file_id(target_parent)
2934- if wt_path == '' and wt_versioned:
2935- tt.adjust_root_path(target_name, parent_trans)
2936- else:
2937- tt.adjust_path(target_name, parent_trans, trans_id)
2938- if wt_executable != target_executable and target_kind == "file":
2939- tt.set_executability(target_executable, trans_id)
2940- if working_tree.supports_content_filtering():
2941- for (trans_id, mode_id, target_path), bytes in (
2942- target_tree.iter_files_bytes(deferred_files)):
2943- # We're reverting a tree to the target tree so using the
2944- # target tree to find the file path seems the best choice
2945- # here IMO - Ian C 27/Oct/2009
2946- filters = working_tree._content_filter_stack(target_path)
2947- bytes = filtered_output_bytes(
2948- bytes, filters,
2949- ContentFilterContext(target_path, working_tree))
2950- tt.create_file(bytes, trans_id, mode_id)
2951- else:
2952- for (trans_id, mode_id, target_path), bytes in target_tree.iter_files_bytes(
2953- deferred_files):
2954- tt.create_file(bytes, trans_id, mode_id)
2955- tt.fixup_new_roots()
2956- finally:
2957- if basis_tree is not None:
2958- basis_tree.unlock()
2959+ parent_trans_id = tt.trans_id_tree_path(osutils.dirname(wt_path))
2960+ backup_name = tt._available_backup_name(
2961+ wt_name, parent_trans_id)
2962+ tt.adjust_path(backup_name, parent_trans_id, trans_id)
2963+ new_trans_id = tt.create_path(wt_name, parent_trans_id)
2964+ if wt_versioned and target_versioned:
2965+ tt.unversion_file(trans_id)
2966+ tt.version_file(
2967+ new_trans_id, file_id=getattr(change, 'file_id', None))
2968+ # New contents should have the same unix perms as old
2969+ # contents
2970+ mode_id = trans_id
2971+ trans_id = new_trans_id
2972+ if target_kind in ('directory', 'tree-reference'):
2973+ tt.create_directory(trans_id)
2974+ if target_kind == 'tree-reference':
2975+ revision = target_tree.get_reference_revision(
2976+ target_path)
2977+ tt.set_tree_reference(revision, trans_id)
2978+ elif target_kind == 'symlink':
2979+ tt.create_symlink(target_tree.get_symlink_target(
2980+ target_path), trans_id)
2981+ elif target_kind == 'file':
2982+ deferred_files.append(
2983+ (target_path, (trans_id, mode_id, target_path)))
2984+ if basis_tree is None:
2985+ basis_tree = working_tree.basis_tree()
2986+ es.enter_context(basis_tree.lock_read())
2987+ new_sha1 = target_tree.get_file_sha1(target_path)
2988+ basis_inter = InterTree.get(basis_tree, target_tree)
2989+ basis_path = basis_inter.find_source_path(target_path)
2990+ if (basis_path is not None and
2991+ new_sha1 == basis_tree.get_file_sha1(basis_path)):
2992+ # If the new contents of the file match what is in basis,
2993+ # then there is no need to store in merge_modified.
2994+ if basis_path in merge_modified:
2995+ del merge_modified[basis_path]
2996+ else:
2997+ merge_modified[target_path] = new_sha1
2998+
2999+ # preserve the execute bit when backing up
3000+ if keep_content and wt_executable == target_executable:
3001+ tt.set_executability(target_executable, trans_id)
3002+ elif target_kind is not None:
3003+ raise AssertionError(target_kind)
3004+ if not wt_versioned and target_versioned:
3005+ tt.version_file(
3006+ trans_id, file_id=getattr(change, 'file_id', None))
3007+ if wt_versioned and not target_versioned:
3008+ tt.unversion_file(trans_id)
3009+ if (target_name is not None
3010+ and (wt_name != target_name or change.is_reparented())):
3011+ if target_path == '':
3012+ parent_trans = ROOT_PARENT
3013+ else:
3014+ parent_trans = tt.trans_id_file_id(target_parent)
3015+ if wt_path == '' and wt_versioned:
3016+ tt.adjust_root_path(target_name, parent_trans)
3017+ else:
3018+ tt.adjust_path(target_name, parent_trans, trans_id)
3019+ if wt_executable != target_executable and target_kind == "file":
3020+ tt.set_executability(target_executable, trans_id)
3021+ if working_tree.supports_content_filtering():
3022+ for (trans_id, mode_id, target_path), bytes in (
3023+ target_tree.iter_files_bytes(deferred_files)):
3024+ # We're reverting a tree to the target tree so using the
3025+ # target tree to find the file path seems the best choice
3026+ # here IMO - Ian C 27/Oct/2009
3027+ filters = working_tree._content_filter_stack(target_path)
3028+ bytes = filtered_output_bytes(
3029+ bytes, filters,
3030+ ContentFilterContext(target_path, working_tree))
3031+ tt.create_file(bytes, trans_id, mode_id)
3032+ else:
3033+ for (trans_id, mode_id, target_path), bytes in target_tree.iter_files_bytes(
3034+ deferred_files):
3035+ tt.create_file(bytes, trans_id, mode_id)
3036+ tt.fixup_new_roots()
3037 return merge_modified
3038
3039
3040@@ -1155,9 +913,17 @@
3041 existing_file, new_file = trans_id, last_trans_id
3042 else:
3043 existing_file, new_file = last_trans_id, trans_id
3044- new_name = tt.final_name(existing_file) + '.moved'
3045- tt.adjust_path(new_name, final_parent, existing_file)
3046- yield (c_type, 'Moved existing file to', existing_file, new_file)
3047+ if (not tt._tree.has_versioned_directories() and
3048+ tt.final_kind(trans_id) == 'directory' and
3049+ tt.final_kind(last_trans_id) == 'directory'):
3050+ _reparent_transform_children(tt, existing_file, new_file)
3051+ tt.delete_contents(existing_file)
3052+ tt.unversion_file(existing_file)
3053+ tt.cancel_creation(existing_file)
3054+ else:
3055+ new_name = tt.final_name(existing_file) + '.moved'
3056+ tt.adjust_path(new_name, final_parent, existing_file)
3057+ yield (c_type, 'Moved existing file to', existing_file, new_file)
3058
3059
3060 def resolve_parent_loop(tt, path_tree, c_type, cur):
3061
3062=== modified file 'breezy/workingtree.py'
3063--- breezy/workingtree.py 2020-08-22 22:46:24 +0000
3064+++ breezy/workingtree.py 2020-11-19 18:29:10 +0000
3065@@ -1075,10 +1075,8 @@
3066 """
3067 raise NotImplementedError(self.unlock)
3068
3069- _marker = object()
3070-
3071 def update(self, change_reporter=None, possible_transports=None,
3072- revision=None, old_tip=_marker, show_base=False):
3073+ revision=None, old_tip=None, show_base=False):
3074 """Update a working tree along its branch.
3075
3076 This will update the branch if its bound too, which means we have
3077@@ -1110,105 +1108,7 @@
3078 returned (old tip of the branch or None). _marker is used
3079 otherwise.
3080 """
3081- if self.branch.get_bound_location() is not None:
3082- self.lock_write()
3083- update_branch = (old_tip is self._marker)
3084- else:
3085- self.lock_tree_write()
3086- update_branch = False
3087- try:
3088- if update_branch:
3089- old_tip = self.branch.update(possible_transports)
3090- else:
3091- if old_tip is self._marker:
3092- old_tip = None
3093- return self._update_tree(old_tip, change_reporter, revision, show_base)
3094- finally:
3095- self.unlock()
3096-
3097- def _update_tree(self, old_tip=None, change_reporter=None, revision=None,
3098- show_base=False):
3099- """Update a tree to the master branch.
3100-
3101- :param old_tip: if supplied, the previous tip revision the branch,
3102- before it was changed to the master branch's tip.
3103- """
3104- # here if old_tip is not None, it is the old tip of the branch before
3105- # it was updated from the master branch. This should become a pending
3106- # merge in the working tree to preserve the user existing work. we
3107- # cant set that until we update the working trees last revision to be
3108- # one from the new branch, because it will just get absorbed by the
3109- # parent de-duplication logic.
3110- #
3111- # We MUST save it even if an error occurs, because otherwise the users
3112- # local work is unreferenced and will appear to have been lost.
3113- #
3114- with self.lock_tree_write():
3115- nb_conflicts = 0
3116- try:
3117- last_rev = self.get_parent_ids()[0]
3118- except IndexError:
3119- last_rev = _mod_revision.NULL_REVISION
3120- if revision is None:
3121- revision = self.branch.last_revision()
3122-
3123- old_tip = old_tip or _mod_revision.NULL_REVISION
3124-
3125- if not _mod_revision.is_null(old_tip) and old_tip != last_rev:
3126- # the branch we are bound to was updated
3127- # merge those changes in first
3128- base_tree = self.basis_tree()
3129- other_tree = self.branch.repository.revision_tree(old_tip)
3130- nb_conflicts = merge.merge_inner(self.branch, other_tree,
3131- base_tree, this_tree=self,
3132- change_reporter=change_reporter,
3133- show_base=show_base)
3134- if nb_conflicts:
3135- self.add_parent_tree((old_tip, other_tree))
3136- note(gettext('Rerun update after fixing the conflicts.'))
3137- return nb_conflicts
3138-
3139- if last_rev != _mod_revision.ensure_null(revision):
3140- # the working tree is up to date with the branch
3141- # we can merge the specified revision from master
3142- to_tree = self.branch.repository.revision_tree(revision)
3143- to_root_id = to_tree.path2id('')
3144-
3145- basis = self.basis_tree()
3146- with basis.lock_read():
3147- if (basis.path2id('') is None or basis.path2id('') != to_root_id):
3148- self.set_root_id(to_root_id)
3149- self.flush()
3150-
3151- # determine the branch point
3152- graph = self.branch.repository.get_graph()
3153- base_rev_id = graph.find_unique_lca(self.branch.last_revision(),
3154- last_rev)
3155- base_tree = self.branch.repository.revision_tree(base_rev_id)
3156-
3157- nb_conflicts = merge.merge_inner(self.branch, to_tree, base_tree,
3158- this_tree=self,
3159- change_reporter=change_reporter,
3160- show_base=show_base)
3161- self.set_last_revision(revision)
3162- # TODO - dedup parents list with things merged by pull ?
3163- # reuse the tree we've updated to to set the basis:
3164- parent_trees = [(revision, to_tree)]
3165- merges = self.get_parent_ids()[1:]
3166- # Ideally we ask the tree for the trees here, that way the working
3167- # tree can decide whether to give us the entire tree or give us a
3168- # lazy initialised tree. dirstate for instance will have the trees
3169- # in ram already, whereas a last-revision + basis-inventory tree
3170- # will not, but also does not need them when setting parents.
3171- for parent in merges:
3172- parent_trees.append(
3173- (parent, self.branch.repository.revision_tree(parent)))
3174- if not _mod_revision.is_null(old_tip):
3175- parent_trees.append(
3176- (old_tip, self.branch.repository.revision_tree(old_tip)))
3177- self.set_parent_trees(parent_trees)
3178- last_rev = parent_trees[0][0]
3179- return nb_conflicts
3180+ raise NotImplementedError(self.update)
3181
3182 def set_conflicts(self, arg):
3183 raise errors.UnsupportedOperation(self.set_conflicts, self)

Subscribers

People subscribed via source and target branches