Merge lp:~jelmer/brz/misc-foreign3 into lp:~jelmer/brz/foreign
- misc-foreign3
- Merge into foreign
Proposed by
Jelmer Vernooij
Status: | Merged |
---|---|
Merged at revision: | 6849 |
Proposed branch: | lp:~jelmer/brz/misc-foreign3 |
Merge into: | lp:~jelmer/brz/foreign |
Diff against target: |
1419 lines (+418/-372) (has conflicts) 19 files modified
breezy/bzr/bzrdir.py (+25/-112) breezy/bzr/remote.py (+2/-2) breezy/bzr/workingtree.py (+124/-0) breezy/controldir.py (+96/-0) breezy/log.py (+12/-12) breezy/tests/per_branch/test_push.py (+13/-16) breezy/tests/per_branch/test_tags.py (+13/-11) breezy/tests/per_controldir/test_controldir.py (+2/-0) breezy/tests/per_intertree/test_compare.py (+14/-13) breezy/tests/per_repository/test_fetch.py (+10/-11) breezy/tests/per_repository/test_repository.py (+26/-5) breezy/tests/per_tree/test_annotate_iter.py (+2/-0) breezy/tests/per_workingtree/test_add.py (+2/-0) breezy/tests/per_workingtree/test_annotate_iter.py (+57/-57) breezy/tests/per_workingtree/test_get_file_mtime.py (+1/-1) breezy/tests/per_workingtree/test_set_root_id.py (+4/-0) breezy/tests/per_workingtree/test_walkdirs.py (+4/-2) breezy/tests/per_workingtree/test_workingtree.py (+6/-5) breezy/workingtree.py (+5/-125) Text conflict in breezy/bzr/bzrdir.py Text conflict in breezy/tests/per_repository/test_repository.py |
To merge this branch: | bzr merge lp:~jelmer/brz/misc-foreign3 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Martin Packman (community) | Approve | ||
Jelmer Vernooij | Pending | ||
Review via email: mp+334443@code.launchpad.net |
Commit message
Description of the change
Avoid specifying revision_id/file_id in a few more cases.
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'breezy/bzr/bzrdir.py' | |||
2 | --- breezy/bzr/bzrdir.py 2017-11-20 22:56:39 +0000 | |||
3 | +++ breezy/bzr/bzrdir.py 2017-11-29 12:08:31 +0000 | |||
4 | @@ -305,9 +305,9 @@ | |||
5 | 305 | return policy | 305 | return policy |
6 | 306 | else: | 306 | else: |
7 | 307 | try: | 307 | try: |
11 | 308 | return UseExistingRepository(self.open_repository(), | 308 | return UseExistingRepository( |
12 | 309 | stack_on, stack_on_pwd, | 309 | self.open_repository(), stack_on, stack_on_pwd, |
13 | 310 | require_stacking=require_stacking) | 310 | require_stacking=require_stacking) |
14 | 311 | except errors.NoRepositoryPresent: | 311 | except errors.NoRepositoryPresent: |
15 | 312 | pass | 312 | pass |
16 | 313 | return CreateRepository(self, stack_on, stack_on_pwd, | 313 | return CreateRepository(self, stack_on, stack_on_pwd, |
17 | @@ -1737,14 +1737,22 @@ | |||
18 | 1737 | except errors.NoRepositoryPresent: | 1737 | except errors.NoRepositoryPresent: |
19 | 1738 | pass | 1738 | pass |
20 | 1739 | else: | 1739 | else: |
22 | 1740 | if not isinstance(repo._format, self.target_format.repository_format.__class__): | 1740 | repo_fmt = self.target_format.repository_format |
23 | 1741 | if not isinstance(repo._format, repo_fmt.__class__): | ||
24 | 1741 | from ..repository import CopyConverter | 1742 | from ..repository import CopyConverter |
25 | 1742 | ui.ui_factory.note(gettext('starting repository conversion')) | 1743 | ui.ui_factory.note(gettext('starting repository conversion')) |
26 | 1744 | <<<<<<< TREE | ||
27 | 1743 | if not (self.target_format. | 1745 | if not (self.target_format. |
28 | 1744 | repository_format.supports_overriding_transport): | 1746 | repository_format.supports_overriding_transport): |
29 | 1745 | raise AssertionError( | 1747 | raise AssertionError( |
30 | 1746 | "Repository in metadir does not support " | 1748 | "Repository in metadir does not support " |
31 | 1747 | "overriding transport") | 1749 | "overriding transport") |
32 | 1750 | ======= | ||
33 | 1751 | if not repo_fmt.supports_overriding_transport: | ||
34 | 1752 | raise AssertionError( | ||
35 | 1753 | "Repository in metadir does not support " | ||
36 | 1754 | "overriding transport") | ||
37 | 1755 | >>>>>>> MERGE-SOURCE | ||
38 | 1748 | converter = CopyConverter(self.target_format.repository_format) | 1756 | converter = CopyConverter(self.target_format.repository_format) |
39 | 1749 | converter.convert(repo, pb) | 1757 | converter.convert(repo, pb) |
40 | 1750 | for branch in self.controldir.list_branches(): | 1758 | for branch in self.controldir.list_branches(): |
41 | @@ -1833,138 +1841,43 @@ | |||
42 | 1833 | return BzrDir.open_from_transport(to_convert.root_transport) | 1841 | return BzrDir.open_from_transport(to_convert.root_transport) |
43 | 1834 | 1842 | ||
44 | 1835 | 1843 | ||
141 | 1836 | class RepositoryAcquisitionPolicy(object): | 1844 | class CreateRepository(controldir.RepositoryAcquisitionPolicy): |
46 | 1837 | """Abstract base class for repository acquisition policies. | ||
47 | 1838 | |||
48 | 1839 | A repository acquisition policy decides how a BzrDir acquires a repository | ||
49 | 1840 | for a branch that is being created. The most basic policy decision is | ||
50 | 1841 | whether to create a new repository or use an existing one. | ||
51 | 1842 | """ | ||
52 | 1843 | def __init__(self, stack_on, stack_on_pwd, require_stacking): | ||
53 | 1844 | """Constructor. | ||
54 | 1845 | |||
55 | 1846 | :param stack_on: A location to stack on | ||
56 | 1847 | :param stack_on_pwd: If stack_on is relative, the location it is | ||
57 | 1848 | relative to. | ||
58 | 1849 | :param require_stacking: If True, it is a failure to not stack. | ||
59 | 1850 | """ | ||
60 | 1851 | self._stack_on = stack_on | ||
61 | 1852 | self._stack_on_pwd = stack_on_pwd | ||
62 | 1853 | self._require_stacking = require_stacking | ||
63 | 1854 | |||
64 | 1855 | def configure_branch(self, branch): | ||
65 | 1856 | """Apply any configuration data from this policy to the branch. | ||
66 | 1857 | |||
67 | 1858 | Default implementation sets repository stacking. | ||
68 | 1859 | """ | ||
69 | 1860 | if self._stack_on is None: | ||
70 | 1861 | return | ||
71 | 1862 | if self._stack_on_pwd is None: | ||
72 | 1863 | stack_on = self._stack_on | ||
73 | 1864 | else: | ||
74 | 1865 | try: | ||
75 | 1866 | stack_on = urlutils.rebase_url(self._stack_on, | ||
76 | 1867 | self._stack_on_pwd, | ||
77 | 1868 | branch.user_url) | ||
78 | 1869 | except urlutils.InvalidRebaseURLs: | ||
79 | 1870 | stack_on = self._get_full_stack_on() | ||
80 | 1871 | try: | ||
81 | 1872 | branch.set_stacked_on_url(stack_on) | ||
82 | 1873 | except (_mod_branch.UnstackableBranchFormat, | ||
83 | 1874 | errors.UnstackableRepositoryFormat): | ||
84 | 1875 | if self._require_stacking: | ||
85 | 1876 | raise | ||
86 | 1877 | |||
87 | 1878 | def requires_stacking(self): | ||
88 | 1879 | """Return True if this policy requires stacking.""" | ||
89 | 1880 | return self._stack_on is not None and self._require_stacking | ||
90 | 1881 | |||
91 | 1882 | def _get_full_stack_on(self): | ||
92 | 1883 | """Get a fully-qualified URL for the stack_on location.""" | ||
93 | 1884 | if self._stack_on is None: | ||
94 | 1885 | return None | ||
95 | 1886 | if self._stack_on_pwd is None: | ||
96 | 1887 | return self._stack_on | ||
97 | 1888 | else: | ||
98 | 1889 | return urlutils.join(self._stack_on_pwd, self._stack_on) | ||
99 | 1890 | |||
100 | 1891 | def _add_fallback(self, repository, possible_transports=None): | ||
101 | 1892 | """Add a fallback to the supplied repository, if stacking is set.""" | ||
102 | 1893 | stack_on = self._get_full_stack_on() | ||
103 | 1894 | if stack_on is None: | ||
104 | 1895 | return | ||
105 | 1896 | try: | ||
106 | 1897 | stacked_dir = BzrDir.open(stack_on, | ||
107 | 1898 | possible_transports=possible_transports) | ||
108 | 1899 | except errors.JailBreak: | ||
109 | 1900 | # We keep the stacking details, but we are in the server code so | ||
110 | 1901 | # actually stacking is not needed. | ||
111 | 1902 | return | ||
112 | 1903 | try: | ||
113 | 1904 | stacked_repo = stacked_dir.open_branch().repository | ||
114 | 1905 | except errors.NotBranchError: | ||
115 | 1906 | stacked_repo = stacked_dir.open_repository() | ||
116 | 1907 | try: | ||
117 | 1908 | repository.add_fallback_repository(stacked_repo) | ||
118 | 1909 | except errors.UnstackableRepositoryFormat: | ||
119 | 1910 | if self._require_stacking: | ||
120 | 1911 | raise | ||
121 | 1912 | else: | ||
122 | 1913 | self._require_stacking = True | ||
123 | 1914 | |||
124 | 1915 | def acquire_repository(self, make_working_trees=None, shared=False, | ||
125 | 1916 | possible_transports=None): | ||
126 | 1917 | """Acquire a repository for this bzrdir. | ||
127 | 1918 | |||
128 | 1919 | Implementations may create a new repository or use a pre-exising | ||
129 | 1920 | repository. | ||
130 | 1921 | |||
131 | 1922 | :param make_working_trees: If creating a repository, set | ||
132 | 1923 | make_working_trees to this value (if non-None) | ||
133 | 1924 | :param shared: If creating a repository, make it shared if True | ||
134 | 1925 | :return: A repository, is_new_flag (True if the repository was | ||
135 | 1926 | created). | ||
136 | 1927 | """ | ||
137 | 1928 | raise NotImplementedError(RepositoryAcquisitionPolicy.acquire_repository) | ||
138 | 1929 | |||
139 | 1930 | |||
140 | 1931 | class CreateRepository(RepositoryAcquisitionPolicy): | ||
142 | 1932 | """A policy of creating a new repository""" | 1845 | """A policy of creating a new repository""" |
143 | 1933 | 1846 | ||
145 | 1934 | def __init__(self, bzrdir, stack_on=None, stack_on_pwd=None, | 1847 | def __init__(self, controldir, stack_on=None, stack_on_pwd=None, |
146 | 1935 | require_stacking=False): | 1848 | require_stacking=False): |
147 | 1936 | """Constructor. | 1849 | """Constructor. |
148 | 1937 | 1850 | ||
150 | 1938 | :param bzrdir: The bzrdir to create the repository on. | 1851 | :param controldir: The controldir to create the repository on. |
151 | 1939 | :param stack_on: A location to stack on | 1852 | :param stack_on: A location to stack on |
152 | 1940 | :param stack_on_pwd: If stack_on is relative, the location it is | 1853 | :param stack_on_pwd: If stack_on is relative, the location it is |
153 | 1941 | relative to. | 1854 | relative to. |
154 | 1942 | """ | 1855 | """ |
158 | 1943 | RepositoryAcquisitionPolicy.__init__(self, stack_on, stack_on_pwd, | 1856 | super(CreateRepository, self).__init__( |
159 | 1944 | require_stacking) | 1857 | stack_on, stack_on_pwd, require_stacking) |
160 | 1945 | self._bzrdir = bzrdir | 1858 | self._controldir = controldir |
161 | 1946 | 1859 | ||
162 | 1947 | def acquire_repository(self, make_working_trees=None, shared=False, | 1860 | def acquire_repository(self, make_working_trees=None, shared=False, |
163 | 1948 | possible_transports=None): | 1861 | possible_transports=None): |
164 | 1949 | """Implementation of RepositoryAcquisitionPolicy.acquire_repository | 1862 | """Implementation of RepositoryAcquisitionPolicy.acquire_repository |
165 | 1950 | 1863 | ||
167 | 1951 | Creates the desired repository in the bzrdir we already have. | 1864 | Creates the desired repository in the controldir we already have. |
168 | 1952 | """ | 1865 | """ |
169 | 1953 | if possible_transports is None: | 1866 | if possible_transports is None: |
170 | 1954 | possible_transports = [] | 1867 | possible_transports = [] |
171 | 1955 | else: | 1868 | else: |
172 | 1956 | possible_transports = list(possible_transports) | 1869 | possible_transports = list(possible_transports) |
174 | 1957 | possible_transports.append(self._bzrdir.root_transport) | 1870 | possible_transports.append(self._controldir.root_transport) |
175 | 1958 | stack_on = self._get_full_stack_on() | 1871 | stack_on = self._get_full_stack_on() |
176 | 1959 | if stack_on: | 1872 | if stack_on: |
178 | 1960 | format = self._bzrdir._format | 1873 | format = self._controldir._format |
179 | 1961 | format.require_stacking(stack_on=stack_on, | 1874 | format.require_stacking(stack_on=stack_on, |
180 | 1962 | possible_transports=possible_transports) | 1875 | possible_transports=possible_transports) |
181 | 1963 | if not self._require_stacking: | 1876 | if not self._require_stacking: |
182 | 1964 | # We have picked up automatic stacking somewhere. | 1877 | # We have picked up automatic stacking somewhere. |
183 | 1965 | note(gettext('Using default stacking branch {0} at {1}').format( | 1878 | note(gettext('Using default stacking branch {0} at {1}').format( |
184 | 1966 | self._stack_on, self._stack_on_pwd)) | 1879 | self._stack_on, self._stack_on_pwd)) |
186 | 1967 | repository = self._bzrdir.create_repository(shared=shared) | 1880 | repository = self._controldir.create_repository(shared=shared) |
187 | 1968 | self._add_fallback(repository, | 1881 | self._add_fallback(repository, |
188 | 1969 | possible_transports=possible_transports) | 1882 | possible_transports=possible_transports) |
189 | 1970 | if make_working_trees is not None: | 1883 | if make_working_trees is not None: |
190 | @@ -1972,7 +1885,7 @@ | |||
191 | 1972 | return repository, True | 1885 | return repository, True |
192 | 1973 | 1886 | ||
193 | 1974 | 1887 | ||
195 | 1975 | class UseExistingRepository(RepositoryAcquisitionPolicy): | 1888 | class UseExistingRepository(controldir.RepositoryAcquisitionPolicy): |
196 | 1976 | """A policy of reusing an existing repository""" | 1889 | """A policy of reusing an existing repository""" |
197 | 1977 | 1890 | ||
198 | 1978 | def __init__(self, repository, stack_on=None, stack_on_pwd=None, | 1891 | def __init__(self, repository, stack_on=None, stack_on_pwd=None, |
199 | @@ -1984,8 +1897,8 @@ | |||
200 | 1984 | :param stack_on_pwd: If stack_on is relative, the location it is | 1897 | :param stack_on_pwd: If stack_on is relative, the location it is |
201 | 1985 | relative to. | 1898 | relative to. |
202 | 1986 | """ | 1899 | """ |
205 | 1987 | RepositoryAcquisitionPolicy.__init__(self, stack_on, stack_on_pwd, | 1900 | super(UseExistingRepository, self).__init__( |
206 | 1988 | require_stacking) | 1901 | stack_on, stack_on_pwd, require_stacking) |
207 | 1989 | self._repository = repository | 1902 | self._repository = repository |
208 | 1990 | 1903 | ||
209 | 1991 | def acquire_repository(self, make_working_trees=None, shared=False, | 1904 | def acquire_repository(self, make_working_trees=None, shared=False, |
210 | 1992 | 1905 | ||
211 | === modified file 'breezy/bzr/remote.py' | |||
212 | --- breezy/bzr/remote.py 2017-11-20 22:51:10 +0000 | |||
213 | +++ breezy/bzr/remote.py 2017-11-29 12:08:31 +0000 | |||
214 | @@ -308,8 +308,8 @@ | |||
215 | 308 | remote_repo.dont_leave_lock_in_place() | 308 | remote_repo.dont_leave_lock_in_place() |
216 | 309 | else: | 309 | else: |
217 | 310 | remote_repo.lock_write() | 310 | remote_repo.lock_write() |
220 | 311 | policy = _mod_bzrdir.UseExistingRepository(remote_repo, final_stack, | 311 | policy = _mod_bzrdir.UseExistingRepository(remote_repo, |
221 | 312 | final_stack_pwd, require_stacking) | 312 | final_stack, final_stack_pwd, require_stacking) |
222 | 313 | policy.acquire_repository() | 313 | policy.acquire_repository() |
223 | 314 | else: | 314 | else: |
224 | 315 | remote_repo = None | 315 | remote_repo = None |
225 | 316 | 316 | ||
226 | === modified file 'breezy/bzr/workingtree.py' | |||
227 | --- breezy/bzr/workingtree.py 2017-11-19 18:10:24 +0000 | |||
228 | +++ breezy/bzr/workingtree.py 2017-11-29 12:08:31 +0000 | |||
229 | @@ -32,9 +32,12 @@ | |||
230 | 32 | 32 | ||
231 | 33 | from __future__ import absolute_import | 33 | from __future__ import absolute_import |
232 | 34 | 34 | ||
233 | 35 | from bisect import bisect_left | ||
234 | 35 | import breezy | 36 | import breezy |
235 | 36 | import collections | 37 | import collections |
236 | 37 | import errno | 38 | import errno |
237 | 39 | import itertools | ||
238 | 40 | import operator | ||
239 | 38 | import os | 41 | import os |
240 | 39 | import stat | 42 | import stat |
241 | 40 | import sys | 43 | import sys |
242 | @@ -1501,6 +1504,127 @@ | |||
243 | 1501 | subp = osutils.pathjoin(path, subf) | 1504 | subp = osutils.pathjoin(path, subf) |
244 | 1502 | yield subp | 1505 | yield subp |
245 | 1503 | 1506 | ||
246 | 1507 | def walkdirs(self, prefix=""): | ||
247 | 1508 | """Walk the directories of this tree. | ||
248 | 1509 | |||
249 | 1510 | returns a generator which yields items in the form: | ||
250 | 1511 | ((curren_directory_path, fileid), | ||
251 | 1512 | [(file1_path, file1_name, file1_kind, (lstat), file1_id, | ||
252 | 1513 | file1_kind), ... ]) | ||
253 | 1514 | |||
254 | 1515 | This API returns a generator, which is only valid during the current | ||
255 | 1516 | tree transaction - within a single lock_read or lock_write duration. | ||
256 | 1517 | |||
257 | 1518 | If the tree is not locked, it may cause an error to be raised, | ||
258 | 1519 | depending on the tree implementation. | ||
259 | 1520 | """ | ||
260 | 1521 | disk_top = self.abspath(prefix) | ||
261 | 1522 | if disk_top.endswith('/'): | ||
262 | 1523 | disk_top = disk_top[:-1] | ||
263 | 1524 | top_strip_len = len(disk_top) + 1 | ||
264 | 1525 | inventory_iterator = self._walkdirs(prefix) | ||
265 | 1526 | disk_iterator = osutils.walkdirs(disk_top, prefix) | ||
266 | 1527 | try: | ||
267 | 1528 | current_disk = next(disk_iterator) | ||
268 | 1529 | disk_finished = False | ||
269 | 1530 | except OSError as e: | ||
270 | 1531 | if not (e.errno == errno.ENOENT or | ||
271 | 1532 | (sys.platform == 'win32' and e.errno == ERROR_PATH_NOT_FOUND)): | ||
272 | 1533 | raise | ||
273 | 1534 | current_disk = None | ||
274 | 1535 | disk_finished = True | ||
275 | 1536 | try: | ||
276 | 1537 | current_inv = next(inventory_iterator) | ||
277 | 1538 | inv_finished = False | ||
278 | 1539 | except StopIteration: | ||
279 | 1540 | current_inv = None | ||
280 | 1541 | inv_finished = True | ||
281 | 1542 | while not inv_finished or not disk_finished: | ||
282 | 1543 | if current_disk: | ||
283 | 1544 | ((cur_disk_dir_relpath, cur_disk_dir_path_from_top), | ||
284 | 1545 | cur_disk_dir_content) = current_disk | ||
285 | 1546 | else: | ||
286 | 1547 | ((cur_disk_dir_relpath, cur_disk_dir_path_from_top), | ||
287 | 1548 | cur_disk_dir_content) = ((None, None), None) | ||
288 | 1549 | if not disk_finished: | ||
289 | 1550 | # strip out .bzr dirs | ||
290 | 1551 | if (cur_disk_dir_path_from_top[top_strip_len:] == '' and | ||
291 | 1552 | len(cur_disk_dir_content) > 0): | ||
292 | 1553 | # osutils.walkdirs can be made nicer - | ||
293 | 1554 | # yield the path-from-prefix rather than the pathjoined | ||
294 | 1555 | # value. | ||
295 | 1556 | bzrdir_loc = bisect_left(cur_disk_dir_content, | ||
296 | 1557 | ('.bzr', '.bzr')) | ||
297 | 1558 | if (bzrdir_loc < len(cur_disk_dir_content) | ||
298 | 1559 | and self.controldir.is_control_filename( | ||
299 | 1560 | cur_disk_dir_content[bzrdir_loc][0])): | ||
300 | 1561 | # we dont yield the contents of, or, .bzr itself. | ||
301 | 1562 | del cur_disk_dir_content[bzrdir_loc] | ||
302 | 1563 | if inv_finished: | ||
303 | 1564 | # everything is unknown | ||
304 | 1565 | direction = 1 | ||
305 | 1566 | elif disk_finished: | ||
306 | 1567 | # everything is missing | ||
307 | 1568 | direction = -1 | ||
308 | 1569 | else: | ||
309 | 1570 | direction = cmp(current_inv[0][0], cur_disk_dir_relpath) | ||
310 | 1571 | if direction > 0: | ||
311 | 1572 | # disk is before inventory - unknown | ||
312 | 1573 | dirblock = [(relpath, basename, kind, stat, None, None) for | ||
313 | 1574 | relpath, basename, kind, stat, top_path in | ||
314 | 1575 | cur_disk_dir_content] | ||
315 | 1576 | yield (cur_disk_dir_relpath, None), dirblock | ||
316 | 1577 | try: | ||
317 | 1578 | current_disk = next(disk_iterator) | ||
318 | 1579 | except StopIteration: | ||
319 | 1580 | disk_finished = True | ||
320 | 1581 | elif direction < 0: | ||
321 | 1582 | # inventory is before disk - missing. | ||
322 | 1583 | dirblock = [(relpath, basename, 'unknown', None, fileid, kind) | ||
323 | 1584 | for relpath, basename, dkind, stat, fileid, kind in | ||
324 | 1585 | current_inv[1]] | ||
325 | 1586 | yield (current_inv[0][0], current_inv[0][1]), dirblock | ||
326 | 1587 | try: | ||
327 | 1588 | current_inv = next(inventory_iterator) | ||
328 | 1589 | except StopIteration: | ||
329 | 1590 | inv_finished = True | ||
330 | 1591 | else: | ||
331 | 1592 | # versioned present directory | ||
332 | 1593 | # merge the inventory and disk data together | ||
333 | 1594 | dirblock = [] | ||
334 | 1595 | for relpath, subiterator in itertools.groupby(sorted( | ||
335 | 1596 | current_inv[1] + cur_disk_dir_content, | ||
336 | 1597 | key=operator.itemgetter(0)), operator.itemgetter(1)): | ||
337 | 1598 | path_elements = list(subiterator) | ||
338 | 1599 | if len(path_elements) == 2: | ||
339 | 1600 | inv_row, disk_row = path_elements | ||
340 | 1601 | # versioned, present file | ||
341 | 1602 | dirblock.append((inv_row[0], | ||
342 | 1603 | inv_row[1], disk_row[2], | ||
343 | 1604 | disk_row[3], inv_row[4], | ||
344 | 1605 | inv_row[5])) | ||
345 | 1606 | elif len(path_elements[0]) == 5: | ||
346 | 1607 | # unknown disk file | ||
347 | 1608 | dirblock.append((path_elements[0][0], | ||
348 | 1609 | path_elements[0][1], path_elements[0][2], | ||
349 | 1610 | path_elements[0][3], None, None)) | ||
350 | 1611 | elif len(path_elements[0]) == 6: | ||
351 | 1612 | # versioned, absent file. | ||
352 | 1613 | dirblock.append((path_elements[0][0], | ||
353 | 1614 | path_elements[0][1], 'unknown', None, | ||
354 | 1615 | path_elements[0][4], path_elements[0][5])) | ||
355 | 1616 | else: | ||
356 | 1617 | raise NotImplementedError('unreachable code') | ||
357 | 1618 | yield current_inv[0], dirblock | ||
358 | 1619 | try: | ||
359 | 1620 | current_inv = next(inventory_iterator) | ||
360 | 1621 | except StopIteration: | ||
361 | 1622 | inv_finished = True | ||
362 | 1623 | try: | ||
363 | 1624 | current_disk = next(disk_iterator) | ||
364 | 1625 | except StopIteration: | ||
365 | 1626 | disk_finished = True | ||
366 | 1627 | |||
367 | 1504 | def _walkdirs(self, prefix=""): | 1628 | def _walkdirs(self, prefix=""): |
368 | 1505 | """Walk the directories of this tree. | 1629 | """Walk the directories of this tree. |
369 | 1506 | 1630 | ||
370 | 1507 | 1631 | ||
371 | === modified file 'breezy/controldir.py' | |||
372 | --- breezy/controldir.py 2017-07-30 21:23:44 +0000 | |||
373 | +++ breezy/controldir.py 2017-11-29 12:08:31 +0000 | |||
374 | @@ -29,6 +29,7 @@ | |||
375 | 29 | import textwrap | 29 | import textwrap |
376 | 30 | 30 | ||
377 | 31 | from breezy import ( | 31 | from breezy import ( |
378 | 32 | branch as _mod_branch, | ||
379 | 32 | hooks, | 33 | hooks, |
380 | 33 | revision as _mod_revision, | 34 | revision as _mod_revision, |
381 | 34 | transport as _mod_transport, | 35 | transport as _mod_transport, |
382 | @@ -1454,6 +1455,101 @@ | |||
383 | 1454 | return filename == '.bzr' | 1455 | return filename == '.bzr' |
384 | 1455 | 1456 | ||
385 | 1456 | 1457 | ||
386 | 1458 | class RepositoryAcquisitionPolicy(object): | ||
387 | 1459 | """Abstract base class for repository acquisition policies. | ||
388 | 1460 | |||
389 | 1461 | A repository acquisition policy decides how a ControlDir acquires a repository | ||
390 | 1462 | for a branch that is being created. The most basic policy decision is | ||
391 | 1463 | whether to create a new repository or use an existing one. | ||
392 | 1464 | """ | ||
393 | 1465 | def __init__(self, stack_on, stack_on_pwd, require_stacking): | ||
394 | 1466 | """Constructor. | ||
395 | 1467 | |||
396 | 1468 | :param stack_on: A location to stack on | ||
397 | 1469 | :param stack_on_pwd: If stack_on is relative, the location it is | ||
398 | 1470 | relative to. | ||
399 | 1471 | :param require_stacking: If True, it is a failure to not stack. | ||
400 | 1472 | """ | ||
401 | 1473 | self._stack_on = stack_on | ||
402 | 1474 | self._stack_on_pwd = stack_on_pwd | ||
403 | 1475 | self._require_stacking = require_stacking | ||
404 | 1476 | |||
405 | 1477 | def configure_branch(self, branch): | ||
406 | 1478 | """Apply any configuration data from this policy to the branch. | ||
407 | 1479 | |||
408 | 1480 | Default implementation sets repository stacking. | ||
409 | 1481 | """ | ||
410 | 1482 | if self._stack_on is None: | ||
411 | 1483 | return | ||
412 | 1484 | if self._stack_on_pwd is None: | ||
413 | 1485 | stack_on = self._stack_on | ||
414 | 1486 | else: | ||
415 | 1487 | try: | ||
416 | 1488 | stack_on = urlutils.rebase_url(self._stack_on, | ||
417 | 1489 | self._stack_on_pwd, | ||
418 | 1490 | branch.user_url) | ||
419 | 1491 | except urlutils.InvalidRebaseURLs: | ||
420 | 1492 | stack_on = self._get_full_stack_on() | ||
421 | 1493 | try: | ||
422 | 1494 | branch.set_stacked_on_url(stack_on) | ||
423 | 1495 | except (_mod_branch.UnstackableBranchFormat, | ||
424 | 1496 | errors.UnstackableRepositoryFormat): | ||
425 | 1497 | if self._require_stacking: | ||
426 | 1498 | raise | ||
427 | 1499 | |||
428 | 1500 | def requires_stacking(self): | ||
429 | 1501 | """Return True if this policy requires stacking.""" | ||
430 | 1502 | return self._stack_on is not None and self._require_stacking | ||
431 | 1503 | |||
432 | 1504 | def _get_full_stack_on(self): | ||
433 | 1505 | """Get a fully-qualified URL for the stack_on location.""" | ||
434 | 1506 | if self._stack_on is None: | ||
435 | 1507 | return None | ||
436 | 1508 | if self._stack_on_pwd is None: | ||
437 | 1509 | return self._stack_on | ||
438 | 1510 | else: | ||
439 | 1511 | return urlutils.join(self._stack_on_pwd, self._stack_on) | ||
440 | 1512 | |||
441 | 1513 | def _add_fallback(self, repository, possible_transports=None): | ||
442 | 1514 | """Add a fallback to the supplied repository, if stacking is set.""" | ||
443 | 1515 | stack_on = self._get_full_stack_on() | ||
444 | 1516 | if stack_on is None: | ||
445 | 1517 | return | ||
446 | 1518 | try: | ||
447 | 1519 | stacked_dir = ControlDir.open( | ||
448 | 1520 | stack_on, possible_transports=possible_transports) | ||
449 | 1521 | except errors.JailBreak: | ||
450 | 1522 | # We keep the stacking details, but we are in the server code so | ||
451 | 1523 | # actually stacking is not needed. | ||
452 | 1524 | return | ||
453 | 1525 | try: | ||
454 | 1526 | stacked_repo = stacked_dir.open_branch().repository | ||
455 | 1527 | except errors.NotBranchError: | ||
456 | 1528 | stacked_repo = stacked_dir.open_repository() | ||
457 | 1529 | try: | ||
458 | 1530 | repository.add_fallback_repository(stacked_repo) | ||
459 | 1531 | except errors.UnstackableRepositoryFormat: | ||
460 | 1532 | if self._require_stacking: | ||
461 | 1533 | raise | ||
462 | 1534 | else: | ||
463 | 1535 | self._require_stacking = True | ||
464 | 1536 | |||
465 | 1537 | def acquire_repository(self, make_working_trees=None, shared=False, | ||
466 | 1538 | possible_transports=None): | ||
467 | 1539 | """Acquire a repository for this controlrdir. | ||
468 | 1540 | |||
469 | 1541 | Implementations may create a new repository or use a pre-exising | ||
470 | 1542 | repository. | ||
471 | 1543 | |||
472 | 1544 | :param make_working_trees: If creating a repository, set | ||
473 | 1545 | make_working_trees to this value (if non-None) | ||
474 | 1546 | :param shared: If creating a repository, make it shared if True | ||
475 | 1547 | :return: A repository, is_new_flag (True if the repository was | ||
476 | 1548 | created). | ||
477 | 1549 | """ | ||
478 | 1550 | raise NotImplementedError(RepositoryAcquisitionPolicy.acquire_repository) | ||
479 | 1551 | |||
480 | 1552 | |||
481 | 1457 | # Please register new formats after old formats so that formats | 1553 | # Please register new formats after old formats so that formats |
482 | 1458 | # appear in chronological order and format descriptions can build | 1554 | # appear in chronological order and format descriptions can build |
483 | 1459 | # on previous ones. | 1555 | # on previous ones. |
484 | 1460 | 1556 | ||
485 | === modified file 'breezy/log.py' | |||
486 | --- breezy/log.py 2017-11-12 20:07:32 +0000 | |||
487 | +++ breezy/log.py 2017-11-29 12:08:31 +0000 | |||
488 | @@ -103,38 +103,38 @@ | |||
489 | 103 | TODO: Perhaps some way to limit this to only particular revisions, | 103 | TODO: Perhaps some way to limit this to only particular revisions, |
490 | 104 | or to traverse a non-mainline set of revisions? | 104 | or to traverse a non-mainline set of revisions? |
491 | 105 | """ | 105 | """ |
493 | 106 | last_ie = None | 106 | last_verifier = None |
494 | 107 | last_path = None | 107 | last_path = None |
495 | 108 | revno = 1 | 108 | revno = 1 |
496 | 109 | graph = branch.repository.get_graph() | 109 | graph = branch.repository.get_graph() |
497 | 110 | history = list(graph.iter_lefthand_ancestry(branch.last_revision(), | 110 | history = list(graph.iter_lefthand_ancestry(branch.last_revision(), |
498 | 111 | [_mod_revision.NULL_REVISION])) | 111 | [_mod_revision.NULL_REVISION])) |
499 | 112 | for revision_id in reversed(history): | 112 | for revision_id in reversed(history): |
504 | 113 | this_inv = branch.repository.get_inventory(revision_id) | 113 | this_tree = branch.repository.revision_tree(revision_id) |
505 | 114 | if this_inv.has_id(file_id): | 114 | try: |
506 | 115 | this_ie = this_inv[file_id] | 115 | this_path = this_tree.id2path(file_id) |
507 | 116 | this_path = this_inv.id2path(file_id) | 116 | except errors.NoSuchId: |
508 | 117 | this_verifier = this_path = None | ||
509 | 117 | else: | 118 | else: |
511 | 118 | this_ie = this_path = None | 119 | this_verifier = this_tree.get_file_verifier(this_path, file_id) |
512 | 119 | 120 | ||
513 | 120 | # now we know how it was last time, and how it is in this revision. | 121 | # now we know how it was last time, and how it is in this revision. |
514 | 121 | # are those two states effectively the same or not? | 122 | # are those two states effectively the same or not? |
515 | 122 | 123 | ||
517 | 123 | if not this_ie and not last_ie: | 124 | if not this_verifier and not last_verifier: |
518 | 124 | # not present in either | 125 | # not present in either |
519 | 125 | pass | 126 | pass |
521 | 126 | elif this_ie and not last_ie: | 127 | elif this_verifier and not last_verifier: |
522 | 127 | yield revno, revision_id, "added " + this_path | 128 | yield revno, revision_id, "added " + this_path |
524 | 128 | elif not this_ie and last_ie: | 129 | elif not this_verifier and last_verifier: |
525 | 129 | # deleted here | 130 | # deleted here |
526 | 130 | yield revno, revision_id, "deleted " + last_path | 131 | yield revno, revision_id, "deleted " + last_path |
527 | 131 | elif this_path != last_path: | 132 | elif this_path != last_path: |
528 | 132 | yield revno, revision_id, ("renamed %s => %s" % (last_path, this_path)) | 133 | yield revno, revision_id, ("renamed %s => %s" % (last_path, this_path)) |
531 | 133 | elif (this_ie.text_size != last_ie.text_size | 134 | elif (this_verifier != last_verifier): |
530 | 134 | or this_ie.text_sha1 != last_ie.text_sha1): | ||
532 | 135 | yield revno, revision_id, "modified " + this_path | 135 | yield revno, revision_id, "modified " + this_path |
533 | 136 | 136 | ||
535 | 137 | last_ie = this_ie | 137 | last_verifier = this_verifier |
536 | 138 | last_path = this_path | 138 | last_path = this_path |
537 | 139 | revno += 1 | 139 | revno += 1 |
538 | 140 | 140 | ||
539 | 141 | 141 | ||
540 | === modified file 'breezy/tests/per_branch/test_push.py' | |||
541 | --- breezy/tests/per_branch/test_push.py 2017-11-16 00:39:04 +0000 | |||
542 | +++ breezy/tests/per_branch/test_push.py 2017-11-29 12:08:31 +0000 | |||
543 | @@ -237,11 +237,10 @@ | |||
544 | 237 | except errors.UninitializableFormat: | 237 | except errors.UninitializableFormat: |
545 | 238 | raise tests.TestNotApplicable('cannot initialize this format') | 238 | raise tests.TestNotApplicable('cannot initialize this format') |
546 | 239 | source.start_series() | 239 | source.start_series() |
552 | 240 | source.build_snapshot(None, [ | 240 | revid_a = source.build_snapshot(None, [ |
553 | 241 | ('add', ('', 'root-id', 'directory', None))], | 241 | ('add', ('', 'root-id', 'directory', None))]) |
554 | 242 | revision_id='A') | 242 | revid_b = source.build_snapshot([revid_a], []) |
555 | 243 | source.build_snapshot(['A'], [], revision_id='B') | 243 | revid_c = source.build_snapshot([revid_a], []) |
551 | 244 | source.build_snapshot(['A'], [], revision_id='C') | ||
556 | 245 | source.finish_series() | 244 | source.finish_series() |
557 | 246 | b = source.get_branch() | 245 | b = source.get_branch() |
558 | 247 | # Note: We can't read lock the source branch. Some formats take a write | 246 | # Note: We can't read lock the source branch. Some formats take a write |
559 | @@ -251,9 +250,9 @@ | |||
560 | 251 | # This means 'push the source branch into this dir' | 250 | # This means 'push the source branch into this dir' |
561 | 252 | bzrdir.push_branch(b) | 251 | bzrdir.push_branch(b) |
562 | 253 | self.addCleanup(repo.lock_read().unlock) | 252 | self.addCleanup(repo.lock_read().unlock) |
564 | 254 | # We should have pushed 'C', but not 'B', since it isn't in the | 253 | # We should have pushed revid_c, but not revid_b, since it isn't in the |
565 | 255 | # ancestry | 254 | # ancestry |
567 | 256 | self.assertEqual(['A', 'C'], sorted(repo.all_revision_ids())) | 255 | self.assertEqual([revid_a, revid_c], sorted(repo.all_revision_ids())) |
568 | 257 | 256 | ||
569 | 258 | def test_push_with_default_stacking_does_not_create_broken_branch(self): | 257 | def test_push_with_default_stacking_does_not_create_broken_branch(self): |
570 | 259 | """Pushing a new standalone branch works even when there's a default | 258 | """Pushing a new standalone branch works even when there's a default |
571 | @@ -278,24 +277,22 @@ | |||
572 | 278 | repo = self.make_repository('repo', shared=True, format='1.6') | 277 | repo = self.make_repository('repo', shared=True, format='1.6') |
573 | 279 | builder = self.make_branch_builder('repo/local') | 278 | builder = self.make_branch_builder('repo/local') |
574 | 280 | builder.start_series() | 279 | builder.start_series() |
576 | 281 | builder.build_snapshot(None, [ | 280 | revid1 = builder.build_snapshot(None, [ |
577 | 282 | ('add', ('', 'root-id', 'directory', '')), | 281 | ('add', ('', 'root-id', 'directory', '')), |
584 | 283 | ('add', ('filename', 'f-id', 'file', 'content\n'))], | 282 | ('add', ('filename', 'f-id', 'file', 'content\n'))]) |
585 | 284 | revision_id='rev-1',) | 283 | revid2 = builder.build_snapshot([revid1], []) |
586 | 285 | builder.build_snapshot(['rev-1'], [], revision_id='rev-2') | 284 | revid3 = builder.build_snapshot([revid2], |
587 | 286 | builder.build_snapshot(['rev-2'], | 285 | [('modify', ('f-id', 'new-content\n'))]) |
582 | 287 | [('modify', ('f-id', 'new-content\n'))], | ||
583 | 288 | revision_id='rev-3') | ||
588 | 289 | builder.finish_series() | 286 | builder.finish_series() |
589 | 290 | trunk = builder.get_branch() | 287 | trunk = builder.get_branch() |
590 | 291 | # Sprout rev-1 to "trunk", so that we can stack on it. | 288 | # Sprout rev-1 to "trunk", so that we can stack on it. |
592 | 292 | trunk.controldir.sprout(self.get_url('trunk'), revision_id='rev-1') | 289 | trunk.controldir.sprout(self.get_url('trunk'), revision_id=revid1) |
593 | 293 | # Set a default stacking policy so that new branches will automatically | 290 | # Set a default stacking policy so that new branches will automatically |
594 | 294 | # stack on trunk. | 291 | # stack on trunk. |
595 | 295 | self.make_controldir('.').get_config().set_default_stack_on('trunk') | 292 | self.make_controldir('.').get_config().set_default_stack_on('trunk') |
596 | 296 | # Push rev-2 to a new branch "remote". It will be stacked on "trunk". | 293 | # Push rev-2 to a new branch "remote". It will be stacked on "trunk". |
597 | 297 | output = BytesIO() | 294 | output = BytesIO() |
599 | 298 | push._show_push_branch(trunk, 'rev-2', self.get_url('remote'), output) | 295 | push._show_push_branch(trunk, revid2, self.get_url('remote'), output) |
600 | 299 | # Push rev-3 onto "remote". If "remote" not stacked and is missing the | 296 | # Push rev-3 onto "remote". If "remote" not stacked and is missing the |
601 | 300 | # fulltext record for f-id @ rev-1, then this will fail. | 297 | # fulltext record for f-id @ rev-1, then this will fail. |
602 | 301 | remote_branch = branch.Branch.open(self.get_url('remote')) | 298 | remote_branch = branch.Branch.open(self.get_url('remote')) |
603 | 302 | 299 | ||
604 | === modified file 'breezy/tests/per_branch/test_tags.py' | |||
605 | --- breezy/tests/per_branch/test_tags.py 2017-11-21 20:09:04 +0000 | |||
606 | +++ breezy/tests/per_branch/test_tags.py 2017-11-29 12:08:31 +0000 | |||
607 | @@ -116,29 +116,31 @@ | |||
608 | 116 | self.fail("didn't get expected exception") | 116 | self.fail("didn't get expected exception") |
609 | 117 | 117 | ||
610 | 118 | def test_merge_tags(self): | 118 | def test_merge_tags(self): |
613 | 119 | b1 = self.make_branch_with_revisions('b1', ['revid', 'revid-1']) | 119 | b1, [revid, revid1] = self.make_branch_with_revision_tuple('b1', 2) |
614 | 120 | b2 = self.make_branch_with_revisions('b2', ['revid', 'revid-2']) | 120 | w2 = b1.controldir.sprout('b2', revision_id=revid).open_workingtree() |
615 | 121 | revid2 = w2.commit('revision 2') | ||
616 | 122 | b2 = w2.branch | ||
617 | 121 | # if there are tags in the source and not the destination, then they | 123 | # if there are tags in the source and not the destination, then they |
618 | 122 | # just go across | 124 | # just go across |
620 | 123 | b1.tags.set_tag('tagname', 'revid') | 125 | b1.tags.set_tag('tagname', revid) |
621 | 124 | b1.tags.merge_to(b2.tags) | 126 | b1.tags.merge_to(b2.tags) |
623 | 125 | self.assertEqual(b2.tags.lookup_tag('tagname'), 'revid') | 127 | self.assertEqual(b2.tags.lookup_tag('tagname'), revid) |
624 | 126 | # if a tag is in the destination and not in the source, it is not | 128 | # if a tag is in the destination and not in the source, it is not |
625 | 127 | # removed when we merge them | 129 | # removed when we merge them |
627 | 128 | b2.tags.set_tag('in-destination', 'revid') | 130 | b2.tags.set_tag('in-destination', revid) |
628 | 129 | updates, conflicts = b1.tags.merge_to(b2.tags) | 131 | updates, conflicts = b1.tags.merge_to(b2.tags) |
629 | 130 | self.assertEqual(list(conflicts), []) | 132 | self.assertEqual(list(conflicts), []) |
630 | 131 | self.assertEqual(updates, {}) | 133 | self.assertEqual(updates, {}) |
632 | 132 | self.assertEqual(b2.tags.lookup_tag('in-destination'), 'revid') | 134 | self.assertEqual(b2.tags.lookup_tag('in-destination'), revid) |
633 | 133 | # if there's a conflicting tag, it's reported -- the command line | 135 | # if there's a conflicting tag, it's reported -- the command line |
634 | 134 | # interface will say "these tags couldn't be copied" | 136 | # interface will say "these tags couldn't be copied" |
637 | 135 | b1.tags.set_tag('conflicts', 'revid-1') | 137 | b1.tags.set_tag('conflicts', revid1) |
638 | 136 | b2.tags.set_tag('conflicts', 'revid-2') | 138 | b2.tags.set_tag('conflicts', revid2) |
639 | 137 | updates, conflicts = b1.tags.merge_to(b2.tags) | 139 | updates, conflicts = b1.tags.merge_to(b2.tags) |
641 | 138 | self.assertEqual(list(conflicts), [('conflicts', 'revid-1', 'revid-2')]) | 140 | self.assertEqual(list(conflicts), [('conflicts', revid1, revid2)]) |
642 | 139 | # and it keeps the same value | 141 | # and it keeps the same value |
643 | 140 | self.assertEqual(updates, {}) | 142 | self.assertEqual(updates, {}) |
645 | 141 | self.assertEqual(b2.tags.lookup_tag('conflicts'), 'revid-2') | 143 | self.assertEqual(b2.tags.lookup_tag('conflicts'), revid2) |
646 | 142 | 144 | ||
647 | 143 | def test_unicode_tag(self): | 145 | def test_unicode_tag(self): |
648 | 144 | tag_name = u'\u3070' | 146 | tag_name = u'\u3070' |
649 | @@ -238,7 +240,7 @@ | |||
650 | 238 | 240 | ||
651 | 239 | def test_merge_to_invalides_cache(self): | 241 | def test_merge_to_invalides_cache(self): |
652 | 240 | b1, revids = self.make_write_locked_branch_with_one_tag() | 242 | b1, revids = self.make_write_locked_branch_with_one_tag() |
654 | 241 | b2 = self.make_branch_with_revisions('b2', [revids[1], revids[0]]) | 243 | b2 = b1.controldir.sprout('b2').open_branch() |
655 | 242 | b2.tags.set_tag('two', revids[1]) | 244 | b2.tags.set_tag('two', revids[1]) |
656 | 243 | b2.tags.merge_to(b1.tags) | 245 | b2.tags.merge_to(b1.tags) |
657 | 244 | self.assertEqual( | 246 | self.assertEqual( |
658 | 245 | 247 | ||
659 | === modified file 'breezy/tests/per_controldir/test_controldir.py' | |||
660 | --- breezy/tests/per_controldir/test_controldir.py 2017-08-10 01:21:20 +0000 | |||
661 | +++ breezy/tests/per_controldir/test_controldir.py 2017-11-29 12:08:31 +0000 | |||
662 | @@ -314,6 +314,8 @@ | |||
663 | 314 | tree.add('foo') | 314 | tree.add('foo') |
664 | 315 | rev1 = tree.commit('revision 1') | 315 | rev1 = tree.commit('revision 1') |
665 | 316 | tree_repo = tree.branch.repository | 316 | tree_repo = tree.branch.repository |
666 | 317 | if not tree_repo._format.supports_revision_signatures: | ||
667 | 318 | self.skipTest('repository format does not support signing') | ||
668 | 317 | tree_repo.lock_write() | 319 | tree_repo.lock_write() |
669 | 318 | tree_repo.start_write_group() | 320 | tree_repo.start_write_group() |
670 | 319 | tree_repo.sign_revision(rev1, gpg.LoopbackGPGStrategy(None)) | 321 | tree_repo.sign_revision(rev1, gpg.LoopbackGPGStrategy(None)) |
671 | 320 | 322 | ||
672 | === modified file 'breezy/tests/per_intertree/test_compare.py' | |||
673 | --- breezy/tests/per_intertree/test_compare.py 2017-11-12 20:44:54 +0000 | |||
674 | +++ breezy/tests/per_intertree/test_compare.py 2017-11-29 12:08:31 +0000 | |||
675 | @@ -130,9 +130,9 @@ | |||
676 | 130 | d = self.intertree_class(tree1, tree2).compare() | 130 | d = self.intertree_class(tree1, tree2).compare() |
677 | 131 | self.assertEqual([], d.added) | 131 | self.assertEqual([], d.added) |
678 | 132 | self.assertEqual([], d.modified) | 132 | self.assertEqual([], d.modified) |
682 | 133 | self.assertEqual([('a', 'a-id', 'file'), | 133 | self.assertEqual([('a', tree1.path2id('a'), 'file'), |
683 | 134 | ('b', 'b-id', 'directory'), | 134 | ('b', tree1.path2id('b'), 'directory'), |
684 | 135 | ('b/c', 'c-id', 'file'), | 135 | ('b/c', tree1.path2id('b/c'), 'file'), |
685 | 136 | ], d.removed) | 136 | ], d.removed) |
686 | 137 | self.assertEqual([], d.renamed) | 137 | self.assertEqual([], d.renamed) |
687 | 138 | self.assertEqual([], d.unchanged) | 138 | self.assertEqual([], d.unchanged) |
688 | @@ -146,7 +146,7 @@ | |||
689 | 146 | tree1, tree2 = self.mutable_trees_to_test_trees(self, tree1, tree2) | 146 | tree1, tree2 = self.mutable_trees_to_test_trees(self, tree1, tree2) |
690 | 147 | d = self.intertree_class(tree1, tree2).compare() | 147 | d = self.intertree_class(tree1, tree2).compare() |
691 | 148 | self.assertEqual([], d.added) | 148 | self.assertEqual([], d.added) |
693 | 149 | self.assertEqual([('a', 'a-id', 'file', True, False)], d.modified) | 149 | self.assertEqual([('a', tree1.path2id('a'), 'file', True, False)], d.modified) |
694 | 150 | self.assertEqual([], d.removed) | 150 | self.assertEqual([], d.removed) |
695 | 151 | self.assertEqual([], d.renamed) | 151 | self.assertEqual([], d.renamed) |
696 | 152 | self.assertEqual([], d.unchanged) | 152 | self.assertEqual([], d.unchanged) |
697 | @@ -160,7 +160,7 @@ | |||
698 | 160 | tree1, tree2 = self.mutable_trees_to_test_trees(self, tree1, tree2) | 160 | tree1, tree2 = self.mutable_trees_to_test_trees(self, tree1, tree2) |
699 | 161 | d = self.intertree_class(tree1, tree2).compare() | 161 | d = self.intertree_class(tree1, tree2).compare() |
700 | 162 | self.assertEqual([], d.added) | 162 | self.assertEqual([], d.added) |
702 | 163 | self.assertEqual([('b/c', 'c-id', 'file', False, True)], d.modified) | 163 | self.assertEqual([('b/c', tree1.path2id('b/c'), 'file', False, True)], d.modified) |
703 | 164 | self.assertEqual([], d.removed) | 164 | self.assertEqual([], d.removed) |
704 | 165 | self.assertEqual([], d.renamed) | 165 | self.assertEqual([], d.renamed) |
705 | 166 | self.assertEqual([], d.unchanged) | 166 | self.assertEqual([], d.unchanged) |
706 | @@ -176,7 +176,7 @@ | |||
707 | 176 | self.assertEqual([], d.added) | 176 | self.assertEqual([], d.added) |
708 | 177 | self.assertEqual([], d.modified) | 177 | self.assertEqual([], d.modified) |
709 | 178 | self.assertEqual([], d.removed) | 178 | self.assertEqual([], d.removed) |
711 | 179 | self.assertEqual([('a', 'd', 'a-id', 'file', False, False)], d.renamed) | 179 | self.assertEqual([('a', 'd', tree1.path2id('a'), 'file', False, False)], d.renamed) |
712 | 180 | self.assertEqual([], d.unchanged) | 180 | self.assertEqual([], d.unchanged) |
713 | 181 | 181 | ||
714 | 182 | def test_file_rename_and_modification(self): | 182 | def test_file_rename_and_modification(self): |
715 | @@ -190,7 +190,7 @@ | |||
716 | 190 | self.assertEqual([], d.added) | 190 | self.assertEqual([], d.added) |
717 | 191 | self.assertEqual([], d.modified) | 191 | self.assertEqual([], d.modified) |
718 | 192 | self.assertEqual([], d.removed) | 192 | self.assertEqual([], d.removed) |
720 | 193 | self.assertEqual([('a', 'd', 'a-id', 'file', True, False)], d.renamed) | 193 | self.assertEqual([('a', 'd', tree1.path2id('a'), 'file', True, False)], d.renamed) |
721 | 194 | self.assertEqual([], d.unchanged) | 194 | self.assertEqual([], d.unchanged) |
722 | 195 | 195 | ||
723 | 196 | def test_file_rename_and_meta_modification(self): | 196 | def test_file_rename_and_meta_modification(self): |
724 | @@ -204,7 +204,7 @@ | |||
725 | 204 | self.assertEqual([], d.added) | 204 | self.assertEqual([], d.added) |
726 | 205 | self.assertEqual([], d.modified) | 205 | self.assertEqual([], d.modified) |
727 | 206 | self.assertEqual([], d.removed) | 206 | self.assertEqual([], d.removed) |
729 | 207 | self.assertEqual([('b/c', 'e', 'c-id', 'file', False, True)], d.renamed) | 207 | self.assertEqual([('b/c', 'e', tree1.path2id('b/c'), 'file', False, True)], d.renamed) |
730 | 208 | self.assertEqual([], d.unchanged) | 208 | self.assertEqual([], d.unchanged) |
731 | 209 | 209 | ||
732 | 210 | def test_empty_to_abc_content_a_only(self): | 210 | def test_empty_to_abc_content_a_only(self): |
733 | @@ -215,7 +215,7 @@ | |||
734 | 215 | tree2 = self.get_tree_no_parents_abc_content(tree2) | 215 | tree2 = self.get_tree_no_parents_abc_content(tree2) |
735 | 216 | tree1, tree2 = self.mutable_trees_to_test_trees(self, tree1, tree2) | 216 | tree1, tree2 = self.mutable_trees_to_test_trees(self, tree1, tree2) |
736 | 217 | d = self.intertree_class(tree1, tree2).compare(specific_files=['a']) | 217 | d = self.intertree_class(tree1, tree2).compare(specific_files=['a']) |
738 | 218 | self.assertEqual([('a', 'a-id', 'file')], d.added) | 218 | self.assertEqual([('a', tree2.path2id('a'), 'file')], d.added) |
739 | 219 | self.assertEqual([], d.modified) | 219 | self.assertEqual([], d.modified) |
740 | 220 | self.assertEqual([], d.removed) | 220 | self.assertEqual([], d.removed) |
741 | 221 | self.assertEqual([], d.renamed) | 221 | self.assertEqual([], d.renamed) |
742 | @@ -230,8 +230,9 @@ | |||
743 | 230 | d = self.intertree_class(tree1, tree2).compare( | 230 | d = self.intertree_class(tree1, tree2).compare( |
744 | 231 | specific_files=['a', 'b/c']) | 231 | specific_files=['a', 'b/c']) |
745 | 232 | self.assertEqual( | 232 | self.assertEqual( |
748 | 233 | [('a', 'a-id', 'file'), (u'b', 'b-id', 'directory'), | 233 | [('a', tree2.path2id('a'), 'file'), |
749 | 234 | ('b/c', 'c-id', 'file')], | 234 | (u'b', tree2.path2id('b'), 'directory'), |
750 | 235 | ('b/c', tree2.path2id('b/c'), 'file')], | ||
751 | 235 | d.added) | 236 | d.added) |
752 | 236 | self.assertEqual([], d.modified) | 237 | self.assertEqual([], d.modified) |
753 | 237 | self.assertEqual([], d.removed) | 238 | self.assertEqual([], d.removed) |
754 | @@ -765,7 +766,7 @@ | |||
755 | 765 | tree2 = self.get_tree_no_parents_abc_content_4(tree2) | 766 | tree2 = self.get_tree_no_parents_abc_content_4(tree2) |
756 | 766 | tree1, tree2 = self.mutable_trees_to_test_trees(self, tree1, tree2) | 767 | tree1, tree2 = self.mutable_trees_to_test_trees(self, tree1, tree2) |
757 | 767 | root_id = tree1.path2id('') | 768 | root_id = tree1.path2id('') |
759 | 768 | self.assertEqual([('a-id', ('a', 'd'), False, (True, True), | 769 | self.assertEqual([(tree1.path2id('a'), ('a', 'd'), False, (True, True), |
760 | 769 | (root_id, root_id), ('a', 'd'), ('file', 'file'), | 770 | (root_id, root_id), ('a', 'd'), ('file', 'file'), |
761 | 770 | (False, False))], | 771 | (False, False))], |
762 | 771 | self.do_iter_changes(tree1, tree2)) | 772 | self.do_iter_changes(tree1, tree2)) |
763 | @@ -861,7 +862,7 @@ | |||
764 | 861 | # d is new, d/e is b-id renamed, d/e/a is a-id renamed | 862 | # d is new, d/e is b-id renamed, d/e/a is a-id renamed |
765 | 862 | root_id = tree1.path2id('') | 863 | root_id = tree1.path2id('') |
766 | 863 | self.assertEqualIterChanges( | 864 | self.assertEqualIterChanges( |
768 | 864 | [self.renamed(tree1, tree2, 'b-id', False), | 865 | [self.renamed(tree1, tree2, tree1.path2id('b'), False), |
769 | 865 | self.added(tree2, 'd-id'), | 866 | self.added(tree2, 'd-id'), |
770 | 866 | self.renamed(tree1, tree2, 'a-id', False)], | 867 | self.renamed(tree1, tree2, 'a-id', False)], |
771 | 867 | self.do_iter_changes(tree1, tree2, specific_files=['d/e/a'])) | 868 | self.do_iter_changes(tree1, tree2, specific_files=['d/e/a'])) |
772 | 868 | 869 | ||
773 | === modified file 'breezy/tests/per_repository/test_fetch.py' | |||
774 | --- breezy/tests/per_repository/test_fetch.py 2017-11-19 19:00:31 +0000 | |||
775 | +++ breezy/tests/per_repository/test_fetch.py 2017-11-29 12:08:31 +0000 | |||
776 | @@ -320,27 +320,26 @@ | |||
777 | 320 | def make_simple_branch_with_ghost(self): | 320 | def make_simple_branch_with_ghost(self): |
778 | 321 | builder = self.make_branch_builder('source') | 321 | builder = self.make_branch_builder('source') |
779 | 322 | builder.start_series() | 322 | builder.start_series() |
781 | 323 | builder.build_snapshot(None, [ | 323 | a_revid = builder.build_snapshot(None, [ |
782 | 324 | ('add', ('', 'root-id', 'directory', None)), | 324 | ('add', ('', 'root-id', 'directory', None)), |
786 | 325 | ('add', ('file', 'file-id', 'file', 'content\n'))], | 325 | ('add', ('file', 'file-id', 'file', 'content\n'))]) |
787 | 326 | revision_id='A-id') | 326 | b_revid = builder.build_snapshot([a_revid, 'ghost-id'], []) |
785 | 327 | builder.build_snapshot(['A-id', 'ghost-id'], [], revision_id='B-id') | ||
788 | 328 | builder.finish_series() | 327 | builder.finish_series() |
789 | 329 | source_b = builder.get_branch() | 328 | source_b = builder.get_branch() |
790 | 330 | source_b.lock_read() | 329 | source_b.lock_read() |
791 | 331 | self.addCleanup(source_b.unlock) | 330 | self.addCleanup(source_b.unlock) |
793 | 332 | return source_b | 331 | return source_b, b_revid |
794 | 333 | 332 | ||
795 | 334 | def test_fetch_with_ghost(self): | 333 | def test_fetch_with_ghost(self): |
797 | 335 | source_b = self.make_simple_branch_with_ghost() | 334 | source_b, b_revid = self.make_simple_branch_with_ghost() |
798 | 336 | target = self.make_repository('target') | 335 | target = self.make_repository('target') |
799 | 337 | target.lock_write() | 336 | target.lock_write() |
800 | 338 | self.addCleanup(target.unlock) | 337 | self.addCleanup(target.unlock) |
802 | 339 | target.fetch(source_b.repository, revision_id='B-id') | 338 | target.fetch(source_b.repository, revision_id=b_revid) |
803 | 340 | 339 | ||
804 | 341 | def test_fetch_into_smart_with_ghost(self): | 340 | def test_fetch_into_smart_with_ghost(self): |
805 | 342 | trans = self.make_smart_server('target') | 341 | trans = self.make_smart_server('target') |
807 | 343 | source_b = self.make_simple_branch_with_ghost() | 342 | source_b, b_revid = self.make_simple_branch_with_ghost() |
808 | 344 | if not source_b.controldir._format.supports_transport(trans): | 343 | if not source_b.controldir._format.supports_transport(trans): |
809 | 345 | raise TestNotApplicable("format does not support transport") | 344 | raise TestNotApplicable("format does not support transport") |
810 | 346 | target = self.make_repository('target') | 345 | target = self.make_repository('target') |
811 | @@ -349,7 +348,7 @@ | |||
812 | 349 | target.lock_write() | 348 | target.lock_write() |
813 | 350 | self.addCleanup(target.unlock) | 349 | self.addCleanup(target.unlock) |
814 | 351 | try: | 350 | try: |
816 | 352 | target.fetch(source_b.repository, revision_id='B-id') | 351 | target.fetch(source_b.repository, revision_id=b_revid) |
817 | 353 | except errors.TokenLockingNotSupported: | 352 | except errors.TokenLockingNotSupported: |
818 | 354 | # The code inside fetch() that tries to lock and then fails, also | 353 | # The code inside fetch() that tries to lock and then fails, also |
819 | 355 | # causes weird problems with 'lock_not_held' later on... | 354 | # causes weird problems with 'lock_not_held' later on... |
820 | @@ -359,7 +358,7 @@ | |||
821 | 359 | 358 | ||
822 | 360 | def test_fetch_from_smart_with_ghost(self): | 359 | def test_fetch_from_smart_with_ghost(self): |
823 | 361 | trans = self.make_smart_server('source') | 360 | trans = self.make_smart_server('source') |
825 | 362 | source_b = self.make_simple_branch_with_ghost() | 361 | source_b, b_revid = self.make_simple_branch_with_ghost() |
826 | 363 | if not source_b.controldir._format.supports_transport(trans): | 362 | if not source_b.controldir._format.supports_transport(trans): |
827 | 364 | raise TestNotApplicable("format does not support transport") | 363 | raise TestNotApplicable("format does not support transport") |
828 | 365 | target = self.make_repository('target') | 364 | target = self.make_repository('target') |
829 | @@ -369,5 +368,5 @@ | |||
830 | 369 | source = repository.Repository.open(trans.base) | 368 | source = repository.Repository.open(trans.base) |
831 | 370 | source.lock_read() | 369 | source.lock_read() |
832 | 371 | self.addCleanup(source.unlock) | 370 | self.addCleanup(source.unlock) |
834 | 372 | target.fetch(source, revision_id='B-id') | 371 | target.fetch(source, revision_id=b_revid) |
835 | 373 | 372 | ||
836 | 374 | 373 | ||
837 | === modified file 'breezy/tests/per_repository/test_repository.py' | |||
838 | --- breezy/tests/per_repository/test_repository.py 2017-11-21 00:38:51 +0000 | |||
839 | +++ breezy/tests/per_repository/test_repository.py 2017-11-29 12:08:31 +0000 | |||
840 | @@ -69,7 +69,7 @@ | |||
841 | 69 | def assertFormatAttribute(self, attribute, allowed_values): | 69 | def assertFormatAttribute(self, attribute, allowed_values): |
842 | 70 | """Assert that the format has an attribute 'attribute'.""" | 70 | """Assert that the format has an attribute 'attribute'.""" |
843 | 71 | repo = self.make_repository('repo') | 71 | repo = self.make_repository('repo') |
845 | 72 | self.assertSubset([getattr(repo._format, attribute)], allowed_values) | 72 | self.assertIn(getattr(repo._format, attribute), allowed_values) |
846 | 73 | 73 | ||
847 | 74 | def test_attribute_fast_deltas(self): | 74 | def test_attribute_fast_deltas(self): |
848 | 75 | """Test the format.fast_deltas attribute.""" | 75 | """Test the format.fast_deltas attribute.""" |
849 | @@ -118,6 +118,7 @@ | |||
850 | 118 | self.assertFormatAttribute('supports_setting_revision_ids', | 118 | self.assertFormatAttribute('supports_setting_revision_ids', |
851 | 119 | (True, False)) | 119 | (True, False)) |
852 | 120 | 120 | ||
853 | 121 | <<<<<<< TREE | ||
854 | 121 | def test_attribute_format_supports_storing_branch_nick(self): | 122 | def test_attribute_format_supports_storing_branch_nick(self): |
855 | 122 | self.assertFormatAttribute('supports_storing_branch_nick', | 123 | self.assertFormatAttribute('supports_storing_branch_nick', |
856 | 123 | (True, False)) | 124 | (True, False)) |
857 | @@ -137,13 +138,34 @@ | |||
858 | 137 | self.assertRaises(TypeError, repo._format.open, | 138 | self.assertRaises(TypeError, repo._format.open, |
859 | 138 | repo.controldir, _override_transport=backup_transport) | 139 | repo.controldir, _override_transport=backup_transport) |
860 | 139 | 140 | ||
861 | 141 | ======= | ||
862 | 142 | def test_attribute_format_supports_storing_branch_nick(self): | ||
863 | 143 | self.assertFormatAttribute('supports_storing_branch_nick', | ||
864 | 144 | (True, False)) | ||
865 | 145 | |||
866 | 146 | def test_attribute_format_supports_overriding_transport(self): | ||
867 | 147 | repo = self.make_repository('repo') | ||
868 | 148 | self.assertIn(repo._format.supports_overriding_transport, (True, False)) | ||
869 | 149 | |||
870 | 150 | repo.control_transport.copy_tree('.', '../repository.backup') | ||
871 | 151 | backup_transport = repo.control_transport.clone('../repository.backup') | ||
872 | 152 | if repo._format.supports_overriding_transport: | ||
873 | 153 | backup = repo._format.open( | ||
874 | 154 | repo.controldir, | ||
875 | 155 | _override_transport=backup_transport) | ||
876 | 156 | self.assertIs(backup_transport, backup.control_transport) | ||
877 | 157 | else: | ||
878 | 158 | self.assertRaises(TypeError, repo._format.open, | ||
879 | 159 | repo.controldir, _override_transport=backup_transport) | ||
880 | 160 | |||
881 | 161 | >>>>>>> MERGE-SOURCE | ||
882 | 140 | def test_format_is_deprecated(self): | 162 | def test_format_is_deprecated(self): |
883 | 141 | repo = self.make_repository('repo') | 163 | repo = self.make_repository('repo') |
885 | 142 | self.assertSubset([repo._format.is_deprecated()], (True, False)) | 164 | self.assertIn(repo._format.is_deprecated(), (True, False)) |
886 | 143 | 165 | ||
887 | 144 | def test_format_is_supported(self): | 166 | def test_format_is_supported(self): |
888 | 145 | repo = self.make_repository('repo') | 167 | repo = self.make_repository('repo') |
890 | 146 | self.assertSubset([repo._format.is_supported()], (True, False)) | 168 | self.assertIn(repo._format.is_supported(), (True, False)) |
891 | 147 | 169 | ||
892 | 148 | def test_clone_to_default_format(self): | 170 | def test_clone_to_default_format(self): |
893 | 149 | #TODO: Test that cloning a repository preserves all the information | 171 | #TODO: Test that cloning a repository preserves all the information |
894 | @@ -387,8 +409,7 @@ | |||
895 | 387 | 409 | ||
896 | 388 | def test_format_supports_external_lookups(self): | 410 | def test_format_supports_external_lookups(self): |
897 | 389 | repo = self.make_repository('.') | 411 | repo = self.make_repository('.') |
900 | 390 | self.assertSubset( | 412 | self.assertIn(repo._format.supports_external_lookups, (True, False)) |
899 | 391 | [repo._format.supports_external_lookups], (True, False)) | ||
901 | 392 | 413 | ||
902 | 393 | def assertMessageRoundtrips(self, message): | 414 | def assertMessageRoundtrips(self, message): |
903 | 394 | """Assert that message roundtrips to a repository and back intact.""" | 415 | """Assert that message roundtrips to a repository and back intact.""" |
904 | 395 | 416 | ||
905 | === modified file 'breezy/tests/per_tree/test_annotate_iter.py' | |||
906 | --- breezy/tests/per_tree/test_annotate_iter.py 2017-11-12 13:09:58 +0000 | |||
907 | +++ breezy/tests/per_tree/test_annotate_iter.py 2017-11-29 12:08:31 +0000 | |||
908 | @@ -32,6 +32,8 @@ | |||
909 | 32 | 32 | ||
910 | 33 | def get_tree_with_ghost(self): | 33 | def get_tree_with_ghost(self): |
911 | 34 | tree = self.make_branch_and_tree('tree') | 34 | tree = self.make_branch_and_tree('tree') |
912 | 35 | if not tree.branch.repository._format.supports_ghosts: | ||
913 | 36 | self.skipTest('repository format does not support ghosts') | ||
914 | 35 | self.build_tree_contents([('tree/one', 'first\ncontent\n')]) | 37 | self.build_tree_contents([('tree/one', 'first\ncontent\n')]) |
915 | 36 | tree.add(['one']) | 38 | tree.add(['one']) |
916 | 37 | rev_1 = tree.commit('one') | 39 | rev_1 = tree.commit('one') |
917 | 38 | 40 | ||
918 | === modified file 'breezy/tests/per_workingtree/test_add.py' | |||
919 | --- breezy/tests/per_workingtree/test_add.py 2017-11-19 18:57:33 +0000 | |||
920 | +++ breezy/tests/per_workingtree/test_add.py 2017-11-29 12:08:31 +0000 | |||
921 | @@ -58,6 +58,8 @@ | |||
922 | 58 | def test_add_old_id(self): | 58 | def test_add_old_id(self): |
923 | 59 | """We can add an old id, as long as it doesn't exist now.""" | 59 | """We can add an old id, as long as it doesn't exist now.""" |
924 | 60 | tree = self.make_branch_and_tree('.') | 60 | tree = self.make_branch_and_tree('.') |
925 | 61 | if not tree.supports_setting_file_ids(): | ||
926 | 62 | self.skipTest("tree does not support setting file ids") | ||
927 | 61 | self.build_tree(['a', 'b']) | 63 | self.build_tree(['a', 'b']) |
928 | 62 | tree.add(['a']) | 64 | tree.add(['a']) |
929 | 63 | file_id = tree.path2id('a') | 65 | file_id = tree.path2id('a') |
930 | 64 | 66 | ||
931 | === modified file 'breezy/tests/per_workingtree/test_annotate_iter.py' | |||
932 | --- breezy/tests/per_workingtree/test_annotate_iter.py 2017-11-19 19:00:31 +0000 | |||
933 | +++ breezy/tests/per_workingtree/test_annotate_iter.py 2017-11-29 12:08:31 +0000 | |||
934 | @@ -23,159 +23,159 @@ | |||
935 | 23 | 23 | ||
936 | 24 | def make_single_rev_tree(self): | 24 | def make_single_rev_tree(self): |
937 | 25 | builder = self.make_branch_builder('branch') | 25 | builder = self.make_branch_builder('branch') |
939 | 26 | builder.build_snapshot(None, [ | 26 | revid = builder.build_snapshot(None, [ |
940 | 27 | ('add', ('', 'TREE_ROOT', 'directory', None)), | 27 | ('add', ('', 'TREE_ROOT', 'directory', None)), |
941 | 28 | ('add', ('file', 'file-id', 'file', 'initial content\n')), | 28 | ('add', ('file', 'file-id', 'file', 'initial content\n')), |
943 | 29 | ], revision_id='rev-1') | 29 | ]) |
944 | 30 | b = builder.get_branch() | 30 | b = builder.get_branch() |
945 | 31 | tree = b.create_checkout('tree', lightweight=True) | 31 | tree = b.create_checkout('tree', lightweight=True) |
946 | 32 | tree.lock_read() | 32 | tree.lock_read() |
947 | 33 | self.addCleanup(tree.unlock) | 33 | self.addCleanup(tree.unlock) |
949 | 34 | return tree | 34 | return tree, revid |
950 | 35 | 35 | ||
951 | 36 | def test_annotate_same_as_parent(self): | 36 | def test_annotate_same_as_parent(self): |
953 | 37 | tree = self.make_single_rev_tree() | 37 | tree, revid = self.make_single_rev_tree() |
954 | 38 | annotations = tree.annotate_iter('file') | 38 | annotations = tree.annotate_iter('file') |
956 | 39 | self.assertEqual([('rev-1', 'initial content\n')], | 39 | self.assertEqual([(revid, 'initial content\n')], |
957 | 40 | annotations) | 40 | annotations) |
958 | 41 | 41 | ||
959 | 42 | def test_annotate_mod_from_parent(self): | 42 | def test_annotate_mod_from_parent(self): |
961 | 43 | tree = self.make_single_rev_tree() | 43 | tree, revid = self.make_single_rev_tree() |
962 | 44 | self.build_tree_contents([('tree/file', | 44 | self.build_tree_contents([('tree/file', |
963 | 45 | 'initial content\nnew content\n')]) | 45 | 'initial content\nnew content\n')]) |
964 | 46 | annotations = tree.annotate_iter('file') | 46 | annotations = tree.annotate_iter('file') |
966 | 47 | self.assertEqual([('rev-1', 'initial content\n'), | 47 | self.assertEqual([(revid, 'initial content\n'), |
967 | 48 | ('current:', 'new content\n'), | 48 | ('current:', 'new content\n'), |
968 | 49 | ], annotations) | 49 | ], annotations) |
969 | 50 | 50 | ||
970 | 51 | def test_annotate_merge_parents(self): | 51 | def test_annotate_merge_parents(self): |
971 | 52 | builder = self.make_branch_builder('branch') | 52 | builder = self.make_branch_builder('branch') |
972 | 53 | builder.start_series() | 53 | builder.start_series() |
974 | 54 | builder.build_snapshot(None, [ | 54 | revid1 = builder.build_snapshot(None, [ |
975 | 55 | ('add', ('', 'TREE_ROOT', 'directory', None)), | 55 | ('add', ('', 'TREE_ROOT', 'directory', None)), |
976 | 56 | ('add', ('file', 'file-id', 'file', 'initial content\n')), | 56 | ('add', ('file', 'file-id', 'file', 'initial content\n')), |
979 | 57 | ], revision_id='rev-1') | 57 | ]) |
980 | 58 | builder.build_snapshot(['rev-1'], [ | 58 | revid2 = builder.build_snapshot([revid1], [ |
981 | 59 | ('modify', ('file-id', 'initial content\ncontent in 2\n')), | 59 | ('modify', ('file-id', 'initial content\ncontent in 2\n')), |
984 | 60 | ], revision_id='rev-2') | 60 | ]) |
985 | 61 | builder.build_snapshot(['rev-1'], [ | 61 | revid3 = builder.build_snapshot([revid1], [ |
986 | 62 | ('modify', ('file-id', 'initial content\ncontent in 3\n')), | 62 | ('modify', ('file-id', 'initial content\ncontent in 3\n')), |
988 | 63 | ], revision_id='rev-3') | 63 | ]) |
989 | 64 | builder.finish_series() | 64 | builder.finish_series() |
990 | 65 | b = builder.get_branch() | 65 | b = builder.get_branch() |
992 | 66 | tree = b.create_checkout('tree', revision_id='rev-2', lightweight=True) | 66 | tree = b.create_checkout('tree', revision_id=revid2, lightweight=True) |
993 | 67 | tree.lock_write() | 67 | tree.lock_write() |
994 | 68 | self.addCleanup(tree.unlock) | 68 | self.addCleanup(tree.unlock) |
996 | 69 | tree.set_parent_ids(['rev-2', 'rev-3']) | 69 | tree.set_parent_ids([revid2, revid3]) |
997 | 70 | self.build_tree_contents([('tree/file', | 70 | self.build_tree_contents([('tree/file', |
998 | 71 | 'initial content\ncontent in 2\n' | 71 | 'initial content\ncontent in 2\n' |
999 | 72 | 'content in 3\nnew content\n')]) | 72 | 'content in 3\nnew content\n')]) |
1000 | 73 | annotations = tree.annotate_iter('file') | 73 | annotations = tree.annotate_iter('file') |
1004 | 74 | self.assertEqual([('rev-1', 'initial content\n'), | 74 | self.assertEqual([(revid1, 'initial content\n'), |
1005 | 75 | ('rev-2', 'content in 2\n'), | 75 | (revid2, 'content in 2\n'), |
1006 | 76 | ('rev-3', 'content in 3\n'), | 76 | (revid3, 'content in 3\n'), |
1007 | 77 | ('current:', 'new content\n'), | 77 | ('current:', 'new content\n'), |
1008 | 78 | ], annotations) | 78 | ], annotations) |
1009 | 79 | 79 | ||
1010 | 80 | def test_annotate_merge_parent_no_file(self): | 80 | def test_annotate_merge_parent_no_file(self): |
1011 | 81 | builder = self.make_branch_builder('branch') | 81 | builder = self.make_branch_builder('branch') |
1012 | 82 | builder.start_series() | 82 | builder.start_series() |
1014 | 83 | builder.build_snapshot(None, [ | 83 | revid1 = builder.build_snapshot(None, [ |
1015 | 84 | ('add', ('', 'TREE_ROOT', 'directory', None)), | 84 | ('add', ('', 'TREE_ROOT', 'directory', None)), |
1018 | 85 | ], revision_id='rev-1') | 85 | ]) |
1019 | 86 | builder.build_snapshot(['rev-1'], [ | 86 | revid2 = builder.build_snapshot([revid1], [ |
1020 | 87 | ('add', ('file', 'file-id', 'file', 'initial content\n')), | 87 | ('add', ('file', 'file-id', 'file', 'initial content\n')), |
1023 | 88 | ], revision_id='rev-2') | 88 | ]) |
1024 | 89 | builder.build_snapshot(['rev-1'], [], revision_id='rev-3') | 89 | revid3 = builder.build_snapshot([revid1], []) |
1025 | 90 | builder.finish_series() | 90 | builder.finish_series() |
1026 | 91 | b = builder.get_branch() | 91 | b = builder.get_branch() |
1028 | 92 | tree = b.create_checkout('tree', revision_id='rev-2', lightweight=True) | 92 | tree = b.create_checkout('tree', revision_id=revid2, lightweight=True) |
1029 | 93 | tree.lock_write() | 93 | tree.lock_write() |
1030 | 94 | self.addCleanup(tree.unlock) | 94 | self.addCleanup(tree.unlock) |
1032 | 95 | tree.set_parent_ids(['rev-2', 'rev-3']) | 95 | tree.set_parent_ids([revid2, revid3]) |
1033 | 96 | self.build_tree_contents([('tree/file', | 96 | self.build_tree_contents([('tree/file', |
1034 | 97 | 'initial content\nnew content\n')]) | 97 | 'initial content\nnew content\n')]) |
1035 | 98 | annotations = tree.annotate_iter('file') | 98 | annotations = tree.annotate_iter('file') |
1037 | 99 | self.assertEqual([('rev-2', 'initial content\n'), | 99 | self.assertEqual([(revid2, 'initial content\n'), |
1038 | 100 | ('current:', 'new content\n'), | 100 | ('current:', 'new content\n'), |
1039 | 101 | ], annotations) | 101 | ], annotations) |
1040 | 102 | 102 | ||
1041 | 103 | def test_annotate_merge_parent_was_directory(self): | 103 | def test_annotate_merge_parent_was_directory(self): |
1042 | 104 | builder = self.make_branch_builder('branch') | 104 | builder = self.make_branch_builder('branch') |
1043 | 105 | builder.start_series() | 105 | builder.start_series() |
1045 | 106 | builder.build_snapshot(None, [ | 106 | revid1 = builder.build_snapshot(None, [ |
1046 | 107 | ('add', ('', 'TREE_ROOT', 'directory', None)), | 107 | ('add', ('', 'TREE_ROOT', 'directory', None)), |
1049 | 108 | ], revision_id='rev-1') | 108 | ]) |
1050 | 109 | builder.build_snapshot(['rev-1'], [ | 109 | revid2 = builder.build_snapshot([revid1], [ |
1051 | 110 | ('add', ('file', 'file-id', 'file', 'initial content\n')), | 110 | ('add', ('file', 'file-id', 'file', 'initial content\n')), |
1054 | 111 | ], revision_id='rev-2') | 111 | ]) |
1055 | 112 | builder.build_snapshot(['rev-1'], [ | 112 | revid3 = builder.build_snapshot([revid1], [ |
1056 | 113 | ('add', ('a_dir', 'file-id', 'directory', None)), | 113 | ('add', ('a_dir', 'file-id', 'directory', None)), |
1058 | 114 | ], revision_id='rev-3') | 114 | ]) |
1059 | 115 | builder.finish_series() | 115 | builder.finish_series() |
1060 | 116 | b = builder.get_branch() | 116 | b = builder.get_branch() |
1062 | 117 | tree = b.create_checkout('tree', revision_id='rev-2', lightweight=True) | 117 | tree = b.create_checkout('tree', revision_id=revid2, lightweight=True) |
1063 | 118 | tree.lock_write() | 118 | tree.lock_write() |
1064 | 119 | self.addCleanup(tree.unlock) | 119 | self.addCleanup(tree.unlock) |
1066 | 120 | tree.set_parent_ids(['rev-2', 'rev-3']) | 120 | tree.set_parent_ids([revid2, revid3]) |
1067 | 121 | self.build_tree_contents([('tree/file', | 121 | self.build_tree_contents([('tree/file', |
1068 | 122 | 'initial content\nnew content\n')]) | 122 | 'initial content\nnew content\n')]) |
1069 | 123 | annotations = tree.annotate_iter('file') | 123 | annotations = tree.annotate_iter('file') |
1071 | 124 | self.assertEqual([('rev-2', 'initial content\n'), | 124 | self.assertEqual([(revid2, 'initial content\n'), |
1072 | 125 | ('current:', 'new content\n'), | 125 | ('current:', 'new content\n'), |
1073 | 126 | ], annotations) | 126 | ], annotations) |
1074 | 127 | 127 | ||
1075 | 128 | def test_annotate_same_as_merge_parent(self): | 128 | def test_annotate_same_as_merge_parent(self): |
1076 | 129 | builder = self.make_branch_builder('branch') | 129 | builder = self.make_branch_builder('branch') |
1077 | 130 | builder.start_series() | 130 | builder.start_series() |
1079 | 131 | builder.build_snapshot(None, [ | 131 | revid1 = builder.build_snapshot(None, [ |
1080 | 132 | ('add', ('', 'TREE_ROOT', 'directory', None)), | 132 | ('add', ('', 'TREE_ROOT', 'directory', None)), |
1081 | 133 | ('add', ('file', 'file-id', 'file', 'initial content\n')), | 133 | ('add', ('file', 'file-id', 'file', 'initial content\n')), |
1086 | 134 | ], revision_id='rev-1') | 134 | ]) |
1087 | 135 | builder.build_snapshot(['rev-1'], [ | 135 | revid2 = builder.build_snapshot([revid1], [ |
1088 | 136 | ], revision_id='rev-2') | 136 | ]) |
1089 | 137 | builder.build_snapshot(['rev-1'], [ | 137 | revid3 = builder.build_snapshot([revid1], [ |
1090 | 138 | ('modify', ('file-id', 'initial content\ncontent in 3\n')), | 138 | ('modify', ('file-id', 'initial content\ncontent in 3\n')), |
1092 | 139 | ], revision_id='rev-3') | 139 | ]) |
1093 | 140 | builder.finish_series() | 140 | builder.finish_series() |
1094 | 141 | b = builder.get_branch() | 141 | b = builder.get_branch() |
1096 | 142 | tree = b.create_checkout('tree', revision_id='rev-2', lightweight=True) | 142 | tree = b.create_checkout('tree', revision_id=revid2, lightweight=True) |
1097 | 143 | tree.lock_write() | 143 | tree.lock_write() |
1098 | 144 | self.addCleanup(tree.unlock) | 144 | self.addCleanup(tree.unlock) |
1100 | 145 | tree.set_parent_ids(['rev-2', 'rev-3']) | 145 | tree.set_parent_ids([revid2, revid3]) |
1101 | 146 | self.build_tree_contents([('tree/file', | 146 | self.build_tree_contents([('tree/file', |
1102 | 147 | 'initial content\ncontent in 3\n')]) | 147 | 'initial content\ncontent in 3\n')]) |
1103 | 148 | annotations = tree.annotate_iter('file') | 148 | annotations = tree.annotate_iter('file') |
1106 | 149 | self.assertEqual([('rev-1', 'initial content\n'), | 149 | self.assertEqual([(revid1, 'initial content\n'), |
1107 | 150 | ('rev-3', 'content in 3\n'), | 150 | (revid3, 'content in 3\n'), |
1108 | 151 | ], annotations) | 151 | ], annotations) |
1109 | 152 | 152 | ||
1110 | 153 | def test_annotate_same_as_merge_parent_supersedes(self): | 153 | def test_annotate_same_as_merge_parent_supersedes(self): |
1111 | 154 | builder = self.make_branch_builder('branch') | 154 | builder = self.make_branch_builder('branch') |
1112 | 155 | builder.start_series() | 155 | builder.start_series() |
1114 | 156 | builder.build_snapshot(None, [ | 156 | revid1 = builder.build_snapshot(None, [ |
1115 | 157 | ('add', ('', 'TREE_ROOT', 'directory', None)), | 157 | ('add', ('', 'TREE_ROOT', 'directory', None)), |
1116 | 158 | ('add', ('file', 'file-id', 'file', 'initial content\n')), | 158 | ('add', ('file', 'file-id', 'file', 'initial content\n')), |
1119 | 159 | ], revision_id='rev-1') | 159 | ]) |
1120 | 160 | builder.build_snapshot(['rev-1'], [ | 160 | revid2 = builder.build_snapshot([revid1], [ |
1121 | 161 | ('modify', ('file-id', 'initial content\nnew content\n')), | 161 | ('modify', ('file-id', 'initial content\nnew content\n')), |
1124 | 162 | ], revision_id='rev-2') | 162 | ]) |
1125 | 163 | builder.build_snapshot(['rev-2'], [ | 163 | revid3 = builder.build_snapshot([revid2], [ |
1126 | 164 | ('modify', ('file-id', 'initial content\ncontent in 3\n')), | 164 | ('modify', ('file-id', 'initial content\ncontent in 3\n')), |
1129 | 165 | ], revision_id='rev-3') | 165 | ]) |
1130 | 166 | builder.build_snapshot(['rev-3'], [ | 166 | revid4 = builder.build_snapshot([revid3], [ |
1131 | 167 | ('modify', ('file-id', 'initial content\nnew content\n')), | 167 | ('modify', ('file-id', 'initial content\nnew content\n')), |
1133 | 168 | ], revision_id='rev-4') | 168 | ]) |
1134 | 169 | # In this case, the content locally is the same as content in basis | 169 | # In this case, the content locally is the same as content in basis |
1135 | 170 | # tree, but the merge revision states that *it* should win | 170 | # tree, but the merge revision states that *it* should win |
1136 | 171 | builder.finish_series() | 171 | builder.finish_series() |
1137 | 172 | b = builder.get_branch() | 172 | b = builder.get_branch() |
1139 | 173 | tree = b.create_checkout('tree', revision_id='rev-2', lightweight=True) | 173 | tree = b.create_checkout('tree', revision_id=revid2, lightweight=True) |
1140 | 174 | tree.lock_write() | 174 | tree.lock_write() |
1141 | 175 | self.addCleanup(tree.unlock) | 175 | self.addCleanup(tree.unlock) |
1143 | 176 | tree.set_parent_ids(['rev-2', 'rev-4']) | 176 | tree.set_parent_ids([revid2, revid4]) |
1144 | 177 | annotations = tree.annotate_iter('file') | 177 | annotations = tree.annotate_iter('file') |
1147 | 178 | self.assertEqual([('rev-1', 'initial content\n'), | 178 | self.assertEqual([(revid1, 'initial content\n'), |
1148 | 179 | ('rev-4', 'new content\n'), | 179 | (revid4, 'new content\n'), |
1149 | 180 | ], annotations) | 180 | ], annotations) |
1150 | 181 | 181 | ||
1151 | 182 | 182 | ||
1152 | === modified file 'breezy/tests/per_workingtree/test_get_file_mtime.py' | |||
1153 | --- breezy/tests/per_workingtree/test_get_file_mtime.py 2017-11-12 20:44:54 +0000 | |||
1154 | +++ breezy/tests/per_workingtree/test_get_file_mtime.py 2017-11-29 12:08:31 +0000 | |||
1155 | @@ -57,7 +57,7 @@ | |||
1156 | 57 | one_id = tree.path2id('one') | 57 | one_id = tree.path2id('one') |
1157 | 58 | 58 | ||
1158 | 59 | st = os.lstat('tree/one') | 59 | st = os.lstat('tree/one') |
1160 | 60 | tree.commit('one', rev_id='rev-1') | 60 | tree.commit('one') |
1161 | 61 | 61 | ||
1162 | 62 | tree.lock_read() | 62 | tree.lock_read() |
1163 | 63 | try: | 63 | try: |
1164 | 64 | 64 | ||
1165 | === modified file 'breezy/tests/per_workingtree/test_set_root_id.py' | |||
1166 | --- breezy/tests/per_workingtree/test_set_root_id.py 2017-06-10 00:17:06 +0000 | |||
1167 | +++ breezy/tests/per_workingtree/test_set_root_id.py 2017-11-29 12:08:31 +0000 | |||
1168 | @@ -32,6 +32,8 @@ | |||
1169 | 32 | # deliberately tests concurrent access that isn't possible on windows. | 32 | # deliberately tests concurrent access that isn't possible on windows. |
1170 | 33 | self.thisFailsStrictLockCheck() | 33 | self.thisFailsStrictLockCheck() |
1171 | 34 | tree = self.make_branch_and_tree('a-tree') | 34 | tree = self.make_branch_and_tree('a-tree') |
1172 | 35 | if not tree.supports_setting_file_ids(): | ||
1173 | 36 | self.skipTest('format does not support setting file ids') | ||
1174 | 35 | # setting the root id allows it to be read via get_root_id. | 37 | # setting the root id allows it to be read via get_root_id. |
1175 | 36 | root_id = u'\xe5n-id'.encode('utf8') | 38 | root_id = u'\xe5n-id'.encode('utf8') |
1176 | 37 | tree.lock_write() | 39 | tree.lock_write() |
1177 | @@ -57,6 +59,8 @@ | |||
1178 | 57 | 59 | ||
1179 | 58 | def test_set_root_id(self): | 60 | def test_set_root_id(self): |
1180 | 59 | tree = self.make_branch_and_tree('.') | 61 | tree = self.make_branch_and_tree('.') |
1181 | 62 | if not tree.supports_setting_file_ids(): | ||
1182 | 63 | self.skipTest('format does not support setting file ids') | ||
1183 | 60 | tree.lock_write() | 64 | tree.lock_write() |
1184 | 61 | self.addCleanup(tree.unlock) | 65 | self.addCleanup(tree.unlock) |
1185 | 62 | orig_root_id = tree.get_root_id() | 66 | orig_root_id = tree.get_root_id() |
1186 | 63 | 67 | ||
1187 | === modified file 'breezy/tests/per_workingtree/test_walkdirs.py' | |||
1188 | --- breezy/tests/per_workingtree/test_walkdirs.py 2017-11-14 01:20:44 +0000 | |||
1189 | +++ breezy/tests/per_workingtree/test_walkdirs.py 2017-11-29 12:08:31 +0000 | |||
1190 | @@ -84,7 +84,6 @@ | |||
1191 | 84 | def add_dirblock(path, kind): | 84 | def add_dirblock(path, kind): |
1192 | 85 | dirblock = DirBlock(tree, path) | 85 | dirblock = DirBlock(tree, path) |
1193 | 86 | if file_status != self.unknown: | 86 | if file_status != self.unknown: |
1194 | 87 | dirblock.id = 'a ' + str(path).replace('/', '-') + '-id' | ||
1195 | 88 | dirblock.inventory_kind = kind | 87 | dirblock.inventory_kind = kind |
1196 | 89 | if file_status != self.missing: | 88 | if file_status != self.missing: |
1197 | 90 | dirblock.disk_kind = kind | 89 | dirblock.disk_kind = kind |
1198 | @@ -97,7 +96,10 @@ | |||
1199 | 97 | add_dirblock(paths[3], 'directory') | 96 | add_dirblock(paths[3], 'directory') |
1200 | 98 | 97 | ||
1201 | 99 | if file_status != self.unknown: | 98 | if file_status != self.unknown: |
1203 | 100 | tree.add(paths, [db.id for db in dirblocks]) | 99 | tree.add(paths) |
1204 | 100 | for dirblock in dirblocks: | ||
1205 | 101 | if file_status != self.unknown: | ||
1206 | 102 | dirblock.id = tree.path2id(dirblock.relpath) | ||
1207 | 101 | 103 | ||
1208 | 102 | if file_status == self.missing: | 104 | if file_status == self.missing: |
1209 | 103 | # now make the files be missing | 105 | # now make the files be missing |
1210 | 104 | 106 | ||
1211 | === modified file 'breezy/tests/per_workingtree/test_workingtree.py' | |||
1212 | --- breezy/tests/per_workingtree/test_workingtree.py 2017-11-21 20:09:04 +0000 | |||
1213 | +++ breezy/tests/per_workingtree/test_workingtree.py 2017-11-29 12:08:31 +0000 | |||
1214 | @@ -409,17 +409,17 @@ | |||
1215 | 409 | wt = self.make_branch_and_tree('source') | 409 | wt = self.make_branch_and_tree('source') |
1216 | 410 | self.build_tree(['added', 'deleted', 'notadded'], | 410 | self.build_tree(['added', 'deleted', 'notadded'], |
1217 | 411 | transport=wt.controldir.transport.clone('..')) | 411 | transport=wt.controldir.transport.clone('..')) |
1219 | 412 | wt.add('deleted', 'deleted') | 412 | wt.add('deleted') |
1220 | 413 | wt.commit('add deleted') | 413 | wt.commit('add deleted') |
1221 | 414 | wt.remove('deleted') | 414 | wt.remove('deleted') |
1223 | 415 | wt.add('added', 'added') | 415 | wt.add('added') |
1224 | 416 | cloned_dir = wt.controldir.clone('target') | 416 | cloned_dir = wt.controldir.clone('target') |
1225 | 417 | cloned = cloned_dir.open_workingtree() | 417 | cloned = cloned_dir.open_workingtree() |
1226 | 418 | cloned_transport = cloned.controldir.transport.clone('..') | 418 | cloned_transport = cloned.controldir.transport.clone('..') |
1227 | 419 | self.assertFalse(cloned_transport.has('deleted')) | 419 | self.assertFalse(cloned_transport.has('deleted')) |
1228 | 420 | self.assertTrue(cloned_transport.has('added')) | 420 | self.assertTrue(cloned_transport.has('added')) |
1229 | 421 | self.assertFalse(cloned_transport.has('notadded')) | 421 | self.assertFalse(cloned_transport.has('notadded')) |
1231 | 422 | self.assertEqual('added', cloned.path2id('added')) | 422 | self.assertIsNot(None, cloned.path2id('added')) |
1232 | 423 | self.assertEqual(None, cloned.path2id('deleted')) | 423 | self.assertEqual(None, cloned.path2id('deleted')) |
1233 | 424 | self.assertEqual(None, cloned.path2id('notadded')) | 424 | self.assertEqual(None, cloned.path2id('notadded')) |
1234 | 425 | 425 | ||
1235 | @@ -799,12 +799,13 @@ | |||
1236 | 799 | self.build_tree(['foo.pyc']) | 799 | self.build_tree(['foo.pyc']) |
1237 | 800 | # ensure that foo.pyc is ignored | 800 | # ensure that foo.pyc is ignored |
1238 | 801 | self.build_tree_contents([('.bzrignore', 'foo.pyc')]) | 801 | self.build_tree_contents([('.bzrignore', 'foo.pyc')]) |
1240 | 802 | tree.add('foo.pyc', 'anid') | 802 | tree.add('foo.pyc') |
1241 | 803 | anid = tree.path2id('foo.pyc') | ||
1242 | 803 | tree.lock_read() | 804 | tree.lock_read() |
1243 | 804 | files = sorted(list(tree.list_files())) | 805 | files = sorted(list(tree.list_files())) |
1244 | 805 | tree.unlock() | 806 | tree.unlock() |
1245 | 806 | self.assertEqual((u'.bzrignore', '?', 'file', None), files[0][:-1]) | 807 | self.assertEqual((u'.bzrignore', '?', 'file', None), files[0][:-1]) |
1247 | 807 | self.assertEqual((u'foo.pyc', 'V', 'file', 'anid'), files[1][:-1]) | 808 | self.assertEqual((u'foo.pyc', 'V', 'file', anid), files[1][:-1]) |
1248 | 808 | self.assertEqual(2, len(files)) | 809 | self.assertEqual(2, len(files)) |
1249 | 809 | 810 | ||
1250 | 810 | def test_non_normalized_add_accessible(self): | 811 | def test_non_normalized_add_accessible(self): |
1251 | 811 | 812 | ||
1252 | === modified file 'breezy/workingtree.py' | |||
1253 | --- breezy/workingtree.py 2017-11-17 03:06:50 +0000 | |||
1254 | +++ breezy/workingtree.py 2017-11-29 12:08:31 +0000 | |||
1255 | @@ -38,9 +38,6 @@ | |||
1256 | 38 | 38 | ||
1257 | 39 | from .lazy_import import lazy_import | 39 | from .lazy_import import lazy_import |
1258 | 40 | lazy_import(globals(), """ | 40 | lazy_import(globals(), """ |
1259 | 41 | from bisect import bisect_left | ||
1260 | 42 | import itertools | ||
1261 | 43 | import operator | ||
1262 | 44 | import stat | 41 | import stat |
1263 | 45 | 42 | ||
1264 | 46 | from breezy import ( | 43 | from breezy import ( |
1265 | @@ -393,7 +390,7 @@ | |||
1266 | 393 | except errors.NoSuchRevision: | 390 | except errors.NoSuchRevision: |
1267 | 394 | pass | 391 | pass |
1268 | 395 | # No cached copy available, retrieve from the repository. | 392 | # No cached copy available, retrieve from the repository. |
1270 | 396 | # FIXME? RBC 20060403 should we cache the inventory locally | 393 | # FIXME? RBC 20060403 should we cache the tree locally |
1271 | 397 | # at this point ? | 394 | # at this point ? |
1272 | 398 | try: | 395 | try: |
1273 | 399 | return self.branch.repository.revision_tree(revision_id) | 396 | return self.branch.repository.revision_tree(revision_id) |
1274 | @@ -762,7 +759,7 @@ | |||
1275 | 762 | because of a merge. | 759 | because of a merge. |
1276 | 763 | 760 | ||
1277 | 764 | This returns a map of file_id->sha1, containing only files which are | 761 | This returns a map of file_id->sha1, containing only files which are |
1279 | 765 | still in the working inventory and have that text hash. | 762 | still in the working tree and have that text hash. |
1280 | 766 | """ | 763 | """ |
1281 | 767 | raise NotImplementedError(self.merge_modified) | 764 | raise NotImplementedError(self.merge_modified) |
1282 | 768 | 765 | ||
1283 | @@ -1155,8 +1152,8 @@ | |||
1284 | 1155 | def revision_tree(self, revision_id): | 1152 | def revision_tree(self, revision_id): |
1285 | 1156 | """See Tree.revision_tree. | 1153 | """See Tree.revision_tree. |
1286 | 1157 | 1154 | ||
1289 | 1158 | WorkingTree can supply revision_trees for the basis revision only | 1155 | For trees that can be obtained from the working tree, this |
1290 | 1159 | because there is only one cached inventory in the bzr directory. | 1156 | will do so. For other trees, it will fall back to the repository. |
1291 | 1160 | """ | 1157 | """ |
1292 | 1161 | raise NotImplementedError(self.revision_tree) | 1158 | raise NotImplementedError(self.revision_tree) |
1293 | 1162 | 1159 | ||
1294 | @@ -1350,124 +1347,7 @@ | |||
1295 | 1350 | If the tree is not locked, it may cause an error to be raised, | 1347 | If the tree is not locked, it may cause an error to be raised, |
1296 | 1351 | depending on the tree implementation. | 1348 | depending on the tree implementation. |
1297 | 1352 | """ | 1349 | """ |
1416 | 1353 | disk_top = self.abspath(prefix) | 1350 | raise NotImplementedError(self.walkdirs) |
1299 | 1354 | if disk_top.endswith('/'): | ||
1300 | 1355 | disk_top = disk_top[:-1] | ||
1301 | 1356 | top_strip_len = len(disk_top) + 1 | ||
1302 | 1357 | inventory_iterator = self._walkdirs(prefix) | ||
1303 | 1358 | disk_iterator = osutils.walkdirs(disk_top, prefix) | ||
1304 | 1359 | try: | ||
1305 | 1360 | current_disk = next(disk_iterator) | ||
1306 | 1361 | disk_finished = False | ||
1307 | 1362 | except OSError as e: | ||
1308 | 1363 | if not (e.errno == errno.ENOENT or | ||
1309 | 1364 | (sys.platform == 'win32' and e.errno == ERROR_PATH_NOT_FOUND)): | ||
1310 | 1365 | raise | ||
1311 | 1366 | current_disk = None | ||
1312 | 1367 | disk_finished = True | ||
1313 | 1368 | try: | ||
1314 | 1369 | current_inv = next(inventory_iterator) | ||
1315 | 1370 | inv_finished = False | ||
1316 | 1371 | except StopIteration: | ||
1317 | 1372 | current_inv = None | ||
1318 | 1373 | inv_finished = True | ||
1319 | 1374 | while not inv_finished or not disk_finished: | ||
1320 | 1375 | if current_disk: | ||
1321 | 1376 | ((cur_disk_dir_relpath, cur_disk_dir_path_from_top), | ||
1322 | 1377 | cur_disk_dir_content) = current_disk | ||
1323 | 1378 | else: | ||
1324 | 1379 | ((cur_disk_dir_relpath, cur_disk_dir_path_from_top), | ||
1325 | 1380 | cur_disk_dir_content) = ((None, None), None) | ||
1326 | 1381 | if not disk_finished: | ||
1327 | 1382 | # strip out .bzr dirs | ||
1328 | 1383 | if (cur_disk_dir_path_from_top[top_strip_len:] == '' and | ||
1329 | 1384 | len(cur_disk_dir_content) > 0): | ||
1330 | 1385 | # osutils.walkdirs can be made nicer - | ||
1331 | 1386 | # yield the path-from-prefix rather than the pathjoined | ||
1332 | 1387 | # value. | ||
1333 | 1388 | bzrdir_loc = bisect_left(cur_disk_dir_content, | ||
1334 | 1389 | ('.bzr', '.bzr')) | ||
1335 | 1390 | if (bzrdir_loc < len(cur_disk_dir_content) | ||
1336 | 1391 | and self.controldir.is_control_filename( | ||
1337 | 1392 | cur_disk_dir_content[bzrdir_loc][0])): | ||
1338 | 1393 | # we dont yield the contents of, or, .bzr itself. | ||
1339 | 1394 | del cur_disk_dir_content[bzrdir_loc] | ||
1340 | 1395 | if inv_finished: | ||
1341 | 1396 | # everything is unknown | ||
1342 | 1397 | direction = 1 | ||
1343 | 1398 | elif disk_finished: | ||
1344 | 1399 | # everything is missing | ||
1345 | 1400 | direction = -1 | ||
1346 | 1401 | else: | ||
1347 | 1402 | direction = cmp(current_inv[0][0], cur_disk_dir_relpath) | ||
1348 | 1403 | if direction > 0: | ||
1349 | 1404 | # disk is before inventory - unknown | ||
1350 | 1405 | dirblock = [(relpath, basename, kind, stat, None, None) for | ||
1351 | 1406 | relpath, basename, kind, stat, top_path in | ||
1352 | 1407 | cur_disk_dir_content] | ||
1353 | 1408 | yield (cur_disk_dir_relpath, None), dirblock | ||
1354 | 1409 | try: | ||
1355 | 1410 | current_disk = next(disk_iterator) | ||
1356 | 1411 | except StopIteration: | ||
1357 | 1412 | disk_finished = True | ||
1358 | 1413 | elif direction < 0: | ||
1359 | 1414 | # inventory is before disk - missing. | ||
1360 | 1415 | dirblock = [(relpath, basename, 'unknown', None, fileid, kind) | ||
1361 | 1416 | for relpath, basename, dkind, stat, fileid, kind in | ||
1362 | 1417 | current_inv[1]] | ||
1363 | 1418 | yield (current_inv[0][0], current_inv[0][1]), dirblock | ||
1364 | 1419 | try: | ||
1365 | 1420 | current_inv = next(inventory_iterator) | ||
1366 | 1421 | except StopIteration: | ||
1367 | 1422 | inv_finished = True | ||
1368 | 1423 | else: | ||
1369 | 1424 | # versioned present directory | ||
1370 | 1425 | # merge the inventory and disk data together | ||
1371 | 1426 | dirblock = [] | ||
1372 | 1427 | for relpath, subiterator in itertools.groupby(sorted( | ||
1373 | 1428 | current_inv[1] + cur_disk_dir_content, | ||
1374 | 1429 | key=operator.itemgetter(0)), operator.itemgetter(1)): | ||
1375 | 1430 | path_elements = list(subiterator) | ||
1376 | 1431 | if len(path_elements) == 2: | ||
1377 | 1432 | inv_row, disk_row = path_elements | ||
1378 | 1433 | # versioned, present file | ||
1379 | 1434 | dirblock.append((inv_row[0], | ||
1380 | 1435 | inv_row[1], disk_row[2], | ||
1381 | 1436 | disk_row[3], inv_row[4], | ||
1382 | 1437 | inv_row[5])) | ||
1383 | 1438 | elif len(path_elements[0]) == 5: | ||
1384 | 1439 | # unknown disk file | ||
1385 | 1440 | dirblock.append((path_elements[0][0], | ||
1386 | 1441 | path_elements[0][1], path_elements[0][2], | ||
1387 | 1442 | path_elements[0][3], None, None)) | ||
1388 | 1443 | elif len(path_elements[0]) == 6: | ||
1389 | 1444 | # versioned, absent file. | ||
1390 | 1445 | dirblock.append((path_elements[0][0], | ||
1391 | 1446 | path_elements[0][1], 'unknown', None, | ||
1392 | 1447 | path_elements[0][4], path_elements[0][5])) | ||
1393 | 1448 | else: | ||
1394 | 1449 | raise NotImplementedError('unreachable code') | ||
1395 | 1450 | yield current_inv[0], dirblock | ||
1396 | 1451 | try: | ||
1397 | 1452 | current_inv = next(inventory_iterator) | ||
1398 | 1453 | except StopIteration: | ||
1399 | 1454 | inv_finished = True | ||
1400 | 1455 | try: | ||
1401 | 1456 | current_disk = next(disk_iterator) | ||
1402 | 1457 | except StopIteration: | ||
1403 | 1458 | disk_finished = True | ||
1404 | 1459 | |||
1405 | 1460 | def _walkdirs(self, prefix=""): | ||
1406 | 1461 | """Walk the directories of this tree. | ||
1407 | 1462 | |||
1408 | 1463 | :param prefix: is used as the directrory to start with. | ||
1409 | 1464 | :returns: a generator which yields items in the form:: | ||
1410 | 1465 | |||
1411 | 1466 | ((curren_directory_path, fileid), | ||
1412 | 1467 | [(file1_path, file1_name, file1_kind, None, file1_id, | ||
1413 | 1468 | file1_kind), ... ]) | ||
1414 | 1469 | """ | ||
1415 | 1470 | raise NotImplementedError(self._walkdirs) | ||
1417 | 1471 | 1351 | ||
1418 | 1472 | def auto_resolve(self): | 1352 | def auto_resolve(self): |
1419 | 1473 | """Automatically resolve text conflicts according to contents. | 1353 | """Automatically resolve text conflicts according to contents. |
There are a stack of commits (and some conflicts) in the branch here.
Looking only at the last rev, changes seems good.