Status: | Superseded |
---|---|
Proposed branch: | lp:~jelmer/brz/depth |
Merge into: | lp:brz |
Diff against target: |
900 lines (+214/-111) 18 files modified
breezy/branch.py (+8/-4) breezy/bzr/bzrdir.py (+7/-3) breezy/controldir.py (+2/-1) breezy/errors.py (+8/-0) breezy/git/branch.py (+11/-11) breezy/git/dir.py (+3/-2) breezy/git/interrepo.py (+48/-37) breezy/git/remote.py (+13/-11) breezy/git/repository.py (+1/-0) breezy/git/tests/test_blackbox.py (+1/-0) breezy/git/transportgit.py (+40/-35) breezy/plugins/weave_fmt/bzrdir.py (+3/-1) breezy/repository.py (+4/-0) breezy/tests/per_controldir/test_controldir.py (+17/-2) breezy/tests/per_interbranch/test_fetch.py (+41/-1) breezy/tests/per_repository/test_repository.py (+3/-0) breezy/tests/test_foreign.py (+3/-2) setup.py (+1/-1) |
To merge this branch: | bzr merge lp:~jelmer/brz/depth |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Martin Packman | Approve | ||
Review via email: mp+362957@code.launchpad.net |
This proposal has been superseded by a proposal from 2019-02-14.
Commit message
Description of the change
Add a depth argument to ControlDir.sprout() and Repository.fetch().
To post a comment you must log in.
lp:~jelmer/brz/depth
updated
- 7154. By Jelmer Vernooij
-
Fix tests.
- 7155. By Jelmer Vernooij
-
Merge lp:brz/3.1.
- 7156. By Jelmer Vernooij
-
Merge lp:brz/3.2.
- 7157. By Jelmer Vernooij
-
Merge trunk.
- 7158. By Jelmer Vernooij
-
Merge lp:brz/3.3
Unmerged revisions
- 7158. By Jelmer Vernooij
-
Merge lp:brz/3.3
- 7157. By Jelmer Vernooij
-
Merge trunk.
- 7156. By Jelmer Vernooij
-
Merge lp:brz/3.2.
- 7155. By Jelmer Vernooij
-
Merge lp:brz/3.1.
- 7154. By Jelmer Vernooij
-
Fix tests.
- 7153. By Jelmer Vernooij
-
add depth argument to ControlDir.sprout.
- 7152. By Jelmer Vernooij
-
Enable depth fetching for Git repositories.
- 7151. By Jelmer Vernooij
-
For the moment, Dulwich doesn't support depth fetching locally.
- 7150. By Jelmer Vernooij
-
Add Repository.
support_ fetch_depth. - 7149. By Jelmer Vernooij
-
Merge dulwich-compat.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'breezy/branch.py' | |||
2 | --- breezy/branch.py 2018-11-30 12:39:04 +0000 | |||
3 | +++ breezy/branch.py 2019-02-11 01:23:45 +0000 | |||
4 | @@ -680,18 +680,19 @@ | |||
5 | 680 | """Get the tree_path and branch_location for a tree reference.""" | 680 | """Get the tree_path and branch_location for a tree reference.""" |
6 | 681 | raise errors.UnsupportedOperation(self.get_reference_info, self) | 681 | raise errors.UnsupportedOperation(self.get_reference_info, self) |
7 | 682 | 682 | ||
9 | 683 | def fetch(self, from_branch, last_revision=None, limit=None): | 683 | def fetch(self, from_branch, last_revision=None, limit=None, depth=None): |
10 | 684 | """Copy revisions from from_branch into this branch. | 684 | """Copy revisions from from_branch into this branch. |
11 | 685 | 685 | ||
12 | 686 | :param from_branch: Where to copy from. | 686 | :param from_branch: Where to copy from. |
13 | 687 | :param last_revision: What revision to stop at (None for at the end | 687 | :param last_revision: What revision to stop at (None for at the end |
14 | 688 | of the branch. | 688 | of the branch. |
15 | 689 | :param limit: Optional rough limit of revisions to fetch | 689 | :param limit: Optional rough limit of revisions to fetch |
16 | 690 | :param depth: Revision depth | ||
17 | 690 | :return: None | 691 | :return: None |
18 | 691 | """ | 692 | """ |
19 | 692 | with self.lock_write(): | 693 | with self.lock_write(): |
20 | 693 | return InterBranch.get(from_branch, self).fetch( | 694 | return InterBranch.get(from_branch, self).fetch( |
22 | 694 | last_revision, limit=limit) | 695 | last_revision, limit=limit, depth=depth) |
23 | 695 | 696 | ||
24 | 696 | def get_bound_location(self): | 697 | def get_bound_location(self): |
25 | 697 | """Return the URL of the branch we are bound to. | 698 | """Return the URL of the branch we are bound to. |
26 | @@ -2098,11 +2099,12 @@ | |||
27 | 2098 | """ | 2099 | """ |
28 | 2099 | raise NotImplementedError(self.copy_content_into) | 2100 | raise NotImplementedError(self.copy_content_into) |
29 | 2100 | 2101 | ||
31 | 2101 | def fetch(self, stop_revision=None, limit=None): | 2102 | def fetch(self, stop_revision=None, limit=None, depth=None): |
32 | 2102 | """Fetch revisions. | 2103 | """Fetch revisions. |
33 | 2103 | 2104 | ||
34 | 2104 | :param stop_revision: Last revision to fetch | 2105 | :param stop_revision: Last revision to fetch |
35 | 2105 | :param limit: Optional rough limit of revisions to fetch | 2106 | :param limit: Optional rough limit of revisions to fetch |
36 | 2107 | :param depth: Optional revision depth | ||
37 | 2106 | """ | 2108 | """ |
38 | 2107 | raise NotImplementedError(self.fetch) | 2109 | raise NotImplementedError(self.fetch) |
39 | 2108 | 2110 | ||
40 | @@ -2154,9 +2156,11 @@ | |||
41 | 2154 | if self.source._push_should_merge_tags(): | 2156 | if self.source._push_should_merge_tags(): |
42 | 2155 | self.source.tags.merge_to(self.target.tags) | 2157 | self.source.tags.merge_to(self.target.tags) |
43 | 2156 | 2158 | ||
45 | 2157 | def fetch(self, stop_revision=None, limit=None): | 2159 | def fetch(self, stop_revision=None, limit=None, depth=None): |
46 | 2158 | if self.target.base == self.source.base: | 2160 | if self.target.base == self.source.base: |
47 | 2159 | return (0, []) | 2161 | return (0, []) |
48 | 2162 | if depth is not None: | ||
49 | 2163 | raise errors.FetchDepthUnsupported(self) | ||
50 | 2160 | with self.source.lock_read(), self.target.lock_write(): | 2164 | with self.source.lock_read(), self.target.lock_write(): |
51 | 2161 | fetch_spec_factory = fetch.FetchSpecFactory() | 2165 | fetch_spec_factory = fetch.FetchSpecFactory() |
52 | 2162 | fetch_spec_factory.source_branch = self.source | 2166 | fetch_spec_factory.source_branch = self.source |
53 | 2163 | 2167 | ||
54 | === modified file 'breezy/bzr/bzrdir.py' | |||
55 | --- breezy/bzr/bzrdir.py 2018-11-18 19:48:57 +0000 | |||
56 | +++ breezy/bzr/bzrdir.py 2019-02-11 01:23:45 +0000 | |||
57 | @@ -350,7 +350,7 @@ | |||
58 | 350 | recurse='down', possible_transports=None, | 350 | recurse='down', possible_transports=None, |
59 | 351 | accelerator_tree=None, hardlink=False, stacked=False, | 351 | accelerator_tree=None, hardlink=False, stacked=False, |
60 | 352 | source_branch=None, create_tree_if_local=True, | 352 | source_branch=None, create_tree_if_local=True, |
62 | 353 | lossy=False): | 353 | depth=None): |
63 | 354 | """Create a copy of this controldir prepared for use as a new line of | 354 | """Create a copy of this controldir prepared for use as a new line of |
64 | 355 | development. | 355 | development. |
65 | 356 | 356 | ||
66 | @@ -374,6 +374,7 @@ | |||
67 | 374 | location of this control directory. | 374 | location of this control directory. |
68 | 375 | :param create_tree_if_local: If true, a working-tree will be created | 375 | :param create_tree_if_local: If true, a working-tree will be created |
69 | 376 | when working locally. | 376 | when working locally. |
70 | 377 | :param depth: Optional fetch depth | ||
71 | 377 | :return: The created control directory | 378 | :return: The created control directory |
72 | 378 | """ | 379 | """ |
73 | 379 | operation = cleanup.OperationWithCleanups(self._sprout) | 380 | operation = cleanup.OperationWithCleanups(self._sprout) |
74 | @@ -382,12 +383,15 @@ | |||
75 | 382 | recurse=recurse, possible_transports=possible_transports, | 383 | recurse=recurse, possible_transports=possible_transports, |
76 | 383 | accelerator_tree=accelerator_tree, hardlink=hardlink, | 384 | accelerator_tree=accelerator_tree, hardlink=hardlink, |
77 | 384 | stacked=stacked, source_branch=source_branch, | 385 | stacked=stacked, source_branch=source_branch, |
79 | 385 | create_tree_if_local=create_tree_if_local) | 386 | create_tree_if_local=create_tree_if_local, depth=depth) |
80 | 386 | 387 | ||
81 | 387 | def _sprout(self, op, url, revision_id=None, force_new_repo=False, | 388 | def _sprout(self, op, url, revision_id=None, force_new_repo=False, |
82 | 388 | recurse='down', possible_transports=None, | 389 | recurse='down', possible_transports=None, |
83 | 389 | accelerator_tree=None, hardlink=False, stacked=False, | 390 | accelerator_tree=None, hardlink=False, stacked=False, |
85 | 390 | source_branch=None, create_tree_if_local=True, lossy=False): | 391 | source_branch=None, create_tree_if_local=True, |
86 | 392 | depth=None): | ||
87 | 393 | if depth is not None: | ||
88 | 394 | raise errors.FetchDepthUnsupported(self) | ||
89 | 391 | add_cleanup = op.add_cleanup | 395 | add_cleanup = op.add_cleanup |
90 | 392 | fetch_spec_factory = fetch.FetchSpecFactory() | 396 | fetch_spec_factory = fetch.FetchSpecFactory() |
91 | 393 | if revision_id is not None: | 397 | if revision_id is not None: |
92 | 394 | 398 | ||
93 | === modified file 'breezy/controldir.py' | |||
94 | --- breezy/controldir.py 2018-11-12 01:41:38 +0000 | |||
95 | +++ breezy/controldir.py 2019-02-11 01:23:45 +0000 | |||
96 | @@ -373,7 +373,7 @@ | |||
97 | 373 | recurse='down', possible_transports=None, | 373 | recurse='down', possible_transports=None, |
98 | 374 | accelerator_tree=None, hardlink=False, stacked=False, | 374 | accelerator_tree=None, hardlink=False, stacked=False, |
99 | 375 | source_branch=None, create_tree_if_local=True, | 375 | source_branch=None, create_tree_if_local=True, |
101 | 376 | lossy=False): | 376 | depth=None): |
102 | 377 | """Create a copy of this controldir prepared for use as a new line of | 377 | """Create a copy of this controldir prepared for use as a new line of |
103 | 378 | development. | 378 | development. |
104 | 379 | 379 | ||
105 | @@ -396,6 +396,7 @@ | |||
106 | 396 | location of this control directory. | 396 | location of this control directory. |
107 | 397 | :param create_tree_if_local: If true, a working-tree will be created | 397 | :param create_tree_if_local: If true, a working-tree will be created |
108 | 398 | when working locally. | 398 | when working locally. |
109 | 399 | :param depth: Possible fetch depth | ||
110 | 399 | """ | 400 | """ |
111 | 400 | raise NotImplementedError(self.sprout) | 401 | raise NotImplementedError(self.sprout) |
112 | 401 | 402 | ||
113 | 402 | 403 | ||
114 | === modified file 'breezy/errors.py' | |||
115 | --- breezy/errors.py 2018-11-11 04:08:32 +0000 | |||
116 | +++ breezy/errors.py 2019-02-11 01:23:45 +0000 | |||
117 | @@ -1851,6 +1851,14 @@ | |||
118 | 1851 | self.tname = type(method_self).__name__ | 1851 | self.tname = type(method_self).__name__ |
119 | 1852 | 1852 | ||
120 | 1853 | 1853 | ||
121 | 1854 | class FetchDepthUnsupported(UnsupportedOperation): | ||
122 | 1855 | |||
123 | 1856 | fmt = ("InterBranch %(interbranch)r does not support fetching depths.") | ||
124 | 1857 | |||
125 | 1858 | def __init__(self, interbranch): | ||
126 | 1859 | BzrError.__init__(self, interbranch=interbranch) | ||
127 | 1860 | |||
128 | 1861 | |||
129 | 1854 | class FetchLimitUnsupported(UnsupportedOperation): | 1862 | class FetchLimitUnsupported(UnsupportedOperation): |
130 | 1855 | 1863 | ||
131 | 1856 | fmt = ("InterBranch %(interbranch)r does not support fetching limits.") | 1864 | fmt = ("InterBranch %(interbranch)r does not support fetching limits.") |
132 | 1857 | 1865 | ||
133 | === modified file 'breezy/git/branch.py' | |||
134 | --- breezy/git/branch.py 2019-01-19 17:13:53 +0000 | |||
135 | +++ breezy/git/branch.py 2019-02-11 01:23:45 +0000 | |||
136 | @@ -656,9 +656,9 @@ | |||
137 | 656 | def break_lock(self): | 656 | def break_lock(self): |
138 | 657 | self.repository._git.refs.unlock_ref(self.ref) | 657 | self.repository._git.refs.unlock_ref(self.ref) |
139 | 658 | 658 | ||
141 | 659 | def fetch(self, from_branch, last_revision=None, limit=None): | 659 | def fetch(self, from_branch, last_revision=None, limit=None, depth=None): |
142 | 660 | return branch.InterBranch.get(from_branch, self).fetch( | 660 | return branch.InterBranch.get(from_branch, self).fetch( |
144 | 661 | stop_revision=last_revision, limit=limit) | 661 | stop_revision=last_revision, limit=limit, depth=depth) |
145 | 662 | 662 | ||
146 | 663 | def _gen_revision_history(self): | 663 | def _gen_revision_history(self): |
147 | 664 | if self.head is None: | 664 | if self.head is None: |
148 | @@ -885,10 +885,10 @@ | |||
149 | 885 | return False | 885 | return False |
150 | 886 | return True | 886 | return True |
151 | 887 | 887 | ||
154 | 888 | def fetch(self, stop_revision=None, fetch_tags=None, limit=None): | 888 | def fetch(self, stop_revision=None, fetch_tags=None, limit=None, depth=None): |
155 | 889 | self.fetch_objects(stop_revision, fetch_tags=fetch_tags, limit=limit) | 889 | self.fetch_objects(stop_revision, fetch_tags=fetch_tags, limit=limit, depth=depth) |
156 | 890 | 890 | ||
158 | 891 | def fetch_objects(self, stop_revision, fetch_tags, limit=None): | 891 | def fetch_objects(self, stop_revision, fetch_tags, limit=None, depth=None): |
159 | 892 | interrepo = self._get_interrepo(self.source, self.target) | 892 | interrepo = self._get_interrepo(self.source, self.target) |
160 | 893 | if fetch_tags is None: | 893 | if fetch_tags is None: |
161 | 894 | c = self.source.get_config_stack() | 894 | c = self.source.get_config_stack() |
162 | @@ -909,7 +909,7 @@ | |||
163 | 909 | [self._last_revid], include_tags=fetch_tags) | 909 | [self._last_revid], include_tags=fetch_tags) |
164 | 910 | return real(heads) | 910 | return real(heads) |
165 | 911 | pack_hint, head, refs = interrepo.fetch_objects( | 911 | pack_hint, head, refs = interrepo.fetch_objects( |
167 | 912 | determine_wants, self.source.mapping, limit=limit) | 912 | determine_wants, self.source.mapping, limit=limit, depth=depth) |
168 | 913 | if (pack_hint is not None and | 913 | if (pack_hint is not None and |
169 | 914 | self.target.repository._format.pack_compresses): | 914 | self.target.repository._format.pack_compresses): |
170 | 915 | self.target.repository.pack(hint=pack_hint) | 915 | self.target.repository.pack(hint=pack_hint) |
171 | @@ -1034,7 +1034,7 @@ | |||
172 | 1034 | class InterGitBranch(branch.GenericInterBranch): | 1034 | class InterGitBranch(branch.GenericInterBranch): |
173 | 1035 | """InterBranch implementation that pulls between Git branches.""" | 1035 | """InterBranch implementation that pulls between Git branches.""" |
174 | 1036 | 1036 | ||
176 | 1037 | def fetch(self, stop_revision=None, fetch_tags=None, limit=None): | 1037 | def fetch(self, stop_revision=None, fetch_tags=None, limit=None, depth=None): |
177 | 1038 | raise NotImplementedError(self.fetch) | 1038 | raise NotImplementedError(self.fetch) |
178 | 1039 | 1039 | ||
179 | 1040 | 1040 | ||
180 | @@ -1101,14 +1101,14 @@ | |||
181 | 1101 | return (isinstance(source, GitBranch) and | 1101 | return (isinstance(source, GitBranch) and |
182 | 1102 | isinstance(target, LocalGitBranch)) | 1102 | isinstance(target, LocalGitBranch)) |
183 | 1103 | 1103 | ||
185 | 1104 | def fetch(self, stop_revision=None, fetch_tags=None, limit=None): | 1104 | def fetch(self, stop_revision=None, fetch_tags=None, limit=None, depth=None): |
186 | 1105 | interrepo = _mod_repository.InterRepository.get(self.source.repository, | 1105 | interrepo = _mod_repository.InterRepository.get(self.source.repository, |
187 | 1106 | self.target.repository) | 1106 | self.target.repository) |
188 | 1107 | if stop_revision is None: | 1107 | if stop_revision is None: |
189 | 1108 | stop_revision = self.source.last_revision() | 1108 | stop_revision = self.source.last_revision() |
190 | 1109 | determine_wants = interrepo.get_determine_wants_revids( | 1109 | determine_wants = interrepo.get_determine_wants_revids( |
191 | 1110 | [stop_revision], include_tags=fetch_tags) | 1110 | [stop_revision], include_tags=fetch_tags) |
193 | 1111 | interrepo.fetch_objects(determine_wants, limit=limit) | 1111 | interrepo.fetch_objects(determine_wants, limit=limit, depth=depth) |
194 | 1112 | 1112 | ||
195 | 1113 | def _basic_push(self, overwrite=False, stop_revision=None): | 1113 | def _basic_push(self, overwrite=False, stop_revision=None): |
196 | 1114 | if overwrite is True: | 1114 | if overwrite is True: |
197 | @@ -1298,7 +1298,7 @@ | |||
198 | 1298 | return ret | 1298 | return ret |
199 | 1299 | 1299 | ||
200 | 1300 | def fetch(self, stop_revision=None, fetch_tags=None, lossy=False, | 1300 | def fetch(self, stop_revision=None, fetch_tags=None, lossy=False, |
202 | 1301 | limit=None): | 1301 | limit=None, depth=None): |
203 | 1302 | if stop_revision is None: | 1302 | if stop_revision is None: |
204 | 1303 | stop_revision = self.source.last_revision() | 1303 | stop_revision = self.source.last_revision() |
205 | 1304 | ret = [] | 1304 | ret = [] |
206 | @@ -1307,7 +1307,7 @@ | |||
207 | 1307 | ret.append((None, v)) | 1307 | ret.append((None, v)) |
208 | 1308 | ret.append((None, stop_revision)) | 1308 | ret.append((None, stop_revision)) |
209 | 1309 | try: | 1309 | try: |
211 | 1310 | self.interrepo.fetch_objects(ret, lossy=lossy, limit=limit) | 1310 | self.interrepo.fetch_objects(ret, lossy=lossy, limit=limit, depth=depth) |
212 | 1311 | except NoPushSupport: | 1311 | except NoPushSupport: |
213 | 1312 | raise errors.NoRoundtrippingSupport(self.source, self.target) | 1312 | raise errors.NoRoundtrippingSupport(self.source, self.target) |
214 | 1313 | 1313 | ||
215 | 1314 | 1314 | ||
216 | === modified file 'breezy/git/dir.py' | |||
217 | --- breezy/git/dir.py 2018-11-16 11:37:47 +0000 | |||
218 | +++ breezy/git/dir.py 2019-02-11 01:23:45 +0000 | |||
219 | @@ -149,7 +149,7 @@ | |||
220 | 149 | def sprout(self, url, revision_id=None, force_new_repo=False, | 149 | def sprout(self, url, revision_id=None, force_new_repo=False, |
221 | 150 | recurse='down', possible_transports=None, | 150 | recurse='down', possible_transports=None, |
222 | 151 | accelerator_tree=None, hardlink=False, stacked=False, | 151 | accelerator_tree=None, hardlink=False, stacked=False, |
224 | 152 | source_branch=None, create_tree_if_local=True): | 152 | source_branch=None, create_tree_if_local=True, depth=None): |
225 | 153 | from ..repository import InterRepository | 153 | from ..repository import InterRepository |
226 | 154 | from ..transport.local import LocalTransport | 154 | from ..transport.local import LocalTransport |
227 | 155 | from ..transport import get_transport | 155 | from ..transport import get_transport |
228 | @@ -178,7 +178,8 @@ | |||
229 | 178 | else: | 178 | else: |
230 | 179 | determine_wants = interrepo.determine_wants_all | 179 | determine_wants = interrepo.determine_wants_all |
231 | 180 | interrepo.fetch_objects(determine_wants=determine_wants, | 180 | interrepo.fetch_objects(determine_wants=determine_wants, |
233 | 181 | mapping=source_branch.mapping) | 181 | mapping=source_branch.mapping, |
234 | 182 | depth=depth) | ||
235 | 182 | result_branch = source_branch.sprout( | 183 | result_branch = source_branch.sprout( |
236 | 183 | result, revision_id=revision_id, repository=result_repo) | 184 | result, revision_id=revision_id, repository=result_repo) |
237 | 184 | if (create_tree_if_local and | 185 | if (create_tree_if_local and |
238 | 185 | 186 | ||
239 | === modified file 'breezy/git/interrepo.py' | |||
240 | --- breezy/git/interrepo.py 2018-11-16 23:15:15 +0000 | |||
241 | +++ breezy/git/interrepo.py 2019-02-11 01:23:45 +0000 | |||
242 | @@ -39,6 +39,7 @@ | |||
243 | 39 | 39 | ||
244 | 40 | from ..errors import ( | 40 | from ..errors import ( |
245 | 41 | DivergedBranches, | 41 | DivergedBranches, |
246 | 42 | FetchDepthUnsupported, | ||
247 | 42 | FetchLimitUnsupported, | 43 | FetchLimitUnsupported, |
248 | 43 | InvalidRevisionId, | 44 | InvalidRevisionId, |
249 | 44 | LossyPushToSameVCS, | 45 | LossyPushToSameVCS, |
250 | @@ -53,6 +54,7 @@ | |||
251 | 53 | ) | 54 | ) |
252 | 54 | from ..sixish import ( | 55 | from ..sixish import ( |
253 | 55 | viewitems, | 56 | viewitems, |
254 | 57 | viewkeys, | ||
255 | 56 | viewvalues, | 58 | viewvalues, |
256 | 57 | ) | 59 | ) |
257 | 58 | from .. import ( | 60 | from .. import ( |
258 | @@ -124,9 +126,9 @@ | |||
259 | 124 | """ | 126 | """ |
260 | 125 | raise NotImplementedError(self.fetch_refs) | 127 | raise NotImplementedError(self.fetch_refs) |
261 | 126 | 128 | ||
265 | 127 | def search_missing_revision_ids(self, | 129 | def search_missing_revision_ids( |
266 | 128 | find_ghosts=True, revision_ids=None, | 130 | self, find_ghosts=True, revision_ids=None, if_present_ids=None, |
267 | 129 | if_present_ids=None, limit=None): | 131 | limit=None, depth=None): |
268 | 130 | if limit is not None: | 132 | if limit is not None: |
269 | 131 | raise FetchLimitUnsupported(self) | 133 | raise FetchLimitUnsupported(self) |
270 | 132 | git_shas = [] | 134 | git_shas = [] |
271 | @@ -190,7 +192,7 @@ | |||
272 | 190 | return False | 192 | return False |
273 | 191 | return self._commit_needs_fetching(sha_id) | 193 | return self._commit_needs_fetching(sha_id) |
274 | 192 | 194 | ||
276 | 193 | def missing_revisions(self, stop_revisions): | 195 | def _missing_revisions(self, stop_revisions, depth=None): |
277 | 194 | """Find the revisions that are missing from the target repository. | 196 | """Find the revisions that are missing from the target repository. |
278 | 195 | 197 | ||
279 | 196 | :param stop_revisions: Revisions to check for (tuples with | 198 | :param stop_revisions: Revisions to check for (tuples with |
280 | @@ -204,32 +206,35 @@ | |||
281 | 204 | for (sha1, revid) in stop_revisions: | 206 | for (sha1, revid) in stop_revisions: |
282 | 205 | if sha1 is not None and revid is not None: | 207 | if sha1 is not None and revid is not None: |
283 | 206 | revid_sha_map[revid] = sha1 | 208 | revid_sha_map[revid] = sha1 |
285 | 207 | stop_revids.append(revid) | 209 | stop_revids.append((revid, 1)) |
286 | 208 | elif sha1 is not None: | 210 | elif sha1 is not None: |
287 | 209 | if self._commit_needs_fetching(sha1): | 211 | if self._commit_needs_fetching(sha1): |
288 | 210 | for (kind, (revid, tree_sha, verifiers)) in self.source_store.lookup_git_sha(sha1): | 212 | for (kind, (revid, tree_sha, verifiers)) in self.source_store.lookup_git_sha(sha1): |
289 | 211 | revid_sha_map[revid] = sha1 | 213 | revid_sha_map[revid] = sha1 |
291 | 212 | stop_revids.append(revid) | 214 | stop_revids.append((revid, 1)) |
292 | 213 | else: | 215 | else: |
293 | 214 | if revid is None: | 216 | if revid is None: |
294 | 215 | raise AssertionError | 217 | raise AssertionError |
296 | 216 | stop_revids.append(revid) | 218 | stop_revids.append((revid, 1)) |
297 | 217 | missing = set() | 219 | missing = set() |
298 | 218 | graph = self.source.get_graph() | 220 | graph = self.source.get_graph() |
299 | 219 | pb = ui.ui_factory.nested_progress_bar() | 221 | pb = ui.ui_factory.nested_progress_bar() |
300 | 220 | try: | 222 | try: |
301 | 221 | while stop_revids: | 223 | while stop_revids: |
304 | 222 | new_stop_revids = [] | 224 | new_stop_revids = {} |
305 | 223 | for revid in stop_revids: | 225 | for revid, revid_depth in stop_revids: |
306 | 224 | sha1 = revid_sha_map.get(revid) | 226 | sha1 = revid_sha_map.get(revid) |
307 | 225 | if (revid not in missing and | 227 | if (revid not in missing and |
308 | 226 | self._revision_needs_fetching(sha1, revid)): | 228 | self._revision_needs_fetching(sha1, revid)): |
309 | 227 | missing.add(revid) | 229 | missing.add(revid) |
311 | 228 | new_stop_revids.append(revid) | 230 | if depth is None or revid_depth < depth: |
312 | 231 | new_stop_revids[revid] = revid_depth | ||
313 | 229 | stop_revids = set() | 232 | stop_revids = set() |
317 | 230 | parent_map = graph.get_parent_map(new_stop_revids) | 233 | parent_map = graph.get_parent_map(viewkeys(new_stop_revids)) |
318 | 231 | for parent_revids in viewvalues(parent_map): | 234 | for revid, parent_revids in viewvalues(parent_map): |
319 | 232 | stop_revids.update(parent_revids) | 235 | stop_revids.update( |
320 | 236 | [(parent_revid, new_stop_revids[revid] + 1) | ||
321 | 237 | for parent_revid in parent_revids]) | ||
322 | 233 | pb.update("determining revisions to fetch", len(missing)) | 238 | pb.update("determining revisions to fetch", len(missing)) |
323 | 234 | finally: | 239 | finally: |
324 | 235 | pb.finished() | 240 | pb.finished() |
325 | @@ -292,7 +297,7 @@ | |||
326 | 292 | result_refs[name] = (gitid, revid if not lossy else self.mapping.revision_id_foreign_to_bzr(gitid)) | 297 | result_refs[name] = (gitid, revid if not lossy else self.mapping.revision_id_foreign_to_bzr(gitid)) |
327 | 293 | return revidmap, old_refs, result_refs | 298 | return revidmap, old_refs, result_refs |
328 | 294 | 299 | ||
330 | 295 | def fetch_objects(self, revs, lossy, limit=None): | 300 | def fetch_objects(self, revs, lossy, limit=None, depth=None): |
331 | 296 | if not lossy and not self.mapping.roundtripping: | 301 | if not lossy and not self.mapping.roundtripping: |
332 | 297 | for git_sha, bzr_revid in revs: | 302 | for git_sha, bzr_revid in revs: |
333 | 298 | if (bzr_revid is not None and | 303 | if (bzr_revid is not None and |
334 | @@ -300,7 +305,7 @@ | |||
335 | 300 | raise NoPushSupport(self.source, self.target, self.mapping, | 305 | raise NoPushSupport(self.source, self.target, self.mapping, |
336 | 301 | bzr_revid) | 306 | bzr_revid) |
337 | 302 | with self.source_store.lock_read(): | 307 | with self.source_store.lock_read(): |
339 | 303 | todo = list(self.missing_revisions(revs))[:limit] | 308 | todo = list(self._missing_revisions(revs, depth=depth))[:limit] |
340 | 304 | revidmap = {} | 309 | revidmap = {} |
341 | 305 | pb = ui.ui_factory.nested_progress_bar() | 310 | pb = ui.ui_factory.nested_progress_bar() |
342 | 306 | try: | 311 | try: |
343 | @@ -325,7 +330,7 @@ | |||
344 | 325 | pb.finished() | 330 | pb.finished() |
345 | 326 | 331 | ||
346 | 327 | def fetch(self, revision_id=None, pb=None, find_ghosts=False, | 332 | def fetch(self, revision_id=None, pb=None, find_ghosts=False, |
348 | 328 | fetch_spec=None, mapped_refs=None): | 333 | fetch_spec=None, mapped_refs=None, depth=None): |
349 | 329 | if mapped_refs is not None: | 334 | if mapped_refs is not None: |
350 | 330 | stop_revisions = mapped_refs | 335 | stop_revisions = mapped_refs |
351 | 331 | elif revision_id is not None: | 336 | elif revision_id is not None: |
352 | @@ -342,7 +347,7 @@ | |||
353 | 342 | for revid in self.source.all_revision_ids()] | 347 | for revid in self.source.all_revision_ids()] |
354 | 343 | self._warn_slow() | 348 | self._warn_slow() |
355 | 344 | try: | 349 | try: |
357 | 345 | self.fetch_objects(stop_revisions, lossy=False) | 350 | self.fetch_objects(stop_revisions, lossy=False, depth=depth) |
358 | 346 | except NoPushSupport: | 351 | except NoPushSupport: |
359 | 347 | raise NoRoundtrippingSupport(self.source, self.target) | 352 | raise NoRoundtrippingSupport(self.source, self.target) |
360 | 348 | 353 | ||
361 | @@ -501,7 +506,8 @@ | |||
362 | 501 | 'Fetching from Git to Bazaar repository. ' | 506 | 'Fetching from Git to Bazaar repository. ' |
363 | 502 | 'For better performance, fetch into a Git repository.') | 507 | 'For better performance, fetch into a Git repository.') |
364 | 503 | 508 | ||
366 | 504 | def fetch_objects(self, determine_wants, mapping, limit=None, lossy=False): | 509 | def fetch_objects(self, determine_wants, mapping, limit=None, lossy=False, |
367 | 510 | depth=None): | ||
368 | 505 | """Fetch objects from a remote server. | 511 | """Fetch objects from a remote server. |
369 | 506 | 512 | ||
370 | 507 | :param determine_wants: determine_wants callback | 513 | :param determine_wants: determine_wants callback |
371 | @@ -522,7 +528,8 @@ | |||
372 | 522 | return self.get_determine_wants_heads(wants, include_tags=include_tags) | 528 | return self.get_determine_wants_heads(wants, include_tags=include_tags) |
373 | 523 | 529 | ||
374 | 524 | def fetch(self, revision_id=None, find_ghosts=False, | 530 | def fetch(self, revision_id=None, find_ghosts=False, |
376 | 525 | mapping=None, fetch_spec=None, include_tags=False): | 531 | mapping=None, fetch_spec=None, include_tags=False, |
377 | 532 | depth=None): | ||
378 | 526 | if mapping is None: | 533 | if mapping is None: |
379 | 527 | mapping = self.source.get_mapping() | 534 | mapping = self.source.get_mapping() |
380 | 528 | if revision_id is not None: | 535 | if revision_id is not None: |
381 | @@ -543,8 +550,8 @@ | |||
382 | 543 | else: | 550 | else: |
383 | 544 | determine_wants = self.determine_wants_all | 551 | determine_wants = self.determine_wants_all |
384 | 545 | 552 | ||
387 | 546 | (pack_hint, _, remote_refs) = self.fetch_objects(determine_wants, | 553 | (pack_hint, _, remote_refs) = self.fetch_objects( |
388 | 547 | mapping) | 554 | determine_wants, mapping, depth=depth) |
389 | 548 | if pack_hint is not None and self.target._format.pack_compresses: | 555 | if pack_hint is not None and self.target._format.pack_compresses: |
390 | 549 | self.target.pack(hint=pack_hint) | 556 | self.target.pack(hint=pack_hint) |
391 | 550 | return remote_refs | 557 | return remote_refs |
392 | @@ -563,7 +570,8 @@ | |||
393 | 563 | all_parents.update(values) | 570 | all_parents.update(values) |
394 | 564 | return set(all_revs) - all_parents | 571 | return set(all_revs) - all_parents |
395 | 565 | 572 | ||
397 | 566 | def fetch_objects(self, determine_wants, mapping, limit=None, lossy=False): | 573 | def fetch_objects(self, determine_wants, mapping, limit=None, lossy=False, |
398 | 574 | depth=None): | ||
399 | 567 | """See `InterGitNonGitRepository`.""" | 575 | """See `InterGitNonGitRepository`.""" |
400 | 568 | self._warn_slow() | 576 | self._warn_slow() |
401 | 569 | store = get_object_store(self.target, mapping) | 577 | store = get_object_store(self.target, mapping) |
402 | @@ -577,7 +585,8 @@ | |||
403 | 577 | pb = ui.ui_factory.nested_progress_bar() | 585 | pb = ui.ui_factory.nested_progress_bar() |
404 | 578 | try: | 586 | try: |
405 | 579 | objects_iter = self.source.fetch_objects( | 587 | objects_iter = self.source.fetch_objects( |
407 | 580 | wants_recorder, graph_walker, store.get_raw) | 588 | wants_recorder, graph_walker, store.get_raw, |
408 | 589 | depth=depth) | ||
409 | 581 | trace.mutter("Importing %d new revisions", | 590 | trace.mutter("Importing %d new revisions", |
410 | 582 | len(wants_recorder.wants)) | 591 | len(wants_recorder.wants)) |
411 | 583 | (pack_hint, last_rev) = import_git_objects( | 592 | (pack_hint, last_rev) = import_git_objects( |
412 | @@ -605,22 +614,22 @@ | |||
413 | 605 | """InterRepository that copies revisions from a local Git into a non-Git | 614 | """InterRepository that copies revisions from a local Git into a non-Git |
414 | 606 | repository.""" | 615 | repository.""" |
415 | 607 | 616 | ||
417 | 608 | def fetch_objects(self, determine_wants, mapping, limit=None, lossy=False): | 617 | def fetch_objects(self, determine_wants, mapping, limit=None, lossy=False, |
418 | 618 | depth=None): | ||
419 | 609 | """See `InterGitNonGitRepository`.""" | 619 | """See `InterGitNonGitRepository`.""" |
420 | 620 | if depth is not None: | ||
421 | 621 | raise FetchDepthUnsupported(self) | ||
422 | 610 | self._warn_slow() | 622 | self._warn_slow() |
423 | 611 | remote_refs = self.source.controldir.get_refs_container().as_dict() | 623 | remote_refs = self.source.controldir.get_refs_container().as_dict() |
424 | 612 | wants = determine_wants(remote_refs) | 624 | wants = determine_wants(remote_refs) |
425 | 613 | pb = ui.ui_factory.nested_progress_bar() | 625 | pb = ui.ui_factory.nested_progress_bar() |
426 | 614 | target_git_object_retriever = get_object_store(self.target, mapping) | 626 | target_git_object_retriever = get_object_store(self.target, mapping) |
427 | 615 | try: | 627 | try: |
430 | 616 | target_git_object_retriever.lock_write() | 628 | with target_git_object_retriever.lock_write(): |
429 | 617 | try: | ||
431 | 618 | (pack_hint, last_rev) = import_git_objects( | 629 | (pack_hint, last_rev) = import_git_objects( |
432 | 619 | self.target, mapping, self.source._git.object_store, | 630 | self.target, mapping, self.source._git.object_store, |
433 | 620 | target_git_object_retriever, wants, pb, limit) | 631 | target_git_object_retriever, wants, pb, limit) |
434 | 621 | return (pack_hint, last_rev, remote_refs) | 632 | return (pack_hint, last_rev, remote_refs) |
435 | 622 | finally: | ||
436 | 623 | target_git_object_retriever.unlock() | ||
437 | 624 | finally: | 633 | finally: |
438 | 625 | pb.finished() | 634 | pb.finished() |
439 | 626 | 635 | ||
440 | @@ -660,7 +669,7 @@ | |||
441 | 660 | return None, old_refs, new_refs | 669 | return None, old_refs, new_refs |
442 | 661 | 670 | ||
443 | 662 | def fetch_objects(self, determine_wants, mapping=None, limit=None, | 671 | def fetch_objects(self, determine_wants, mapping=None, limit=None, |
445 | 663 | lossy=False): | 672 | lossy=False, depth=None): |
446 | 664 | raise NotImplementedError(self.fetch_objects) | 673 | raise NotImplementedError(self.fetch_objects) |
447 | 665 | 674 | ||
448 | 666 | def _target_has_shas(self, shas): | 675 | def _target_has_shas(self, shas): |
449 | @@ -669,7 +678,7 @@ | |||
450 | 669 | 678 | ||
451 | 670 | def fetch(self, revision_id=None, find_ghosts=False, | 679 | def fetch(self, revision_id=None, find_ghosts=False, |
452 | 671 | mapping=None, fetch_spec=None, branches=None, limit=None, | 680 | mapping=None, fetch_spec=None, branches=None, limit=None, |
454 | 672 | include_tags=False): | 681 | include_tags=False, depth=None): |
455 | 673 | if mapping is None: | 682 | if mapping is None: |
456 | 674 | mapping = self.source.get_mapping() | 683 | mapping = self.source.get_mapping() |
457 | 675 | if revision_id is not None: | 684 | if revision_id is not None: |
458 | @@ -698,7 +707,7 @@ | |||
459 | 698 | determine_wants = self.get_determine_wants_revids( | 707 | determine_wants = self.get_determine_wants_revids( |
460 | 699 | args, include_tags=include_tags) | 708 | args, include_tags=include_tags) |
461 | 700 | wants_recorder = DetermineWantsRecorder(determine_wants) | 709 | wants_recorder = DetermineWantsRecorder(determine_wants) |
463 | 701 | self.fetch_objects(wants_recorder, mapping, limit=limit) | 710 | self.fetch_objects(wants_recorder, mapping, limit=limit, depth=depth) |
464 | 702 | return wants_recorder.remote_refs | 711 | return wants_recorder.remote_refs |
465 | 703 | 712 | ||
466 | 704 | def get_determine_wants_revids(self, revids, include_tags=False): | 713 | def get_determine_wants_revids(self, revids, include_tags=False): |
467 | @@ -720,7 +729,7 @@ | |||
468 | 720 | class InterLocalGitLocalGitRepository(InterGitGitRepository): | 729 | class InterLocalGitLocalGitRepository(InterGitGitRepository): |
469 | 721 | 730 | ||
470 | 722 | def fetch_objects(self, determine_wants, mapping=None, limit=None, | 731 | def fetch_objects(self, determine_wants, mapping=None, limit=None, |
472 | 723 | lossy=False): | 732 | lossy=False, depth=None): |
473 | 724 | if lossy: | 733 | if lossy: |
474 | 725 | raise LossyPushToSameVCS(self.source, self.target) | 734 | raise LossyPushToSameVCS(self.source, self.target) |
475 | 726 | if limit is not None: | 735 | if limit is not None: |
476 | @@ -731,7 +740,7 @@ | |||
477 | 731 | try: | 740 | try: |
478 | 732 | refs = self.source._git.fetch( | 741 | refs = self.source._git.fetch( |
479 | 733 | self.target._git, determine_wants, | 742 | self.target._git, determine_wants, |
481 | 734 | progress=progress) | 743 | progress=progress, depth=depth) |
482 | 735 | finally: | 744 | finally: |
483 | 736 | pb.finished() | 745 | pb.finished() |
484 | 737 | return (None, None, refs) | 746 | return (None, None, refs) |
485 | @@ -746,7 +755,7 @@ | |||
486 | 746 | class InterRemoteGitLocalGitRepository(InterGitGitRepository): | 755 | class InterRemoteGitLocalGitRepository(InterGitGitRepository): |
487 | 747 | 756 | ||
488 | 748 | def fetch_objects(self, determine_wants, mapping=None, limit=None, | 757 | def fetch_objects(self, determine_wants, mapping=None, limit=None, |
490 | 749 | lossy=False): | 758 | lossy=False, depth=None): |
491 | 750 | if lossy: | 759 | if lossy: |
492 | 751 | raise LossyPushToSameVCS(self.source, self.target) | 760 | raise LossyPushToSameVCS(self.source, self.target) |
493 | 752 | if limit is not None: | 761 | if limit is not None: |
494 | @@ -768,10 +777,12 @@ | |||
495 | 768 | else: | 777 | else: |
496 | 769 | f, commit, abort = self.target._git.object_store.add_pack() | 778 | f, commit, abort = self.target._git.object_store.add_pack() |
497 | 770 | try: | 779 | try: |
500 | 771 | refs = self.source.controldir.fetch_pack( | 780 | fetch_result = self.source.controldir.fetch_pack( |
501 | 772 | determine_wants, graphwalker, f.write) | 781 | determine_wants, graphwalker, f.write, depth=depth) |
502 | 773 | commit() | 782 | commit() |
504 | 774 | return (None, None, refs) | 783 | self.target._git.update_shallow( |
505 | 784 | fetch_result.new_shallow, fetch_result.new_unshallow) | ||
506 | 785 | return (None, None, fetch_result.refs) | ||
507 | 775 | except BaseException: | 786 | except BaseException: |
508 | 776 | abort() | 787 | abort() |
509 | 777 | raise | 788 | raise |
510 | 778 | 789 | ||
511 | === modified file 'breezy/git/remote.py' | |||
512 | --- breezy/git/remote.py 2019-02-01 16:56:56 +0000 | |||
513 | +++ breezy/git/remote.py 2019-02-11 01:23:45 +0000 | |||
514 | @@ -416,8 +416,8 @@ | |||
515 | 416 | if pb is not None: | 416 | if pb is not None: |
516 | 417 | pb.finished() | 417 | pb.finished() |
517 | 418 | 418 | ||
520 | 419 | def fetch_pack(self, determine_wants, graph_walker, pack_data, | 419 | def fetch_pack(self, determine_wants, graph_walker, pack_data, progress=None, |
521 | 420 | progress=None): | 420 | depth=None): |
522 | 421 | if progress is None: | 421 | if progress is None: |
523 | 422 | pb = ui.ui_factory.nested_progress_bar() | 422 | pb = ui.ui_factory.nested_progress_bar() |
524 | 423 | progress = DefaultProgressReporter(pb).progress | 423 | progress = DefaultProgressReporter(pb).progress |
525 | @@ -426,7 +426,7 @@ | |||
526 | 426 | try: | 426 | try: |
527 | 427 | result = self._client.fetch_pack( | 427 | result = self._client.fetch_pack( |
528 | 428 | self._client_path, determine_wants, graph_walker, pack_data, | 428 | self._client_path, determine_wants, graph_walker, pack_data, |
530 | 429 | progress) | 429 | progress, depth=depth) |
531 | 430 | if result.refs is None: | 430 | if result.refs is None: |
532 | 431 | result.refs = {} | 431 | result.refs = {} |
533 | 432 | self._refs = remote_refs_dict_to_container( | 432 | self._refs = remote_refs_dict_to_container( |
534 | @@ -539,9 +539,10 @@ | |||
535 | 539 | def get_refs_container(self): | 539 | def get_refs_container(self): |
536 | 540 | if self._refs is not None: | 540 | if self._refs is not None: |
537 | 541 | return self._refs | 541 | return self._refs |
541 | 542 | result = self.fetch_pack(lambda x: None, None, | 542 | result = self.fetch_pack( |
542 | 543 | lambda x: None, | 543 | lambda x: None, None, |
543 | 544 | lambda x: trace.mutter("git: %s" % x)) | 544 | lambda x: None, lambda x: trace.mutter("git: %s" % x), |
544 | 545 | depth=None) | ||
545 | 545 | self._refs = remote_refs_dict_to_container( | 546 | self._refs = remote_refs_dict_to_container( |
546 | 546 | result.refs, result.symrefs) | 547 | result.refs, result.symrefs) |
547 | 547 | return self._refs | 548 | return self._refs |
548 | @@ -841,19 +842,20 @@ | |||
549 | 841 | return self.controldir.archive(*args, **kwargs) | 842 | return self.controldir.archive(*args, **kwargs) |
550 | 842 | 843 | ||
551 | 843 | def fetch_pack(self, determine_wants, graph_walker, pack_data, | 844 | def fetch_pack(self, determine_wants, graph_walker, pack_data, |
553 | 844 | progress=None): | 845 | progress=None, depth=None): |
554 | 845 | return self.controldir.fetch_pack( | 846 | return self.controldir.fetch_pack( |
556 | 846 | determine_wants, graph_walker, pack_data, progress) | 847 | determine_wants, graph_walker, pack_data, progress, depth=depth) |
557 | 847 | 848 | ||
558 | 848 | def send_pack(self, get_changed_refs, generate_pack_data): | 849 | def send_pack(self, get_changed_refs, generate_pack_data): |
559 | 849 | return self.controldir.send_pack(get_changed_refs, generate_pack_data) | 850 | return self.controldir.send_pack(get_changed_refs, generate_pack_data) |
560 | 850 | 851 | ||
561 | 851 | def fetch_objects(self, determine_wants, graph_walker, resolve_ext_ref, | 852 | def fetch_objects(self, determine_wants, graph_walker, resolve_ext_ref, |
563 | 852 | progress=None): | 853 | progress=None, depth=None): |
564 | 853 | fd, path = tempfile.mkstemp(suffix=".pack") | 854 | fd, path = tempfile.mkstemp(suffix=".pack") |
565 | 854 | try: | 855 | try: |
568 | 855 | self.fetch_pack(determine_wants, graph_walker, | 856 | self.fetch_pack( |
569 | 856 | lambda x: os.write(fd, x), progress) | 857 | determine_wants, graph_walker, |
570 | 858 | lambda x: os.write(fd, x), progress, depth=depth) | ||
571 | 857 | finally: | 859 | finally: |
572 | 858 | os.close(fd) | 860 | os.close(fd) |
573 | 859 | if os.path.getsize(path) == 0: | 861 | if os.path.getsize(path) == 0: |
574 | 860 | 862 | ||
575 | === modified file 'breezy/git/repository.py' | |||
576 | --- breezy/git/repository.py 2018-11-30 12:39:04 +0000 | |||
577 | +++ breezy/git/repository.py 2019-02-11 01:23:45 +0000 | |||
578 | @@ -131,6 +131,7 @@ | |||
579 | 131 | _serializer = None | 131 | _serializer = None |
580 | 132 | vcs = foreign_vcs_git | 132 | vcs = foreign_vcs_git |
581 | 133 | chk_bytes = None | 133 | chk_bytes = None |
582 | 134 | supports_fetch_depth = True | ||
583 | 134 | 135 | ||
584 | 135 | def __init__(self, gitdir): | 136 | def __init__(self, gitdir): |
585 | 136 | self._transport = gitdir.root_transport | 137 | self._transport = gitdir.root_transport |
586 | 137 | 138 | ||
587 | === modified file 'breezy/git/tests/test_blackbox.py' | |||
588 | --- breezy/git/tests/test_blackbox.py 2019-01-02 18:49:15 +0000 | |||
589 | +++ breezy/git/tests/test_blackbox.py 2019-02-11 01:23:45 +0000 | |||
590 | @@ -308,6 +308,7 @@ | |||
591 | 308 | self.repo.stage("foo") | 308 | self.repo.stage("foo") |
592 | 309 | self.repo.do_commit( | 309 | self.repo.do_commit( |
593 | 310 | b"message", committer=b"Somebody <user@example.com>", | 310 | b"message", committer=b"Somebody <user@example.com>", |
594 | 311 | author=b"Somebody <user@example.com>", | ||
595 | 311 | commit_timestamp=1526330165, commit_timezone=0, | 312 | commit_timestamp=1526330165, commit_timezone=0, |
596 | 312 | author_timestamp=1526330165, author_timezone=0, | 313 | author_timestamp=1526330165, author_timezone=0, |
597 | 313 | merge_heads=[b'aa' * 20]) | 314 | merge_heads=[b'aa' * 20]) |
598 | 314 | 315 | ||
599 | === modified file 'breezy/git/transportgit.py' | |||
600 | --- breezy/git/transportgit.py 2018-11-11 14:23:06 +0000 | |||
601 | +++ breezy/git/transportgit.py 2019-02-11 01:23:45 +0000 | |||
602 | @@ -36,6 +36,7 @@ | |||
603 | 36 | from dulwich.object_store import ( | 36 | from dulwich.object_store import ( |
604 | 37 | PackBasedObjectStore, | 37 | PackBasedObjectStore, |
605 | 38 | PACKDIR, | 38 | PACKDIR, |
606 | 39 | read_packs_file, | ||
607 | 39 | ) | 40 | ) |
608 | 40 | from dulwich.pack import ( | 41 | from dulwich.pack import ( |
609 | 41 | MemoryPackIndex, | 42 | MemoryPackIndex, |
610 | @@ -587,16 +588,41 @@ | |||
611 | 587 | ret.append(l) | 588 | ret.append(l) |
612 | 588 | return ret | 589 | return ret |
613 | 589 | 590 | ||
614 | 590 | @property | ||
615 | 591 | def packs(self): | ||
616 | 592 | # FIXME: Never invalidates. | ||
617 | 593 | if not self._pack_cache: | ||
618 | 594 | self._update_pack_cache() | ||
619 | 595 | return self._pack_cache.values() | ||
620 | 596 | |||
621 | 597 | def _update_pack_cache(self): | 591 | def _update_pack_cache(self): |
624 | 598 | for pack in self._load_packs(): | 592 | pack_files = set() |
625 | 599 | self._pack_cache[pack._basename] = pack | 593 | pack_dir_contents = self._pack_names() |
626 | 594 | for name in pack_dir_contents: | ||
627 | 595 | if name.startswith("pack-") and name.endswith(".pack"): | ||
628 | 596 | # verify that idx exists first (otherwise the pack was not yet | ||
629 | 597 | # fully written) | ||
630 | 598 | idx_name = os.path.splitext(name)[0] + ".idx" | ||
631 | 599 | if idx_name in pack_dir_contents: | ||
632 | 600 | pack_files.add(os.path.splitext(name)[0]) | ||
633 | 601 | |||
634 | 602 | new_packs = [] | ||
635 | 603 | for basename in pack_files: | ||
636 | 604 | pack_name = basename + ".pack" | ||
637 | 605 | if basename not in self._pack_cache: | ||
638 | 606 | try: | ||
639 | 607 | size = self.pack_transport.stat(pack_name).st_size | ||
640 | 608 | except TransportNotPossible: | ||
641 | 609 | f = self.pack_transport.get(pack_name) | ||
642 | 610 | pd = PackData(pack_name, f) | ||
643 | 611 | else: | ||
644 | 612 | pd = PackData( | ||
645 | 613 | pack_name, self.pack_transport.get(pack_name), | ||
646 | 614 | size=size) | ||
647 | 615 | idxname = basename + ".idx" | ||
648 | 616 | idx = load_pack_index_file( | ||
649 | 617 | idxname, self.pack_transport.get(idxname)) | ||
650 | 618 | pack = Pack.from_objects(pd, idx) | ||
651 | 619 | pack._basename = basename | ||
652 | 620 | self._pack_cache[basename] = pack | ||
653 | 621 | new_packs.append(pack) | ||
654 | 622 | # Remove disappeared pack files | ||
655 | 623 | for f in set(self._pack_cache) - pack_files: | ||
656 | 624 | self._pack_cache.pop(f).close() | ||
657 | 625 | return new_packs | ||
658 | 600 | 626 | ||
659 | 601 | def _pack_names(self): | 627 | def _pack_names(self): |
660 | 602 | try: | 628 | try: |
661 | @@ -608,9 +634,6 @@ | |||
662 | 608 | # Hmm, warn about running 'git update-server-info' ? | 634 | # Hmm, warn about running 'git update-server-info' ? |
663 | 609 | return iter([]) | 635 | return iter([]) |
664 | 610 | else: | 636 | else: |
665 | 611 | # TODO(jelmer): Move to top-level after dulwich | ||
666 | 612 | # 0.19.7 is released. | ||
667 | 613 | from dulwich.object_store import read_packs_file | ||
668 | 614 | with f: | 637 | with f: |
669 | 615 | return read_packs_file(f) | 638 | return read_packs_file(f) |
670 | 616 | except NoSuchFile: | 639 | except NoSuchFile: |
671 | @@ -619,26 +642,10 @@ | |||
672 | 619 | def _remove_pack(self, pack): | 642 | def _remove_pack(self, pack): |
673 | 620 | self.pack_transport.delete(os.path.basename(pack.index.path)) | 643 | self.pack_transport.delete(os.path.basename(pack.index.path)) |
674 | 621 | self.pack_transport.delete(pack.data.filename) | 644 | self.pack_transport.delete(pack.data.filename) |
695 | 622 | 645 | try: | |
696 | 623 | def _load_packs(self): | 646 | del self._pack_cache[os.path.basename(pack._basename)] |
697 | 624 | ret = [] | 647 | except KeyError: |
698 | 625 | for name in self._pack_names(): | 648 | pass |
679 | 626 | if name.startswith("pack-") and name.endswith(".pack"): | ||
680 | 627 | try: | ||
681 | 628 | size = self.pack_transport.stat(name).st_size | ||
682 | 629 | except TransportNotPossible: | ||
683 | 630 | f = self.pack_transport.get(name) | ||
684 | 631 | pd = PackData(name, f) | ||
685 | 632 | else: | ||
686 | 633 | pd = PackData(name, self.pack_transport.get(name), | ||
687 | 634 | size=size) | ||
688 | 635 | idxname = name.replace(".pack", ".idx") | ||
689 | 636 | idx = load_pack_index_file( | ||
690 | 637 | idxname, self.pack_transport.get(idxname)) | ||
691 | 638 | pack = Pack.from_objects(pd, idx) | ||
692 | 639 | pack._basename = idxname[:-4] | ||
693 | 640 | ret.append(pack) | ||
694 | 641 | return ret | ||
699 | 642 | 649 | ||
700 | 643 | def _iter_loose_objects(self): | 650 | def _iter_loose_objects(self): |
701 | 644 | for base in self.transport.list_dir('.'): | 651 | for base in self.transport.list_dir('.'): |
702 | @@ -702,7 +709,7 @@ | |||
703 | 702 | idx = load_pack_index_file(basename + ".idx", idxfile) | 709 | idx = load_pack_index_file(basename + ".idx", idxfile) |
704 | 703 | final_pack = Pack.from_objects(p, idx) | 710 | final_pack = Pack.from_objects(p, idx) |
705 | 704 | final_pack._basename = basename | 711 | final_pack._basename = basename |
707 | 705 | self._add_known_pack(basename, final_pack) | 712 | self._add_cached_pack(basename, final_pack) |
708 | 706 | return final_pack | 713 | return final_pack |
709 | 707 | 714 | ||
710 | 708 | def move_in_thin_pack(self, f): | 715 | def move_in_thin_pack(self, f): |
711 | @@ -735,8 +742,6 @@ | |||
712 | 735 | write_pack_index_v2(idxfile, entries, data_sum) | 742 | write_pack_index_v2(idxfile, entries, data_sum) |
713 | 736 | finally: | 743 | finally: |
714 | 737 | idxfile.close() | 744 | idxfile.close() |
715 | 738 | # TODO(jelmer): Just add new pack to the cache | ||
716 | 739 | self._flush_pack_cache() | ||
717 | 740 | 745 | ||
718 | 741 | def add_pack(self): | 746 | def add_pack(self): |
719 | 742 | """Add a new pack to this object store. | 747 | """Add a new pack to this object store. |
720 | 743 | 748 | ||
721 | === modified file 'breezy/plugins/weave_fmt/bzrdir.py' | |||
722 | --- breezy/plugins/weave_fmt/bzrdir.py 2018-11-11 04:08:32 +0000 | |||
723 | +++ breezy/plugins/weave_fmt/bzrdir.py 2019-02-11 01:23:45 +0000 | |||
724 | @@ -895,8 +895,10 @@ | |||
725 | 895 | def sprout(self, url, revision_id=None, force_new_repo=False, | 895 | def sprout(self, url, revision_id=None, force_new_repo=False, |
726 | 896 | possible_transports=None, accelerator_tree=None, | 896 | possible_transports=None, accelerator_tree=None, |
727 | 897 | hardlink=False, stacked=False, create_tree_if_local=True, | 897 | hardlink=False, stacked=False, create_tree_if_local=True, |
729 | 898 | source_branch=None): | 898 | source_branch=None, depth=None): |
730 | 899 | """See ControlDir.sprout().""" | 899 | """See ControlDir.sprout().""" |
731 | 900 | if depth is not None: | ||
732 | 901 | raise errors.FetchDepthUnsupported(self) | ||
733 | 900 | if source_branch is not None: | 902 | if source_branch is not None: |
734 | 901 | my_branch = self.open_branch() | 903 | my_branch = self.open_branch() |
735 | 902 | if source_branch.base != my_branch.base: | 904 | if source_branch.base != my_branch.base: |
736 | 903 | 905 | ||
737 | === modified file 'breezy/repository.py' | |||
738 | --- breezy/repository.py 2019-02-01 16:56:56 +0000 | |||
739 | +++ breezy/repository.py 2019-02-11 01:23:45 +0000 | |||
740 | @@ -260,6 +260,10 @@ | |||
741 | 260 | # items in the tree, or just bulk fetching/pushing of data? | 260 | # items in the tree, or just bulk fetching/pushing of data? |
742 | 261 | supports_random_access = True | 261 | supports_random_access = True |
743 | 262 | 262 | ||
744 | 263 | # Does this repository implementation support fetching with | ||
745 | 264 | # a certain graph depth? | ||
746 | 265 | supports_fetch_depth = False | ||
747 | 266 | |||
748 | 263 | def abort_write_group(self, suppress_errors=False): | 267 | def abort_write_group(self, suppress_errors=False): |
749 | 264 | """Commit the contents accrued within the current write group. | 268 | """Commit the contents accrued within the current write group. |
750 | 265 | 269 | ||
751 | 266 | 270 | ||
752 | === modified file 'breezy/tests/per_controldir/test_controldir.py' | |||
753 | --- breezy/tests/per_controldir/test_controldir.py 2018-11-11 04:08:32 +0000 | |||
754 | +++ breezy/tests/per_controldir/test_controldir.py 2019-02-11 01:23:45 +0000 | |||
755 | @@ -86,7 +86,7 @@ | |||
756 | 86 | 86 | ||
757 | 87 | def sproutOrSkip(self, from_bzrdir, to_url, revision_id=None, | 87 | def sproutOrSkip(self, from_bzrdir, to_url, revision_id=None, |
758 | 88 | force_new_repo=False, accelerator_tree=None, | 88 | force_new_repo=False, accelerator_tree=None, |
760 | 89 | create_tree_if_local=True): | 89 | create_tree_if_local=True, depth=None): |
761 | 90 | """Sprout from_bzrdir into to_url, or raise TestSkipped. | 90 | """Sprout from_bzrdir into to_url, or raise TestSkipped. |
762 | 91 | 91 | ||
763 | 92 | A simple wrapper for from_bzrdir.sprout that translates NotLocalUrl into | 92 | A simple wrapper for from_bzrdir.sprout that translates NotLocalUrl into |
764 | @@ -99,7 +99,8 @@ | |||
765 | 99 | force_new_repo=force_new_repo, | 99 | force_new_repo=force_new_repo, |
766 | 100 | possible_transports=[to_transport], | 100 | possible_transports=[to_transport], |
767 | 101 | accelerator_tree=accelerator_tree, | 101 | accelerator_tree=accelerator_tree, |
769 | 102 | create_tree_if_local=create_tree_if_local) | 102 | create_tree_if_local=create_tree_if_local, |
770 | 103 | depth=depth) | ||
771 | 103 | return target | 104 | return target |
772 | 104 | 105 | ||
773 | 105 | def test_uninitializable(self): | 106 | def test_uninitializable(self): |
774 | @@ -1050,6 +1051,20 @@ | |||
775 | 1050 | self.addCleanup(repo.lock_read().unlock) | 1051 | self.addCleanup(repo.lock_read().unlock) |
776 | 1051 | self.assertEqual(None, repo.get_parent_map([rev1]).get(rev1)) | 1052 | self.assertEqual(None, repo.get_parent_map([rev1]).get(rev1)) |
777 | 1052 | 1053 | ||
778 | 1054 | def test_sprout_with_depth(self): | ||
779 | 1055 | tree = self.make_branch_and_tree('source') | ||
780 | 1056 | self.build_tree(['source/foo']) | ||
781 | 1057 | tree.add('foo') | ||
782 | 1058 | tree.commit('revision 1') | ||
783 | 1059 | rev2 = tree.commit('revision 2', allow_pointless=True) | ||
784 | 1060 | dir = tree.controldir | ||
785 | 1061 | try: | ||
786 | 1062 | target = self.sproutOrSkip(dir, self.get_url('target'), depth=1) | ||
787 | 1063 | except errors.FetchDepthUnsupported: | ||
788 | 1064 | self.assertFalse(tree.branch.repository.supports_fetch_depth) | ||
789 | 1065 | else: | ||
790 | 1066 | self.assertEqual({rev2}, target.open_repository().all_revision_ids()) | ||
791 | 1067 | |||
792 | 1053 | def test_format_initialize_find_open(self): | 1068 | def test_format_initialize_find_open(self): |
793 | 1054 | # loopback test to check the current format initializes to itself. | 1069 | # loopback test to check the current format initializes to itself. |
794 | 1055 | if not self.bzrdir_format.is_initializable(): | 1070 | if not self.bzrdir_format.is_initializable(): |
795 | 1056 | 1071 | ||
796 | === modified file 'breezy/tests/per_interbranch/test_fetch.py' | |||
797 | --- breezy/tests/per_interbranch/test_fetch.py 2018-11-11 04:08:32 +0000 | |||
798 | +++ breezy/tests/per_interbranch/test_fetch.py 2019-02-11 01:23:45 +0000 | |||
799 | @@ -16,7 +16,11 @@ | |||
800 | 16 | 16 | ||
801 | 17 | """Tests for InterBranch.fetch.""" | 17 | """Tests for InterBranch.fetch.""" |
802 | 18 | 18 | ||
804 | 19 | from breezy.errors import FetchLimitUnsupported, NoRoundtrippingSupport | 19 | from breezy.errors import ( |
805 | 20 | FetchDepthUnsupported, | ||
806 | 21 | FetchLimitUnsupported, | ||
807 | 22 | NoRoundtrippingSupport, | ||
808 | 23 | ) | ||
809 | 20 | from breezy.revision import NULL_REVISION | 24 | from breezy.revision import NULL_REVISION |
810 | 21 | from breezy.tests import TestNotApplicable | 25 | from breezy.tests import TestNotApplicable |
811 | 22 | from breezy.tests.per_interbranch import ( | 26 | from breezy.tests.per_interbranch import ( |
812 | @@ -113,3 +117,39 @@ | |||
813 | 113 | self.assertEqual( | 117 | self.assertEqual( |
814 | 114 | {rev1, rev2}, | 118 | {rev1, rev2}, |
815 | 115 | b2.repository.has_revisions([rev1, rev2, rev3])) | 119 | b2.repository.has_revisions([rev1, rev2, rev3])) |
816 | 120 | |||
817 | 121 | def test_fetch_revisions_depth(self): | ||
818 | 122 | """Test fetch-revision operation.""" | ||
819 | 123 | builder = self.make_branch_builder( | ||
820 | 124 | 'b1', format=self.branch_format_from._matchingcontroldir) | ||
821 | 125 | builder.start_series() | ||
822 | 126 | rev1 = builder.build_commit() | ||
823 | 127 | rev2 = builder.build_commit() | ||
824 | 128 | rev3 = builder.build_commit() | ||
825 | 129 | builder.finish_series() | ||
826 | 130 | b1 = builder.get_branch() | ||
827 | 131 | b2 = self.make_to_branch('b2') | ||
828 | 132 | try: | ||
829 | 133 | if b2.repository.supports_fetch_depth: | ||
830 | 134 | b2.fetch(b1, depth=1) | ||
831 | 135 | else: | ||
832 | 136 | self.assertRaises(FetchDepthUnsupported, b2.fetch, b1, depth=1) | ||
833 | 137 | raise TestNotApplicable( | ||
834 | 138 | 'interbranch does not support fetch depths') | ||
835 | 139 | except NoRoundtrippingSupport: | ||
836 | 140 | raise TestNotApplicable( | ||
837 | 141 | 'lossless cross-vcs fetch %r to %r not supported' % | ||
838 | 142 | (b1, b2)) | ||
839 | 143 | |||
840 | 144 | self.assertEqual( | ||
841 | 145 | {rev3}, | ||
842 | 146 | b2.repository.has_revisions([rev1, rev2, rev3])) | ||
843 | 147 | |||
844 | 148 | # fetch does not update the last revision | ||
845 | 149 | self.assertEqual(NULL_REVISION, b2.last_revision()) | ||
846 | 150 | |||
847 | 151 | # Incrementally fetch one more | ||
848 | 152 | b2.fetch(b1, depth=2) | ||
849 | 153 | |||
850 | 154 | self.assertEqual( | ||
851 | 155 | {rev2, rev3}, b2.repository.has_revisions([rev1, rev2, rev3])) | ||
852 | 116 | 156 | ||
853 | === modified file 'breezy/tests/per_repository/test_repository.py' | |||
854 | --- breezy/tests/per_repository/test_repository.py 2019-02-01 16:56:56 +0000 | |||
855 | +++ breezy/tests/per_repository/test_repository.py 2019-02-11 01:23:45 +0000 | |||
856 | @@ -125,6 +125,9 @@ | |||
857 | 125 | def test_attribute_format_supports_random_access(self): | 125 | def test_attribute_format_supports_random_access(self): |
858 | 126 | self.assertRepositoryAttribute('supports_random_access', (True, False)) | 126 | self.assertRepositoryAttribute('supports_random_access', (True, False)) |
859 | 127 | 127 | ||
860 | 128 | def test_attribute_format_supports_fetch_depth(self): | ||
861 | 129 | self.assertRepositoryAttribute('supports_fetch_depth', (True, False)) | ||
862 | 130 | |||
863 | 128 | def test_attribute_format_supports_setting_revision_ids(self): | 131 | def test_attribute_format_supports_setting_revision_ids(self): |
864 | 129 | self.assertFormatAttribute('supports_setting_revision_ids', | 132 | self.assertFormatAttribute('supports_setting_revision_ids', |
865 | 130 | (True, False)) | 133 | (True, False)) |
866 | 131 | 134 | ||
867 | === modified file 'breezy/tests/test_foreign.py' | |||
868 | --- breezy/tests/test_foreign.py 2018-11-25 20:44:56 +0000 | |||
869 | +++ breezy/tests/test_foreign.py 2019-02-11 01:23:45 +0000 | |||
870 | @@ -339,14 +339,15 @@ | |||
871 | 339 | def sprout(self, url, revision_id=None, force_new_repo=False, | 339 | def sprout(self, url, revision_id=None, force_new_repo=False, |
872 | 340 | recurse='down', possible_transports=None, | 340 | recurse='down', possible_transports=None, |
873 | 341 | accelerator_tree=None, hardlink=False, stacked=False, | 341 | accelerator_tree=None, hardlink=False, stacked=False, |
875 | 342 | source_branch=None): | 342 | source_branch=None, depth=None): |
876 | 343 | # dirstate doesn't cope with accelerator_trees well | 343 | # dirstate doesn't cope with accelerator_trees well |
877 | 344 | # that have a different control dir | 344 | # that have a different control dir |
878 | 345 | return super(DummyForeignVcsDir, self).sprout( | 345 | return super(DummyForeignVcsDir, self).sprout( |
879 | 346 | url=url, | 346 | url=url, |
880 | 347 | revision_id=revision_id, force_new_repo=force_new_repo, | 347 | revision_id=revision_id, force_new_repo=force_new_repo, |
881 | 348 | recurse=recurse, possible_transports=possible_transports, | 348 | recurse=recurse, possible_transports=possible_transports, |
883 | 349 | hardlink=hardlink, stacked=stacked, source_branch=source_branch) | 349 | hardlink=hardlink, stacked=stacked, source_branch=source_branch, |
884 | 350 | depth=depth) | ||
885 | 350 | 351 | ||
886 | 351 | 352 | ||
887 | 352 | def register_dummy_foreign_for_test(testcase): | 353 | def register_dummy_foreign_for_test(testcase): |
888 | 353 | 354 | ||
889 | === modified file 'setup.py' | |||
890 | --- setup.py 2019-01-24 00:23:01 +0000 | |||
891 | +++ setup.py 2019-02-11 01:23:45 +0000 | |||
892 | @@ -60,7 +60,7 @@ | |||
893 | 60 | # Technically, Breezy works without these two dependencies too. But there's | 60 | # Technically, Breezy works without these two dependencies too. But there's |
894 | 61 | # no way to enable them by default and let users opt out. | 61 | # no way to enable them by default and let users opt out. |
895 | 62 | 'fastimport>=0.9.8', | 62 | 'fastimport>=0.9.8', |
897 | 63 | 'dulwich>=0.19.1', | 63 | 'dulwich>=0.19.11', |
898 | 64 | ], | 64 | ], |
899 | 65 | 'extras_require': { | 65 | 'extras_require': { |
900 | 66 | 'fastimport': [], | 66 | 'fastimport': [], |
Thanks!