Merge lp:~jelmer/brz/depth into lp:brz

Proposed by Jelmer Vernooij
Status: Superseded
Proposed branch: lp:~jelmer/brz/depth
Merge into: lp:brz
Diff against target: 900 lines (+214/-111)
18 files modified
breezy/branch.py (+8/-4)
breezy/bzr/bzrdir.py (+7/-3)
breezy/controldir.py (+2/-1)
breezy/errors.py (+8/-0)
breezy/git/branch.py (+11/-11)
breezy/git/dir.py (+3/-2)
breezy/git/interrepo.py (+48/-37)
breezy/git/remote.py (+13/-11)
breezy/git/repository.py (+1/-0)
breezy/git/tests/test_blackbox.py (+1/-0)
breezy/git/transportgit.py (+40/-35)
breezy/plugins/weave_fmt/bzrdir.py (+3/-1)
breezy/repository.py (+4/-0)
breezy/tests/per_controldir/test_controldir.py (+17/-2)
breezy/tests/per_interbranch/test_fetch.py (+41/-1)
breezy/tests/per_repository/test_repository.py (+3/-0)
breezy/tests/test_foreign.py (+3/-2)
setup.py (+1/-1)
To merge this branch: bzr merge lp:~jelmer/brz/depth
Reviewer Review Type Date Requested Status
Martin Packman Approve
Review via email: mp+362957@code.launchpad.net

This proposal has been superseded by a proposal from 2019-02-14.

Description of the change

Add a depth argument to ControlDir.sprout() and Repository.fetch().

To post a comment you must log in.
Revision history for this message
Martin Packman (gz) wrote :

Thanks!

review: Approve
lp:~jelmer/brz/depth updated
7154. By Jelmer Vernooij

Fix tests.

7155. By Jelmer Vernooij

Merge lp:brz/3.1.

7156. By Jelmer Vernooij

Merge lp:brz/3.2.

7157. By Jelmer Vernooij

Merge trunk.

7158. By Jelmer Vernooij

Merge lp:brz/3.3

Unmerged revisions

7158. By Jelmer Vernooij

Merge lp:brz/3.3

7157. By Jelmer Vernooij

Merge trunk.

7156. By Jelmer Vernooij

Merge lp:brz/3.2.

7155. By Jelmer Vernooij

Merge lp:brz/3.1.

7154. By Jelmer Vernooij

Fix tests.

7153. By Jelmer Vernooij

add depth argument to ControlDir.sprout.

7152. By Jelmer Vernooij

Enable depth fetching for Git repositories.

7151. By Jelmer Vernooij

For the moment, Dulwich doesn't support depth fetching locally.

7150. By Jelmer Vernooij

Add Repository.support_fetch_depth.

7149. By Jelmer Vernooij

Merge dulwich-compat.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'breezy/branch.py'
2--- breezy/branch.py 2018-11-30 12:39:04 +0000
3+++ breezy/branch.py 2019-02-11 01:23:45 +0000
4@@ -680,18 +680,19 @@
5 """Get the tree_path and branch_location for a tree reference."""
6 raise errors.UnsupportedOperation(self.get_reference_info, self)
7
8- def fetch(self, from_branch, last_revision=None, limit=None):
9+ def fetch(self, from_branch, last_revision=None, limit=None, depth=None):
10 """Copy revisions from from_branch into this branch.
11
12 :param from_branch: Where to copy from.
13 :param last_revision: What revision to stop at (None for at the end
14 of the branch.
15 :param limit: Optional rough limit of revisions to fetch
16+ :param depth: Revision depth
17 :return: None
18 """
19 with self.lock_write():
20 return InterBranch.get(from_branch, self).fetch(
21- last_revision, limit=limit)
22+ last_revision, limit=limit, depth=depth)
23
24 def get_bound_location(self):
25 """Return the URL of the branch we are bound to.
26@@ -2098,11 +2099,12 @@
27 """
28 raise NotImplementedError(self.copy_content_into)
29
30- def fetch(self, stop_revision=None, limit=None):
31+ def fetch(self, stop_revision=None, limit=None, depth=None):
32 """Fetch revisions.
33
34 :param stop_revision: Last revision to fetch
35 :param limit: Optional rough limit of revisions to fetch
36+ :param depth: Optional revision depth
37 """
38 raise NotImplementedError(self.fetch)
39
40@@ -2154,9 +2156,11 @@
41 if self.source._push_should_merge_tags():
42 self.source.tags.merge_to(self.target.tags)
43
44- def fetch(self, stop_revision=None, limit=None):
45+ def fetch(self, stop_revision=None, limit=None, depth=None):
46 if self.target.base == self.source.base:
47 return (0, [])
48+ if depth is not None:
49+ raise errors.FetchDepthUnsupported(self)
50 with self.source.lock_read(), self.target.lock_write():
51 fetch_spec_factory = fetch.FetchSpecFactory()
52 fetch_spec_factory.source_branch = self.source
53
54=== modified file 'breezy/bzr/bzrdir.py'
55--- breezy/bzr/bzrdir.py 2018-11-18 19:48:57 +0000
56+++ breezy/bzr/bzrdir.py 2019-02-11 01:23:45 +0000
57@@ -350,7 +350,7 @@
58 recurse='down', possible_transports=None,
59 accelerator_tree=None, hardlink=False, stacked=False,
60 source_branch=None, create_tree_if_local=True,
61- lossy=False):
62+ depth=None):
63 """Create a copy of this controldir prepared for use as a new line of
64 development.
65
66@@ -374,6 +374,7 @@
67 location of this control directory.
68 :param create_tree_if_local: If true, a working-tree will be created
69 when working locally.
70+ :param depth: Optional fetch depth
71 :return: The created control directory
72 """
73 operation = cleanup.OperationWithCleanups(self._sprout)
74@@ -382,12 +383,15 @@
75 recurse=recurse, possible_transports=possible_transports,
76 accelerator_tree=accelerator_tree, hardlink=hardlink,
77 stacked=stacked, source_branch=source_branch,
78- create_tree_if_local=create_tree_if_local)
79+ create_tree_if_local=create_tree_if_local, depth=depth)
80
81 def _sprout(self, op, url, revision_id=None, force_new_repo=False,
82 recurse='down', possible_transports=None,
83 accelerator_tree=None, hardlink=False, stacked=False,
84- source_branch=None, create_tree_if_local=True, lossy=False):
85+ source_branch=None, create_tree_if_local=True,
86+ depth=None):
87+ if depth is not None:
88+ raise errors.FetchDepthUnsupported(self)
89 add_cleanup = op.add_cleanup
90 fetch_spec_factory = fetch.FetchSpecFactory()
91 if revision_id is not None:
92
93=== modified file 'breezy/controldir.py'
94--- breezy/controldir.py 2018-11-12 01:41:38 +0000
95+++ breezy/controldir.py 2019-02-11 01:23:45 +0000
96@@ -373,7 +373,7 @@
97 recurse='down', possible_transports=None,
98 accelerator_tree=None, hardlink=False, stacked=False,
99 source_branch=None, create_tree_if_local=True,
100- lossy=False):
101+ depth=None):
102 """Create a copy of this controldir prepared for use as a new line of
103 development.
104
105@@ -396,6 +396,7 @@
106 location of this control directory.
107 :param create_tree_if_local: If true, a working-tree will be created
108 when working locally.
109+ :param depth: Possible fetch depth
110 """
111 raise NotImplementedError(self.sprout)
112
113
114=== modified file 'breezy/errors.py'
115--- breezy/errors.py 2018-11-11 04:08:32 +0000
116+++ breezy/errors.py 2019-02-11 01:23:45 +0000
117@@ -1851,6 +1851,14 @@
118 self.tname = type(method_self).__name__
119
120
121+class FetchDepthUnsupported(UnsupportedOperation):
122+
123+ fmt = ("InterBranch %(interbranch)r does not support fetching depths.")
124+
125+ def __init__(self, interbranch):
126+ BzrError.__init__(self, interbranch=interbranch)
127+
128+
129 class FetchLimitUnsupported(UnsupportedOperation):
130
131 fmt = ("InterBranch %(interbranch)r does not support fetching limits.")
132
133=== modified file 'breezy/git/branch.py'
134--- breezy/git/branch.py 2019-01-19 17:13:53 +0000
135+++ breezy/git/branch.py 2019-02-11 01:23:45 +0000
136@@ -656,9 +656,9 @@
137 def break_lock(self):
138 self.repository._git.refs.unlock_ref(self.ref)
139
140- def fetch(self, from_branch, last_revision=None, limit=None):
141+ def fetch(self, from_branch, last_revision=None, limit=None, depth=None):
142 return branch.InterBranch.get(from_branch, self).fetch(
143- stop_revision=last_revision, limit=limit)
144+ stop_revision=last_revision, limit=limit, depth=depth)
145
146 def _gen_revision_history(self):
147 if self.head is None:
148@@ -885,10 +885,10 @@
149 return False
150 return True
151
152- def fetch(self, stop_revision=None, fetch_tags=None, limit=None):
153- self.fetch_objects(stop_revision, fetch_tags=fetch_tags, limit=limit)
154+ def fetch(self, stop_revision=None, fetch_tags=None, limit=None, depth=None):
155+ self.fetch_objects(stop_revision, fetch_tags=fetch_tags, limit=limit, depth=depth)
156
157- def fetch_objects(self, stop_revision, fetch_tags, limit=None):
158+ def fetch_objects(self, stop_revision, fetch_tags, limit=None, depth=None):
159 interrepo = self._get_interrepo(self.source, self.target)
160 if fetch_tags is None:
161 c = self.source.get_config_stack()
162@@ -909,7 +909,7 @@
163 [self._last_revid], include_tags=fetch_tags)
164 return real(heads)
165 pack_hint, head, refs = interrepo.fetch_objects(
166- determine_wants, self.source.mapping, limit=limit)
167+ determine_wants, self.source.mapping, limit=limit, depth=depth)
168 if (pack_hint is not None and
169 self.target.repository._format.pack_compresses):
170 self.target.repository.pack(hint=pack_hint)
171@@ -1034,7 +1034,7 @@
172 class InterGitBranch(branch.GenericInterBranch):
173 """InterBranch implementation that pulls between Git branches."""
174
175- def fetch(self, stop_revision=None, fetch_tags=None, limit=None):
176+ def fetch(self, stop_revision=None, fetch_tags=None, limit=None, depth=None):
177 raise NotImplementedError(self.fetch)
178
179
180@@ -1101,14 +1101,14 @@
181 return (isinstance(source, GitBranch) and
182 isinstance(target, LocalGitBranch))
183
184- def fetch(self, stop_revision=None, fetch_tags=None, limit=None):
185+ def fetch(self, stop_revision=None, fetch_tags=None, limit=None, depth=None):
186 interrepo = _mod_repository.InterRepository.get(self.source.repository,
187 self.target.repository)
188 if stop_revision is None:
189 stop_revision = self.source.last_revision()
190 determine_wants = interrepo.get_determine_wants_revids(
191 [stop_revision], include_tags=fetch_tags)
192- interrepo.fetch_objects(determine_wants, limit=limit)
193+ interrepo.fetch_objects(determine_wants, limit=limit, depth=depth)
194
195 def _basic_push(self, overwrite=False, stop_revision=None):
196 if overwrite is True:
197@@ -1298,7 +1298,7 @@
198 return ret
199
200 def fetch(self, stop_revision=None, fetch_tags=None, lossy=False,
201- limit=None):
202+ limit=None, depth=None):
203 if stop_revision is None:
204 stop_revision = self.source.last_revision()
205 ret = []
206@@ -1307,7 +1307,7 @@
207 ret.append((None, v))
208 ret.append((None, stop_revision))
209 try:
210- self.interrepo.fetch_objects(ret, lossy=lossy, limit=limit)
211+ self.interrepo.fetch_objects(ret, lossy=lossy, limit=limit, depth=depth)
212 except NoPushSupport:
213 raise errors.NoRoundtrippingSupport(self.source, self.target)
214
215
216=== modified file 'breezy/git/dir.py'
217--- breezy/git/dir.py 2018-11-16 11:37:47 +0000
218+++ breezy/git/dir.py 2019-02-11 01:23:45 +0000
219@@ -149,7 +149,7 @@
220 def sprout(self, url, revision_id=None, force_new_repo=False,
221 recurse='down', possible_transports=None,
222 accelerator_tree=None, hardlink=False, stacked=False,
223- source_branch=None, create_tree_if_local=True):
224+ source_branch=None, create_tree_if_local=True, depth=None):
225 from ..repository import InterRepository
226 from ..transport.local import LocalTransport
227 from ..transport import get_transport
228@@ -178,7 +178,8 @@
229 else:
230 determine_wants = interrepo.determine_wants_all
231 interrepo.fetch_objects(determine_wants=determine_wants,
232- mapping=source_branch.mapping)
233+ mapping=source_branch.mapping,
234+ depth=depth)
235 result_branch = source_branch.sprout(
236 result, revision_id=revision_id, repository=result_repo)
237 if (create_tree_if_local and
238
239=== modified file 'breezy/git/interrepo.py'
240--- breezy/git/interrepo.py 2018-11-16 23:15:15 +0000
241+++ breezy/git/interrepo.py 2019-02-11 01:23:45 +0000
242@@ -39,6 +39,7 @@
243
244 from ..errors import (
245 DivergedBranches,
246+ FetchDepthUnsupported,
247 FetchLimitUnsupported,
248 InvalidRevisionId,
249 LossyPushToSameVCS,
250@@ -53,6 +54,7 @@
251 )
252 from ..sixish import (
253 viewitems,
254+ viewkeys,
255 viewvalues,
256 )
257 from .. import (
258@@ -124,9 +126,9 @@
259 """
260 raise NotImplementedError(self.fetch_refs)
261
262- def search_missing_revision_ids(self,
263- find_ghosts=True, revision_ids=None,
264- if_present_ids=None, limit=None):
265+ def search_missing_revision_ids(
266+ self, find_ghosts=True, revision_ids=None, if_present_ids=None,
267+ limit=None, depth=None):
268 if limit is not None:
269 raise FetchLimitUnsupported(self)
270 git_shas = []
271@@ -190,7 +192,7 @@
272 return False
273 return self._commit_needs_fetching(sha_id)
274
275- def missing_revisions(self, stop_revisions):
276+ def _missing_revisions(self, stop_revisions, depth=None):
277 """Find the revisions that are missing from the target repository.
278
279 :param stop_revisions: Revisions to check for (tuples with
280@@ -204,32 +206,35 @@
281 for (sha1, revid) in stop_revisions:
282 if sha1 is not None and revid is not None:
283 revid_sha_map[revid] = sha1
284- stop_revids.append(revid)
285+ stop_revids.append((revid, 1))
286 elif sha1 is not None:
287 if self._commit_needs_fetching(sha1):
288 for (kind, (revid, tree_sha, verifiers)) in self.source_store.lookup_git_sha(sha1):
289 revid_sha_map[revid] = sha1
290- stop_revids.append(revid)
291+ stop_revids.append((revid, 1))
292 else:
293 if revid is None:
294 raise AssertionError
295- stop_revids.append(revid)
296+ stop_revids.append((revid, 1))
297 missing = set()
298 graph = self.source.get_graph()
299 pb = ui.ui_factory.nested_progress_bar()
300 try:
301 while stop_revids:
302- new_stop_revids = []
303- for revid in stop_revids:
304+ new_stop_revids = {}
305+ for revid, revid_depth in stop_revids:
306 sha1 = revid_sha_map.get(revid)
307 if (revid not in missing and
308 self._revision_needs_fetching(sha1, revid)):
309 missing.add(revid)
310- new_stop_revids.append(revid)
311+ if depth is None or revid_depth < depth:
312+ new_stop_revids[revid] = revid_depth
313 stop_revids = set()
314- parent_map = graph.get_parent_map(new_stop_revids)
315- for parent_revids in viewvalues(parent_map):
316- stop_revids.update(parent_revids)
317+ parent_map = graph.get_parent_map(viewkeys(new_stop_revids))
318+ for revid, parent_revids in viewvalues(parent_map):
319+ stop_revids.update(
320+ [(parent_revid, new_stop_revids[revid] + 1)
321+ for parent_revid in parent_revids])
322 pb.update("determining revisions to fetch", len(missing))
323 finally:
324 pb.finished()
325@@ -292,7 +297,7 @@
326 result_refs[name] = (gitid, revid if not lossy else self.mapping.revision_id_foreign_to_bzr(gitid))
327 return revidmap, old_refs, result_refs
328
329- def fetch_objects(self, revs, lossy, limit=None):
330+ def fetch_objects(self, revs, lossy, limit=None, depth=None):
331 if not lossy and not self.mapping.roundtripping:
332 for git_sha, bzr_revid in revs:
333 if (bzr_revid is not None and
334@@ -300,7 +305,7 @@
335 raise NoPushSupport(self.source, self.target, self.mapping,
336 bzr_revid)
337 with self.source_store.lock_read():
338- todo = list(self.missing_revisions(revs))[:limit]
339+ todo = list(self._missing_revisions(revs, depth=depth))[:limit]
340 revidmap = {}
341 pb = ui.ui_factory.nested_progress_bar()
342 try:
343@@ -325,7 +330,7 @@
344 pb.finished()
345
346 def fetch(self, revision_id=None, pb=None, find_ghosts=False,
347- fetch_spec=None, mapped_refs=None):
348+ fetch_spec=None, mapped_refs=None, depth=None):
349 if mapped_refs is not None:
350 stop_revisions = mapped_refs
351 elif revision_id is not None:
352@@ -342,7 +347,7 @@
353 for revid in self.source.all_revision_ids()]
354 self._warn_slow()
355 try:
356- self.fetch_objects(stop_revisions, lossy=False)
357+ self.fetch_objects(stop_revisions, lossy=False, depth=depth)
358 except NoPushSupport:
359 raise NoRoundtrippingSupport(self.source, self.target)
360
361@@ -501,7 +506,8 @@
362 'Fetching from Git to Bazaar repository. '
363 'For better performance, fetch into a Git repository.')
364
365- def fetch_objects(self, determine_wants, mapping, limit=None, lossy=False):
366+ def fetch_objects(self, determine_wants, mapping, limit=None, lossy=False,
367+ depth=None):
368 """Fetch objects from a remote server.
369
370 :param determine_wants: determine_wants callback
371@@ -522,7 +528,8 @@
372 return self.get_determine_wants_heads(wants, include_tags=include_tags)
373
374 def fetch(self, revision_id=None, find_ghosts=False,
375- mapping=None, fetch_spec=None, include_tags=False):
376+ mapping=None, fetch_spec=None, include_tags=False,
377+ depth=None):
378 if mapping is None:
379 mapping = self.source.get_mapping()
380 if revision_id is not None:
381@@ -543,8 +550,8 @@
382 else:
383 determine_wants = self.determine_wants_all
384
385- (pack_hint, _, remote_refs) = self.fetch_objects(determine_wants,
386- mapping)
387+ (pack_hint, _, remote_refs) = self.fetch_objects(
388+ determine_wants, mapping, depth=depth)
389 if pack_hint is not None and self.target._format.pack_compresses:
390 self.target.pack(hint=pack_hint)
391 return remote_refs
392@@ -563,7 +570,8 @@
393 all_parents.update(values)
394 return set(all_revs) - all_parents
395
396- def fetch_objects(self, determine_wants, mapping, limit=None, lossy=False):
397+ def fetch_objects(self, determine_wants, mapping, limit=None, lossy=False,
398+ depth=None):
399 """See `InterGitNonGitRepository`."""
400 self._warn_slow()
401 store = get_object_store(self.target, mapping)
402@@ -577,7 +585,8 @@
403 pb = ui.ui_factory.nested_progress_bar()
404 try:
405 objects_iter = self.source.fetch_objects(
406- wants_recorder, graph_walker, store.get_raw)
407+ wants_recorder, graph_walker, store.get_raw,
408+ depth=depth)
409 trace.mutter("Importing %d new revisions",
410 len(wants_recorder.wants))
411 (pack_hint, last_rev) = import_git_objects(
412@@ -605,22 +614,22 @@
413 """InterRepository that copies revisions from a local Git into a non-Git
414 repository."""
415
416- def fetch_objects(self, determine_wants, mapping, limit=None, lossy=False):
417+ def fetch_objects(self, determine_wants, mapping, limit=None, lossy=False,
418+ depth=None):
419 """See `InterGitNonGitRepository`."""
420+ if depth is not None:
421+ raise FetchDepthUnsupported(self)
422 self._warn_slow()
423 remote_refs = self.source.controldir.get_refs_container().as_dict()
424 wants = determine_wants(remote_refs)
425 pb = ui.ui_factory.nested_progress_bar()
426 target_git_object_retriever = get_object_store(self.target, mapping)
427 try:
428- target_git_object_retriever.lock_write()
429- try:
430+ with target_git_object_retriever.lock_write():
431 (pack_hint, last_rev) = import_git_objects(
432 self.target, mapping, self.source._git.object_store,
433 target_git_object_retriever, wants, pb, limit)
434 return (pack_hint, last_rev, remote_refs)
435- finally:
436- target_git_object_retriever.unlock()
437 finally:
438 pb.finished()
439
440@@ -660,7 +669,7 @@
441 return None, old_refs, new_refs
442
443 def fetch_objects(self, determine_wants, mapping=None, limit=None,
444- lossy=False):
445+ lossy=False, depth=None):
446 raise NotImplementedError(self.fetch_objects)
447
448 def _target_has_shas(self, shas):
449@@ -669,7 +678,7 @@
450
451 def fetch(self, revision_id=None, find_ghosts=False,
452 mapping=None, fetch_spec=None, branches=None, limit=None,
453- include_tags=False):
454+ include_tags=False, depth=None):
455 if mapping is None:
456 mapping = self.source.get_mapping()
457 if revision_id is not None:
458@@ -698,7 +707,7 @@
459 determine_wants = self.get_determine_wants_revids(
460 args, include_tags=include_tags)
461 wants_recorder = DetermineWantsRecorder(determine_wants)
462- self.fetch_objects(wants_recorder, mapping, limit=limit)
463+ self.fetch_objects(wants_recorder, mapping, limit=limit, depth=depth)
464 return wants_recorder.remote_refs
465
466 def get_determine_wants_revids(self, revids, include_tags=False):
467@@ -720,7 +729,7 @@
468 class InterLocalGitLocalGitRepository(InterGitGitRepository):
469
470 def fetch_objects(self, determine_wants, mapping=None, limit=None,
471- lossy=False):
472+ lossy=False, depth=None):
473 if lossy:
474 raise LossyPushToSameVCS(self.source, self.target)
475 if limit is not None:
476@@ -731,7 +740,7 @@
477 try:
478 refs = self.source._git.fetch(
479 self.target._git, determine_wants,
480- progress=progress)
481+ progress=progress, depth=depth)
482 finally:
483 pb.finished()
484 return (None, None, refs)
485@@ -746,7 +755,7 @@
486 class InterRemoteGitLocalGitRepository(InterGitGitRepository):
487
488 def fetch_objects(self, determine_wants, mapping=None, limit=None,
489- lossy=False):
490+ lossy=False, depth=None):
491 if lossy:
492 raise LossyPushToSameVCS(self.source, self.target)
493 if limit is not None:
494@@ -768,10 +777,12 @@
495 else:
496 f, commit, abort = self.target._git.object_store.add_pack()
497 try:
498- refs = self.source.controldir.fetch_pack(
499- determine_wants, graphwalker, f.write)
500+ fetch_result = self.source.controldir.fetch_pack(
501+ determine_wants, graphwalker, f.write, depth=depth)
502 commit()
503- return (None, None, refs)
504+ self.target._git.update_shallow(
505+ fetch_result.new_shallow, fetch_result.new_unshallow)
506+ return (None, None, fetch_result.refs)
507 except BaseException:
508 abort()
509 raise
510
511=== modified file 'breezy/git/remote.py'
512--- breezy/git/remote.py 2019-02-01 16:56:56 +0000
513+++ breezy/git/remote.py 2019-02-11 01:23:45 +0000
514@@ -416,8 +416,8 @@
515 if pb is not None:
516 pb.finished()
517
518- def fetch_pack(self, determine_wants, graph_walker, pack_data,
519- progress=None):
520+ def fetch_pack(self, determine_wants, graph_walker, pack_data, progress=None,
521+ depth=None):
522 if progress is None:
523 pb = ui.ui_factory.nested_progress_bar()
524 progress = DefaultProgressReporter(pb).progress
525@@ -426,7 +426,7 @@
526 try:
527 result = self._client.fetch_pack(
528 self._client_path, determine_wants, graph_walker, pack_data,
529- progress)
530+ progress, depth=depth)
531 if result.refs is None:
532 result.refs = {}
533 self._refs = remote_refs_dict_to_container(
534@@ -539,9 +539,10 @@
535 def get_refs_container(self):
536 if self._refs is not None:
537 return self._refs
538- result = self.fetch_pack(lambda x: None, None,
539- lambda x: None,
540- lambda x: trace.mutter("git: %s" % x))
541+ result = self.fetch_pack(
542+ lambda x: None, None,
543+ lambda x: None, lambda x: trace.mutter("git: %s" % x),
544+ depth=None)
545 self._refs = remote_refs_dict_to_container(
546 result.refs, result.symrefs)
547 return self._refs
548@@ -841,19 +842,20 @@
549 return self.controldir.archive(*args, **kwargs)
550
551 def fetch_pack(self, determine_wants, graph_walker, pack_data,
552- progress=None):
553+ progress=None, depth=None):
554 return self.controldir.fetch_pack(
555- determine_wants, graph_walker, pack_data, progress)
556+ determine_wants, graph_walker, pack_data, progress, depth=depth)
557
558 def send_pack(self, get_changed_refs, generate_pack_data):
559 return self.controldir.send_pack(get_changed_refs, generate_pack_data)
560
561 def fetch_objects(self, determine_wants, graph_walker, resolve_ext_ref,
562- progress=None):
563+ progress=None, depth=None):
564 fd, path = tempfile.mkstemp(suffix=".pack")
565 try:
566- self.fetch_pack(determine_wants, graph_walker,
567- lambda x: os.write(fd, x), progress)
568+ self.fetch_pack(
569+ determine_wants, graph_walker,
570+ lambda x: os.write(fd, x), progress, depth=depth)
571 finally:
572 os.close(fd)
573 if os.path.getsize(path) == 0:
574
575=== modified file 'breezy/git/repository.py'
576--- breezy/git/repository.py 2018-11-30 12:39:04 +0000
577+++ breezy/git/repository.py 2019-02-11 01:23:45 +0000
578@@ -131,6 +131,7 @@
579 _serializer = None
580 vcs = foreign_vcs_git
581 chk_bytes = None
582+ supports_fetch_depth = True
583
584 def __init__(self, gitdir):
585 self._transport = gitdir.root_transport
586
587=== modified file 'breezy/git/tests/test_blackbox.py'
588--- breezy/git/tests/test_blackbox.py 2019-01-02 18:49:15 +0000
589+++ breezy/git/tests/test_blackbox.py 2019-02-11 01:23:45 +0000
590@@ -308,6 +308,7 @@
591 self.repo.stage("foo")
592 self.repo.do_commit(
593 b"message", committer=b"Somebody <user@example.com>",
594+ author=b"Somebody <user@example.com>",
595 commit_timestamp=1526330165, commit_timezone=0,
596 author_timestamp=1526330165, author_timezone=0,
597 merge_heads=[b'aa' * 20])
598
599=== modified file 'breezy/git/transportgit.py'
600--- breezy/git/transportgit.py 2018-11-11 14:23:06 +0000
601+++ breezy/git/transportgit.py 2019-02-11 01:23:45 +0000
602@@ -36,6 +36,7 @@
603 from dulwich.object_store import (
604 PackBasedObjectStore,
605 PACKDIR,
606+ read_packs_file,
607 )
608 from dulwich.pack import (
609 MemoryPackIndex,
610@@ -587,16 +588,41 @@
611 ret.append(l)
612 return ret
613
614- @property
615- def packs(self):
616- # FIXME: Never invalidates.
617- if not self._pack_cache:
618- self._update_pack_cache()
619- return self._pack_cache.values()
620-
621 def _update_pack_cache(self):
622- for pack in self._load_packs():
623- self._pack_cache[pack._basename] = pack
624+ pack_files = set()
625+ pack_dir_contents = self._pack_names()
626+ for name in pack_dir_contents:
627+ if name.startswith("pack-") and name.endswith(".pack"):
628+ # verify that idx exists first (otherwise the pack was not yet
629+ # fully written)
630+ idx_name = os.path.splitext(name)[0] + ".idx"
631+ if idx_name in pack_dir_contents:
632+ pack_files.add(os.path.splitext(name)[0])
633+
634+ new_packs = []
635+ for basename in pack_files:
636+ pack_name = basename + ".pack"
637+ if basename not in self._pack_cache:
638+ try:
639+ size = self.pack_transport.stat(pack_name).st_size
640+ except TransportNotPossible:
641+ f = self.pack_transport.get(pack_name)
642+ pd = PackData(pack_name, f)
643+ else:
644+ pd = PackData(
645+ pack_name, self.pack_transport.get(pack_name),
646+ size=size)
647+ idxname = basename + ".idx"
648+ idx = load_pack_index_file(
649+ idxname, self.pack_transport.get(idxname))
650+ pack = Pack.from_objects(pd, idx)
651+ pack._basename = basename
652+ self._pack_cache[basename] = pack
653+ new_packs.append(pack)
654+ # Remove disappeared pack files
655+ for f in set(self._pack_cache) - pack_files:
656+ self._pack_cache.pop(f).close()
657+ return new_packs
658
659 def _pack_names(self):
660 try:
661@@ -608,9 +634,6 @@
662 # Hmm, warn about running 'git update-server-info' ?
663 return iter([])
664 else:
665- # TODO(jelmer): Move to top-level after dulwich
666- # 0.19.7 is released.
667- from dulwich.object_store import read_packs_file
668 with f:
669 return read_packs_file(f)
670 except NoSuchFile:
671@@ -619,26 +642,10 @@
672 def _remove_pack(self, pack):
673 self.pack_transport.delete(os.path.basename(pack.index.path))
674 self.pack_transport.delete(pack.data.filename)
675-
676- def _load_packs(self):
677- ret = []
678- for name in self._pack_names():
679- if name.startswith("pack-") and name.endswith(".pack"):
680- try:
681- size = self.pack_transport.stat(name).st_size
682- except TransportNotPossible:
683- f = self.pack_transport.get(name)
684- pd = PackData(name, f)
685- else:
686- pd = PackData(name, self.pack_transport.get(name),
687- size=size)
688- idxname = name.replace(".pack", ".idx")
689- idx = load_pack_index_file(
690- idxname, self.pack_transport.get(idxname))
691- pack = Pack.from_objects(pd, idx)
692- pack._basename = idxname[:-4]
693- ret.append(pack)
694- return ret
695+ try:
696+ del self._pack_cache[os.path.basename(pack._basename)]
697+ except KeyError:
698+ pass
699
700 def _iter_loose_objects(self):
701 for base in self.transport.list_dir('.'):
702@@ -702,7 +709,7 @@
703 idx = load_pack_index_file(basename + ".idx", idxfile)
704 final_pack = Pack.from_objects(p, idx)
705 final_pack._basename = basename
706- self._add_known_pack(basename, final_pack)
707+ self._add_cached_pack(basename, final_pack)
708 return final_pack
709
710 def move_in_thin_pack(self, f):
711@@ -735,8 +742,6 @@
712 write_pack_index_v2(idxfile, entries, data_sum)
713 finally:
714 idxfile.close()
715- # TODO(jelmer): Just add new pack to the cache
716- self._flush_pack_cache()
717
718 def add_pack(self):
719 """Add a new pack to this object store.
720
721=== modified file 'breezy/plugins/weave_fmt/bzrdir.py'
722--- breezy/plugins/weave_fmt/bzrdir.py 2018-11-11 04:08:32 +0000
723+++ breezy/plugins/weave_fmt/bzrdir.py 2019-02-11 01:23:45 +0000
724@@ -895,8 +895,10 @@
725 def sprout(self, url, revision_id=None, force_new_repo=False,
726 possible_transports=None, accelerator_tree=None,
727 hardlink=False, stacked=False, create_tree_if_local=True,
728- source_branch=None):
729+ source_branch=None, depth=None):
730 """See ControlDir.sprout()."""
731+ if depth is not None:
732+ raise errors.FetchDepthUnsupported(self)
733 if source_branch is not None:
734 my_branch = self.open_branch()
735 if source_branch.base != my_branch.base:
736
737=== modified file 'breezy/repository.py'
738--- breezy/repository.py 2019-02-01 16:56:56 +0000
739+++ breezy/repository.py 2019-02-11 01:23:45 +0000
740@@ -260,6 +260,10 @@
741 # items in the tree, or just bulk fetching/pushing of data?
742 supports_random_access = True
743
744+ # Does this repository implementation support fetching with
745+ # a certain graph depth?
746+ supports_fetch_depth = False
747+
748 def abort_write_group(self, suppress_errors=False):
749 """Commit the contents accrued within the current write group.
750
751
752=== modified file 'breezy/tests/per_controldir/test_controldir.py'
753--- breezy/tests/per_controldir/test_controldir.py 2018-11-11 04:08:32 +0000
754+++ breezy/tests/per_controldir/test_controldir.py 2019-02-11 01:23:45 +0000
755@@ -86,7 +86,7 @@
756
757 def sproutOrSkip(self, from_bzrdir, to_url, revision_id=None,
758 force_new_repo=False, accelerator_tree=None,
759- create_tree_if_local=True):
760+ create_tree_if_local=True, depth=None):
761 """Sprout from_bzrdir into to_url, or raise TestSkipped.
762
763 A simple wrapper for from_bzrdir.sprout that translates NotLocalUrl into
764@@ -99,7 +99,8 @@
765 force_new_repo=force_new_repo,
766 possible_transports=[to_transport],
767 accelerator_tree=accelerator_tree,
768- create_tree_if_local=create_tree_if_local)
769+ create_tree_if_local=create_tree_if_local,
770+ depth=depth)
771 return target
772
773 def test_uninitializable(self):
774@@ -1050,6 +1051,20 @@
775 self.addCleanup(repo.lock_read().unlock)
776 self.assertEqual(None, repo.get_parent_map([rev1]).get(rev1))
777
778+ def test_sprout_with_depth(self):
779+ tree = self.make_branch_and_tree('source')
780+ self.build_tree(['source/foo'])
781+ tree.add('foo')
782+ tree.commit('revision 1')
783+ rev2 = tree.commit('revision 2', allow_pointless=True)
784+ dir = tree.controldir
785+ try:
786+ target = self.sproutOrSkip(dir, self.get_url('target'), depth=1)
787+ except errors.FetchDepthUnsupported:
788+ self.assertFalse(tree.branch.repository.supports_fetch_depth)
789+ else:
790+ self.assertEqual({rev2}, target.open_repository().all_revision_ids())
791+
792 def test_format_initialize_find_open(self):
793 # loopback test to check the current format initializes to itself.
794 if not self.bzrdir_format.is_initializable():
795
796=== modified file 'breezy/tests/per_interbranch/test_fetch.py'
797--- breezy/tests/per_interbranch/test_fetch.py 2018-11-11 04:08:32 +0000
798+++ breezy/tests/per_interbranch/test_fetch.py 2019-02-11 01:23:45 +0000
799@@ -16,7 +16,11 @@
800
801 """Tests for InterBranch.fetch."""
802
803-from breezy.errors import FetchLimitUnsupported, NoRoundtrippingSupport
804+from breezy.errors import (
805+ FetchDepthUnsupported,
806+ FetchLimitUnsupported,
807+ NoRoundtrippingSupport,
808+ )
809 from breezy.revision import NULL_REVISION
810 from breezy.tests import TestNotApplicable
811 from breezy.tests.per_interbranch import (
812@@ -113,3 +117,39 @@
813 self.assertEqual(
814 {rev1, rev2},
815 b2.repository.has_revisions([rev1, rev2, rev3]))
816+
817+ def test_fetch_revisions_depth(self):
818+ """Test fetch-revision operation."""
819+ builder = self.make_branch_builder(
820+ 'b1', format=self.branch_format_from._matchingcontroldir)
821+ builder.start_series()
822+ rev1 = builder.build_commit()
823+ rev2 = builder.build_commit()
824+ rev3 = builder.build_commit()
825+ builder.finish_series()
826+ b1 = builder.get_branch()
827+ b2 = self.make_to_branch('b2')
828+ try:
829+ if b2.repository.supports_fetch_depth:
830+ b2.fetch(b1, depth=1)
831+ else:
832+ self.assertRaises(FetchDepthUnsupported, b2.fetch, b1, depth=1)
833+ raise TestNotApplicable(
834+ 'interbranch does not support fetch depths')
835+ except NoRoundtrippingSupport:
836+ raise TestNotApplicable(
837+ 'lossless cross-vcs fetch %r to %r not supported' %
838+ (b1, b2))
839+
840+ self.assertEqual(
841+ {rev3},
842+ b2.repository.has_revisions([rev1, rev2, rev3]))
843+
844+ # fetch does not update the last revision
845+ self.assertEqual(NULL_REVISION, b2.last_revision())
846+
847+ # Incrementally fetch one more
848+ b2.fetch(b1, depth=2)
849+
850+ self.assertEqual(
851+ {rev2, rev3}, b2.repository.has_revisions([rev1, rev2, rev3]))
852
853=== modified file 'breezy/tests/per_repository/test_repository.py'
854--- breezy/tests/per_repository/test_repository.py 2019-02-01 16:56:56 +0000
855+++ breezy/tests/per_repository/test_repository.py 2019-02-11 01:23:45 +0000
856@@ -125,6 +125,9 @@
857 def test_attribute_format_supports_random_access(self):
858 self.assertRepositoryAttribute('supports_random_access', (True, False))
859
860+ def test_attribute_format_supports_fetch_depth(self):
861+ self.assertRepositoryAttribute('supports_fetch_depth', (True, False))
862+
863 def test_attribute_format_supports_setting_revision_ids(self):
864 self.assertFormatAttribute('supports_setting_revision_ids',
865 (True, False))
866
867=== modified file 'breezy/tests/test_foreign.py'
868--- breezy/tests/test_foreign.py 2018-11-25 20:44:56 +0000
869+++ breezy/tests/test_foreign.py 2019-02-11 01:23:45 +0000
870@@ -339,14 +339,15 @@
871 def sprout(self, url, revision_id=None, force_new_repo=False,
872 recurse='down', possible_transports=None,
873 accelerator_tree=None, hardlink=False, stacked=False,
874- source_branch=None):
875+ source_branch=None, depth=None):
876 # dirstate doesn't cope with accelerator_trees well
877 # that have a different control dir
878 return super(DummyForeignVcsDir, self).sprout(
879 url=url,
880 revision_id=revision_id, force_new_repo=force_new_repo,
881 recurse=recurse, possible_transports=possible_transports,
882- hardlink=hardlink, stacked=stacked, source_branch=source_branch)
883+ hardlink=hardlink, stacked=stacked, source_branch=source_branch,
884+ depth=depth)
885
886
887 def register_dummy_foreign_for_test(testcase):
888
889=== modified file 'setup.py'
890--- setup.py 2019-01-24 00:23:01 +0000
891+++ setup.py 2019-02-11 01:23:45 +0000
892@@ -60,7 +60,7 @@
893 # Technically, Breezy works without these two dependencies too. But there's
894 # no way to enable them by default and let users opt out.
895 'fastimport>=0.9.8',
896- 'dulwich>=0.19.1',
897+ 'dulwich>=0.19.11',
898 ],
899 'extras_require': {
900 'fastimport': [],

Subscribers

People subscribed via source and target branches