Merge lp:~cjwatson/launchpad/git-ref-tolerate-slow-commit-info into lp:launchpad

Proposed by Colin Watson
Status: Merged
Merged at revision: 18847
Proposed branch: lp:~cjwatson/launchpad/git-ref-tolerate-slow-commit-info
Merge into: lp:launchpad
Diff against target: 286 lines (+101/-22)
5 files modified
lib/lp/code/browser/gitref.py (+3/-1)
lib/lp/code/browser/tests/test_gitref.py (+45/-4)
lib/lp/code/interfaces/gitref.py (+6/-2)
lib/lp/code/model/gitref.py (+42/-15)
lib/lp/code/templates/git-macros.pt (+5/-0)
To merge this branch: bzr merge lp:~cjwatson/launchpad/git-ref-tolerate-slow-commit-info
Reviewer Review Type Date Requested Status
William Grant code Approve
Review via email: mp+359827@code.launchpad.net

Commit message

Tolerate backend timeouts while fetching commit information for GitRef:+index.

To post a comment you must log in.
Revision history for this message
William Grant (wgrant) :
review: Approve (code)
Revision history for this message
Colin Watson (cjwatson) :

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'lib/lp/code/browser/gitref.py'
--- lib/lp/code/browser/gitref.py 2018-08-30 14:54:37 +0000
+++ lib/lp/code/browser/gitref.py 2018-12-20 11:14:34 +0000
@@ -49,6 +49,7 @@
49from lp.code.interfaces.gitrepository import IGitRepositorySet49from lp.code.interfaces.gitrepository import IGitRepositorySet
50from lp.services.helpers import english_list50from lp.services.helpers import english_list
51from lp.services.propertycache import cachedproperty51from lp.services.propertycache import cachedproperty
52from lp.services.scripts import log
52from lp.services.webapp import (53from lp.services.webapp import (
53 canonical_url,54 canonical_url,
54 ContextMenu,55 ContextMenu,
@@ -180,7 +181,8 @@
180 @cachedproperty181 @cachedproperty
181 def commit_infos(self):182 def commit_infos(self):
182 return self.context.getLatestCommits(183 return self.context.getLatestCommits(
183 extended_details=True, user=self.user)184 extended_details=True, user=self.user, handle_timeout=True,
185 logger=log)
184186
185 @property187 @property
186 def recipes_link(self):188 def recipes_link(self):
187189
=== modified file 'lib/lp/code/browser/tests/test_gitref.py'
--- lib/lp/code/browser/tests/test_gitref.py 2018-09-27 13:50:06 +0000
+++ lib/lp/code/browser/tests/test_gitref.py 2018-12-20 11:14:34 +0000
@@ -11,10 +11,14 @@
11import hashlib11import hashlib
12import re12import re
1313
14from fixtures import FakeLogger
14import pytz15import pytz
15import soupmatchers16import soupmatchers
16from storm.store import Store17from storm.store import Store
17from testtools.matchers import Equals18from testtools.matchers import (
19 Equals,
20 Not,
21 )
18from zope.component import getUtility22from zope.component import getUtility
19from zope.security.proxy import removeSecurityProxy23from zope.security.proxy import removeSecurityProxy
2024
@@ -23,6 +27,7 @@
23from lp.code.tests.helpers import GitHostingFixture27from lp.code.tests.helpers import GitHostingFixture
24from lp.services.beautifulsoup import BeautifulSoup28from lp.services.beautifulsoup import BeautifulSoup
25from lp.services.job.runner import JobRunner29from lp.services.job.runner import JobRunner
30from lp.services.timeout import TimeoutError
26from lp.services.utils import seconds_since_epoch31from lp.services.utils import seconds_since_epoch
27from lp.services.webapp.publisher import canonical_url32from lp.services.webapp.publisher import canonical_url
28from lp.testing import (33from lp.testing import (
@@ -82,6 +87,14 @@
82 canonical_url(ref))87 canonical_url(ref))
8388
8489
90class MissingCommitsNote(soupmatchers.Tag):
91
92 def __init__(self):
93 super(MissingCommitsNote, self).__init__(
94 "missing commits note", "div",
95 text="Some recent commit information could not be fetched.")
96
97
85class TestGitRefView(BrowserTestCase):98class TestGitRefView(BrowserTestCase):
8699
87 layer = LaunchpadFunctionalLayer100 layer = LaunchpadFunctionalLayer
@@ -186,11 +199,12 @@
186 self.hosting_fixture.getLog.result = list(reversed(log))199 self.hosting_fixture.getLog.result = list(reversed(log))
187 self.scanRef(ref, log[-1])200 self.scanRef(ref, log[-1])
188 view = create_initialized_view(ref, "+index")201 view = create_initialized_view(ref, "+index")
202 contents = view()
189 expected_texts = list(reversed([203 expected_texts = list(reversed([
190 "%.7s...\nby\n%s\non 2015-01-%02d" % (204 "%.7s...\nby\n%s\non 2015-01-%02d" % (
191 log[i]["sha1"], log[i]["author"]["name"], i + 1)205 log[i]["sha1"], log[i]["author"]["name"], i + 1)
192 for i in range(5)]))206 for i in range(5)]))
193 details = find_tags_by_class(view(), "commit-details")207 details = find_tags_by_class(contents, "commit-details")
194 self.assertEqual(208 self.assertEqual(
195 expected_texts, [extract_text(detail) for detail in details])209 expected_texts, [extract_text(detail) for detail in details])
196 expected_urls = list(reversed([210 expected_urls = list(reversed([
@@ -199,6 +213,8 @@
199 for i in range(5)]))213 for i in range(5)]))
200 self.assertEqual(214 self.assertEqual(
201 expected_urls, [detail.a["href"] for detail in details])215 expected_urls, [detail.a["href"] for detail in details])
216 self.assertThat(
217 contents, Not(soupmatchers.HTMLContains(MissingCommitsNote())))
202218
203 def test_recent_commits_with_merge(self):219 def test_recent_commits_with_merge(self):
204 [ref] = self.factory.makeGitRefs(paths=["refs/heads/branch"])220 [ref] = self.factory.makeGitRefs(paths=["refs/heads/branch"])
@@ -211,7 +227,8 @@
211 self.scanRef(mp.merge_source, merged_tip)227 self.scanRef(mp.merge_source, merged_tip)
212 mp.markAsMerged(merged_revision_id=log[0]["sha1"])228 mp.markAsMerged(merged_revision_id=log[0]["sha1"])
213 view = create_initialized_view(ref, "+index")229 view = create_initialized_view(ref, "+index")
214 soup = BeautifulSoup(view())230 contents = view()
231 soup = BeautifulSoup(contents)
215 details = soup.findAll(232 details = soup.findAll(
216 attrs={"class": re.compile(r"commit-details|commit-comment")})233 attrs={"class": re.compile(r"commit-details|commit-comment")})
217 expected_texts = list(reversed([234 expected_texts = list(reversed([
@@ -225,6 +242,8 @@
225 self.assertEqual(242 self.assertEqual(
226 [canonical_url(mp), canonical_url(mp.merge_source)],243 [canonical_url(mp), canonical_url(mp.merge_source)],
227 [link["href"] for link in details[5].findAll("a")])244 [link["href"] for link in details[5].findAll("a")])
245 self.assertThat(
246 contents, Not(soupmatchers.HTMLContains(MissingCommitsNote())))
228247
229 def test_recent_commits_with_merge_from_deleted_ref(self):248 def test_recent_commits_with_merge_from_deleted_ref(self):
230 [ref] = self.factory.makeGitRefs(paths=["refs/heads/branch"])249 [ref] = self.factory.makeGitRefs(paths=["refs/heads/branch"])
@@ -238,7 +257,8 @@
238 mp.markAsMerged(merged_revision_id=log[0]["sha1"])257 mp.markAsMerged(merged_revision_id=log[0]["sha1"])
239 mp.source_git_repository.removeRefs([mp.source_git_path])258 mp.source_git_repository.removeRefs([mp.source_git_path])
240 view = create_initialized_view(ref, "+index")259 view = create_initialized_view(ref, "+index")
241 soup = BeautifulSoup(view())260 contents = view()
261 soup = BeautifulSoup(contents)
242 details = soup.findAll(262 details = soup.findAll(
243 attrs={"class": re.compile(r"commit-details|commit-comment")})263 attrs={"class": re.compile(r"commit-details|commit-comment")})
244 expected_texts = list(reversed([264 expected_texts = list(reversed([
@@ -252,6 +272,8 @@
252 self.assertEqual(272 self.assertEqual(
253 [canonical_url(mp)],273 [canonical_url(mp)],
254 [link["href"] for link in details[5].findAll("a")])274 [link["href"] for link in details[5].findAll("a")])
275 self.assertThat(
276 contents, Not(soupmatchers.HTMLContains(MissingCommitsNote())))
255277
256 def _test_all_commits_link(self, branch_name, encoded_branch_name=None):278 def _test_all_commits_link(self, branch_name, encoded_branch_name=None):
257 if encoded_branch_name is None:279 if encoded_branch_name is None:
@@ -312,3 +334,22 @@
312 with StormStatementRecorder() as recorder:334 with StormStatementRecorder() as recorder:
313 view.landing_targets335 view.landing_targets
314 self.assertThat(recorder, HasQueryCount(Equals(12)))336 self.assertThat(recorder, HasQueryCount(Equals(12)))
337
338 def test_timeout(self):
339 # The page renders even if fetching commits times out.
340 self.useFixture(FakeLogger())
341 [ref] = self.factory.makeGitRefs()
342 log = self.makeCommitLog()
343 self.scanRef(ref, log[-1])
344 self.hosting_fixture.getLog.failure = TimeoutError
345 view = create_initialized_view(ref, "+index")
346 contents = view()
347 soup = BeautifulSoup(contents)
348 details = soup.findAll(
349 attrs={"class": re.compile(r"commit-details|commit-comment")})
350 expected_text = "%.7s...\nby\n%s\non 2015-01-%02d" % (
351 log[-1]["sha1"], log[-1]["author"]["name"], len(log))
352 self.assertEqual(
353 [expected_text], [extract_text(detail) for detail in details])
354 self.assertThat(
355 contents, soupmatchers.HTMLContains(MissingCommitsNote()))
315356
=== modified file 'lib/lp/code/interfaces/gitref.py'
--- lib/lp/code/interfaces/gitref.py 2018-10-25 17:52:43 +0000
+++ lib/lp/code/interfaces/gitref.py 2018-12-20 11:14:34 +0000
@@ -364,7 +364,8 @@
364 "yet been scanned.")364 "yet been scanned.")
365365
366 def getCommits(start, limit=None, stop=None, union_repository=None,366 def getCommits(start, limit=None, stop=None, union_repository=None,
367 start_date=None, end_date=None, logger=None):367 start_date=None, end_date=None, handle_timeout=False,
368 logger=None):
368 """Get commit information from this reference.369 """Get commit information from this reference.
369370
370 :param start: The commit to start listing from.371 :param start: The commit to start listing from.
@@ -374,11 +375,14 @@
374 this repository as well (particularly useful with `stop`).375 this repository as well (particularly useful with `stop`).
375 :param start_date: If not None, ignore commits before this date.376 :param start_date: If not None, ignore commits before this date.
376 :param end_date: If not None, ignore commits after this date.377 :param end_date: If not None, ignore commits after this date.
378 :param handle_timeout: If True and the backend request times out,
379 synthesise commit information from what we have in the database.
377 :param logger: An optional logger.380 :param logger: An optional logger.
378 :return: An iterable of commit information dicts.381 :return: An iterable of commit information dicts.
379 """382 """
380383
381 def getLatestCommits(quantity=10):384 def getLatestCommits(quantity=10, extended_details=False, user=None,
385 handle_timeout=False, logger=None):
382 """Return a specific number of the latest commits in this ref."""386 """Return a specific number of the latest commits in this ref."""
383387
384 has_commits = Attribute("Whether this reference has any commits.")388 has_commits = Attribute("Whether this reference has any commits.")
385389
=== modified file 'lib/lp/code/model/gitref.py'
--- lib/lp/code/model/gitref.py 2018-12-20 10:03:26 +0000
+++ lib/lp/code/model/gitref.py 2018-12-20 11:14:34 +0000
@@ -83,7 +83,11 @@
83from lp.services.database.stormbase import StormBase83from lp.services.database.stormbase import StormBase
84from lp.services.features import getFeatureFlag84from lp.services.features import getFeatureFlag
85from lp.services.memcache.interfaces import IMemcacheClient85from lp.services.memcache.interfaces import IMemcacheClient
86from lp.services.timeout import urlfetch86from lp.services.timeout import (
87 reduced_timeout,
88 TimeoutError,
89 urlfetch,
90 )
87from lp.services.utils import seconds_since_epoch91from lp.services.utils import seconds_since_epoch
88from lp.services.webapp.interfaces import ILaunchBag92from lp.services.webapp.interfaces import ILaunchBag
8993
@@ -336,9 +340,10 @@
336 try:340 try:
337 log = json.loads(cached_log)341 log = json.loads(cached_log)
338 except Exception:342 except Exception:
339 logger.exception(343 if logger is not None:
340 "Cannot load cached log information for %s:%s; "344 logger.exception(
341 "deleting" % (path, start))345 "Cannot load cached log information for %s:%s; "
346 "deleting" % (path, start))
342 memcache_client.delete(memcache_key)347 memcache_client.delete(memcache_key)
343 if log is None:348 if log is None:
344 if enable_hosting:349 if enable_hosting:
@@ -367,19 +372,38 @@
367 return log372 return log
368373
369 def getCommits(self, start, limit=None, stop=None, union_repository=None,374 def getCommits(self, start, limit=None, stop=None, union_repository=None,
370 start_date=None, end_date=None, logger=None):375 start_date=None, end_date=None, handle_timeout=False,
376 logger=None):
371 # Circular import.377 # Circular import.
372 from lp.code.model.gitrepository import parse_git_commits378 from lp.code.model.gitrepository import parse_git_commits
373379
374 log = self._getLog(380 with reduced_timeout(1.0 if handle_timeout else 0.0):
375 start, limit=limit, stop=stop, union_repository=union_repository,381 try:
376 logger=logger)382 log = self._getLog(
377 parsed_commits = parse_git_commits(log)383 start, limit=limit, stop=stop,
384 union_repository=union_repository, logger=logger)
385 commit_oids = [
386 commit["sha1"] for commit in log if "sha1" in commit]
387 parsed_commits = parse_git_commits(log)
388 except TimeoutError:
389 if not handle_timeout:
390 raise
391 if logger is not None:
392 logger.exception(
393 "Timeout fetching commits for %s" % self.identity)
394 # Synthesise a response based on what we have in the database.
395 commit_oids = [self.commit_sha1]
396 tip = {"sha1": self.commit_sha1, "synthetic": True}
397 optional_fields = (
398 "author", "author_date", "committer", "committer_date",
399 "commit_message")
400 for field in optional_fields:
401 if getattr(self, field) is not None:
402 tip[field] = getattr(self, field)
403 parsed_commits = {self.commit_sha1: tip}
378 commits = []404 commits = []
379 for commit in log:405 for commit_oid in commit_oids:
380 if "sha1" not in commit:406 parsed_commit = parsed_commits[commit_oid]
381 continue
382 parsed_commit = parsed_commits[commit["sha1"]]
383 author_date = parsed_commit.get("author_date")407 author_date = parsed_commit.get("author_date")
384 if start_date is not None:408 if start_date is not None:
385 if author_date is None or author_date < start_date:409 if author_date is None or author_date < start_date:
@@ -390,8 +414,11 @@
390 commits.append(parsed_commit)414 commits.append(parsed_commit)
391 return commits415 return commits
392416
393 def getLatestCommits(self, quantity=10, extended_details=False, user=None):417 def getLatestCommits(self, quantity=10, extended_details=False, user=None,
394 commits = self.getCommits(self.commit_sha1, limit=quantity)418 handle_timeout=False, logger=None):
419 commits = self.getCommits(
420 self.commit_sha1, limit=quantity, handle_timeout=handle_timeout,
421 logger=logger)
395 if extended_details:422 if extended_details:
396 # XXX cjwatson 2016-05-09: Add support for linked bugtasks once423 # XXX cjwatson 2016-05-09: Add support for linked bugtasks once
397 # those are supported for Git.424 # those are supported for Git.
398425
=== modified file 'lib/lp/code/templates/git-macros.pt'
--- lib/lp/code/templates/git-macros.pt 2018-08-24 16:52:27 +0000
+++ lib/lp/code/templates/git-macros.pt 2018-12-20 11:14:34 +0000
@@ -135,6 +135,11 @@
135 </tal:diff-expander>135 </tal:diff-expander>
136 </tal:ajax-revision-diffs>136 </tal:ajax-revision-diffs>
137 </dl>137 </dl>
138 <tal:synthetic
139 condition="python: any(commit_info.get('synthetic')
140 for commit_info in commit_infos)">
141 <div>Some recent commit information could not be fetched.</div>
142 </tal:synthetic>
138143
139</metal:ref-commits>144</metal:ref-commits>
140145