Merge lp:~cjwatson/launchpad/git-ref-tolerate-slow-commit-info into lp:launchpad
- git-ref-tolerate-slow-commit-info
- Merge into devel
Proposed by
Colin Watson
Status: | Merged | ||||
---|---|---|---|---|---|
Merged at revision: | 18847 | ||||
Proposed branch: | lp:~cjwatson/launchpad/git-ref-tolerate-slow-commit-info | ||||
Merge into: | lp:launchpad | ||||
Diff against target: |
286 lines (+101/-22) 5 files modified
lib/lp/code/browser/gitref.py (+3/-1) lib/lp/code/browser/tests/test_gitref.py (+45/-4) lib/lp/code/interfaces/gitref.py (+6/-2) lib/lp/code/model/gitref.py (+42/-15) lib/lp/code/templates/git-macros.pt (+5/-0) |
||||
To merge this branch: | bzr merge lp:~cjwatson/launchpad/git-ref-tolerate-slow-commit-info | ||||
Related bugs: |
|
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
William Grant | code | Approve | |
Review via email: mp+359827@code.launchpad.net |
Commit message
Tolerate backend timeouts while fetching commit information for GitRef:+index.
Description of the change
To post a comment you must log in.
Revision history for this message
William Grant (wgrant) : | # |
review:
Approve
(code)
Revision history for this message
Colin Watson (cjwatson) : | # |
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'lib/lp/code/browser/gitref.py' | |||
2 | --- lib/lp/code/browser/gitref.py 2018-08-30 14:54:37 +0000 | |||
3 | +++ lib/lp/code/browser/gitref.py 2018-12-20 11:14:34 +0000 | |||
4 | @@ -49,6 +49,7 @@ | |||
5 | 49 | from lp.code.interfaces.gitrepository import IGitRepositorySet | 49 | from lp.code.interfaces.gitrepository import IGitRepositorySet |
6 | 50 | from lp.services.helpers import english_list | 50 | from lp.services.helpers import english_list |
7 | 51 | from lp.services.propertycache import cachedproperty | 51 | from lp.services.propertycache import cachedproperty |
8 | 52 | from lp.services.scripts import log | ||
9 | 52 | from lp.services.webapp import ( | 53 | from lp.services.webapp import ( |
10 | 53 | canonical_url, | 54 | canonical_url, |
11 | 54 | ContextMenu, | 55 | ContextMenu, |
12 | @@ -180,7 +181,8 @@ | |||
13 | 180 | @cachedproperty | 181 | @cachedproperty |
14 | 181 | def commit_infos(self): | 182 | def commit_infos(self): |
15 | 182 | return self.context.getLatestCommits( | 183 | return self.context.getLatestCommits( |
17 | 183 | extended_details=True, user=self.user) | 184 | extended_details=True, user=self.user, handle_timeout=True, |
18 | 185 | logger=log) | ||
19 | 184 | 186 | ||
20 | 185 | @property | 187 | @property |
21 | 186 | def recipes_link(self): | 188 | def recipes_link(self): |
22 | 187 | 189 | ||
23 | === modified file 'lib/lp/code/browser/tests/test_gitref.py' | |||
24 | --- lib/lp/code/browser/tests/test_gitref.py 2018-09-27 13:50:06 +0000 | |||
25 | +++ lib/lp/code/browser/tests/test_gitref.py 2018-12-20 11:14:34 +0000 | |||
26 | @@ -11,10 +11,14 @@ | |||
27 | 11 | import hashlib | 11 | import hashlib |
28 | 12 | import re | 12 | import re |
29 | 13 | 13 | ||
30 | 14 | from fixtures import FakeLogger | ||
31 | 14 | import pytz | 15 | import pytz |
32 | 15 | import soupmatchers | 16 | import soupmatchers |
33 | 16 | from storm.store import Store | 17 | from storm.store import Store |
35 | 17 | from testtools.matchers import Equals | 18 | from testtools.matchers import ( |
36 | 19 | Equals, | ||
37 | 20 | Not, | ||
38 | 21 | ) | ||
39 | 18 | from zope.component import getUtility | 22 | from zope.component import getUtility |
40 | 19 | from zope.security.proxy import removeSecurityProxy | 23 | from zope.security.proxy import removeSecurityProxy |
41 | 20 | 24 | ||
42 | @@ -23,6 +27,7 @@ | |||
43 | 23 | from lp.code.tests.helpers import GitHostingFixture | 27 | from lp.code.tests.helpers import GitHostingFixture |
44 | 24 | from lp.services.beautifulsoup import BeautifulSoup | 28 | from lp.services.beautifulsoup import BeautifulSoup |
45 | 25 | from lp.services.job.runner import JobRunner | 29 | from lp.services.job.runner import JobRunner |
46 | 30 | from lp.services.timeout import TimeoutError | ||
47 | 26 | from lp.services.utils import seconds_since_epoch | 31 | from lp.services.utils import seconds_since_epoch |
48 | 27 | from lp.services.webapp.publisher import canonical_url | 32 | from lp.services.webapp.publisher import canonical_url |
49 | 28 | from lp.testing import ( | 33 | from lp.testing import ( |
50 | @@ -82,6 +87,14 @@ | |||
51 | 82 | canonical_url(ref)) | 87 | canonical_url(ref)) |
52 | 83 | 88 | ||
53 | 84 | 89 | ||
54 | 90 | class MissingCommitsNote(soupmatchers.Tag): | ||
55 | 91 | |||
56 | 92 | def __init__(self): | ||
57 | 93 | super(MissingCommitsNote, self).__init__( | ||
58 | 94 | "missing commits note", "div", | ||
59 | 95 | text="Some recent commit information could not be fetched.") | ||
60 | 96 | |||
61 | 97 | |||
62 | 85 | class TestGitRefView(BrowserTestCase): | 98 | class TestGitRefView(BrowserTestCase): |
63 | 86 | 99 | ||
64 | 87 | layer = LaunchpadFunctionalLayer | 100 | layer = LaunchpadFunctionalLayer |
65 | @@ -186,11 +199,12 @@ | |||
66 | 186 | self.hosting_fixture.getLog.result = list(reversed(log)) | 199 | self.hosting_fixture.getLog.result = list(reversed(log)) |
67 | 187 | self.scanRef(ref, log[-1]) | 200 | self.scanRef(ref, log[-1]) |
68 | 188 | view = create_initialized_view(ref, "+index") | 201 | view = create_initialized_view(ref, "+index") |
69 | 202 | contents = view() | ||
70 | 189 | expected_texts = list(reversed([ | 203 | expected_texts = list(reversed([ |
71 | 190 | "%.7s...\nby\n%s\non 2015-01-%02d" % ( | 204 | "%.7s...\nby\n%s\non 2015-01-%02d" % ( |
72 | 191 | log[i]["sha1"], log[i]["author"]["name"], i + 1) | 205 | log[i]["sha1"], log[i]["author"]["name"], i + 1) |
73 | 192 | for i in range(5)])) | 206 | for i in range(5)])) |
75 | 193 | details = find_tags_by_class(view(), "commit-details") | 207 | details = find_tags_by_class(contents, "commit-details") |
76 | 194 | self.assertEqual( | 208 | self.assertEqual( |
77 | 195 | expected_texts, [extract_text(detail) for detail in details]) | 209 | expected_texts, [extract_text(detail) for detail in details]) |
78 | 196 | expected_urls = list(reversed([ | 210 | expected_urls = list(reversed([ |
79 | @@ -199,6 +213,8 @@ | |||
80 | 199 | for i in range(5)])) | 213 | for i in range(5)])) |
81 | 200 | self.assertEqual( | 214 | self.assertEqual( |
82 | 201 | expected_urls, [detail.a["href"] for detail in details]) | 215 | expected_urls, [detail.a["href"] for detail in details]) |
83 | 216 | self.assertThat( | ||
84 | 217 | contents, Not(soupmatchers.HTMLContains(MissingCommitsNote()))) | ||
85 | 202 | 218 | ||
86 | 203 | def test_recent_commits_with_merge(self): | 219 | def test_recent_commits_with_merge(self): |
87 | 204 | [ref] = self.factory.makeGitRefs(paths=["refs/heads/branch"]) | 220 | [ref] = self.factory.makeGitRefs(paths=["refs/heads/branch"]) |
88 | @@ -211,7 +227,8 @@ | |||
89 | 211 | self.scanRef(mp.merge_source, merged_tip) | 227 | self.scanRef(mp.merge_source, merged_tip) |
90 | 212 | mp.markAsMerged(merged_revision_id=log[0]["sha1"]) | 228 | mp.markAsMerged(merged_revision_id=log[0]["sha1"]) |
91 | 213 | view = create_initialized_view(ref, "+index") | 229 | view = create_initialized_view(ref, "+index") |
93 | 214 | soup = BeautifulSoup(view()) | 230 | contents = view() |
94 | 231 | soup = BeautifulSoup(contents) | ||
95 | 215 | details = soup.findAll( | 232 | details = soup.findAll( |
96 | 216 | attrs={"class": re.compile(r"commit-details|commit-comment")}) | 233 | attrs={"class": re.compile(r"commit-details|commit-comment")}) |
97 | 217 | expected_texts = list(reversed([ | 234 | expected_texts = list(reversed([ |
98 | @@ -225,6 +242,8 @@ | |||
99 | 225 | self.assertEqual( | 242 | self.assertEqual( |
100 | 226 | [canonical_url(mp), canonical_url(mp.merge_source)], | 243 | [canonical_url(mp), canonical_url(mp.merge_source)], |
101 | 227 | [link["href"] for link in details[5].findAll("a")]) | 244 | [link["href"] for link in details[5].findAll("a")]) |
102 | 245 | self.assertThat( | ||
103 | 246 | contents, Not(soupmatchers.HTMLContains(MissingCommitsNote()))) | ||
104 | 228 | 247 | ||
105 | 229 | def test_recent_commits_with_merge_from_deleted_ref(self): | 248 | def test_recent_commits_with_merge_from_deleted_ref(self): |
106 | 230 | [ref] = self.factory.makeGitRefs(paths=["refs/heads/branch"]) | 249 | [ref] = self.factory.makeGitRefs(paths=["refs/heads/branch"]) |
107 | @@ -238,7 +257,8 @@ | |||
108 | 238 | mp.markAsMerged(merged_revision_id=log[0]["sha1"]) | 257 | mp.markAsMerged(merged_revision_id=log[0]["sha1"]) |
109 | 239 | mp.source_git_repository.removeRefs([mp.source_git_path]) | 258 | mp.source_git_repository.removeRefs([mp.source_git_path]) |
110 | 240 | view = create_initialized_view(ref, "+index") | 259 | view = create_initialized_view(ref, "+index") |
112 | 241 | soup = BeautifulSoup(view()) | 260 | contents = view() |
113 | 261 | soup = BeautifulSoup(contents) | ||
114 | 242 | details = soup.findAll( | 262 | details = soup.findAll( |
115 | 243 | attrs={"class": re.compile(r"commit-details|commit-comment")}) | 263 | attrs={"class": re.compile(r"commit-details|commit-comment")}) |
116 | 244 | expected_texts = list(reversed([ | 264 | expected_texts = list(reversed([ |
117 | @@ -252,6 +272,8 @@ | |||
118 | 252 | self.assertEqual( | 272 | self.assertEqual( |
119 | 253 | [canonical_url(mp)], | 273 | [canonical_url(mp)], |
120 | 254 | [link["href"] for link in details[5].findAll("a")]) | 274 | [link["href"] for link in details[5].findAll("a")]) |
121 | 275 | self.assertThat( | ||
122 | 276 | contents, Not(soupmatchers.HTMLContains(MissingCommitsNote()))) | ||
123 | 255 | 277 | ||
124 | 256 | def _test_all_commits_link(self, branch_name, encoded_branch_name=None): | 278 | def _test_all_commits_link(self, branch_name, encoded_branch_name=None): |
125 | 257 | if encoded_branch_name is None: | 279 | if encoded_branch_name is None: |
126 | @@ -312,3 +334,22 @@ | |||
127 | 312 | with StormStatementRecorder() as recorder: | 334 | with StormStatementRecorder() as recorder: |
128 | 313 | view.landing_targets | 335 | view.landing_targets |
129 | 314 | self.assertThat(recorder, HasQueryCount(Equals(12))) | 336 | self.assertThat(recorder, HasQueryCount(Equals(12))) |
130 | 337 | |||
131 | 338 | def test_timeout(self): | ||
132 | 339 | # The page renders even if fetching commits times out. | ||
133 | 340 | self.useFixture(FakeLogger()) | ||
134 | 341 | [ref] = self.factory.makeGitRefs() | ||
135 | 342 | log = self.makeCommitLog() | ||
136 | 343 | self.scanRef(ref, log[-1]) | ||
137 | 344 | self.hosting_fixture.getLog.failure = TimeoutError | ||
138 | 345 | view = create_initialized_view(ref, "+index") | ||
139 | 346 | contents = view() | ||
140 | 347 | soup = BeautifulSoup(contents) | ||
141 | 348 | details = soup.findAll( | ||
142 | 349 | attrs={"class": re.compile(r"commit-details|commit-comment")}) | ||
143 | 350 | expected_text = "%.7s...\nby\n%s\non 2015-01-%02d" % ( | ||
144 | 351 | log[-1]["sha1"], log[-1]["author"]["name"], len(log)) | ||
145 | 352 | self.assertEqual( | ||
146 | 353 | [expected_text], [extract_text(detail) for detail in details]) | ||
147 | 354 | self.assertThat( | ||
148 | 355 | contents, soupmatchers.HTMLContains(MissingCommitsNote())) | ||
149 | 315 | 356 | ||
150 | === modified file 'lib/lp/code/interfaces/gitref.py' | |||
151 | --- lib/lp/code/interfaces/gitref.py 2018-10-25 17:52:43 +0000 | |||
152 | +++ lib/lp/code/interfaces/gitref.py 2018-12-20 11:14:34 +0000 | |||
153 | @@ -364,7 +364,8 @@ | |||
154 | 364 | "yet been scanned.") | 364 | "yet been scanned.") |
155 | 365 | 365 | ||
156 | 366 | def getCommits(start, limit=None, stop=None, union_repository=None, | 366 | def getCommits(start, limit=None, stop=None, union_repository=None, |
158 | 367 | start_date=None, end_date=None, logger=None): | 367 | start_date=None, end_date=None, handle_timeout=False, |
159 | 368 | logger=None): | ||
160 | 368 | """Get commit information from this reference. | 369 | """Get commit information from this reference. |
161 | 369 | 370 | ||
162 | 370 | :param start: The commit to start listing from. | 371 | :param start: The commit to start listing from. |
163 | @@ -374,11 +375,14 @@ | |||
164 | 374 | this repository as well (particularly useful with `stop`). | 375 | this repository as well (particularly useful with `stop`). |
165 | 375 | :param start_date: If not None, ignore commits before this date. | 376 | :param start_date: If not None, ignore commits before this date. |
166 | 376 | :param end_date: If not None, ignore commits after this date. | 377 | :param end_date: If not None, ignore commits after this date. |
167 | 378 | :param handle_timeout: If True and the backend request times out, | ||
168 | 379 | synthesise commit information from what we have in the database. | ||
169 | 377 | :param logger: An optional logger. | 380 | :param logger: An optional logger. |
170 | 378 | :return: An iterable of commit information dicts. | 381 | :return: An iterable of commit information dicts. |
171 | 379 | """ | 382 | """ |
172 | 380 | 383 | ||
174 | 381 | def getLatestCommits(quantity=10): | 384 | def getLatestCommits(quantity=10, extended_details=False, user=None, |
175 | 385 | handle_timeout=False, logger=None): | ||
176 | 382 | """Return a specific number of the latest commits in this ref.""" | 386 | """Return a specific number of the latest commits in this ref.""" |
177 | 383 | 387 | ||
178 | 384 | has_commits = Attribute("Whether this reference has any commits.") | 388 | has_commits = Attribute("Whether this reference has any commits.") |
179 | 385 | 389 | ||
180 | === modified file 'lib/lp/code/model/gitref.py' | |||
181 | --- lib/lp/code/model/gitref.py 2018-12-20 10:03:26 +0000 | |||
182 | +++ lib/lp/code/model/gitref.py 2018-12-20 11:14:34 +0000 | |||
183 | @@ -83,7 +83,11 @@ | |||
184 | 83 | from lp.services.database.stormbase import StormBase | 83 | from lp.services.database.stormbase import StormBase |
185 | 84 | from lp.services.features import getFeatureFlag | 84 | from lp.services.features import getFeatureFlag |
186 | 85 | from lp.services.memcache.interfaces import IMemcacheClient | 85 | from lp.services.memcache.interfaces import IMemcacheClient |
188 | 86 | from lp.services.timeout import urlfetch | 86 | from lp.services.timeout import ( |
189 | 87 | reduced_timeout, | ||
190 | 88 | TimeoutError, | ||
191 | 89 | urlfetch, | ||
192 | 90 | ) | ||
193 | 87 | from lp.services.utils import seconds_since_epoch | 91 | from lp.services.utils import seconds_since_epoch |
194 | 88 | from lp.services.webapp.interfaces import ILaunchBag | 92 | from lp.services.webapp.interfaces import ILaunchBag |
195 | 89 | 93 | ||
196 | @@ -336,9 +340,10 @@ | |||
197 | 336 | try: | 340 | try: |
198 | 337 | log = json.loads(cached_log) | 341 | log = json.loads(cached_log) |
199 | 338 | except Exception: | 342 | except Exception: |
203 | 339 | logger.exception( | 343 | if logger is not None: |
204 | 340 | "Cannot load cached log information for %s:%s; " | 344 | logger.exception( |
205 | 341 | "deleting" % (path, start)) | 345 | "Cannot load cached log information for %s:%s; " |
206 | 346 | "deleting" % (path, start)) | ||
207 | 342 | memcache_client.delete(memcache_key) | 347 | memcache_client.delete(memcache_key) |
208 | 343 | if log is None: | 348 | if log is None: |
209 | 344 | if enable_hosting: | 349 | if enable_hosting: |
210 | @@ -367,19 +372,38 @@ | |||
211 | 367 | return log | 372 | return log |
212 | 368 | 373 | ||
213 | 369 | def getCommits(self, start, limit=None, stop=None, union_repository=None, | 374 | def getCommits(self, start, limit=None, stop=None, union_repository=None, |
215 | 370 | start_date=None, end_date=None, logger=None): | 375 | start_date=None, end_date=None, handle_timeout=False, |
216 | 376 | logger=None): | ||
217 | 371 | # Circular import. | 377 | # Circular import. |
218 | 372 | from lp.code.model.gitrepository import parse_git_commits | 378 | from lp.code.model.gitrepository import parse_git_commits |
219 | 373 | 379 | ||
224 | 374 | log = self._getLog( | 380 | with reduced_timeout(1.0 if handle_timeout else 0.0): |
225 | 375 | start, limit=limit, stop=stop, union_repository=union_repository, | 381 | try: |
226 | 376 | logger=logger) | 382 | log = self._getLog( |
227 | 377 | parsed_commits = parse_git_commits(log) | 383 | start, limit=limit, stop=stop, |
228 | 384 | union_repository=union_repository, logger=logger) | ||
229 | 385 | commit_oids = [ | ||
230 | 386 | commit["sha1"] for commit in log if "sha1" in commit] | ||
231 | 387 | parsed_commits = parse_git_commits(log) | ||
232 | 388 | except TimeoutError: | ||
233 | 389 | if not handle_timeout: | ||
234 | 390 | raise | ||
235 | 391 | if logger is not None: | ||
236 | 392 | logger.exception( | ||
237 | 393 | "Timeout fetching commits for %s" % self.identity) | ||
238 | 394 | # Synthesise a response based on what we have in the database. | ||
239 | 395 | commit_oids = [self.commit_sha1] | ||
240 | 396 | tip = {"sha1": self.commit_sha1, "synthetic": True} | ||
241 | 397 | optional_fields = ( | ||
242 | 398 | "author", "author_date", "committer", "committer_date", | ||
243 | 399 | "commit_message") | ||
244 | 400 | for field in optional_fields: | ||
245 | 401 | if getattr(self, field) is not None: | ||
246 | 402 | tip[field] = getattr(self, field) | ||
247 | 403 | parsed_commits = {self.commit_sha1: tip} | ||
248 | 378 | commits = [] | 404 | commits = [] |
253 | 379 | for commit in log: | 405 | for commit_oid in commit_oids: |
254 | 380 | if "sha1" not in commit: | 406 | parsed_commit = parsed_commits[commit_oid] |
251 | 381 | continue | ||
252 | 382 | parsed_commit = parsed_commits[commit["sha1"]] | ||
255 | 383 | author_date = parsed_commit.get("author_date") | 407 | author_date = parsed_commit.get("author_date") |
256 | 384 | if start_date is not None: | 408 | if start_date is not None: |
257 | 385 | if author_date is None or author_date < start_date: | 409 | if author_date is None or author_date < start_date: |
258 | @@ -390,8 +414,11 @@ | |||
259 | 390 | commits.append(parsed_commit) | 414 | commits.append(parsed_commit) |
260 | 391 | return commits | 415 | return commits |
261 | 392 | 416 | ||
264 | 393 | def getLatestCommits(self, quantity=10, extended_details=False, user=None): | 417 | def getLatestCommits(self, quantity=10, extended_details=False, user=None, |
265 | 394 | commits = self.getCommits(self.commit_sha1, limit=quantity) | 418 | handle_timeout=False, logger=None): |
266 | 419 | commits = self.getCommits( | ||
267 | 420 | self.commit_sha1, limit=quantity, handle_timeout=handle_timeout, | ||
268 | 421 | logger=logger) | ||
269 | 395 | if extended_details: | 422 | if extended_details: |
270 | 396 | # XXX cjwatson 2016-05-09: Add support for linked bugtasks once | 423 | # XXX cjwatson 2016-05-09: Add support for linked bugtasks once |
271 | 397 | # those are supported for Git. | 424 | # those are supported for Git. |
272 | 398 | 425 | ||
273 | === modified file 'lib/lp/code/templates/git-macros.pt' | |||
274 | --- lib/lp/code/templates/git-macros.pt 2018-08-24 16:52:27 +0000 | |||
275 | +++ lib/lp/code/templates/git-macros.pt 2018-12-20 11:14:34 +0000 | |||
276 | @@ -135,6 +135,11 @@ | |||
277 | 135 | </tal:diff-expander> | 135 | </tal:diff-expander> |
278 | 136 | </tal:ajax-revision-diffs> | 136 | </tal:ajax-revision-diffs> |
279 | 137 | </dl> | 137 | </dl> |
280 | 138 | <tal:synthetic | ||
281 | 139 | condition="python: any(commit_info.get('synthetic') | ||
282 | 140 | for commit_info in commit_infos)"> | ||
283 | 141 | <div>Some recent commit information could not be fetched.</div> | ||
284 | 142 | </tal:synthetic> | ||
285 | 138 | 143 | ||
286 | 139 | </metal:ref-commits> | 144 | </metal:ref-commits> |
287 | 140 | 145 |