Merge lp:~cjwatson/launchpad/sourcefileurls-include-meta into lp:launchpad
- sourcefileurls-include-meta
- Merge into devel
Status: | Merged |
---|---|
Merged at revision: | 17861 |
Proposed branch: | lp:~cjwatson/launchpad/sourcefileurls-include-meta |
Merge into: | lp:launchpad |
Diff against target: |
436 lines (+210/-50) 5 files modified
lib/lp/soyuz/browser/tests/test_publishing_webservice.py (+100/-24) lib/lp/soyuz/interfaces/publishing.py (+7/-3) lib/lp/soyuz/model/publishing.py (+19/-6) lib/lp/soyuz/tests/test_publishing_models.py (+72/-14) lib/lp/testing/__init__.py (+12/-3) |
To merge this branch: | bzr merge lp:~cjwatson/launchpad/sourcefileurls-include-meta |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
William Grant | code | Approve | |
Review via email: mp+276928@code.launchpad.net |
Commit message
Add include_meta option to SPPH.sourceFile
Description of the change
Add include_meta option to SPPH.sourceFile
This is mainly to support dgit, which needs to be able to fetch .dsc files from Launchpad and verify their contents without having to fetch a full Sources file (even if one were available for the .dsc in question) and its trust chain.
The include_meta option is rather weird, in that it causes the method in question to return a completely different type, and had I been doing this from scratch I would probably have added a new getSourceFiles method instead and deprecated sourceFileUrls over time. However, BPPH.binaryFile
While I was here, I also added sha256 to BPPH.binaryFile
Preview Diff
1 | === modified file 'lib/lp/soyuz/browser/tests/test_publishing_webservice.py' | |||
2 | --- lib/lp/soyuz/browser/tests/test_publishing_webservice.py 2015-11-26 13:31:45 +0000 | |||
3 | +++ lib/lp/soyuz/browser/tests/test_publishing_webservice.py 2015-12-02 13:21:01 +0000 | |||
4 | @@ -3,20 +3,84 @@ | |||
5 | 3 | 3 | ||
6 | 4 | """Test webservice methods related to the publisher.""" | 4 | """Test webservice methods related to the publisher.""" |
7 | 5 | 5 | ||
9 | 6 | from testtools.matchers import IsInstance | 6 | from functools import partial |
10 | 7 | 7 | ||
12 | 8 | from lp.services.database.sqlbase import flush_database_caches | 8 | from lp.services.librarian.browser import ProxiedLibraryFileAlias |
13 | 9 | from lp.services.webapp.interfaces import OAuthPermission | 9 | from lp.services.webapp.interfaces import OAuthPermission |
14 | 10 | from lp.testing import ( | 10 | from lp.testing import ( |
15 | 11 | api_url, | 11 | api_url, |
16 | 12 | login_person, | ||
17 | 12 | person_logged_in, | 13 | person_logged_in, |
19 | 13 | RequestTimelineCollector, | 14 | record_two_runs, |
20 | 14 | TestCaseWithFactory, | 15 | TestCaseWithFactory, |
21 | 15 | ) | 16 | ) |
22 | 16 | from lp.testing.layers import LaunchpadFunctionalLayer | 17 | from lp.testing.layers import LaunchpadFunctionalLayer |
23 | 18 | from lp.testing.matchers import HasQueryCount | ||
24 | 17 | from lp.testing.pages import webservice_for_person | 19 | from lp.testing.pages import webservice_for_person |
25 | 18 | 20 | ||
26 | 19 | 21 | ||
27 | 22 | class SourcePackagePublishingHistoryWebserviceTests(TestCaseWithFactory): | ||
28 | 23 | |||
29 | 24 | layer = LaunchpadFunctionalLayer | ||
30 | 25 | |||
31 | 26 | def make_spph_for(self, person): | ||
32 | 27 | with person_logged_in(person): | ||
33 | 28 | spr = self.factory.makeSourcePackageRelease() | ||
34 | 29 | self.factory.makeSourcePackageReleaseFile(sourcepackagerelease=spr) | ||
35 | 30 | spph = self.factory.makeSourcePackagePublishingHistory( | ||
36 | 31 | sourcepackagerelease=spr) | ||
37 | 32 | return spph, api_url(spph) | ||
38 | 33 | |||
39 | 34 | def test_sourceFileUrls(self): | ||
40 | 35 | person = self.factory.makePerson() | ||
41 | 36 | webservice = webservice_for_person( | ||
42 | 37 | person, permission=OAuthPermission.READ_PUBLIC) | ||
43 | 38 | spph, url = self.make_spph_for(person) | ||
44 | 39 | |||
45 | 40 | response = webservice.named_get( | ||
46 | 41 | url, 'sourceFileUrls', api_version='devel') | ||
47 | 42 | |||
48 | 43 | self.assertEqual(200, response.status) | ||
49 | 44 | urls = response.jsonBody() | ||
50 | 45 | with person_logged_in(person): | ||
51 | 46 | sprf = spph.sourcepackagerelease.files[0] | ||
52 | 47 | expected_urls = [ | ||
53 | 48 | ProxiedLibraryFileAlias( | ||
54 | 49 | sprf.libraryfile, spph.archive).http_url] | ||
55 | 50 | self.assertEqual(expected_urls, urls) | ||
56 | 51 | |||
57 | 52 | def test_sourceFileUrls_include_meta(self): | ||
58 | 53 | person = self.factory.makePerson() | ||
59 | 54 | webservice = webservice_for_person( | ||
60 | 55 | person, permission=OAuthPermission.READ_PUBLIC) | ||
61 | 56 | spph, url = self.make_spph_for(person) | ||
62 | 57 | |||
63 | 58 | def create_file(): | ||
64 | 59 | self.factory.makeSourcePackageReleaseFile( | ||
65 | 60 | sourcepackagerelease=spph.sourcepackagerelease) | ||
66 | 61 | |||
67 | 62 | def get_urls(): | ||
68 | 63 | return webservice.named_get( | ||
69 | 64 | url, 'sourceFileUrls', include_meta=True, api_version='devel') | ||
70 | 65 | |||
71 | 66 | recorder1, recorder2 = record_two_runs( | ||
72 | 67 | get_urls, create_file, 2, | ||
73 | 68 | login_method=partial(login_person, person), record_request=True) | ||
74 | 69 | self.assertThat(recorder2, HasQueryCount.byEquality(recorder1)) | ||
75 | 70 | |||
76 | 71 | response = get_urls() | ||
77 | 72 | self.assertEqual(200, response.status) | ||
78 | 73 | info = response.jsonBody() | ||
79 | 74 | with person_logged_in(person): | ||
80 | 75 | expected_info = [{ | ||
81 | 76 | "url": ProxiedLibraryFileAlias( | ||
82 | 77 | sprf.libraryfile, spph.archive).http_url, | ||
83 | 78 | "size": sprf.libraryfile.content.filesize, | ||
84 | 79 | "sha256": sprf.libraryfile.content.sha256, | ||
85 | 80 | } for sprf in spph.sourcepackagerelease.files] | ||
86 | 81 | self.assertContentEqual(expected_info, info) | ||
87 | 82 | |||
88 | 83 | |||
89 | 20 | class BinaryPackagePublishingHistoryWebserviceTests(TestCaseWithFactory): | 84 | class BinaryPackagePublishingHistoryWebserviceTests(TestCaseWithFactory): |
90 | 21 | 85 | ||
91 | 22 | layer = LaunchpadFunctionalLayer | 86 | layer = LaunchpadFunctionalLayer |
92 | @@ -33,36 +97,48 @@ | |||
93 | 33 | person = self.factory.makePerson() | 97 | person = self.factory.makePerson() |
94 | 34 | webservice = webservice_for_person( | 98 | webservice = webservice_for_person( |
95 | 35 | person, permission=OAuthPermission.READ_PUBLIC) | 99 | person, permission=OAuthPermission.READ_PUBLIC) |
96 | 100 | bpph, url = self.make_bpph_for(person) | ||
97 | 36 | 101 | ||
98 | 37 | response = webservice.named_get( | 102 | response = webservice.named_get( |
101 | 38 | self.make_bpph_for(person)[1], 'binaryFileUrls', | 103 | url, 'binaryFileUrls', api_version='devel') |
100 | 39 | api_version='devel') | ||
102 | 40 | 104 | ||
103 | 41 | self.assertEqual(200, response.status) | 105 | self.assertEqual(200, response.status) |
104 | 42 | urls = response.jsonBody() | 106 | urls = response.jsonBody() |
107 | 43 | self.assertEqual(1, len(urls)) | 107 | with person_logged_in(person): |
108 | 44 | self.assertTrue(urls[0], IsInstance(unicode)) | 108 | bpf = bpph.binarypackagerelease.files[0] |
109 | 109 | expected_urls = [ | ||
110 | 110 | ProxiedLibraryFileAlias( | ||
111 | 111 | bpf.libraryfile, bpph.archive).http_url] | ||
112 | 112 | self.assertEqual(expected_urls, urls) | ||
113 | 45 | 113 | ||
114 | 46 | def test_binaryFileUrls_include_meta(self): | 114 | def test_binaryFileUrls_include_meta(self): |
115 | 47 | person = self.factory.makePerson() | 115 | person = self.factory.makePerson() |
116 | 48 | webservice = webservice_for_person( | 116 | webservice = webservice_for_person( |
117 | 49 | person, permission=OAuthPermission.READ_PUBLIC) | 117 | person, permission=OAuthPermission.READ_PUBLIC) |
118 | 50 | |||
119 | 51 | bpph, url = self.make_bpph_for(person) | 118 | bpph, url = self.make_bpph_for(person) |
133 | 52 | query_counts = [] | 119 | |
134 | 53 | for i in range(3): | 120 | def create_file(): |
135 | 54 | flush_database_caches() | 121 | self.factory.makeBinaryPackageFile( |
136 | 55 | with RequestTimelineCollector() as collector: | 122 | binarypackagerelease=bpph.binarypackagerelease) |
137 | 56 | response = webservice.named_get( | 123 | |
138 | 57 | url, 'binaryFileUrls', include_meta=True, | 124 | def get_urls(): |
139 | 58 | api_version='devel') | 125 | return webservice.named_get( |
140 | 59 | query_counts.append(collector.count) | 126 | url, 'binaryFileUrls', include_meta=True, api_version='devel') |
141 | 60 | with person_logged_in(person): | 127 | |
142 | 61 | self.factory.makeBinaryPackageFile( | 128 | recorder1, recorder2 = record_two_runs( |
143 | 62 | binarypackagerelease=bpph.binarypackagerelease) | 129 | get_urls, create_file, 2, |
144 | 63 | self.assertEqual(query_counts[0], query_counts[-1]) | 130 | login_method=partial(login_person, person), record_request=True) |
145 | 64 | 131 | self.assertThat(recorder2, HasQueryCount.byEquality(recorder1)) | |
146 | 132 | |||
147 | 133 | response = get_urls() | ||
148 | 65 | self.assertEqual(200, response.status) | 134 | self.assertEqual(200, response.status) |
152 | 66 | urls = response.jsonBody() | 135 | info = response.jsonBody() |
153 | 67 | self.assertEqual(3, len(urls)) | 136 | with person_logged_in(person): |
154 | 68 | self.assertThat(urls[0], IsInstance(dict)) | 137 | expected_info = [{ |
155 | 138 | "url": ProxiedLibraryFileAlias( | ||
156 | 139 | bpf.libraryfile, bpph.archive).http_url, | ||
157 | 140 | "size": bpf.libraryfile.content.filesize, | ||
158 | 141 | "sha1": bpf.libraryfile.content.sha1, | ||
159 | 142 | "sha256": bpf.libraryfile.content.sha256, | ||
160 | 143 | } for bpf in bpph.binarypackagerelease.files] | ||
161 | 144 | self.assertContentEqual(expected_info, info) | ||
162 | 69 | 145 | ||
163 | === modified file 'lib/lp/soyuz/interfaces/publishing.py' | |||
164 | --- lib/lp/soyuz/interfaces/publishing.py 2015-04-08 10:35:22 +0000 | |||
165 | +++ lib/lp/soyuz/interfaces/publishing.py 2015-12-02 13:21:01 +0000 | |||
166 | @@ -578,9 +578,13 @@ | |||
167 | 578 | """ | 578 | """ |
168 | 579 | 579 | ||
169 | 580 | @export_read_operation() | 580 | @export_read_operation() |
171 | 581 | def sourceFileUrls(): | 581 | @operation_parameters( |
172 | 582 | include_meta=Bool(title=_("Include Metadata"), required=False)) | ||
173 | 583 | def sourceFileUrls(include_meta=False): | ||
174 | 582 | """URLs for this source publication's uploaded source files. | 584 | """URLs for this source publication's uploaded source files. |
175 | 583 | 585 | ||
176 | 586 | :param include_meta: Return a list of dicts with keys url, size, and | ||
177 | 587 | sha256 for each URL instead of a simple list. | ||
178 | 584 | :return: A collection of URLs for this source. | 588 | :return: A collection of URLs for this source. |
179 | 585 | """ | 589 | """ |
180 | 586 | 590 | ||
181 | @@ -869,8 +873,8 @@ | |||
182 | 869 | def binaryFileUrls(include_meta=False): | 873 | def binaryFileUrls(include_meta=False): |
183 | 870 | """URLs for this binary publication's binary files. | 874 | """URLs for this binary publication's binary files. |
184 | 871 | 875 | ||
187 | 872 | :param include_meta: Return a list of dicts with keys url, size | 876 | :param include_meta: Return a list of dicts with keys url, size, |
188 | 873 | and sha1 for each url instead of a simple list. | 877 | sha1, and sha256 for each URL instead of a simple list. |
189 | 874 | :return: A collection of URLs for this binary. | 878 | :return: A collection of URLs for this binary. |
190 | 875 | """ | 879 | """ |
191 | 876 | 880 | ||
192 | 877 | 881 | ||
193 | === modified file 'lib/lp/soyuz/model/publishing.py' | |||
194 | --- lib/lp/soyuz/model/publishing.py 2015-07-08 16:05:11 +0000 | |||
195 | +++ lib/lp/soyuz/model/publishing.py 2015-12-02 13:21:01 +0000 | |||
196 | @@ -643,11 +643,23 @@ | |||
197 | 643 | return getUtility( | 643 | return getUtility( |
198 | 644 | IPublishingSet).getBuildStatusSummaryForSourcePublication(self) | 644 | IPublishingSet).getBuildStatusSummaryForSourcePublication(self) |
199 | 645 | 645 | ||
201 | 646 | def sourceFileUrls(self): | 646 | def sourceFileUrls(self, include_meta=False): |
202 | 647 | """See `ISourcePackagePublishingHistory`.""" | 647 | """See `ISourcePackagePublishingHistory`.""" |
203 | 648 | sources = Store.of(self).find( | ||
204 | 649 | (LibraryFileAlias, LibraryFileContent), | ||
205 | 650 | LibraryFileContent.id == LibraryFileAlias.contentID, | ||
206 | 651 | LibraryFileAlias.id == SourcePackageReleaseFile.libraryfileID, | ||
207 | 652 | SourcePackageReleaseFile.sourcepackagerelease == | ||
208 | 653 | SourcePackageRelease.id, | ||
209 | 654 | SourcePackageRelease.id == self.sourcepackagereleaseID) | ||
210 | 648 | source_urls = proxied_urls( | 655 | source_urls = proxied_urls( |
213 | 649 | [file.libraryfile for file in self.sourcepackagerelease.files], | 656 | [source for source, _ in sources], self.archive) |
214 | 650 | self.archive) | 657 | if include_meta: |
215 | 658 | meta = [ | ||
216 | 659 | (content.filesize, content.sha256) for _, content in sources] | ||
217 | 660 | return [ | ||
218 | 661 | dict(url=url, size=size, sha256=sha256) | ||
219 | 662 | for url, (size, sha256) in zip(source_urls, meta)] | ||
220 | 651 | return source_urls | 663 | return source_urls |
221 | 652 | 664 | ||
222 | 653 | def binaryFileUrls(self): | 665 | def binaryFileUrls(self): |
223 | @@ -1064,9 +1076,10 @@ | |||
224 | 1064 | [binary for binary, _ in binaries], self.archive) | 1076 | [binary for binary, _ in binaries], self.archive) |
225 | 1065 | if include_meta: | 1077 | if include_meta: |
226 | 1066 | meta = [ | 1078 | meta = [ |
230 | 1067 | (content.filesize, content.sha1) for _, content in binaries] | 1079 | (content.filesize, content.sha1, content.sha256) |
231 | 1068 | return [dict(url=url, size=size, sha1=sha1) | 1080 | for _, content in binaries] |
232 | 1069 | for url, (size, sha1) in zip(binary_urls, meta)] | 1081 | return [dict(url=url, size=size, sha1=sha1, sha256=sha256) |
233 | 1082 | for url, (size, sha1, sha256) in zip(binary_urls, meta)] | ||
234 | 1070 | return binary_urls | 1083 | return binary_urls |
235 | 1071 | 1084 | ||
236 | 1072 | 1085 | ||
237 | 1073 | 1086 | ||
238 | === modified file 'lib/lp/soyuz/tests/test_publishing_models.py' | |||
239 | --- lib/lp/soyuz/tests/test_publishing_models.py 2015-05-14 13:23:47 +0000 | |||
240 | +++ lib/lp/soyuz/tests/test_publishing_models.py 2015-12-02 13:21:01 +0000 | |||
241 | @@ -8,6 +8,7 @@ | |||
242 | 8 | 8 | ||
243 | 9 | from lp.app.errors import NotFoundError | 9 | from lp.app.errors import NotFoundError |
244 | 10 | from lp.buildmaster.enums import BuildStatus | 10 | from lp.buildmaster.enums import BuildStatus |
245 | 11 | from lp.registry.interfaces.sourcepackage import SourcePackageFileType | ||
246 | 11 | from lp.services.database.constants import UTC_NOW | 12 | from lp.services.database.constants import UTC_NOW |
247 | 12 | from lp.services.librarian.browser import ProxiedLibraryFileAlias | 13 | from lp.services.librarian.browser import ProxiedLibraryFileAlias |
248 | 13 | from lp.services.webapp.publisher import canonical_url | 14 | from lp.services.webapp.publisher import canonical_url |
249 | @@ -154,12 +155,68 @@ | |||
250 | 154 | spph = self.factory.makeSourcePackagePublishingHistory() | 155 | spph = self.factory.makeSourcePackagePublishingHistory() |
251 | 155 | self.assertRaises(NotFoundError, spph.getFileByName, 'not-changelog') | 156 | self.assertRaises(NotFoundError, spph.getFileByName, 'not-changelog') |
252 | 156 | 157 | ||
253 | 158 | def getURLsForSPPH(self, spph, include_meta=False): | ||
254 | 159 | spr = spph.sourcepackagerelease | ||
255 | 160 | archive = spph.archive | ||
256 | 161 | urls = [ProxiedLibraryFileAlias(f.libraryfile, archive).http_url | ||
257 | 162 | for f in spr.files] | ||
258 | 163 | |||
259 | 164 | if include_meta: | ||
260 | 165 | meta = [( | ||
261 | 166 | f.libraryfile.content.filesize, | ||
262 | 167 | f.libraryfile.content.sha256, | ||
263 | 168 | ) for f in spr.files] | ||
264 | 169 | |||
265 | 170 | return [dict(url=url, size=size, sha256=sha256) | ||
266 | 171 | for url, (size, sha256) in zip(urls, meta)] | ||
267 | 172 | return urls | ||
268 | 173 | |||
269 | 174 | def makeSPPH(self, num_files=1): | ||
270 | 175 | archive = self.factory.makeArchive(private=False) | ||
271 | 176 | spr = self.factory.makeSourcePackageRelease(archive=archive) | ||
272 | 177 | filetypes = [ | ||
273 | 178 | SourcePackageFileType.DSC, SourcePackageFileType.ORIG_TARBALL] | ||
274 | 179 | for count in range(num_files): | ||
275 | 180 | self.factory.makeSourcePackageReleaseFile( | ||
276 | 181 | sourcepackagerelease=spr, filetype=filetypes[count % 2]) | ||
277 | 182 | return self.factory.makeSourcePackagePublishingHistory( | ||
278 | 183 | sourcepackagerelease=spr, archive=archive) | ||
279 | 184 | |||
280 | 185 | def test_sourceFileUrls_no_files(self): | ||
281 | 186 | spph = self.makeSPPH(num_files=0) | ||
282 | 187 | |||
283 | 188 | urls = spph.sourceFileUrls() | ||
284 | 189 | |||
285 | 190 | self.assertContentEqual([], urls) | ||
286 | 191 | |||
287 | 192 | def test_sourceFileUrls_one_file(self): | ||
288 | 193 | spph = self.makeSPPH(num_files=1) | ||
289 | 194 | |||
290 | 195 | urls = spph.sourceFileUrls() | ||
291 | 196 | |||
292 | 197 | self.assertContentEqual(self.getURLsForSPPH(spph), urls) | ||
293 | 198 | |||
294 | 199 | def test_sourceFileUrls_two_files(self): | ||
295 | 200 | spph = self.makeSPPH(num_files=2) | ||
296 | 201 | |||
297 | 202 | urls = spph.sourceFileUrls() | ||
298 | 203 | |||
299 | 204 | self.assertContentEqual(self.getURLsForSPPH(spph), urls) | ||
300 | 205 | |||
301 | 206 | def test_sourceFileUrls_include_meta(self): | ||
302 | 207 | spph = self.makeSPPH(num_files=2) | ||
303 | 208 | |||
304 | 209 | urls = spph.sourceFileUrls(include_meta=True) | ||
305 | 210 | |||
306 | 211 | self.assertContentEqual( | ||
307 | 212 | self.getURLsForSPPH(spph, include_meta=True), urls) | ||
308 | 213 | |||
309 | 157 | 214 | ||
310 | 158 | class TestBinaryPackagePublishingHistory(TestCaseWithFactory): | 215 | class TestBinaryPackagePublishingHistory(TestCaseWithFactory): |
311 | 159 | 216 | ||
312 | 160 | layer = LaunchpadFunctionalLayer | 217 | layer = LaunchpadFunctionalLayer |
313 | 161 | 218 | ||
315 | 162 | def get_urls_for_bpph(self, bpph, include_meta=False): | 219 | def getURLsForBPPH(self, bpph, include_meta=False): |
316 | 163 | bpr = bpph.binarypackagerelease | 220 | bpr = bpph.binarypackagerelease |
317 | 164 | archive = bpph.archive | 221 | archive = bpph.archive |
318 | 165 | urls = [ProxiedLibraryFileAlias(f.libraryfile, archive).http_url | 222 | urls = [ProxiedLibraryFileAlias(f.libraryfile, archive).http_url |
319 | @@ -169,13 +226,14 @@ | |||
320 | 169 | meta = [( | 226 | meta = [( |
321 | 170 | f.libraryfile.content.filesize, | 227 | f.libraryfile.content.filesize, |
322 | 171 | f.libraryfile.content.sha1, | 228 | f.libraryfile.content.sha1, |
323 | 229 | f.libraryfile.content.sha256, | ||
324 | 172 | ) for f in bpr.files] | 230 | ) for f in bpr.files] |
325 | 173 | 231 | ||
328 | 174 | return [dict(url=url, size=size, sha1=sha1) | 232 | return [dict(url=url, size=size, sha1=sha1, sha256=sha256) |
329 | 175 | for url, (size, sha1) in zip(urls, meta)] | 233 | for url, (size, sha1, sha256) in zip(urls, meta)] |
330 | 176 | return urls | 234 | return urls |
331 | 177 | 235 | ||
333 | 178 | def make_bpph(self, num_binaries=1): | 236 | def makeBPPH(self, num_binaries=1): |
334 | 179 | archive = self.factory.makeArchive(private=False) | 237 | archive = self.factory.makeArchive(private=False) |
335 | 180 | bpr = self.factory.makeBinaryPackageRelease() | 238 | bpr = self.factory.makeBinaryPackageRelease() |
336 | 181 | filetypes = [BinaryPackageFileType.DEB, BinaryPackageFileType.DDEB] | 239 | filetypes = [BinaryPackageFileType.DEB, BinaryPackageFileType.DDEB] |
337 | @@ -186,40 +244,40 @@ | |||
338 | 186 | binarypackagerelease=bpr, archive=archive) | 244 | binarypackagerelease=bpr, archive=archive) |
339 | 187 | 245 | ||
340 | 188 | def test_binaryFileUrls_no_binaries(self): | 246 | def test_binaryFileUrls_no_binaries(self): |
342 | 189 | bpph = self.make_bpph(num_binaries=0) | 247 | bpph = self.makeBPPH(num_binaries=0) |
343 | 190 | 248 | ||
344 | 191 | urls = bpph.binaryFileUrls() | 249 | urls = bpph.binaryFileUrls() |
345 | 192 | 250 | ||
346 | 193 | self.assertContentEqual([], urls) | 251 | self.assertContentEqual([], urls) |
347 | 194 | 252 | ||
348 | 195 | def test_binaryFileUrls_one_binary(self): | 253 | def test_binaryFileUrls_one_binary(self): |
350 | 196 | bpph = self.make_bpph(num_binaries=1) | 254 | bpph = self.makeBPPH(num_binaries=1) |
351 | 197 | 255 | ||
352 | 198 | urls = bpph.binaryFileUrls() | 256 | urls = bpph.binaryFileUrls() |
353 | 199 | 257 | ||
355 | 200 | self.assertContentEqual(self.get_urls_for_bpph(bpph), urls) | 258 | self.assertContentEqual(self.getURLsForBPPH(bpph), urls) |
356 | 201 | 259 | ||
357 | 202 | def test_binaryFileUrls_two_binaries(self): | 260 | def test_binaryFileUrls_two_binaries(self): |
359 | 203 | bpph = self.make_bpph(num_binaries=2) | 261 | bpph = self.makeBPPH(num_binaries=2) |
360 | 204 | 262 | ||
361 | 205 | urls = bpph.binaryFileUrls() | 263 | urls = bpph.binaryFileUrls() |
362 | 206 | 264 | ||
364 | 207 | self.assertContentEqual(self.get_urls_for_bpph(bpph), urls) | 265 | self.assertContentEqual(self.getURLsForBPPH(bpph), urls) |
365 | 208 | 266 | ||
366 | 209 | def test_binaryFileUrls_include_meta(self): | 267 | def test_binaryFileUrls_include_meta(self): |
368 | 210 | bpph = self.make_bpph(num_binaries=2) | 268 | bpph = self.makeBPPH(num_binaries=2) |
369 | 211 | 269 | ||
370 | 212 | urls = bpph.binaryFileUrls(include_meta=True) | 270 | urls = bpph.binaryFileUrls(include_meta=True) |
371 | 213 | 271 | ||
372 | 214 | self.assertContentEqual( | 272 | self.assertContentEqual( |
374 | 215 | self.get_urls_for_bpph(bpph, include_meta=True), urls) | 273 | self.getURLsForBPPH(bpph, include_meta=True), urls) |
375 | 216 | 274 | ||
376 | 217 | def test_binaryFileUrls_removed(self): | 275 | def test_binaryFileUrls_removed(self): |
377 | 218 | # binaryFileUrls returns URLs even if the files have been removed | 276 | # binaryFileUrls returns URLs even if the files have been removed |
378 | 219 | # from the published archive. | 277 | # from the published archive. |
382 | 220 | bpph = self.make_bpph(num_binaries=2) | 278 | bpph = self.makeBPPH(num_binaries=2) |
383 | 221 | expected_urls = self.get_urls_for_bpph(bpph) | 279 | expected_urls = self.getURLsForBPPH(bpph) |
384 | 222 | expected_urls_meta = self.get_urls_for_bpph(bpph, include_meta=True) | 280 | expected_urls_meta = self.getURLsForBPPH(bpph, include_meta=True) |
385 | 223 | self.assertContentEqual(expected_urls, bpph.binaryFileUrls()) | 281 | self.assertContentEqual(expected_urls, bpph.binaryFileUrls()) |
386 | 224 | self.assertContentEqual( | 282 | self.assertContentEqual( |
387 | 225 | expected_urls_meta, bpph.binaryFileUrls(include_meta=True)) | 283 | expected_urls_meta, bpph.binaryFileUrls(include_meta=True)) |
388 | 226 | 284 | ||
389 | === modified file 'lib/lp/testing/__init__.py' | |||
390 | --- lib/lp/testing/__init__.py 2015-10-04 01:28:19 +0000 | |||
391 | +++ lib/lp/testing/__init__.py 2015-12-02 13:21:01 +0000 | |||
392 | @@ -425,7 +425,8 @@ | |||
393 | 425 | 425 | ||
394 | 426 | 426 | ||
395 | 427 | def record_two_runs(tested_method, item_creator, first_round_number, | 427 | def record_two_runs(tested_method, item_creator, first_round_number, |
397 | 428 | second_round_number=None, login_method=None): | 428 | second_round_number=None, login_method=None, |
398 | 429 | record_request=False): | ||
399 | 429 | """A helper that returns the two storm statement recorders | 430 | """A helper that returns the two storm statement recorders |
400 | 430 | obtained when running tested_method after having run the | 431 | obtained when running tested_method after having run the |
401 | 431 | method {item_creator} {first_round_number} times and then | 432 | method {item_creator} {first_round_number} times and then |
402 | @@ -435,9 +436,17 @@ | |||
403 | 435 | If {login_method} is not None, it is called before each batch of | 436 | If {login_method} is not None, it is called before each batch of |
404 | 436 | {item_creator} calls. | 437 | {item_creator} calls. |
405 | 437 | 438 | ||
406 | 439 | If {record_request} is True, `RequestTimelineCollector` is used to get | ||
407 | 440 | the query counts, so {tested_method} should make a web request. | ||
408 | 441 | Otherwise, `StormStatementRecorder` is used to get the query count. | ||
409 | 442 | |||
410 | 438 | :return: a tuple containing the two recorders obtained by the successive | 443 | :return: a tuple containing the two recorders obtained by the successive |
411 | 439 | runs. | 444 | runs. |
412 | 440 | """ | 445 | """ |
413 | 446 | if record_request: | ||
414 | 447 | recorder_factory = RequestTimelineCollector | ||
415 | 448 | else: | ||
416 | 449 | recorder_factory = StormStatementRecorder | ||
417 | 441 | if login_method is not None: | 450 | if login_method is not None: |
418 | 442 | login_method() | 451 | login_method() |
419 | 443 | for i in range(first_round_number): | 452 | for i in range(first_round_number): |
420 | @@ -449,7 +458,7 @@ | |||
421 | 449 | if queryInteraction() is not None: | 458 | if queryInteraction() is not None: |
422 | 450 | clear_permission_cache() | 459 | clear_permission_cache() |
423 | 451 | getUtility(ILaunchpadCelebrities).clearCache() | 460 | getUtility(ILaunchpadCelebrities).clearCache() |
425 | 452 | with StormStatementRecorder() as recorder1: | 461 | with recorder_factory() as recorder1: |
426 | 453 | tested_method() | 462 | tested_method() |
427 | 454 | # Run {item_creator} {second_round_number} more times. | 463 | # Run {item_creator} {second_round_number} more times. |
428 | 455 | if second_round_number is None: | 464 | if second_round_number is None: |
429 | @@ -463,7 +472,7 @@ | |||
430 | 463 | if queryInteraction() is not None: | 472 | if queryInteraction() is not None: |
431 | 464 | clear_permission_cache() | 473 | clear_permission_cache() |
432 | 465 | getUtility(ILaunchpadCelebrities).clearCache() | 474 | getUtility(ILaunchpadCelebrities).clearCache() |
434 | 466 | with StormStatementRecorder() as recorder2: | 475 | with recorder_factory() as recorder2: |
435 | 467 | tested_method() | 476 | tested_method() |
436 | 468 | return recorder1, recorder2 | 477 | return recorder1, recorder2 |
437 | 469 | 478 |
I'd lean toward consistency wrt. SHA-1, but it doesn't really matter.