Merge lp:~stevenk/launchpad/moar-preload-distroseries-queue into lp:launchpad
- moar-preload-distroseries-queue
- Merge into devel
Proposed by
Steve Kowalik
Status: | Merged | ||||||||
---|---|---|---|---|---|---|---|---|---|
Approved by: | Steve Kowalik | ||||||||
Approved revision: | no longer in the source branch. | ||||||||
Merged at revision: | 16396 | ||||||||
Proposed branch: | lp:~stevenk/launchpad/moar-preload-distroseries-queue | ||||||||
Merge into: | lp:launchpad | ||||||||
Diff against target: |
568 lines (+139/-84) 11 files modified
lib/lp/security.py (+4/-6) lib/lp/services/webapp/adapter.py (+0/-4) lib/lp/soyuz/browser/queue.py (+19/-22) lib/lp/soyuz/browser/tests/test_queue.py (+23/-2) lib/lp/soyuz/configure.zcml (+2/-1) lib/lp/soyuz/model/archive.py (+1/-1) lib/lp/soyuz/model/binarypackagebuild.py (+1/-1) lib/lp/soyuz/model/publishing.py (+12/-19) lib/lp/soyuz/model/queue.py (+60/-21) lib/lp/soyuz/model/sourcepackagerelease.py (+4/-7) lib/lp/soyuz/tests/test_packageupload.py (+13/-0) |
||||||||
To merge this branch: | bzr merge lp:~stevenk/launchpad/moar-preload-distroseries-queue | ||||||||
Related bugs: |
|
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
William Grant | code | Approve | |
Review via email: mp+141581@code.launchpad.net |
Commit message
Teach IPackageUploadS
Description of the change
Force the PackageUpload security adapter to use the cachedproperties on it, after making sure they are set carefully in add{Source,
Do lots of preloading in IPackageUploadS
Write two query count tests, one for DistroSeries:
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'lib/lp/security.py' | |||
2 | --- lib/lp/security.py 2012-12-19 22:01:13 +0000 | |||
3 | +++ lib/lp/security.py 2013-01-03 00:18:23 +0000 | |||
4 | @@ -1,8 +1,6 @@ | |||
5 | 1 | # Copyright 2009-2012 Canonical Ltd. This software is licensed under the | 1 | # Copyright 2009-2012 Canonical Ltd. This software is licensed under the |
6 | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). |
7 | 3 | 3 | ||
8 | 4 | # pylint: disable-msg=F0401 | ||
9 | 5 | |||
10 | 6 | """Security policies for using content objects.""" | 4 | """Security policies for using content objects.""" |
11 | 7 | 5 | ||
12 | 8 | __metaclass__ = type | 6 | __metaclass__ = type |
13 | @@ -1836,10 +1834,10 @@ | |||
14 | 1836 | # We cannot use self.obj.sourcepackagerelease, as that causes | 1834 | # We cannot use self.obj.sourcepackagerelease, as that causes |
15 | 1837 | # interference with the property cache if we are called in the | 1835 | # interference with the property cache if we are called in the |
16 | 1838 | # process of adding a source or a build. | 1836 | # process of adding a source or a build. |
21 | 1839 | if not self.obj._sources.is_empty(): | 1837 | if self.obj.sources: |
22 | 1840 | spr = self.obj._sources[0].sourcepackagerelease | 1838 | spr = self.obj.sources[0].sourcepackagerelease |
23 | 1841 | elif not self.obj._builds.is_empty(): | 1839 | elif self.obj.builds: |
24 | 1842 | spr = self.obj._builds[0].build.source_package_release | 1840 | spr = self.obj.builds[0].build.source_package_release |
25 | 1843 | else: | 1841 | else: |
26 | 1844 | spr = None | 1842 | spr = None |
27 | 1845 | if spr is not None: | 1843 | if spr is not None: |
28 | 1846 | 1844 | ||
29 | === modified file 'lib/lp/services/webapp/adapter.py' | |||
30 | --- lib/lp/services/webapp/adapter.py 2012-11-29 18:08:12 +0000 | |||
31 | +++ lib/lp/services/webapp/adapter.py 2013-01-03 00:18:23 +0000 | |||
32 | @@ -1,9 +1,6 @@ | |||
33 | 1 | # Copyright 2009-2011 Canonical Ltd. This software is licensed under the | 1 | # Copyright 2009-2011 Canonical Ltd. This software is licensed under the |
34 | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). |
35 | 3 | 3 | ||
36 | 4 | # We use global in this module. | ||
37 | 5 | # pylint: disable-msg=W0602 | ||
38 | 6 | |||
39 | 7 | __metaclass__ = type | 4 | __metaclass__ = type |
40 | 8 | 5 | ||
41 | 9 | from functools import partial | 6 | from functools import partial |
42 | @@ -442,7 +439,6 @@ | |||
43 | 442 | for using connections from the main thread. | 439 | for using connections from the main thread. |
44 | 443 | """ | 440 | """ |
45 | 444 | # Record the ID of the main thread. | 441 | # Record the ID of the main thread. |
46 | 445 | # pylint: disable-msg=W0603 | ||
47 | 446 | global _main_thread_id | 442 | global _main_thread_id |
48 | 447 | _main_thread_id = thread.get_ident() | 443 | _main_thread_id = thread.get_ident() |
49 | 448 | 444 | ||
50 | 449 | 445 | ||
51 | === modified file 'lib/lp/soyuz/browser/queue.py' | |||
52 | --- lib/lp/soyuz/browser/queue.py 2012-12-12 03:35:48 +0000 | |||
53 | +++ lib/lp/soyuz/browser/queue.py 2013-01-03 00:18:23 +0000 | |||
54 | @@ -10,7 +10,7 @@ | |||
55 | 10 | 'QueueItemsView', | 10 | 'QueueItemsView', |
56 | 11 | ] | 11 | ] |
57 | 12 | 12 | ||
59 | 13 | import operator | 13 | from operator import attrgetter |
60 | 14 | 14 | ||
61 | 15 | from lazr.delegates import delegates | 15 | from lazr.delegates import delegates |
62 | 16 | from zope.component import getUtility | 16 | from zope.component import getUtility |
63 | @@ -20,7 +20,7 @@ | |||
64 | 20 | NotFoundError, | 20 | NotFoundError, |
65 | 21 | UnexpectedFormData, | 21 | UnexpectedFormData, |
66 | 22 | ) | 22 | ) |
68 | 23 | from lp.registry.model.person import Person | 23 | from lp.registry.interfaces.person import IPersonSet |
69 | 24 | from lp.services.database.bulk import ( | 24 | from lp.services.database.bulk import ( |
70 | 25 | load_referencing, | 25 | load_referencing, |
71 | 26 | load_related, | 26 | load_related, |
72 | @@ -55,8 +55,10 @@ | |||
73 | 55 | ) | 55 | ) |
74 | 56 | from lp.soyuz.interfaces.section import ISectionSet | 56 | from lp.soyuz.interfaces.section import ISectionSet |
75 | 57 | from lp.soyuz.model.archive import Archive | 57 | from lp.soyuz.model.archive import Archive |
76 | 58 | from lp.soyuz.model.component import Component | ||
77 | 58 | from lp.soyuz.model.packagecopyjob import PackageCopyJob | 59 | from lp.soyuz.model.packagecopyjob import PackageCopyJob |
78 | 59 | from lp.soyuz.model.queue import PackageUploadSource | 60 | from lp.soyuz.model.queue import PackageUploadSource |
79 | 61 | from lp.soyuz.model.section import Section | ||
80 | 60 | from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease | 62 | from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease |
81 | 61 | 63 | ||
82 | 62 | 64 | ||
83 | @@ -124,8 +126,7 @@ | |||
84 | 124 | build_ids = [binary_file.binarypackagerelease.build.id | 126 | build_ids = [binary_file.binarypackagerelease.build.id |
85 | 125 | for binary_file in binary_files] | 127 | for binary_file in binary_files] |
86 | 126 | upload_set = getUtility(IPackageUploadSet) | 128 | upload_set = getUtility(IPackageUploadSet) |
89 | 127 | package_upload_builds = upload_set.getBuildByBuildIDs( | 129 | package_upload_builds = upload_set.getBuildByBuildIDs(build_ids) |
88 | 128 | build_ids) | ||
90 | 129 | package_upload_builds_dict = {} | 130 | package_upload_builds_dict = {} |
91 | 130 | for package_upload_build in package_upload_builds: | 131 | for package_upload_build in package_upload_builds: |
92 | 131 | package_upload_builds_dict[ | 132 | package_upload_builds_dict[ |
93 | @@ -135,8 +136,8 @@ | |||
94 | 135 | def binary_files_dict(self, package_upload_builds_dict, binary_files): | 136 | def binary_files_dict(self, package_upload_builds_dict, binary_files): |
95 | 136 | """Build a dictionary of lists of binary files keyed by upload ID. | 137 | """Build a dictionary of lists of binary files keyed by upload ID. |
96 | 137 | 138 | ||
99 | 138 | To do this efficiently we need to get all the PacakgeUploadBuild | 139 | To do this efficiently we need to get all the PackageUploadBuild |
100 | 139 | records at once, otherwise the Ibuild.package_upload property | 140 | records at once, otherwise the IBuild.package_upload property |
101 | 140 | causes one query per iteration of the loop. | 141 | causes one query per iteration of the loop. |
102 | 141 | """ | 142 | """ |
103 | 142 | build_upload_files = {} | 143 | build_upload_files = {} |
104 | @@ -208,7 +209,9 @@ | |||
105 | 208 | PackageCopyJob, uploads, ['package_copy_job_id']) | 209 | PackageCopyJob, uploads, ['package_copy_job_id']) |
106 | 209 | load_related(Archive, package_copy_jobs, ['source_archive_id']) | 210 | load_related(Archive, package_copy_jobs, ['source_archive_id']) |
107 | 210 | jobs = load_related(Job, package_copy_jobs, ['job_id']) | 211 | jobs = load_related(Job, package_copy_jobs, ['job_id']) |
109 | 211 | load_related(Person, jobs, ['requester_id']) | 212 | person_ids = map(attrgetter('requester_id'), jobs) |
110 | 213 | list(getUtility(IPersonSet).getPrecachedPersonsFromIDs( | ||
111 | 214 | person_ids, need_validity=True)) | ||
112 | 212 | 215 | ||
113 | 213 | def decoratedQueueBatch(self): | 216 | def decoratedQueueBatch(self): |
114 | 214 | """Return the current batch, converted to decorated objects. | 217 | """Return the current batch, converted to decorated objects. |
115 | @@ -224,20 +227,21 @@ | |||
116 | 224 | 227 | ||
117 | 225 | upload_ids = [upload.id for upload in uploads] | 228 | upload_ids = [upload.id for upload in uploads] |
118 | 226 | binary_file_set = getUtility(IBinaryPackageFileSet) | 229 | binary_file_set = getUtility(IBinaryPackageFileSet) |
120 | 227 | binary_files = binary_file_set.getByPackageUploadIDs(upload_ids) | 230 | binary_files = list(binary_file_set.getByPackageUploadIDs(upload_ids)) |
121 | 228 | binary_file_set.loadLibraryFiles(binary_files) | 231 | binary_file_set.loadLibraryFiles(binary_files) |
122 | 229 | packageuploadsources = load_referencing( | 232 | packageuploadsources = load_referencing( |
123 | 230 | PackageUploadSource, uploads, ['packageuploadID']) | 233 | PackageUploadSource, uploads, ['packageuploadID']) |
124 | 231 | source_file_set = getUtility(ISourcePackageReleaseFileSet) | 234 | source_file_set = getUtility(ISourcePackageReleaseFileSet) |
127 | 232 | source_files = source_file_set.getByPackageUploadIDs(upload_ids) | 235 | source_files = list(source_file_set.getByPackageUploadIDs(upload_ids)) |
126 | 233 | |||
128 | 234 | source_sprs = load_related( | 236 | source_sprs = load_related( |
129 | 235 | SourcePackageRelease, packageuploadsources, | 237 | SourcePackageRelease, packageuploadsources, |
130 | 236 | ['sourcepackagereleaseID']) | 238 | ['sourcepackagereleaseID']) |
131 | 237 | 239 | ||
132 | 240 | load_related(Section, source_sprs, ['sectionID']) | ||
133 | 241 | load_related(Component, source_sprs, ['componentID']) | ||
134 | 242 | |||
135 | 238 | # Get a dictionary of lists of binary files keyed by upload ID. | 243 | # Get a dictionary of lists of binary files keyed by upload ID. |
138 | 239 | package_upload_builds_dict = self.builds_dict( | 244 | package_upload_builds_dict = self.builds_dict(upload_ids, binary_files) |
137 | 240 | upload_ids, binary_files) | ||
139 | 241 | 245 | ||
140 | 242 | build_upload_files, binary_package_names = self.binary_files_dict( | 246 | build_upload_files, binary_package_names = self.binary_files_dict( |
141 | 243 | package_upload_builds_dict, binary_files) | 247 | package_upload_builds_dict, binary_files) |
142 | @@ -461,7 +465,7 @@ | |||
143 | 461 | sorted by their name. | 465 | sorted by their name. |
144 | 462 | """ | 466 | """ |
145 | 463 | return sorted( | 467 | return sorted( |
147 | 464 | self.context.sections, key=operator.attrgetter('name')) | 468 | self.context.sections, key=attrgetter('name')) |
148 | 465 | 469 | ||
149 | 466 | def priorities(self): | 470 | def priorities(self): |
150 | 467 | """An iterable of priorities from PackagePublishingPriority.""" | 471 | """An iterable of priorities from PackagePublishingPriority.""" |
151 | @@ -516,8 +520,6 @@ | |||
152 | 516 | 520 | ||
153 | 517 | if self.contains_source: | 521 | if self.contains_source: |
154 | 518 | self.sourcepackagerelease = self.sources[0].sourcepackagerelease | 522 | self.sourcepackagerelease = self.sources[0].sourcepackagerelease |
155 | 519 | |||
156 | 520 | if self.contains_source: | ||
157 | 521 | self.package_sets = package_sets.get( | 523 | self.package_sets = package_sets.get( |
158 | 522 | self.sourcepackagerelease.sourcepackagenameID, []) | 524 | self.sourcepackagerelease.sourcepackagenameID, []) |
159 | 523 | else: | 525 | else: |
160 | @@ -561,8 +563,7 @@ | |||
161 | 561 | if title is None: | 563 | if title is None: |
162 | 562 | title = alt | 564 | title = alt |
163 | 563 | return structured( | 565 | return structured( |
166 | 564 | '<img alt="[%s]" src="/@@/%s" title="%s" />', | 566 | '<img alt="[%s]" src="/@@/%s" title="%s" />', alt, icon, title) |
165 | 565 | alt, icon, title) | ||
167 | 566 | 567 | ||
168 | 567 | def composeIconList(self): | 568 | def composeIconList(self): |
169 | 568 | """List icons that should be shown for this upload.""" | 569 | """List icons that should be shown for this upload.""" |
170 | @@ -599,9 +600,5 @@ | |||
171 | 599 | icon_string = structured('\n'.join(['%s'] * len(icons)), *icons) | 600 | icon_string = structured('\n'.join(['%s'] * len(icons)), *icons) |
172 | 600 | link = self.composeNameAndChangesLink() | 601 | link = self.composeNameAndChangesLink() |
173 | 601 | return structured( | 602 | return structured( |
179 | 602 | """<div id="%s"> | 603 | """<div id="%s"> %s %s (%s)</div>""", |
175 | 603 | %s | ||
176 | 604 | %s | ||
177 | 605 | (%s) | ||
178 | 606 | </div>""", | ||
180 | 607 | iconlist_id, icon_string, link, self.displayarchs).escapedtext | 604 | iconlist_id, icon_string, link, self.displayarchs).escapedtext |
181 | 608 | 605 | ||
182 | === modified file 'lib/lp/soyuz/browser/tests/test_queue.py' | |||
183 | --- lib/lp/soyuz/browser/tests/test_queue.py 2012-12-12 04:59:52 +0000 | |||
184 | +++ lib/lp/soyuz/browser/tests/test_queue.py 2013-01-03 00:18:23 +0000 | |||
185 | @@ -6,6 +6,8 @@ | |||
186 | 6 | __metaclass__ = type | 6 | __metaclass__ = type |
187 | 7 | 7 | ||
188 | 8 | from lxml import html | 8 | from lxml import html |
189 | 9 | from storm.store import Store | ||
190 | 10 | from testtools.matchers import Equals | ||
191 | 9 | import transaction | 11 | import transaction |
192 | 10 | from zope.component import ( | 12 | from zope.component import ( |
193 | 11 | getUtility, | 13 | getUtility, |
194 | @@ -26,12 +28,14 @@ | |||
195 | 26 | login_person, | 28 | login_person, |
196 | 27 | logout, | 29 | logout, |
197 | 28 | person_logged_in, | 30 | person_logged_in, |
198 | 31 | StormStatementRecorder, | ||
199 | 29 | TestCaseWithFactory, | 32 | TestCaseWithFactory, |
200 | 30 | ) | 33 | ) |
201 | 31 | from lp.testing.layers import ( | 34 | from lp.testing.layers import ( |
202 | 32 | LaunchpadFunctionalLayer, | 35 | LaunchpadFunctionalLayer, |
203 | 33 | LaunchpadZopelessLayer, | 36 | LaunchpadZopelessLayer, |
204 | 34 | ) | 37 | ) |
205 | 38 | from lp.testing.matchers import HasQueryCount | ||
206 | 35 | from lp.testing.sampledata import ADMIN_EMAIL | 39 | from lp.testing.sampledata import ADMIN_EMAIL |
207 | 36 | from lp.testing.views import create_initialized_view | 40 | from lp.testing.views import create_initialized_view |
208 | 37 | 41 | ||
209 | @@ -361,14 +365,31 @@ | |||
210 | 361 | self.assertIn( | 365 | self.assertIn( |
211 | 362 | upload.package_copy_job.job.requester.displayname, html_text) | 366 | upload.package_copy_job.job.requester.displayname, html_text) |
212 | 363 | 367 | ||
213 | 368 | def test_query_count(self): | ||
214 | 369 | login(ADMIN_EMAIL) | ||
215 | 370 | uploads = [] | ||
216 | 371 | distroseries = self.factory.makeDistroSeries() | ||
217 | 372 | for i in range(5): | ||
218 | 373 | uploads.append(self.factory.makeSourcePackageUpload(distroseries)) | ||
219 | 374 | uploads.append(self.factory.makeCustomPackageUpload(distroseries)) | ||
220 | 375 | uploads.append(self.factory.makeCopyJobPackageUpload(distroseries)) | ||
221 | 376 | for i in range(15): | ||
222 | 377 | uploads.append(self.factory.makeBuildPackageUpload(distroseries)) | ||
223 | 378 | queue_admin = self.factory.makeArchiveAdmin(distroseries.main_archive) | ||
224 | 379 | Store.of(uploads[0]).invalidate() | ||
225 | 380 | with person_logged_in(queue_admin): | ||
226 | 381 | with StormStatementRecorder() as recorder: | ||
227 | 382 | view = self.makeView(distroseries, queue_admin) | ||
228 | 383 | view() | ||
229 | 384 | self.assertThat(recorder, HasQueryCount(Equals(52))) | ||
230 | 385 | |||
231 | 364 | 386 | ||
232 | 365 | class TestCompletePackageUpload(TestCaseWithFactory): | 387 | class TestCompletePackageUpload(TestCaseWithFactory): |
233 | 366 | 388 | ||
234 | 367 | layer = LaunchpadZopelessLayer | 389 | layer = LaunchpadZopelessLayer |
235 | 368 | 390 | ||
236 | 369 | def makeCompletePackageUpload(self, upload=None, build_upload_files=None, | 391 | def makeCompletePackageUpload(self, upload=None, build_upload_files=None, |
239 | 370 | source_upload_files=None, | 392 | source_upload_files=None, package_sets=None): |
238 | 371 | package_sets=None): | ||
240 | 372 | if upload is None: | 393 | if upload is None: |
241 | 373 | upload = self.factory.makeSourcePackageUpload() | 394 | upload = self.factory.makeSourcePackageUpload() |
242 | 374 | if build_upload_files is None: | 395 | if build_upload_files is None: |
243 | 375 | 396 | ||
244 | === modified file 'lib/lp/soyuz/configure.zcml' | |||
245 | --- lib/lp/soyuz/configure.zcml 2012-12-10 06:27:12 +0000 | |||
246 | +++ lib/lp/soyuz/configure.zcml 2013-01-03 00:18:23 +0000 | |||
247 | @@ -192,7 +192,8 @@ | |||
248 | 192 | section_name | 192 | section_name |
249 | 193 | components | 193 | components |
250 | 194 | searchable_names | 194 | searchable_names |
252 | 195 | searchable_versions"/> | 195 | searchable_versions |
253 | 196 | changes_file_id"/> | ||
254 | 196 | <require | 197 | <require |
255 | 197 | permission="launchpad.Edit" | 198 | permission="launchpad.Edit" |
256 | 198 | attributes=" | 199 | attributes=" |
257 | 199 | 200 | ||
258 | === modified file 'lib/lp/soyuz/model/archive.py' | |||
259 | --- lib/lp/soyuz/model/archive.py 2012-11-13 13:43:31 +0000 | |||
260 | +++ lib/lp/soyuz/model/archive.py 2013-01-03 00:18:23 +0000 | |||
261 | @@ -1554,7 +1554,7 @@ | |||
262 | 1554 | PackageUploadSource.sourcepackagereleaseID, | 1554 | PackageUploadSource.sourcepackagereleaseID, |
263 | 1555 | PackageUploadSource.packageuploadID == PackageUpload.id, | 1555 | PackageUploadSource.packageuploadID == PackageUpload.id, |
264 | 1556 | PackageUpload.status == PackageUploadStatus.DONE, | 1556 | PackageUpload.status == PackageUploadStatus.DONE, |
266 | 1557 | PackageUpload.changesfileID == LibraryFileAlias.id, | 1557 | PackageUpload.changes_file_id == LibraryFileAlias.id, |
267 | 1558 | ) | 1558 | ) |
268 | 1559 | else: | 1559 | else: |
269 | 1560 | raise NotFoundError(filename) | 1560 | raise NotFoundError(filename) |
270 | 1561 | 1561 | ||
271 | === modified file 'lib/lp/soyuz/model/binarypackagebuild.py' | |||
272 | --- lib/lp/soyuz/model/binarypackagebuild.py 2012-11-15 01:42:33 +0000 | |||
273 | +++ lib/lp/soyuz/model/binarypackagebuild.py 2013-01-03 00:18:23 +0000 | |||
274 | @@ -180,7 +180,7 @@ | |||
275 | 180 | Join(PackageUpload, | 180 | Join(PackageUpload, |
276 | 181 | PackageUploadBuild.packageuploadID == PackageUpload.id), | 181 | PackageUploadBuild.packageuploadID == PackageUpload.id), |
277 | 182 | Join(LibraryFileAlias, | 182 | Join(LibraryFileAlias, |
279 | 183 | LibraryFileAlias.id == PackageUpload.changesfileID), | 183 | LibraryFileAlias.id == PackageUpload.changes_file_id), |
280 | 184 | Join(LibraryFileContent, | 184 | Join(LibraryFileContent, |
281 | 185 | LibraryFileContent.id == LibraryFileAlias.contentID), | 185 | LibraryFileContent.id == LibraryFileAlias.contentID), |
282 | 186 | ] | 186 | ] |
283 | 187 | 187 | ||
284 | === modified file 'lib/lp/soyuz/model/publishing.py' | |||
285 | --- lib/lp/soyuz/model/publishing.py 2012-11-15 23:28:13 +0000 | |||
286 | +++ lib/lp/soyuz/model/publishing.py 2013-01-03 00:18:23 +0000 | |||
287 | @@ -1,8 +1,6 @@ | |||
288 | 1 | # Copyright 2009-2012 Canonical Ltd. This software is licensed under the | 1 | # Copyright 2009-2012 Canonical Ltd. This software is licensed under the |
289 | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). |
290 | 3 | 3 | ||
291 | 4 | # pylint: disable-msg=E0611,W0212 | ||
292 | 5 | |||
293 | 6 | __metaclass__ = type | 4 | __metaclass__ = type |
294 | 7 | 5 | ||
295 | 8 | __all__ = [ | 6 | __all__ = [ |
296 | @@ -1560,12 +1558,12 @@ | |||
297 | 1560 | packageupload=packageupload) | 1558 | packageupload=packageupload) |
298 | 1561 | DistributionSourcePackage.ensure(pub) | 1559 | DistributionSourcePackage.ensure(pub) |
299 | 1562 | 1560 | ||
306 | 1563 | if create_dsd_job: | 1561 | if create_dsd_job and archive == distroseries.main_archive: |
307 | 1564 | if archive == distroseries.main_archive: | 1562 | dsd_job_source = getUtility(IDistroSeriesDifferenceJobSource) |
308 | 1565 | dsd_job_source = getUtility(IDistroSeriesDifferenceJobSource) | 1563 | dsd_job_source.createForPackagePublication( |
309 | 1566 | dsd_job_source.createForPackagePublication( | 1564 | distroseries, sourcepackagerelease.sourcepackagename, pocket) |
310 | 1567 | distroseries, sourcepackagerelease.sourcepackagename, | 1565 | Store.of(sourcepackagerelease).flush() |
311 | 1568 | pocket) | 1566 | del get_property_cache(sourcepackagerelease).published_archives |
312 | 1569 | return pub | 1567 | return pub |
313 | 1570 | 1568 | ||
314 | 1571 | def getBuildsForSourceIds(self, source_publication_ids, archive=None, | 1569 | def getBuildsForSourceIds(self, source_publication_ids, archive=None, |
315 | @@ -1802,8 +1800,7 @@ | |||
316 | 1802 | 1800 | ||
317 | 1803 | return result_set | 1801 | return result_set |
318 | 1804 | 1802 | ||
321 | 1805 | def getBinaryPublicationsForSources(self, | 1803 | def getBinaryPublicationsForSources(self, one_or_more_source_publications): |
320 | 1806 | one_or_more_source_publications): | ||
322 | 1807 | """See `IPublishingSet`.""" | 1804 | """See `IPublishingSet`.""" |
323 | 1808 | source_publication_ids = self._extractIDs( | 1805 | source_publication_ids = self._extractIDs( |
324 | 1809 | one_or_more_source_publications) | 1806 | one_or_more_source_publications) |
325 | @@ -1850,11 +1847,8 @@ | |||
326 | 1850 | 1847 | ||
327 | 1851 | def getChangesFilesForSources(self, one_or_more_source_publications): | 1848 | def getChangesFilesForSources(self, one_or_more_source_publications): |
328 | 1852 | """See `IPublishingSet`.""" | 1849 | """See `IPublishingSet`.""" |
334 | 1853 | # Import PackageUpload and PackageUploadSource locally | 1850 | # Avoid circular imports. |
335 | 1854 | # to avoid circular imports, since PackageUpload uses | 1851 | from lp.soyuz.model.queue import PackageUpload, PackageUploadSource |
331 | 1855 | # SourcePackagePublishingHistory. | ||
332 | 1856 | from lp.soyuz.model.queue import ( | ||
333 | 1857 | PackageUpload, PackageUploadSource) | ||
336 | 1858 | 1852 | ||
337 | 1859 | source_publication_ids = self._extractIDs( | 1853 | source_publication_ids = self._extractIDs( |
338 | 1860 | one_or_more_source_publications) | 1854 | one_or_more_source_publications) |
339 | @@ -1864,7 +1858,7 @@ | |||
340 | 1864 | (SourcePackagePublishingHistory, PackageUpload, | 1858 | (SourcePackagePublishingHistory, PackageUpload, |
341 | 1865 | SourcePackageRelease, LibraryFileAlias, LibraryFileContent), | 1859 | SourcePackageRelease, LibraryFileAlias, LibraryFileContent), |
342 | 1866 | LibraryFileContent.id == LibraryFileAlias.contentID, | 1860 | LibraryFileContent.id == LibraryFileAlias.contentID, |
344 | 1867 | LibraryFileAlias.id == PackageUpload.changesfileID, | 1861 | LibraryFileAlias.id == PackageUpload.changes_file_id, |
345 | 1868 | PackageUpload.id == PackageUploadSource.packageuploadID, | 1862 | PackageUpload.id == PackageUploadSource.packageuploadID, |
346 | 1869 | PackageUpload.status == PackageUploadStatus.DONE, | 1863 | PackageUpload.status == PackageUploadStatus.DONE, |
347 | 1870 | PackageUpload.distroseriesID == | 1864 | PackageUpload.distroseriesID == |
348 | @@ -1882,14 +1876,13 @@ | |||
349 | 1882 | 1876 | ||
350 | 1883 | def getChangesFileLFA(self, spr): | 1877 | def getChangesFileLFA(self, spr): |
351 | 1884 | """See `IPublishingSet`.""" | 1878 | """See `IPublishingSet`.""" |
354 | 1885 | # Import PackageUpload and PackageUploadSource locally to avoid | 1879 | # Avoid circular imports. |
353 | 1886 | # circular imports. | ||
355 | 1887 | from lp.soyuz.model.queue import PackageUpload, PackageUploadSource | 1880 | from lp.soyuz.model.queue import PackageUpload, PackageUploadSource |
356 | 1888 | 1881 | ||
357 | 1889 | store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR) | 1882 | store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR) |
358 | 1890 | result_set = store.find( | 1883 | result_set = store.find( |
359 | 1891 | LibraryFileAlias, | 1884 | LibraryFileAlias, |
361 | 1892 | LibraryFileAlias.id == PackageUpload.changesfileID, | 1885 | LibraryFileAlias.id == PackageUpload.changes_file_id, |
362 | 1893 | PackageUpload.status == PackageUploadStatus.DONE, | 1886 | PackageUpload.status == PackageUploadStatus.DONE, |
363 | 1894 | PackageUpload.distroseriesID == spr.upload_distroseries.id, | 1887 | PackageUpload.distroseriesID == spr.upload_distroseries.id, |
364 | 1895 | PackageUpload.archiveID == spr.upload_archive.id, | 1888 | PackageUpload.archiveID == spr.upload_archive.id, |
365 | 1896 | 1889 | ||
366 | === modified file 'lib/lp/soyuz/model/queue.py' | |||
367 | --- lib/lp/soyuz/model/queue.py 2012-12-26 01:04:05 +0000 | |||
368 | +++ lib/lp/soyuz/model/queue.py 2013-01-03 00:18:23 +0000 | |||
369 | @@ -52,7 +52,10 @@ | |||
370 | 52 | from lp.registry.model.sourcepackagename import SourcePackageName | 52 | from lp.registry.model.sourcepackagename import SourcePackageName |
371 | 53 | from lp.services.auditor.client import AuditorClient | 53 | from lp.services.auditor.client import AuditorClient |
372 | 54 | from lp.services.config import config | 54 | from lp.services.config import config |
374 | 55 | from lp.services.database.bulk import load_referencing | 55 | from lp.services.database.bulk import ( |
375 | 56 | load_referencing, | ||
376 | 57 | load_related, | ||
377 | 58 | ) | ||
378 | 56 | from lp.services.database.constants import UTC_NOW | 59 | from lp.services.database.constants import UTC_NOW |
379 | 57 | from lp.services.database.datetimecol import UtcDateTimeCol | 60 | from lp.services.database.datetimecol import UtcDateTimeCol |
380 | 58 | from lp.services.database.decoratedresultset import DecoratedResultSet | 61 | from lp.services.database.decoratedresultset import DecoratedResultSet |
381 | @@ -112,6 +115,7 @@ | |||
382 | 112 | QueueStateWriteProtectedError, | 115 | QueueStateWriteProtectedError, |
383 | 113 | ) | 116 | ) |
384 | 114 | from lp.soyuz.interfaces.section import ISectionSet | 117 | from lp.soyuz.interfaces.section import ISectionSet |
385 | 118 | from lp.soyuz.model.distroarchseries import DistroArchSeries | ||
386 | 115 | from lp.soyuz.pas import BuildDaemonPackagesArchSpecific | 119 | from lp.soyuz.pas import BuildDaemonPackagesArchSpecific |
387 | 116 | 120 | ||
388 | 117 | # There are imports below in PackageUploadCustom for various bits | 121 | # There are imports below in PackageUploadCustom for various bits |
389 | @@ -174,8 +178,8 @@ | |||
390 | 174 | dbName='pocket', unique=False, notNull=True, | 178 | dbName='pocket', unique=False, notNull=True, |
391 | 175 | schema=PackagePublishingPocket) | 179 | schema=PackagePublishingPocket) |
392 | 176 | 180 | ||
395 | 177 | changesfile = ForeignKey( | 181 | changes_file_id = Int(name='changesfile') |
396 | 178 | dbName='changesfile', foreignKey="LibraryFileAlias", notNull=False) | 182 | changesfile = Reference(changes_file_id, 'LibraryFileAlias.id') |
397 | 179 | 183 | ||
398 | 180 | archive = ForeignKey(dbName="archive", foreignKey="Archive", notNull=True) | 184 | archive = ForeignKey(dbName="archive", foreignKey="Archive", notNull=True) |
399 | 181 | 185 | ||
400 | @@ -814,15 +818,16 @@ | |||
401 | 814 | 818 | ||
402 | 815 | def addSource(self, spr): | 819 | def addSource(self, spr): |
403 | 816 | """See `IPackageUpload`.""" | 820 | """See `IPackageUpload`.""" |
404 | 817 | del get_property_cache(self).sources | ||
405 | 818 | self.addSearchableNames([spr.name]) | 821 | self.addSearchableNames([spr.name]) |
406 | 819 | self.addSearchableVersions([spr.version]) | 822 | self.addSearchableVersions([spr.version]) |
408 | 820 | return PackageUploadSource( | 823 | pus = PackageUploadSource( |
409 | 821 | packageupload=self, sourcepackagerelease=spr.id) | 824 | packageupload=self, sourcepackagerelease=spr.id) |
410 | 825 | Store.of(self).flush() | ||
411 | 826 | del get_property_cache(self).sources | ||
412 | 827 | return pus | ||
413 | 822 | 828 | ||
414 | 823 | def addBuild(self, build): | 829 | def addBuild(self, build): |
415 | 824 | """See `IPackageUpload`.""" | 830 | """See `IPackageUpload`.""" |
416 | 825 | del get_property_cache(self).builds | ||
417 | 826 | names = [build.source_package_release.name] | 831 | names = [build.source_package_release.name] |
418 | 827 | versions = [] | 832 | versions = [] |
419 | 828 | for bpr in build.binarypackages: | 833 | for bpr in build.binarypackages: |
420 | @@ -830,15 +835,20 @@ | |||
421 | 830 | versions.append(bpr.version) | 835 | versions.append(bpr.version) |
422 | 831 | self.addSearchableNames(names) | 836 | self.addSearchableNames(names) |
423 | 832 | self.addSearchableVersions(versions) | 837 | self.addSearchableVersions(versions) |
425 | 833 | return PackageUploadBuild(packageupload=self, build=build.id) | 838 | pub = PackageUploadBuild(packageupload=self, build=build.id) |
426 | 839 | Store.of(self).flush() | ||
427 | 840 | del get_property_cache(self).builds | ||
428 | 841 | return pub | ||
429 | 834 | 842 | ||
430 | 835 | def addCustom(self, library_file, custom_type): | 843 | def addCustom(self, library_file, custom_type): |
431 | 836 | """See `IPackageUpload`.""" | 844 | """See `IPackageUpload`.""" |
432 | 837 | del get_property_cache(self).customfiles | ||
433 | 838 | self.addSearchableNames([library_file.filename]) | 845 | self.addSearchableNames([library_file.filename]) |
435 | 839 | return PackageUploadCustom( | 846 | puc = PackageUploadCustom( |
436 | 840 | packageupload=self, libraryfilealias=library_file.id, | 847 | packageupload=self, libraryfilealias=library_file.id, |
437 | 841 | customformat=custom_type) | 848 | customformat=custom_type) |
438 | 849 | Store.of(self).flush() | ||
439 | 850 | del get_property_cache(self).customfiles | ||
440 | 851 | return puc | ||
441 | 842 | 852 | ||
442 | 843 | def isPPA(self): | 853 | def isPPA(self): |
443 | 844 | """See `IPackageUpload`.""" | 854 | """See `IPackageUpload`.""" |
444 | @@ -1665,18 +1675,7 @@ | |||
445 | 1665 | pucs = load_referencing( | 1675 | pucs = load_referencing( |
446 | 1666 | PackageUploadCustom, rows, ["packageuploadID"]) | 1676 | PackageUploadCustom, rows, ["packageuploadID"]) |
447 | 1667 | 1677 | ||
460 | 1668 | for pu in rows: | 1678 | prefill_packageupload_caches(rows, puses, pubs, pucs) |
449 | 1669 | cache = get_property_cache(pu) | ||
450 | 1670 | cache.sources = [] | ||
451 | 1671 | cache.builds = [] | ||
452 | 1672 | cache.customfiles = [] | ||
453 | 1673 | |||
454 | 1674 | for pus in puses: | ||
455 | 1675 | get_property_cache(pus.packageupload).sources.append(pus) | ||
456 | 1676 | for pub in pubs: | ||
457 | 1677 | get_property_cache(pub.packageupload).builds.append(pub) | ||
458 | 1678 | for puc in pucs: | ||
459 | 1679 | get_property_cache(puc.packageupload).customfiles.append(puc) | ||
461 | 1680 | 1679 | ||
462 | 1681 | return DecoratedResultSet(query, pre_iter_hook=preload_hook) | 1680 | return DecoratedResultSet(query, pre_iter_hook=preload_hook) |
463 | 1682 | 1681 | ||
464 | @@ -1704,3 +1703,43 @@ | |||
465 | 1704 | return IStore(PackageUpload).find( | 1703 | return IStore(PackageUpload).find( |
466 | 1705 | PackageUpload, | 1704 | PackageUpload, |
467 | 1706 | PackageUpload.package_copy_job_id.is_in(pcj_ids)) | 1705 | PackageUpload.package_copy_job_id.is_in(pcj_ids)) |
468 | 1706 | |||
469 | 1707 | |||
470 | 1708 | def prefill_packageupload_caches(uploads, puses, pubs, pucs): | ||
471 | 1709 | # Circular imports. | ||
472 | 1710 | from lp.soyuz.model.archive import Archive | ||
473 | 1711 | from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild | ||
474 | 1712 | from lp.soyuz.model.publishing import SourcePackagePublishingHistory | ||
475 | 1713 | from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease | ||
476 | 1714 | |||
477 | 1715 | for pu in uploads: | ||
478 | 1716 | cache = get_property_cache(pu) | ||
479 | 1717 | cache.sources = [] | ||
480 | 1718 | cache.builds = [] | ||
481 | 1719 | cache.customfiles = [] | ||
482 | 1720 | |||
483 | 1721 | for pus in puses: | ||
484 | 1722 | get_property_cache(pus.packageupload).sources.append(pus) | ||
485 | 1723 | for pub in pubs: | ||
486 | 1724 | get_property_cache(pub.packageupload).builds.append(pub) | ||
487 | 1725 | for puc in pucs: | ||
488 | 1726 | get_property_cache(puc.packageupload).customfiles.append(puc) | ||
489 | 1727 | |||
490 | 1728 | source_sprs = load_related( | ||
491 | 1729 | SourcePackageRelease, puses, ['sourcepackagereleaseID']) | ||
492 | 1730 | bpbs = load_related(BinaryPackageBuild, pubs, ['buildID']) | ||
493 | 1731 | load_related(DistroArchSeries, bpbs, ['distro_arch_series_id']) | ||
494 | 1732 | binary_sprs = load_related( | ||
495 | 1733 | SourcePackageRelease, bpbs, ['source_package_release_id']) | ||
496 | 1734 | sprs = source_sprs + binary_sprs | ||
497 | 1735 | |||
498 | 1736 | load_related(SourcePackageName, sprs, ['sourcepackagenameID']) | ||
499 | 1737 | load_related(LibraryFileAlias, uploads, ['changes_file_id']) | ||
500 | 1738 | publications = load_referencing( | ||
501 | 1739 | SourcePackagePublishingHistory, sprs, ['sourcepackagereleaseID']) | ||
502 | 1740 | load_related(Archive, publications, ['archiveID']) | ||
503 | 1741 | for spr_cache in sprs: | ||
504 | 1742 | get_property_cache(spr_cache).published_archives = [] | ||
505 | 1743 | for publication in publications: | ||
506 | 1744 | spr_cache = get_property_cache(publication.sourcepackagerelease) | ||
507 | 1745 | spr_cache.published_archives.append(publication.archive) | ||
508 | 1707 | 1746 | ||
509 | === modified file 'lib/lp/soyuz/model/sourcepackagerelease.py' | |||
510 | --- lib/lp/soyuz/model/sourcepackagerelease.py 2012-11-26 12:53:30 +0000 | |||
511 | +++ lib/lp/soyuz/model/sourcepackagerelease.py 2013-01-03 00:18:23 +0000 | |||
512 | @@ -1,8 +1,6 @@ | |||
513 | 1 | # Copyright 2009-2012 Canonical Ltd. This software is licensed under the | 1 | # Copyright 2009-2012 Canonical Ltd. This software is licensed under the |
514 | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). |
515 | 3 | 3 | ||
516 | 4 | # pylint: disable-msg=E0611,W0212 | ||
517 | 5 | |||
518 | 6 | __metaclass__ = type | 4 | __metaclass__ = type |
519 | 7 | __all__ = [ | 5 | __all__ = [ |
520 | 8 | 'SourcePackageRelease', | 6 | 'SourcePackageRelease', |
521 | @@ -291,14 +289,13 @@ | |||
522 | 291 | @property | 289 | @property |
523 | 292 | def current_publishings(self): | 290 | def current_publishings(self): |
524 | 293 | """See ISourcePackageRelease.""" | 291 | """See ISourcePackageRelease.""" |
527 | 294 | from lp.soyuz.model.distroseriessourcepackagerelease \ | 292 | from lp.soyuz.model.distroseriessourcepackagerelease import ( |
528 | 295 | import DistroSeriesSourcePackageRelease | 293 | DistroSeriesSourcePackageRelease) |
529 | 296 | return [DistroSeriesSourcePackageRelease(pub.distroseries, self) | 294 | return [DistroSeriesSourcePackageRelease(pub.distroseries, self) |
530 | 297 | for pub in self.publishings] | 295 | for pub in self.publishings] |
531 | 298 | 296 | ||
533 | 299 | @property | 297 | @cachedproperty |
534 | 300 | def published_archives(self): | 298 | def published_archives(self): |
535 | 301 | """See `ISourcePackageRelease`.""" | ||
536 | 302 | archives = set( | 299 | archives = set( |
537 | 303 | pub.archive for pub in self.publishings.prejoin(['archive'])) | 300 | pub.archive for pub in self.publishings.prejoin(['archive'])) |
538 | 304 | return sorted(archives, key=operator.attrgetter('id')) | 301 | return sorted(archives, key=operator.attrgetter('id')) |
539 | @@ -503,7 +500,7 @@ | |||
540 | 503 | Join(PackageUpload, | 500 | Join(PackageUpload, |
541 | 504 | PackageUploadSource.packageuploadID == PackageUpload.id), | 501 | PackageUploadSource.packageuploadID == PackageUpload.id), |
542 | 505 | Join(LibraryFileAlias, | 502 | Join(LibraryFileAlias, |
544 | 506 | LibraryFileAlias.id == PackageUpload.changesfileID), | 503 | LibraryFileAlias.id == PackageUpload.changes_file_id), |
545 | 507 | Join(LibraryFileContent, | 504 | Join(LibraryFileContent, |
546 | 508 | LibraryFileContent.id == LibraryFileAlias.contentID), | 505 | LibraryFileContent.id == LibraryFileAlias.contentID), |
547 | 509 | ] | 506 | ] |
548 | 510 | 507 | ||
549 | === modified file 'lib/lp/soyuz/tests/test_packageupload.py' | |||
550 | --- lib/lp/soyuz/tests/test_packageupload.py 2012-12-17 05:10:29 +0000 | |||
551 | +++ lib/lp/soyuz/tests/test_packageupload.py 2013-01-03 00:18:23 +0000 | |||
552 | @@ -1292,3 +1292,16 @@ | |||
553 | 1292 | "customformat": "raw-translations", | 1292 | "customformat": "raw-translations", |
554 | 1293 | } | 1293 | } |
555 | 1294 | self.assertEqual(expected_custom, ws_binaries[-1]) | 1294 | self.assertEqual(expected_custom, ws_binaries[-1]) |
556 | 1295 | |||
557 | 1296 | def test_getPackageUploads_query_count(self): | ||
558 | 1297 | person = self.makeQueueAdmin([self.universe]) | ||
559 | 1298 | uploads = [] | ||
560 | 1299 | for i in range(5): | ||
561 | 1300 | upload, _ = self.makeBinaryPackageUpload( | ||
562 | 1301 | person, component=self.universe) | ||
563 | 1302 | uploads.append(upload) | ||
564 | 1303 | ws_distroseries = self.load(self.distroseries, person) | ||
565 | 1304 | IStore(uploads[0].__class__).invalidate() | ||
566 | 1305 | with StormStatementRecorder() as recorder: | ||
567 | 1306 | ws_distroseries.getPackageUploads() | ||
568 | 1307 | self.assertThat(recorder, HasQueryCount(Equals(27))) |
143 + sprs = prefill_ packageupload_ caches( urces, pubs, pucs)
144 + uploads, packageuploadso
Is this not already done by the underlying model method?
444 +def prefill_ packageupload_ caches( uploads, puses, pubs, pucs):
My eyes are burning.
Oh, god, they burn.
Can you add a bit of VWS, perhaps? There are several logical sections which could be separated.
468 + for spr in sprs: cache(spr) .published_ archives = []
469 + get_property_
The other three caches get away without this. Are they initialised elsewhere? Perhaps move them into this function as well.
471 + spr = get_property_ cache(publicati on.sourcepackag erelease)
That's no SPR.
506 + @cachedproperty archives( self): _published_ archives( ))
507 + def published_
508 + return list(self.
There's no need for a separate _published_archives here, as nothing needs the actual ResultSet. Just inline it.
Additionally, you might want to invalidate this sometimes, though it's not used much so it's not hugely important. But IIRC SPPH creation is well encapsulated in PS.newSourcePub lication,