Merge lp:~wgrant/launchpad/bug-675621-packages-binary-scaling into lp:launchpad

Proposed by William Grant
Status: Merged
Approved by: Abel Deuring
Approved revision: no longer in the source branch.
Merged at revision: 12031
Proposed branch: lp:~wgrant/launchpad/bug-675621-packages-binary-scaling
Merge into: lp:launchpad
Diff against target: 298 lines (+76/-46)
4 files modified
lib/lp/soyuz/browser/tests/test_archive_packages.py (+18/-17)
lib/lp/soyuz/interfaces/publishing.py (+12/-5)
lib/lp/soyuz/model/publishing.py (+24/-13)
lib/lp/testing/factory.py (+22/-11)
To merge this branch: bzr merge lp:~wgrant/launchpad/bug-675621-packages-binary-scaling
Reviewer Review Type Date Requested Status
Abel Deuring (community) code Approve
Review via email: mp+42607@code.launchpad.net

Commit message

[r=adeuring][ui=none][bug=669717,675621] Fix getBuildStatusSummariesBySourceIds to have a constant query count. Landed by henninge.

Description of the change

Archive:+index and Archive:+packages have been timing out a bit lately, with lots of queries for single BuildFarmJobs and PackageBuilds. This branch fixes the culprit, PublishingSet.getBuildStatusSummariesForSourceIdsAndArchive, to prepopulate the cache, eliminating those queries and making the pages' query count constant.

I also fixed test_binary_query_counts to actually test scaling. This necessitated a change in the factory to coerce makeBinaryPackagePublishingHistory into creating a BinaryPackageBuild in the same context (as builds from other contexts are omitted from the PPA index). This same bug was making the baseline artificially low, so it has been increased to the real value.

And there are some lint fixes too.

To post a comment you must log in.
Revision history for this message
Abel Deuring (adeuring) :
review: Approve (code)

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'lib/lp/soyuz/browser/tests/test_archive_packages.py'
2--- lib/lp/soyuz/browser/tests/test_archive_packages.py 2010-11-10 05:53:57 +0000
3+++ lib/lp/soyuz/browser/tests/test_archive_packages.py 2010-12-03 11:47:00 +0000
4@@ -133,9 +133,10 @@
5 viewer = self.factory.makePerson(password="test")
6 browser = self.getUserBrowser(user=viewer)
7 with person_logged_in(viewer):
8- # The baseline has one package, because otherwise the short-circuit
9- # prevents the packages iteration happening at all and we're not
10- # actually measuring scaling appropriately.
11+ # The baseline has one package, because otherwise the
12+ # short-circuit prevents the packages iteration happening at
13+ # all and we're not actually measuring scaling
14+ # appropriately.
15 self.factory.makeSourcePackagePublishingHistory(archive=ppa)
16 url = canonical_url(ppa) + "/+packages"
17 browser.open(url)
18@@ -144,11 +145,11 @@
19 # We scale with 1 query per distro series because of
20 # getCurrentSourceReleases.
21 expected_count += 1
22- # We need a fuzz of one because if the test is the first to run a
23+ # We need a fuzz of one because if the test is the first to run a
24 # credentials lookup is done as well (and accrued to the collector).
25 expected_count += 1
26- # Use all new objects - avoids caching issues invalidating the gathered
27- # metrics.
28+ # Use all new objects - avoids caching issues invalidating the
29+ # gathered metrics.
30 login(ADMIN_EMAIL)
31 ppa = self.factory.makeArchive()
32 viewer = self.factory.makePerson(password="test")
33@@ -164,7 +165,7 @@
34 self.assertThat(collector, HasQueryCount(LessThan(expected_count)))
35
36 def test_binary_query_counts(self):
37- query_baseline = 26
38+ query_baseline = 40
39 # Assess the baseline.
40 collector = QueryCollector()
41 collector.register()
42@@ -173,27 +174,27 @@
43 viewer = self.factory.makePerson(password="test")
44 browser = self.getUserBrowser(user=viewer)
45 with person_logged_in(viewer):
46- # The baseline has one package, because otherwise the short-circuit
47- # prevents the packages iteration happening at all and we're not
48- # actually measuring scaling appropriately.
49- self.factory.makeBinaryPackagePublishingHistory(archive=ppa)
50+ # The baseline has one package, because otherwise the
51+ # short-circuit prevents the packages iteration happening at
52+ # all and we're not actually measuring scaling
53+ # appropriately.
54+ pkg = self.factory.makeBinaryPackagePublishingHistory(
55+ archive=ppa)
56 url = canonical_url(ppa) + "/+packages"
57 browser.open(url)
58 self.assertThat(collector, HasQueryCount(
59 MatchesAny(LessThan(query_baseline), Equals(query_baseline))))
60 expected_count = collector.count
61- # Use all new objects - avoids caching issues invalidating the gathered
62- # metrics.
63+ # Use all new objects - avoids caching issues invalidating the
64+ # gathered metrics.
65 login(ADMIN_EMAIL)
66 ppa = self.factory.makeArchive()
67 viewer = self.factory.makePerson(password="test")
68 browser = self.getUserBrowser(user=viewer)
69 with person_logged_in(viewer):
70- for i in range(2):
71+ for i in range(3):
72 pkg = self.factory.makeBinaryPackagePublishingHistory(
73- archive=ppa)
74- self.factory.makeBinaryPackagePublishingHistory(archive=ppa,
75- distroarchseries=pkg.distroarchseries)
76+ archive=ppa, distroarchseries=pkg.distroarchseries)
77 url = canonical_url(ppa) + "/+packages"
78 browser.open(url)
79 self.assertThat(collector, HasQueryCount(
80
81=== modified file 'lib/lp/soyuz/interfaces/publishing.py'
82--- lib/lp/soyuz/interfaces/publishing.py 2010-11-10 04:25:52 +0000
83+++ lib/lp/soyuz/interfaces/publishing.py 2010-12-03 11:47:00 +0000
84@@ -561,7 +561,8 @@
85 It is changed only if the argument is not None.
86
87 Return the overridden publishing record, either a
88- `ISourcePackagePublishingHistory` or `IBinaryPackagePublishingHistory`.
89+ `ISourcePackagePublishingHistory` or
90+ `IBinaryPackagePublishingHistory`.
91 """
92
93 def copyTo(distroseries, pocket, archive):
94@@ -788,7 +789,8 @@
95 It is changed only if the argument is not None.
96
97 Return the overridden publishing record, either a
98- `ISourcePackagePublishingHistory` or `IBinaryPackagePublishingHistory`.
99+ `ISourcePackagePublishingHistory` or
100+ `IBinaryPackagePublishingHistory`.
101 """
102
103 def copyTo(distroseries, pocket, archive):
104@@ -896,7 +898,8 @@
105 binary publications.
106 """
107
108- def getBuildsForSourceIds(source_ids, archive=None, build_states=None):
109+ def getBuildsForSourceIds(source_ids, archive=None, build_states=None,
110+ need_build_farm_job=False):
111 """Return all builds related with each given source publication.
112
113 The returned ResultSet contains entries with the wanted `Build`s
114@@ -917,13 +920,17 @@
115 `SourcePackagePublishingHistory` object.
116 :type source_ids: ``list`` or `SourcePackagePublishingHistory`
117 :param archive: An optional archive with which to filter the source
118- ids.
119+ ids.
120 :type archive: `IArchive`
121 :param build_states: optional list of build states to which the
122 result will be limited. Defaults to all states if ommitted.
123 :type build_states: ``list`` or None
124+ :param need_build_farm_job: whether to include the `PackageBuild`
125+ and `BuildFarmJob` in the result.
126+ :type need_build_farm_job: bool
127 :return: a storm ResultSet containing tuples as
128- (`SourcePackagePublishingHistory`, `Build`, `DistroArchSeries`)
129+ (`SourcePackagePublishingHistory`, `Build`, `DistroArchSeries`,
130+ [`PackageBuild`, `BuildFarmJob` if need_build_farm_job])
131 :rtype: `storm.store.ResultSet`.
132 """
133
134
135=== modified file 'lib/lp/soyuz/model/publishing.py'
136--- lib/lp/soyuz/model/publishing.py 2010-11-11 11:55:53 +0000
137+++ lib/lp/soyuz/model/publishing.py 2010-12-03 11:47:00 +0000
138@@ -506,7 +506,7 @@
139 @staticmethod
140 def _convertBuilds(builds_for_sources):
141 """Convert from IPublishingSet getBuilds to SPPH getBuilds."""
142- return [build for source, build, arch in builds_for_sources]
143+ return [build[1] for build in builds_for_sources]
144
145 def getBuilds(self):
146 """See `ISourcePackagePublishingHistory`."""
147@@ -1331,7 +1331,8 @@
148 return pub
149
150 def getBuildsForSourceIds(
151- self, source_publication_ids, archive=None, build_states=None):
152+ self, source_publication_ids, archive=None, build_states=None,
153+ need_build_farm_job=False):
154 """See `IPublishingSet`."""
155 # Import Build and DistroArchSeries locally to avoid circular
156 # imports, since that Build uses SourcePackagePublishingHistory
157@@ -1404,16 +1405,22 @@
158 SourcePackagePublishingHistory,
159 BinaryPackageBuild,
160 DistroArchSeries,
161- )
162+ ) + ((PackageBuild, BuildFarmJob) if need_build_farm_job else ())
163
164 # Storm doesn't let us do builds_union.values('id') -
165 # ('Union' object has no attribute 'columns'). So instead
166 # we have to instantiate the objects just to get the id.
167 build_ids = [build.id for build in builds_union]
168
169+ prejoin_exprs = (
170+ BinaryPackageBuild.package_build == PackageBuild.id,
171+ PackageBuild.build_farm_job == BuildFarmJob.id,
172+ ) if need_build_farm_job else ()
173+
174 result_set = store.find(
175 find_spec, builds_for_distroseries_expr,
176- BinaryPackageBuild.id.is_in(build_ids))
177+ BinaryPackageBuild.id.is_in(build_ids),
178+ *prejoin_exprs)
179
180 return result_set.order_by(
181 SourcePackagePublishingHistory.id,
182@@ -1697,8 +1704,11 @@
183 return {}
184
185 store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
186+ # Find relevant builds while also getting PackageBuilds and
187+ # BuildFarmJobs into the cache. They're used later.
188 build_info = list(
189- self.getBuildsForSourceIds(source_ids, archive=archive))
190+ self.getBuildsForSourceIds(
191+ source_ids, archive=archive, need_build_farm_job=True))
192 source_pubs = set()
193 found_source_ids = set()
194 for row in build_info:
195@@ -1722,20 +1732,21 @@
196 source_build_statuses = {}
197 need_unpublished = set()
198 for source_pub in source_pubs:
199- source_builds = [build for build in build_info
200- if build[0].id == source_pub.id]
201- builds = SourcePackagePublishingHistory._convertBuilds(source_builds)
202+ source_builds = [
203+ build for build in build_info if build[0].id == source_pub.id]
204+ builds = SourcePackagePublishingHistory._convertBuilds(
205+ source_builds)
206 summary = binarypackages.getStatusSummaryForBuilds(builds)
207 source_build_statuses[source_pub.id] = summary
208
209 # If:
210 # 1. the SPPH is in an active publishing state, and
211 # 2. all the builds are fully-built, and
212- # 3. the SPPH is not being published in a rebuild/copy archive (in
213- # which case the binaries are not published)
214+ # 3. the SPPH is not being published in a rebuild/copy
215+ # archive (in which case the binaries are not published)
216 # 4. There are unpublished builds
217- # Then we augment the result with FULLYBUILT_PENDING and attach the
218- # unpublished builds.
219+ # Then we augment the result with FULLYBUILT_PENDING and
220+ # attach the unpublished builds.
221 if (source_pub.status in active_publishing_status and
222 summary['status'] == BuildSetStatus.FULLYBUILT and
223 not source_pub.archive.is_copy):
224@@ -1750,7 +1761,7 @@
225 for source_pub, builds in unpublished_per_source.items():
226 summary = {
227 'status': BuildSetStatus.FULLYBUILT_PENDING,
228- 'builds': builds
229+ 'builds': builds,
230 }
231 source_build_statuses[source_pub.id] = summary
232
233
234=== modified file 'lib/lp/testing/factory.py'
235--- lib/lp/testing/factory.py 2010-11-30 02:38:38 +0000
236+++ lib/lp/testing/factory.py 2010-12-03 11:47:00 +0000
237@@ -104,7 +104,6 @@
238 from lp.archiveuploader.uploadpolicy import BuildDaemonUploadPolicy
239 from lp.blueprints.enums import (
240 SpecificationDefinitionStatus,
241- SpecificationGoalStatus,
242 SpecificationPriority,
243 )
244 from lp.blueprints.interfaces.specification import ISpecificationSet
245@@ -2788,23 +2787,29 @@
246 archive = self.makeArchive()
247 else:
248 archive = source_package_release.upload_archive
249+ if processor is None:
250+ processor = self.makeProcessor()
251+ if distroarchseries is None:
252+ if source_package_release is not None:
253+ distroseries = source_package_release.upload_distroseries
254+ else:
255+ distroseries = self.makeDistroSeries()
256+ distroarchseries = self.makeDistroArchSeries(
257+ distroseries=distroseries,
258+ processorfamily=processor.family)
259+ if pocket is None:
260+ pocket = PackagePublishingPocket.RELEASE
261 if source_package_release is None:
262 multiverse = self.makeComponent(name='multiverse')
263 source_package_release = self.makeSourcePackageRelease(
264- archive, component=multiverse)
265+ archive, component=multiverse,
266+ distroseries=distroarchseries.distroseries)
267 self.makeSourcePackagePublishingHistory(
268 distroseries=source_package_release.upload_distroseries,
269- archive=archive, sourcepackagerelease=source_package_release)
270- if processor is None:
271- processor = self.makeProcessor()
272- if distroarchseries is None:
273- distroarchseries = self.makeDistroArchSeries(
274- distroseries=source_package_release.upload_distroseries,
275- processorfamily=processor.family)
276+ archive=archive, sourcepackagerelease=source_package_release,
277+ pocket=pocket)
278 if status is None:
279 status = BuildStatus.NEEDSBUILD
280- if pocket is None:
281- pocket = PackagePublishingPocket.RELEASE
282 if date_created is None:
283 date_created = self.getUniqueDate()
284 binary_package_build = getUtility(IBinaryPackageBuildSet).new(
285@@ -2925,7 +2930,13 @@
286 priority = PackagePublishingPriority.OPTIONAL
287
288 if binarypackagerelease is None:
289+ # Create a new BinaryPackageBuild and BinaryPackageRelease
290+ # in the same archive and suite.
291+ binarypackagebuild = self.makeBinaryPackageBuild(
292+ archive=archive, distroarchseries=distroarchseries,
293+ pocket=pocket)
294 binarypackagerelease = self.makeBinaryPackageRelease(
295+ build=binarypackagebuild,
296 component=component,
297 section_name=section_name,
298 priority=priority)