Merge lp:~stevenk/launchpad/populate-searchables-for-pu into lp:launchpad
- populate-searchables-for-pu
- Merge into devel
Status: | Merged | ||||||||
---|---|---|---|---|---|---|---|---|---|
Approved by: | Steve Kowalik | ||||||||
Approved revision: | no longer in the source branch. | ||||||||
Merged at revision: | 16371 | ||||||||
Proposed branch: | lp:~stevenk/launchpad/populate-searchables-for-pu | ||||||||
Merge into: | lp:launchpad | ||||||||
Diff against target: |
619 lines (+294/-56) 9 files modified
database/schema/security.cfg (+1/-0) lib/lp/registry/model/distroseries.py (+2/-3) lib/lp/scripts/garbo.py (+128/-0) lib/lp/scripts/tests/test_garbo.py (+49/-0) lib/lp/soyuz/configure.zcml (+3/-1) lib/lp/soyuz/interfaces/queue.py (+5/-2) lib/lp/soyuz/model/queue.py (+66/-48) lib/lp/soyuz/tests/test_packageupload.py (+39/-1) lib/lp/testing/factory.py (+1/-1) |
||||||||
To merge this branch: | bzr merge lp:~stevenk/launchpad/populate-searchables-for-pu | ||||||||
Related bugs: |
|
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
William Grant | code | Approve | |
Review via email: mp+138906@code.launchpad.net |
Commit message
Populate the new searchable_names and searchable_versions columns on PackageUpload.
Description of the change
Populate the new searchable_names and searchable_versions columns on PackageUpload. The queries the garbo job are running are pretty hideous, but they're temporary and actually quite performant -- dogfood is averaging 840 rows each loop through.
LoC justification is the garbo job is temporary and will be dropped when it's done, and after that we can delete most of IPackageUploadS
This can not land until https:/
Preview Diff
1 | === modified file 'database/schema/security.cfg' |
2 | --- database/schema/security.cfg 2012-12-03 21:56:19 +0000 |
3 | +++ database/schema/security.cfg 2012-12-14 00:18:23 +0000 |
4 | @@ -2253,6 +2253,7 @@ |
5 | public.oauthnonce = SELECT, DELETE |
6 | public.openidconsumerassociation = SELECT, DELETE |
7 | public.openidconsumernonce = SELECT, DELETE |
8 | +public.packageupload = SELECT, UPDATE |
9 | public.person = SELECT, DELETE |
10 | public.product = SELECT, UPDATE |
11 | public.pofiletranslator = SELECT, INSERT, UPDATE, DELETE |
12 | |
13 | === modified file 'lib/lp/registry/model/distroseries.py' |
14 | --- lib/lp/registry/model/distroseries.py 2012-12-06 13:33:46 +0000 |
15 | +++ lib/lp/registry/model/distroseries.py 2012-12-14 00:18:23 +0000 |
16 | @@ -1342,9 +1342,8 @@ |
17 | |
18 | return PackageUpload( |
19 | distroseries=self, status=PackageUploadStatus.NEW, |
20 | - pocket=pocket, archive=archive, |
21 | - changesfile=changes_file_alias, signing_key=signing_key, |
22 | - package_copy_job=package_copy_job) |
23 | + pocket=pocket, archive=archive, changesfile=changes_file_alias, |
24 | + signing_key=signing_key, package_copy_job=package_copy_job) |
25 | |
26 | def getPackageUploadQueue(self, state): |
27 | """See `IDistroSeries`.""" |
28 | |
29 | === modified file 'lib/lp/scripts/garbo.py' |
30 | --- lib/lp/scripts/garbo.py 2012-12-04 16:34:40 +0000 |
31 | +++ lib/lp/scripts/garbo.py 2012-12-14 00:18:23 +0000 |
32 | @@ -18,6 +18,7 @@ |
33 | ) |
34 | import logging |
35 | import multiprocessing |
36 | +from operator import itemgetter |
37 | import os |
38 | import threading |
39 | import time |
40 | @@ -122,6 +123,7 @@ |
41 | from lp.services.verification.model.logintoken import LoginToken |
42 | from lp.soyuz.model.archive import Archive |
43 | from lp.soyuz.model.publishing import SourcePackagePublishingHistory |
44 | +from lp.soyuz.model.queue import PackageUpload |
45 | from lp.soyuz.model.reporting import LatestPersonSourcePackageReleaseCache |
46 | from lp.soyuz.model.sourcepackagerelease import SourcePackageRelease |
47 | from lp.translations.interfaces.potemplate import IPOTemplateSet |
48 | @@ -1337,6 +1339,131 @@ |
49 | transaction.commit() |
50 | |
51 | |
52 | +class PopulatePackageUploadSearchables(TunableLoop): |
53 | + """Populates PackageUpload.searchable_names and |
54 | + PackageUpload.searchable_versions.""" |
55 | + |
56 | + maximum_chunk_size = 5000 |
57 | + |
58 | + def __init__(self, log, abort_time=None): |
59 | + super(PopulatePackageUploadSearchables, self).__init__(log, abort_time) |
60 | + self.start_at = 1 |
61 | + self.store = IMasterStore(PackageUpload) |
62 | + |
63 | + def findPackageUploadIDs(self): |
64 | + return self.store.find( |
65 | + (PackageUpload.id,), |
66 | + Or(PackageUpload.searchable_names == None, |
67 | + PackageUpload.searchable_versions == None), |
68 | + PackageUpload.id >= self.start_at).order_by(PackageUpload.id) |
69 | + |
70 | + def isDone(self): |
71 | + return self.findPackageUploadIDs().is_empty() |
72 | + |
73 | + def __call__(self, chunk_size): |
74 | + packageupload_ids = map( |
75 | + itemgetter(0), list(self.findPackageUploadIDs()[:chunk_size])) |
76 | + # The following SQL links from PU[SBC] to fetch all of the relevant |
77 | + # source names, binary names, libraryfile filenames and their versions. |
78 | + results = self.store.find( |
79 | + (PackageUpload.id, SQL(""" |
80 | + (SELECT COALESCE( |
81 | + string_agg(DISTINCT name, ' ' ORDER BY name), '') FROM ( |
82 | + (SELECT spn.name |
83 | + FROM |
84 | + packageuploadbuild |
85 | + JOIN binarypackagebuild AS bpb ON |
86 | + bpb.id = packageuploadbuild.build |
87 | + JOIN sourcepackagerelease AS spr ON |
88 | + spr.id = bpb.source_package_release |
89 | + JOIN sourcepackagename AS spn ON |
90 | + spn.id = spr.sourcepackagename |
91 | + WHERE packageuploadbuild.packageupload = packageupload.id |
92 | + ) |
93 | + UNION |
94 | + (SELECT bpn.name |
95 | + FROM |
96 | + packageuploadbuild |
97 | + JOIN binarypackagerelease ON |
98 | + binarypackagerelease.build = packageuploadbuild.build |
99 | + JOIN binarypackagename AS bpn ON |
100 | + bpn.id = binarypackagerelease.binarypackagename |
101 | + WHERE packageuploadbuild.packageupload = packageupload.id |
102 | + ) |
103 | + UNION |
104 | + (SELECT sourcepackagename.name |
105 | + FROM |
106 | + packageuploadsource |
107 | + JOIN sourcepackagerelease AS spr ON |
108 | + spr.id = packageuploadsource.sourcepackagerelease |
109 | + JOIN sourcepackagename ON |
110 | + sourcepackagename.id = spr.sourcepackagename |
111 | + WHERE packageuploadsource.packageupload = packageupload.id |
112 | + ) |
113 | + UNION |
114 | + (SELECT lfa.filename |
115 | + FROM |
116 | + packageuploadcustom |
117 | + JOIN libraryfilealias AS lfa ON |
118 | + lfa.id = packageuploadcustom.libraryfilealias |
119 | + WHERE packageuploadcustom.packageupload = packageupload.id |
120 | + ) |
121 | + UNION |
122 | + (SELECT package_name FROM packagecopyjob |
123 | + WHERE packageupload.package_copy_job = packagecopyjob.id |
124 | + )) AS names (name)) |
125 | + """), SQL(""" |
126 | + (SELECT COALESCE(array_agg(DISTINCT version ORDER BY version)::text[], |
127 | + ARRAY[]::text[]) FROM ( |
128 | + ( |
129 | + SELECT spr.version |
130 | + FROM packageuploadsource |
131 | + JOIN sourcepackagerelease AS spr ON |
132 | + spr.id = packageuploadsource.sourcepackagerelease |
133 | + WHERE packageuploadsource.packageupload = packageupload.id |
134 | + ) |
135 | + UNION |
136 | + ( |
137 | + SELECT binarypackagerelease.version |
138 | + FROM packageuploadbuild |
139 | + JOIN binarypackagerelease ON |
140 | + binarypackagerelease.build = packageuploadbuild.build |
141 | + WHERE packageuploadbuild.packageupload = packageupload.id |
142 | + ) |
143 | + UNION |
144 | + ( |
145 | + SELECT (regexp_matches(json_data, |
146 | + '"package_version": "([^"]+)"')::debversion[])[1] |
147 | + FROM packagecopyjob |
148 | + WHERE packageupload.package_copy_job = packagecopyjob.id |
149 | + )) AS versions (version)) |
150 | + """)), PackageUpload.id.is_in(packageupload_ids)) |
151 | + # Construct our cache data and populate our Values expression. |
152 | + cache_data = ClassAlias(PackageUpload, "cache_data") |
153 | + updated_columns = dict( |
154 | + [(PackageUpload.searchable_names, cache_data.searchable_names), |
155 | + (PackageUpload.searchable_versions, |
156 | + cache_data.searchable_versions)]) |
157 | + values = [ |
158 | + [dbify_value(col, val)[0] |
159 | + for (col, val) in zip( |
160 | + (PackageUpload.id, PackageUpload.searchable_names, |
161 | + PackageUpload.searchable_versions), data)] |
162 | + for data in results] |
163 | + cols = [ |
164 | + ('id', 'integer'), ('searchable_names', 'text'), |
165 | + ('searchable_versions', 'text[]')] |
166 | + cache_data_expr = Values('cache_data', cols, values) |
167 | + # Using the PackageUpload table, and the pseudo-table Values, set |
168 | + # updated_columns for every row in this loop. |
169 | + self.store.execute( |
170 | + BulkUpdate( |
171 | + updated_columns, table=PackageUpload, values=cache_data_expr, |
172 | + where=PackageUpload.id == cache_data.id)) |
173 | + self.start_at = packageupload_ids[-1] + 1 |
174 | + transaction.commit() |
175 | + |
176 | + |
177 | class BaseDatabaseGarbageCollector(LaunchpadCronScript): |
178 | """Abstract base class to run a collection of TunableLoops.""" |
179 | script_name = None # Script name for locking and database user. Override. |
180 | @@ -1592,6 +1719,7 @@ |
181 | UnusedSessionPruner, |
182 | DuplicateSessionPruner, |
183 | BugHeatUpdater, |
184 | + PopulatePackageUploadSearchables, |
185 | ] |
186 | experimental_tunable_loops = [] |
187 | |
188 | |
189 | === modified file 'lib/lp/scripts/tests/test_garbo.py' |
190 | --- lib/lp/scripts/tests/test_garbo.py 2012-12-04 16:34:40 +0000 |
191 | +++ lib/lp/scripts/tests/test_garbo.py 2012-12-14 00:18:23 +0000 |
192 | @@ -1275,6 +1275,55 @@ |
193 | 'PopulateLatestPersonSourcePackageReleaseCache') |
194 | self.assertEqual(spph_2.id, job_data['last_spph_id']) |
195 | |
196 | + def test_PopulatePackageUploadSearchables(self): |
197 | + # PopulatePackageUploadSearchables sets searchable_names and |
198 | + # searchable_versions for existing uploads correctly. |
199 | + switch_dbuser('testadmin') |
200 | + distroseries = self.factory.makeDistroSeries() |
201 | + source = self.factory.makeSourcePackageUpload(distroseries) |
202 | + binary = self.factory.makeBuildPackageUpload(distroseries) |
203 | + build = self.factory.makeBinaryPackageBuild() |
204 | + self.factory.makeBinaryPackageRelease(build=build) |
205 | + binary.addBuild(build) |
206 | + custom = self.factory.makeCustomPackageUpload(distroseries) |
207 | + # They are all have searchable_{names,versions} set, so unset them. |
208 | + for kind in (source, binary, custom): |
209 | + removeSecurityProxy(kind).searchable_names = None |
210 | + removeSecurityProxy(kind).searchable_versions = None |
211 | + transaction.commit() |
212 | + self.runHourly() |
213 | + source_name = source.sources[0].sourcepackagerelease.name |
214 | + binary_names = ' '.join( |
215 | + [build.build.binarypackages[0].name for build in binary.builds] + [ |
216 | + build.build.source_package_release.name |
217 | + for build in binary.builds]) |
218 | + filename = custom.customfiles[0].libraryfilealias.filename |
219 | + self.assertEqual(source.searchable_names, source_name) |
220 | + self.assertEqual(binary.searchable_names, binary_names) |
221 | + self.assertEqual(custom.searchable_names, filename) |
222 | + source_version = [source.sources[0].sourcepackagerelease.version] |
223 | + binary_versions = [ |
224 | + build.build.binarypackages[0].version for build in binary.builds] |
225 | + self.assertContentEqual(source_version, source.searchable_versions) |
226 | + self.assertContentEqual(binary_versions, binary.searchable_versions) |
227 | + self.assertEqual([], custom.searchable_versions) |
228 | + |
229 | + def test_PopulatePackageUploadSearchables_deduplication(self): |
230 | + # When the SPN and the BPN are the same for a build, the |
231 | + # searchable_names field is set to just one name. |
232 | + switch_dbuser('testadmin') |
233 | + distroseries = self.factory.makeDistroSeries() |
234 | + spr = self.factory.makeSourcePackageRelease() |
235 | + bpn = self.factory.makeBinaryPackageName(name=spr.name) |
236 | + binary = self.factory.makeBuildPackageUpload( |
237 | + distroseries=distroseries, binarypackagename=bpn, |
238 | + source_package_release=spr) |
239 | + removeSecurityProxy(binary).searchable_names = None |
240 | + removeSecurityProxy(binary).searchable_versions = None |
241 | + transaction.commit() |
242 | + self.runHourly() |
243 | + self.assertEqual(spr.name, binary.searchable_names) |
244 | + |
245 | |
246 | class TestGarboTasks(TestCaseWithFactory): |
247 | layer = LaunchpadZopelessLayer |
248 | |
249 | === modified file 'lib/lp/soyuz/configure.zcml' |
250 | --- lib/lp/soyuz/configure.zcml 2012-11-15 01:41:14 +0000 |
251 | +++ lib/lp/soyuz/configure.zcml 2012-12-14 00:18:23 +0000 |
252 | @@ -190,7 +190,9 @@ |
253 | package_name |
254 | package_version |
255 | section_name |
256 | - components"/> |
257 | + components |
258 | + searchable_names |
259 | + searchable_versions"/> |
260 | <require |
261 | permission="launchpad.Edit" |
262 | attributes=" |
263 | |
264 | === modified file 'lib/lp/soyuz/interfaces/queue.py' |
265 | --- lib/lp/soyuz/interfaces/queue.py 2012-11-15 01:41:14 +0000 |
266 | +++ lib/lp/soyuz/interfaces/queue.py 2012-12-14 00:18:23 +0000 |
267 | @@ -1,8 +1,6 @@ |
268 | # Copyright 2009-2012 Canonical Ltd. This software is licensed under the |
269 | # GNU Affero General Public License version 3 (see the file LICENSE). |
270 | |
271 | -# pylint: disable-msg=E0211,E0213 |
272 | - |
273 | """Queue interfaces.""" |
274 | |
275 | __metaclass__ = type |
276 | @@ -212,6 +210,11 @@ |
277 | sourcepackagerelease = Attribute( |
278 | "The source package release for this item") |
279 | |
280 | + searchable_names = TextLine( |
281 | + title=_("Searchable names for this item"), readonly=True) |
282 | + searchable_versions = List( |
283 | + title=_("Searchable versions for this item"), readonly=True) |
284 | + |
285 | package_name = exported( |
286 | TextLine( |
287 | title=_("Name of the uploaded source package"), readonly=True), |
288 | |
289 | === modified file 'lib/lp/soyuz/model/queue.py' |
290 | --- lib/lp/soyuz/model/queue.py 2012-11-15 02:45:58 +0000 |
291 | +++ lib/lp/soyuz/model/queue.py 2012-12-14 00:18:23 +0000 |
292 | @@ -1,8 +1,6 @@ |
293 | # Copyright 2009-2012 Canonical Ltd. This software is licensed under the |
294 | # GNU Affero General Public License version 3 (see the file LICENSE). |
295 | |
296 | -# pylint: disable-msg=E0611,W0212 |
297 | - |
298 | __metaclass__ = type |
299 | __all__ = [ |
300 | 'PackageUploadQueue', |
301 | @@ -23,6 +21,7 @@ |
302 | ForeignKey, |
303 | SQLMultipleJoin, |
304 | SQLObjectNotFound, |
305 | + StringCol, |
306 | ) |
307 | from storm.expr import LeftJoin |
308 | from storm.locals import ( |
309 | @@ -30,8 +29,10 @@ |
310 | Desc, |
311 | Int, |
312 | Join, |
313 | + List, |
314 | Or, |
315 | Reference, |
316 | + Unicode, |
317 | ) |
318 | from storm.store import ( |
319 | EmptyResultSet, |
320 | @@ -216,30 +217,33 @@ |
321 | |
322 | _defaultOrder = ['id'] |
323 | |
324 | - status = EnumCol(dbName='status', unique=False, notNull=True, |
325 | - default=PackageUploadStatus.NEW, |
326 | - schema=PackageUploadStatus, |
327 | - storm_validator=validate_status) |
328 | + status = EnumCol( |
329 | + dbName='status', unique=False, notNull=True, |
330 | + default=PackageUploadStatus.NEW, schema=PackageUploadStatus, |
331 | + storm_validator=validate_status) |
332 | |
333 | date_created = UtcDateTimeCol(notNull=False, default=UTC_NOW) |
334 | |
335 | - distroseries = ForeignKey(dbName="distroseries", |
336 | - foreignKey='DistroSeries') |
337 | + distroseries = ForeignKey(dbName="distroseries", foreignKey='DistroSeries') |
338 | |
339 | - pocket = EnumCol(dbName='pocket', unique=False, notNull=True, |
340 | - schema=PackagePublishingPocket) |
341 | + pocket = EnumCol( |
342 | + dbName='pocket', unique=False, notNull=True, |
343 | + schema=PackagePublishingPocket) |
344 | |
345 | changesfile = ForeignKey( |
346 | dbName='changesfile', foreignKey="LibraryFileAlias", notNull=False) |
347 | |
348 | archive = ForeignKey(dbName="archive", foreignKey="Archive", notNull=True) |
349 | |
350 | - signing_key = ForeignKey(foreignKey='GPGKey', dbName='signing_key', |
351 | - notNull=False) |
352 | + signing_key = ForeignKey( |
353 | + foreignKey='GPGKey', dbName='signing_key', notNull=False) |
354 | |
355 | package_copy_job_id = Int(name='package_copy_job', allow_none=True) |
356 | package_copy_job = Reference(package_copy_job_id, 'PackageCopyJob.id') |
357 | |
358 | + searchable_names = StringCol(name='searchable_names', default='') |
359 | + searchable_versions = List(type=Unicode(), default_factory=list) |
360 | + |
361 | # XXX julian 2007-05-06: |
362 | # Sources should not be SQLMultipleJoin, there is only ever one |
363 | # of each at most. |
364 | @@ -252,6 +256,14 @@ |
365 | _builds = SQLMultipleJoin('PackageUploadBuild', |
366 | joinColumn='packageupload') |
367 | |
368 | + def __init__(self, *args, **kwargs): |
369 | + super(PackageUpload, self).__init__(*args, **kwargs) |
370 | + # searchable_{name,version}s are set for the other cases when |
371 | + # add{Source,Build,Custom} are called. |
372 | + if self.package_copy_job: |
373 | + self.addSearchableNames([self.package_copy_job.package_name]) |
374 | + self.addSearchableVersions([self.package_copy_job.package_version]) |
375 | + |
376 | @cachedproperty |
377 | def sources(self): |
378 | return list(self._sources) |
379 | @@ -353,15 +365,13 @@ |
380 | def setNew(self): |
381 | """See `IPackageUpload`.""" |
382 | if self.status == PackageUploadStatus.NEW: |
383 | - raise QueueInconsistentStateError( |
384 | - 'Queue item already new') |
385 | + raise QueueInconsistentStateError('Queue item already new') |
386 | self.status = PassthroughStatusValue(PackageUploadStatus.NEW) |
387 | |
388 | def setUnapproved(self): |
389 | """See `IPackageUpload`.""" |
390 | if self.status == PackageUploadStatus.UNAPPROVED: |
391 | - raise QueueInconsistentStateError( |
392 | - 'Queue item already unapproved') |
393 | + raise QueueInconsistentStateError('Queue item already unapproved') |
394 | self.status = PassthroughStatusValue(PackageUploadStatus.UNAPPROVED) |
395 | |
396 | def setAccepted(self): |
397 | @@ -374,8 +384,7 @@ |
398 | self.pocket.name, self.distroseries.status.name)) |
399 | |
400 | if self.status == PackageUploadStatus.ACCEPTED: |
401 | - raise QueueInconsistentStateError( |
402 | - 'Queue item already accepted') |
403 | + raise QueueInconsistentStateError('Queue item already accepted') |
404 | |
405 | for source in self.sources: |
406 | source.verifyBeforeAccept() |
407 | @@ -461,15 +470,13 @@ |
408 | def setDone(self): |
409 | """See `IPackageUpload`.""" |
410 | if self.status == PackageUploadStatus.DONE: |
411 | - raise QueueInconsistentStateError( |
412 | - 'Queue item already done') |
413 | + raise QueueInconsistentStateError('Queue item already done') |
414 | self.status = PassthroughStatusValue(PackageUploadStatus.DONE) |
415 | |
416 | def setRejected(self): |
417 | """See `IPackageUpload`.""" |
418 | if self.status == PackageUploadStatus.REJECTED: |
419 | - raise QueueInconsistentStateError( |
420 | - 'Queue item already rejected') |
421 | + raise QueueInconsistentStateError('Queue item already rejected') |
422 | self.status = PassthroughStatusValue(PackageUploadStatus.REJECTED) |
423 | |
424 | def _closeBugs(self, changesfile_path, logger=None): |
425 | @@ -709,20 +716,17 @@ |
426 | @cachedproperty |
427 | def contains_upgrader(self): |
428 | """See `IPackageUpload`.""" |
429 | - return (PackageUploadCustomFormat.DIST_UPGRADER |
430 | - in self._customFormats) |
431 | + return PackageUploadCustomFormat.DIST_UPGRADER in self._customFormats |
432 | |
433 | @cachedproperty |
434 | def contains_ddtp(self): |
435 | """See `IPackageUpload`.""" |
436 | - return (PackageUploadCustomFormat.DDTP_TARBALL |
437 | - in self._customFormats) |
438 | + return PackageUploadCustomFormat.DDTP_TARBALL in self._customFormats |
439 | |
440 | @cachedproperty |
441 | def contains_uefi(self): |
442 | """See `IPackageUpload`.""" |
443 | - return (PackageUploadCustomFormat.UEFI |
444 | - in self._customFormats) |
445 | + return PackageUploadCustomFormat.UEFI in self._customFormats |
446 | |
447 | @property |
448 | def package_name(self): |
449 | @@ -853,26 +857,43 @@ |
450 | |
451 | return publishing_records |
452 | |
453 | + def _appendSearchables(self, existing, new): |
454 | + return sorted(filter(None, set(existing) | set(new))) |
455 | + |
456 | + def addSearchableNames(self, names): |
457 | + self.searchable_names = ' '.join( |
458 | + self._appendSearchables(self.searchable_names.split(' '), names)) |
459 | + |
460 | + def addSearchableVersions(self, versions): |
461 | + self.searchable_versions = self._appendSearchables( |
462 | + self.searchable_versions, versions) |
463 | + |
464 | def addSource(self, spr): |
465 | """See `IPackageUpload`.""" |
466 | del get_property_cache(self).sources |
467 | + self.addSearchableNames([spr.name]) |
468 | + self.addSearchableVersions([spr.version]) |
469 | return PackageUploadSource( |
470 | - packageupload=self, |
471 | - sourcepackagerelease=spr.id) |
472 | + packageupload=self, sourcepackagerelease=spr.id) |
473 | |
474 | def addBuild(self, build): |
475 | """See `IPackageUpload`.""" |
476 | del get_property_cache(self).builds |
477 | - return PackageUploadBuild( |
478 | - packageupload=self, |
479 | - build=build.id) |
480 | + names = [build.source_package_release.name] |
481 | + versions = [] |
482 | + for bpr in build.binarypackages: |
483 | + names.append(bpr.name) |
484 | + versions.append(bpr.version) |
485 | + self.addSearchableNames(names) |
486 | + self.addSearchableVersions(versions) |
487 | + return PackageUploadBuild(packageupload=self, build=build.id) |
488 | |
489 | def addCustom(self, library_file, custom_type): |
490 | """See `IPackageUpload`.""" |
491 | del get_property_cache(self).customfiles |
492 | + self.addSearchableNames([library_file.filename]) |
493 | return PackageUploadCustom( |
494 | - packageupload=self, |
495 | - libraryfilealias=library_file.id, |
496 | + packageupload=self, libraryfilealias=library_file.id, |
497 | customformat=custom_type) |
498 | |
499 | def isPPA(self): |
500 | @@ -1361,15 +1382,14 @@ |
501 | _defaultOrder = ['id'] |
502 | |
503 | packageupload = ForeignKey( |
504 | - dbName='packageupload', |
505 | - foreignKey='PackageUpload') |
506 | - |
507 | - customformat = EnumCol(dbName='customformat', unique=False, |
508 | - notNull=True, schema=PackageUploadCustomFormat) |
509 | - |
510 | - libraryfilealias = ForeignKey(dbName='libraryfilealias', |
511 | - foreignKey="LibraryFileAlias", |
512 | - notNull=True) |
513 | + dbName='packageupload', foreignKey='PackageUpload') |
514 | + |
515 | + customformat = EnumCol( |
516 | + dbName='customformat', unique=False, notNull=True, |
517 | + schema=PackageUploadCustomFormat) |
518 | + |
519 | + libraryfilealias = ForeignKey( |
520 | + dbName='libraryfilealias', foreignKey="LibraryFileAlias", notNull=True) |
521 | |
522 | def publish(self, logger=None): |
523 | """See `IPackageUploadCustom`.""" |
524 | @@ -1425,8 +1445,7 @@ |
525 | """See `IPackageUploadCustom`.""" |
526 | # XXX cprov 2005-03-03: We need to use the Zope Component Lookup |
527 | # to instantiate the object in question and avoid circular imports |
528 | - from lp.archivepublisher.dist_upgrader import ( |
529 | - process_dist_upgrader) |
530 | + from lp.archivepublisher.dist_upgrader import process_dist_upgrader |
531 | |
532 | self._publishCustom(process_dist_upgrader, logger=logger) |
533 | |
534 | @@ -1434,8 +1453,7 @@ |
535 | """See `IPackageUploadCustom`.""" |
536 | # XXX cprov 2005-03-03: We need to use the Zope Component Lookup |
537 | # to instantiate the object in question and avoid circular imports |
538 | - from lp.archivepublisher.ddtp_tarball import ( |
539 | - process_ddtp_tarball) |
540 | + from lp.archivepublisher.ddtp_tarball import process_ddtp_tarball |
541 | |
542 | self._publishCustom(process_ddtp_tarball, logger=logger) |
543 | |
544 | |
545 | === modified file 'lib/lp/soyuz/tests/test_packageupload.py' |
546 | --- lib/lp/soyuz/tests/test_packageupload.py 2012-11-15 01:41:14 +0000 |
547 | +++ lib/lp/soyuz/tests/test_packageupload.py 2012-12-14 00:18:23 +0000 |
548 | @@ -116,6 +116,8 @@ |
549 | upload.addSource(spr) |
550 | self.assertEqual(spr.sourcepackagename.name, upload.package_name) |
551 | self.assertEqual(spr.version, upload.package_version) |
552 | + self.assertEqual(spr.name, upload.searchable_names) |
553 | + self.assertContentEqual([spr.version], upload.searchable_versions) |
554 | |
555 | def test_publish_sets_packageupload(self): |
556 | # Publishing a PackageUploadSource will pass itself to the source |
557 | @@ -459,9 +461,45 @@ |
558 | upload, job = self.makeUploadWithPackageCopyJob() |
559 | self.assertEqual(job.package_name, upload.package_name) |
560 | self.assertEqual(job.package_version, upload.package_version) |
561 | + self.assertEqual(job.package_name, upload.searchable_names) |
562 | + self.assertContentEqual( |
563 | + [job.package_version], upload.searchable_versions) |
564 | + |
565 | + def test_searchables_for_builds(self): |
566 | + distroseries = self.factory.makeDistroSeries() |
567 | + upload = self.factory.makeBuildPackageUpload(distroseries) |
568 | + build = self.factory.makeBinaryPackageBuild() |
569 | + self.factory.makeBinaryPackageRelease(build=build) |
570 | + upload.addBuild(build) |
571 | + name_array = [ |
572 | + build.build.binarypackages[0].name for build in upload.builds] |
573 | + name_array.extend( |
574 | + [b.build.source_package_release.name for b in upload.builds]) |
575 | + names = ' '.join(sorted(name_array)) |
576 | + self.assertEqual(names, upload.searchable_names) |
577 | + self.assertContentEqual( |
578 | + [build.build.binarypackages[0].version for build in upload.builds], |
579 | + upload.searchable_versions) |
580 | + |
581 | + def test_searchables_for_builds_duplication(self): |
582 | + distroseries = self.factory.makeDistroSeries() |
583 | + spr = self.factory.makeSourcePackageRelease() |
584 | + bpn = self.factory.makeBinaryPackageName(name=spr.name) |
585 | + binary = self.factory.makeBuildPackageUpload( |
586 | + distroseries=distroseries, binarypackagename=bpn, |
587 | + source_package_release=spr) |
588 | + self.assertEqual(spr.name, binary.searchable_names) |
589 | + |
590 | + def test_searchables_for_custom(self): |
591 | + distroseries = self.factory.makeDistroSeries() |
592 | + upload = self.factory.makeCustomPackageUpload(distroseries) |
593 | + self.assertEqual( |
594 | + upload.searchable_names, |
595 | + upload.customfiles[0].libraryfilealias.filename) |
596 | + self.assertEqual([], upload.searchable_versions) |
597 | |
598 | def test_displayarchs_for_copy_job_is_sync(self): |
599 | - # For copy jobs, displayarchs is "source." |
600 | + # For copy jobs, displayarchs is "sync." |
601 | upload, job = self.makeUploadWithPackageCopyJob() |
602 | self.assertEqual('sync', upload.displayarchs) |
603 | |
604 | |
605 | === modified file 'lib/lp/testing/factory.py' |
606 | --- lib/lp/testing/factory.py 2012-12-06 02:03:50 +0000 |
607 | +++ lib/lp/testing/factory.py 2012-12-14 00:18:23 +0000 |
608 | @@ -3437,10 +3437,10 @@ |
609 | pocket=pocket) |
610 | build = self.makeBinaryPackageBuild( |
611 | source_package_release=source_package_release, pocket=pocket) |
612 | - upload.addBuild(build) |
613 | self.makeBinaryPackageRelease( |
614 | binarypackagename=binarypackagename, build=build, |
615 | component=component) |
616 | + upload.addBuild(build) |
617 | return upload |
618 | |
619 | def makeCustomPackageUpload(self, distroseries=None, archive=None, |
20 + searchable_names = None copy_job. package_ name copy_job. package_ version]
21 + searchable_versions = None
22 + if package_copy_job is not None:
23 + searchable_names = package_
24 + searchable_versions = [package_
Could you make it clear that the non-PCJ case is handled when the PU[SBC] are created? Wouldn't it also make more sense to set searchable_names and searchable_versions to '' and [] respectively in the constructor, and then use the normal add mechanism with the PCJ data?
74 + return self.store.find( id,), PackageUpload. searchable_ names == None, searchable_ versions == None, at).order_ by(PackageUploa d.id)
75 + (PackageUpload.
76 + PackageUpload.
77 + PackageUpload.id >= self.start_
I generally prefer to the start the conditions on the line after the findspec, so it's easier to scan.
443 + def setSearchableNa mes(self, names): _names = ' '.join( rchables( self.searchable _names, names)) rsions( self, versions): _versions = self._appendSea rchables( _versions, versions)
444 + self.searchable
445 + self._appendSea
446 +
447 + def setSearchableVe
448 + self.searchable
449 + self.searchable
These look like they actually add, not set. It would also be nice to sort the sets (probably in _appendSearchab les), so that this is all deterministic.