Merge lp:~cjwatson/launchpad/getpublishedbinaries-sorting into lp:launchpad
- getpublishedbinaries-sorting
- Merge into devel
Status: | Merged |
---|---|
Merged at revision: | 17425 |
Proposed branch: | lp:~cjwatson/launchpad/getpublishedbinaries-sorting |
Merge into: | lp:launchpad |
Diff against target: |
752 lines (+198/-148) 12 files modified
lib/lp/archivepublisher/tests/test_ftparchive.py (+2/-2) lib/lp/archivepublisher/tests/test_publisher.py (+1/-1) lib/lp/registry/doc/distroseries.txt (+2/-2) lib/lp/soyuz/doc/archive.txt (+23/-23) lib/lp/soyuz/interfaces/archive.py (+23/-4) lib/lp/soyuz/model/archive.py (+110/-102) lib/lp/soyuz/scripts/tests/test_initialize_distroseries.py (+3/-3) lib/lp/soyuz/stories/ppa/xx-ppa-packages.txt (+1/-1) lib/lp/soyuz/stories/webservice/xx-binary-package-publishing.txt (+1/-1) lib/lp/soyuz/stories/webservice/xx-source-package-publishing.txt (+1/-1) lib/lp/soyuz/tests/test_archive.py (+28/-6) lib/lp/soyuz/tests/test_packagecopyjob.py (+3/-2) |
To merge this branch: | bzr merge lp:~cjwatson/launchpad/getpublishedbinaries-sorting |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
William Grant | code | Approve | |
Review via email: mp+255822@code.launchpad.net |
Commit message
Improve performance of Archive.
Description of the change
Improve performance of Archive.
It would be nice to insert StormRangeFactory in here too, providing a stable way to slice the returned collection. As explained in the linked bug, though, that is currently difficult, and this is enough to provide a safe API provided that the status filter is omitted (so that entries don't disappear from the collection during iteration).
William Grant (wgrant) : | # |
Preview Diff
1 | === modified file 'lib/lp/archivepublisher/tests/test_ftparchive.py' |
2 | --- lib/lp/archivepublisher/tests/test_ftparchive.py 2014-11-24 16:03:20 +0000 |
3 | +++ lib/lp/archivepublisher/tests/test_ftparchive.py 2015-04-13 17:14:35 +0000 |
4 | @@ -1,4 +1,4 @@ |
5 | -# Copyright 2009-2013 Canonical Ltd. This software is licensed under the |
6 | +# Copyright 2009-2015 Canonical Ltd. This software is licensed under the |
7 | # GNU Affero General Public License version 3 (see the file LICENSE). |
8 | |
9 | """Tests for ftparchive.py""" |
10 | @@ -308,7 +308,7 @@ |
11 | self._publisher = SamplePublisher(self._archive) |
12 | fa = self._setUpFTPArchiveHandler() |
13 | pubs = self._archive.getAllPublishedBinaries( |
14 | - name="pmount", status=PackagePublishingStatus.PUBLISHED, |
15 | + name=u"pmount", status=PackagePublishingStatus.PUBLISHED, |
16 | distroarchseries=self._distribution.getSeries("hoary")["hppa"]) |
17 | for pub in pubs: |
18 | pub.changeOverride(new_phased_update_percentage=30).setPublished() |
19 | |
20 | === modified file 'lib/lp/archivepublisher/tests/test_publisher.py' |
21 | --- lib/lp/archivepublisher/tests/test_publisher.py 2015-03-18 17:51:16 +0000 |
22 | +++ lib/lp/archivepublisher/tests/test_publisher.py 2015-04-13 17:14:35 +0000 |
23 | @@ -407,7 +407,7 @@ |
24 | for pu_build in pu_i386.builds: |
25 | pu_build.publish() |
26 | |
27 | - publications = archive.getAllPublishedBinaries(name="bin-i386") |
28 | + publications = archive.getAllPublishedBinaries(name=u"bin-i386") |
29 | |
30 | self.assertEqual(1, publications.count()) |
31 | self.assertEqual( |
32 | |
33 | === modified file 'lib/lp/registry/doc/distroseries.txt' |
34 | --- lib/lp/registry/doc/distroseries.txt 2014-11-27 22:13:36 +0000 |
35 | +++ lib/lp/registry/doc/distroseries.txt 2015-04-13 17:14:35 +0000 |
36 | @@ -342,11 +342,11 @@ |
37 | >>> humpy.getPublishedSources('pmount').count() |
38 | 1 |
39 | >>> hoary.main_archive.getAllPublishedBinaries( |
40 | - ... distroarchseries=hoary['i386'], name='pmount', |
41 | + ... distroarchseries=hoary['i386'], name=u'pmount', |
42 | ... status=PackagePublishingStatus.PUBLISHED).count() |
43 | 1 |
44 | >>> humpy.main_archive.getAllPublishedBinaries( |
45 | - ... distroarchseries=humpy['i386'], name='pmount').count() |
46 | + ... distroarchseries=humpy['i386'], name=u'pmount').count() |
47 | 1 |
48 | |
49 | Check if the attributes of an DRSPR instance for the just initialized |
50 | |
51 | === modified file 'lib/lp/soyuz/doc/archive.txt' |
52 | --- lib/lp/soyuz/doc/archive.txt 2014-07-15 02:12:04 +0000 |
53 | +++ lib/lp/soyuz/doc/archive.txt 2015-04-13 17:14:35 +0000 |
54 | @@ -295,46 +295,46 @@ |
55 | |
56 | 'name' filter supporting partial string matching and 'not-found': |
57 | |
58 | - >>> cprov_archive.getPublishedOnDiskBinaries(name='pmou').count() |
59 | + >>> cprov_archive.getPublishedOnDiskBinaries(name=u'pmou').count() |
60 | 1 |
61 | - >>> cprov_archive.getAllPublishedBinaries(name='pmou').count() |
62 | + >>> cprov_archive.getAllPublishedBinaries(name=u'pmou').count() |
63 | 2 |
64 | - >>> cprov_archive.getPublishedOnDiskBinaries(name='foo').count() |
65 | + >>> cprov_archive.getPublishedOnDiskBinaries(name=u'foo').count() |
66 | 0 |
67 | - >>> cprov_archive.getAllPublishedBinaries(name='foo').count() |
68 | + >>> cprov_archive.getAllPublishedBinaries(name=u'foo').count() |
69 | 0 |
70 | |
71 | Combining 'name' filter and 'exact_match' flag: |
72 | |
73 | >>> cprov_archive.getAllPublishedBinaries( |
74 | - ... name='pmou', exact_match=True).count() |
75 | + ... name=u'pmou', exact_match=True).count() |
76 | 0 |
77 | >>> cprov_archive.getAllPublishedBinaries( |
78 | - ... name='pmount', exact_match=True).count() |
79 | + ... name=u'pmount', exact_match=True).count() |
80 | 2 |
81 | >>> cprov_archive.getPublishedOnDiskBinaries( |
82 | - ... name='pmou', exact_match=True).count() |
83 | + ... name=u'pmou', exact_match=True).count() |
84 | 0 |
85 | >>> cprov_archive.getPublishedOnDiskBinaries( |
86 | - ... name='pmount', exact_match=True).count() |
87 | + ... name=u'pmount', exact_match=True).count() |
88 | 1 |
89 | |
90 | It's possible to associate 'name' and 'version' filters: |
91 | |
92 | >>> cprov_archive.getPublishedOnDiskBinaries( |
93 | - ... name='moz', version='1.0').count() |
94 | + ... name=u'moz', version='1.0').count() |
95 | 2 |
96 | |
97 | >>> cprov_archive.getAllPublishedBinaries( |
98 | - ... name='moz', version='1.0').count() |
99 | + ... name=u'moz', version='1.0').count() |
100 | 2 |
101 | |
102 | >>> cprov_archive.getPublishedOnDiskBinaries( |
103 | - ... name='moz', version='666').count() |
104 | + ... name=u'moz', version='666').count() |
105 | 0 |
106 | |
107 | >>> cprov_archive.getAllPublishedBinaries( |
108 | - ... name='moz', version='666').count() |
109 | + ... name=u'moz', version='666').count() |
110 | 0 |
111 | |
112 | Both methods do not support passing the 'version' filter if the 'name' |
113 | @@ -429,44 +429,44 @@ |
114 | Associating 'name' and 'status' filters: |
115 | |
116 | >>> status_lookup = cprov_archive.getPublishedOnDiskBinaries( |
117 | - ... name='pmount', status=active_status) |
118 | + ... name=u'pmount', status=active_status) |
119 | >>> status_lookup.count() |
120 | 1 |
121 | |
122 | >>> status_lookup = cprov_archive.getAllPublishedBinaries( |
123 | - ... name='pmount', status=active_status) |
124 | + ... name=u'pmount', status=active_status) |
125 | >>> status_lookup.count() |
126 | 2 |
127 | |
128 | >>> status_lookup = cprov_archive.getPublishedOnDiskBinaries( |
129 | - ... name='foo', status=active_status) |
130 | + ... name=u'foo', status=active_status) |
131 | >>> status_lookup.count() |
132 | 0 |
133 | |
134 | >>> status_lookup = cprov_archive.getAllPublishedBinaries( |
135 | - ... name='foo', status=active_status) |
136 | + ... name=u'foo', status=active_status) |
137 | >>> status_lookup.count() |
138 | 0 |
139 | |
140 | Associating 'name', 'version' and 'status' filters: |
141 | |
142 | >>> status_lookup = cprov_archive.getPublishedOnDiskBinaries( |
143 | - ... name='pmount', version='0.1-1', status=active_status) |
144 | + ... name=u'pmount', version='0.1-1', status=active_status) |
145 | >>> status_lookup.count() |
146 | 1 |
147 | |
148 | >>> status_lookup = cprov_archive.getAllPublishedBinaries( |
149 | - ... name='pmount', version='0.1-1', status=active_status) |
150 | + ... name=u'pmount', version='0.1-1', status=active_status) |
151 | >>> status_lookup.count() |
152 | 2 |
153 | |
154 | >>> status_lookup = cprov_archive.getPublishedOnDiskBinaries( |
155 | - ... name='pmount', version='666', status=active_status) |
156 | + ... name=u'pmount', version='666', status=active_status) |
157 | >>> status_lookup.count() |
158 | 0 |
159 | |
160 | >>> status_lookup = cprov_archive.getAllPublishedBinaries( |
161 | - ... name='pmount', version='666', status=active_status) |
162 | + ... name=u'pmount', version='666', status=active_status) |
163 | >>> status_lookup.count() |
164 | 0 |
165 | |
166 | @@ -474,13 +474,13 @@ |
167 | and 'exact_match' flag: |
168 | |
169 | >>> status_lookup = cprov_archive.getAllPublishedBinaries( |
170 | - ... name='pmount', version='0.1-1', distroarchseries=warty_i386, |
171 | + ... name=u'pmount', version='0.1-1', distroarchseries=warty_i386, |
172 | ... status=active_status, exact_match=True) |
173 | >>> status_lookup.count() |
174 | 1 |
175 | |
176 | >>> status_lookup = cprov_archive.getAllPublishedBinaries( |
177 | - ... name='pmount', version='0.1-1', |
178 | + ... name=u'pmount', version='0.1-1', |
179 | ... distroarchseries=[warty_i386, warty_hppa], |
180 | ... status=active_status, exact_match=True) |
181 | >>> status_lookup.count() |
182 | @@ -592,7 +592,7 @@ |
183 | >>> cprov_archive.number_of_binaries |
184 | 3 |
185 | >>> cprov_archive.getAllPublishedBinaries( |
186 | - ... name='mozilla-firefox')[0].supersede() |
187 | + ... name=u'mozilla-firefox')[0].supersede() |
188 | |
189 | >>> cprov_archive.number_of_binaries |
190 | 2 |
191 | |
192 | === modified file 'lib/lp/soyuz/interfaces/archive.py' |
193 | --- lib/lp/soyuz/interfaces/archive.py 2014-08-14 10:08:28 +0000 |
194 | +++ lib/lp/soyuz/interfaces/archive.py 2015-04-13 17:14:35 +0000 |
195 | @@ -1,4 +1,4 @@ |
196 | -# Copyright 2009-2013 Canonical Ltd. This software is licensed under the |
197 | +# Copyright 2009-2015 Canonical Ltd. This software is licensed under the |
198 | # GNU Affero General Public License version 3 (see the file LICENSE). |
199 | |
200 | """Archive interfaces.""" |
201 | @@ -449,6 +449,12 @@ |
202 | "than or equal to this date."), |
203 | required=False), |
204 | component_name=TextLine(title=_("Component name"), required=False), |
205 | + order_by_date=Bool( |
206 | + title=_("Order by creation date"), |
207 | + description=_("Return newest results first. This is recommended " |
208 | + "for applications that need to catch up with " |
209 | + "publications since their last run."), |
210 | + required=False), |
211 | ) |
212 | # Really returns ISourcePackagePublishingHistory, see below for |
213 | # patch to avoid circular import. |
214 | @@ -457,7 +463,7 @@ |
215 | def api_getPublishedSources(name=None, version=None, status=None, |
216 | distroseries=None, pocket=None, |
217 | exact_match=False, created_since_date=None, |
218 | - component_name=None): |
219 | + component_name=None, order_by_date=False): |
220 | """All `ISourcePackagePublishingHistory` target to this archive.""" |
221 | # It loads additional related objects only needed in the API call |
222 | # context (i.e. security checks and entries marshalling). |
223 | @@ -465,7 +471,8 @@ |
224 | def getPublishedSources(name=None, version=None, status=None, |
225 | distroseries=None, pocket=None, |
226 | exact_match=False, created_since_date=None, |
227 | - eager_load=False, component_name=None): |
228 | + eager_load=False, component_name=None, |
229 | + order_by_date=False): |
230 | """All `ISourcePackagePublishingHistory` target to this archive. |
231 | |
232 | :param name: source name filter (exact match or SQL LIKE controlled |
233 | @@ -482,6 +489,9 @@ |
234 | is greater than or equal to this date. |
235 | :param component_name: component filter. Only return source packages |
236 | that are in this component. |
237 | + :param order_by_date: Order publications by descending creation date |
238 | + and then by descending ID. This is suitable for applications |
239 | + that need to catch up with publications since their last run. |
240 | |
241 | :return: SelectResults containing `ISourcePackagePublishingHistory`, |
242 | ordered by name. If there are multiple results for the same |
243 | @@ -1110,6 +1120,12 @@ |
244 | description=_("Return ordered results by default, but specifying " |
245 | "False will return results more quickly."), |
246 | required=False, readonly=True), |
247 | + order_by_date=Bool( |
248 | + title=_("Order by creation date"), |
249 | + description=_("Return newest results first. This is recommended " |
250 | + "for applications that need to catch up with " |
251 | + "publications since their last run."), |
252 | + required=False), |
253 | ) |
254 | # Really returns ISourcePackagePublishingHistory, see below for |
255 | # patch to avoid circular import. |
256 | @@ -1119,7 +1135,7 @@ |
257 | def getAllPublishedBinaries(name=None, version=None, status=None, |
258 | distroarchseries=None, pocket=None, |
259 | exact_match=False, created_since_date=None, |
260 | - ordered=True): |
261 | + ordered=True, order_by_date=False): |
262 | """All `IBinaryPackagePublishingHistory` target to this archive. |
263 | |
264 | :param name: binary name filter (exact match or SQL LIKE controlled |
265 | @@ -1137,6 +1153,9 @@ |
266 | False then the results will be unordered. This will make the |
267 | operation much quicker to return results if you don't care about |
268 | ordering. |
269 | + :param order_by_date: Order publications by descending creation date |
270 | + and then by descending ID. This is suitable for applications |
271 | + that need to catch up with publications since their last run. |
272 | |
273 | :return: A collection containing `BinaryPackagePublishingHistory`. |
274 | """ |
275 | |
276 | === modified file 'lib/lp/soyuz/model/archive.py' |
277 | --- lib/lp/soyuz/model/archive.py 2015-03-12 13:59:27 +0000 |
278 | +++ lib/lp/soyuz/model/archive.py 2015-04-13 17:14:35 +0000 |
279 | @@ -97,7 +97,6 @@ |
280 | ) |
281 | from lp.services.database.sqlbase import ( |
282 | cursor, |
283 | - quote_like, |
284 | SQLBase, |
285 | sqlvalues, |
286 | ) |
287 | @@ -521,13 +520,16 @@ |
288 | def api_getPublishedSources(self, name=None, version=None, status=None, |
289 | distroseries=None, pocket=None, |
290 | exact_match=False, created_since_date=None, |
291 | - component_name=None): |
292 | + order_by_date=False, component_name=None): |
293 | """See `IArchive`.""" |
294 | # 'eager_load' and 'include_removed' arguments are always True |
295 | # for API calls. |
296 | published_sources = self.getPublishedSources( |
297 | - name, version, status, distroseries, pocket, exact_match, |
298 | - created_since_date, True, component_name, True) |
299 | + name=name, version=version, status=status, |
300 | + distroseries=distroseries, pocket=pocket, exact_match=exact_match, |
301 | + created_since_date=created_since_date, eager_load=True, |
302 | + component_name=component_name, order_by_date=order_by_date, |
303 | + include_removed=True) |
304 | |
305 | def load_api_extra_objects(rows): |
306 | """Load extra related-objects needed by API calls.""" |
307 | @@ -565,20 +567,23 @@ |
308 | distroseries=None, pocket=None, |
309 | exact_match=False, created_since_date=None, |
310 | eager_load=False, component_name=None, |
311 | - include_removed=True): |
312 | + order_by_date=False, include_removed=True): |
313 | """See `IArchive`.""" |
314 | - # clauses contains literal sql expressions for things that don't work |
315 | - # easily in storm : this method was migrated from sqlobject but some |
316 | - # callers are problematic. (Migrate them and test to see). |
317 | - clauses = [ |
318 | - SourcePackagePublishingHistory.archiveID == self.id, |
319 | - SourcePackagePublishingHistory.sourcepackagereleaseID == |
320 | - SourcePackageRelease.id, |
321 | - SourcePackagePublishingHistory.sourcepackagenameID == |
322 | - SourcePackageName.id] |
323 | - orderBy = [ |
324 | - SourcePackageName.name, |
325 | - Desc(SourcePackagePublishingHistory.id)] |
326 | + clauses = [SourcePackagePublishingHistory.archiveID == self.id] |
327 | + |
328 | + if order_by_date: |
329 | + order_by = [ |
330 | + Desc(SourcePackagePublishingHistory.datecreated), |
331 | + Desc(SourcePackagePublishingHistory.id)] |
332 | + else: |
333 | + order_by = [ |
334 | + SourcePackageName.name, |
335 | + Desc(SourcePackagePublishingHistory.id)] |
336 | + |
337 | + if not order_by_date or name is not None: |
338 | + clauses.append( |
339 | + SourcePackagePublishingHistory.sourcepackagenameID == |
340 | + SourcePackageName.id) |
341 | |
342 | if name is not None: |
343 | if type(name) in (str, unicode): |
344 | @@ -590,14 +595,19 @@ |
345 | elif len(name) != 0: |
346 | clauses.append(SourcePackageName.name.is_in(name)) |
347 | |
348 | + if not order_by_date or version is not None: |
349 | + clauses.append( |
350 | + SourcePackagePublishingHistory.sourcepackagereleaseID == |
351 | + SourcePackageRelease.id) |
352 | + |
353 | if version is not None: |
354 | if name is None: |
355 | raise VersionRequiresName( |
356 | "The 'version' parameter can be used only together with" |
357 | " the 'name' parameter.") |
358 | clauses.append(SourcePackageRelease.version == version) |
359 | - else: |
360 | - orderBy.insert(1, Desc(SourcePackageRelease.version)) |
361 | + elif not order_by_date: |
362 | + order_by.insert(1, Desc(SourcePackageRelease.version)) |
363 | |
364 | if component_name is not None: |
365 | clauses.extend( |
366 | @@ -635,7 +645,7 @@ |
367 | |
368 | store = Store.of(self) |
369 | resultset = store.find( |
370 | - SourcePackagePublishingHistory, *clauses).order_by(*orderBy) |
371 | + SourcePackagePublishingHistory, *clauses).order_by(*order_by) |
372 | if not eager_load: |
373 | return resultset |
374 | |
375 | @@ -747,38 +757,46 @@ |
376 | def _getBinaryPublishingBaseClauses( |
377 | self, name=None, version=None, status=None, distroarchseries=None, |
378 | pocket=None, exact_match=False, created_since_date=None, |
379 | - ordered=True, include_removed=True): |
380 | - """Base clauses and clauseTables for binary publishing queries. |
381 | + ordered=True, order_by_date=False, include_removed=True, |
382 | + need_bpr=False): |
383 | + """Base clauses for binary publishing queries. |
384 | |
385 | - Returns a list of 'clauses' (to be joined in the callsite) and |
386 | - a list of clauseTables required according to the given arguments. |
387 | + Returns a list of 'clauses' (to be joined in the callsite). |
388 | """ |
389 | - clauses = [""" |
390 | - BinaryPackagePublishingHistory.archive = %s AND |
391 | - BinaryPackagePublishingHistory.binarypackagerelease = |
392 | - BinaryPackageRelease.id AND |
393 | - BinaryPackagePublishingHistory.binarypackagename = |
394 | - BinaryPackageName.id |
395 | - """ % sqlvalues(self)] |
396 | - clauseTables = ['BinaryPackageRelease', 'BinaryPackageName'] |
397 | - if ordered: |
398 | - orderBy = ['BinaryPackageName.name', |
399 | - '-BinaryPackagePublishingHistory.id'] |
400 | + clauses = [BinaryPackagePublishingHistory.archiveID == self.id] |
401 | + |
402 | + if order_by_date: |
403 | + ordered = False |
404 | + |
405 | + if order_by_date: |
406 | + order_by = [ |
407 | + Desc(BinaryPackagePublishingHistory.datecreated), |
408 | + Desc(BinaryPackagePublishingHistory.id)] |
409 | + elif ordered: |
410 | + order_by = [ |
411 | + BinaryPackageName.name, |
412 | + Desc(BinaryPackagePublishingHistory.id)] |
413 | else: |
414 | # Strictly speaking, this is ordering, but it's an indexed |
415 | # ordering so it will be quick. It's needed so that we can |
416 | # batch results on the webservice. |
417 | - orderBy = ['-BinaryPackagePublishingHistory.id'] |
418 | + order_by = [Desc(BinaryPackagePublishingHistory.id)] |
419 | + |
420 | + if ordered or name is not None: |
421 | + clauses.append( |
422 | + BinaryPackagePublishingHistory.binarypackagenameID == |
423 | + BinaryPackageName.id) |
424 | |
425 | if name is not None: |
426 | if exact_match: |
427 | - clauses.append(""" |
428 | - BinaryPackageName.name=%s |
429 | - """ % sqlvalues(name)) |
430 | + clauses.append(BinaryPackageName.name == name) |
431 | else: |
432 | - clauses.append(""" |
433 | - BinaryPackageName.name LIKE '%%' || %s || '%%' |
434 | - """ % quote_like(name)) |
435 | + clauses.append(BinaryPackageName.name.contains_string(name)) |
436 | + |
437 | + if need_bpr or ordered or version is not None: |
438 | + clauses.append( |
439 | + BinaryPackagePublishingHistory.binarypackagereleaseID == |
440 | + BinaryPackageRelease.id) |
441 | |
442 | if version is not None: |
443 | if name is None: |
444 | @@ -786,115 +804,105 @@ |
445 | "The 'version' parameter can be used only together with" |
446 | " the 'name' parameter.") |
447 | |
448 | - clauses.append(""" |
449 | - BinaryPackageRelease.version = %s |
450 | - """ % sqlvalues(version)) |
451 | + clauses.append(BinaryPackageRelease.version == version) |
452 | elif ordered: |
453 | - orderBy.insert(1, Desc(BinaryPackageRelease.version)) |
454 | + order_by.insert(1, Desc(BinaryPackageRelease.version)) |
455 | |
456 | if status is not None: |
457 | try: |
458 | status = tuple(status) |
459 | except TypeError: |
460 | status = (status, ) |
461 | - clauses.append(""" |
462 | - BinaryPackagePublishingHistory.status IN %s |
463 | - """ % sqlvalues(status)) |
464 | + clauses.append(BinaryPackagePublishingHistory.status.is_in(status)) |
465 | |
466 | if distroarchseries is not None: |
467 | try: |
468 | distroarchseries = tuple(distroarchseries) |
469 | except TypeError: |
470 | distroarchseries = (distroarchseries, ) |
471 | - # XXX cprov 20071016: there is no sqlrepr for DistroArchSeries |
472 | - # uhmm, how so ? |
473 | - das_ids = "(%s)" % ", ".join(str(d.id) for d in distroarchseries) |
474 | - clauses.append(""" |
475 | - BinaryPackagePublishingHistory.distroarchseries IN %s |
476 | - """ % das_ids) |
477 | + clauses.append( |
478 | + BinaryPackagePublishingHistory.distroarchseriesID.is_in( |
479 | + [d.id for d in distroarchseries])) |
480 | |
481 | if pocket is not None: |
482 | - clauses.append(""" |
483 | - BinaryPackagePublishingHistory.pocket = %s |
484 | - """ % sqlvalues(pocket)) |
485 | + clauses.append(BinaryPackagePublishingHistory.pocket == pocket) |
486 | |
487 | if created_since_date is not None: |
488 | clauses.append( |
489 | - "BinaryPackagePublishingHistory.datecreated >= %s" |
490 | - % sqlvalues(created_since_date)) |
491 | + BinaryPackagePublishingHistory.datecreated >= |
492 | + created_since_date) |
493 | |
494 | if not include_removed: |
495 | - clauses.append( |
496 | - "BinaryPackagePublishingHistory.dateremoved IS NULL") |
497 | + clauses.append(BinaryPackagePublishingHistory.dateremoved == None) |
498 | |
499 | - return clauses, clauseTables, orderBy |
500 | + return clauses, order_by |
501 | |
502 | def getAllPublishedBinaries(self, name=None, version=None, status=None, |
503 | distroarchseries=None, pocket=None, |
504 | exact_match=False, created_since_date=None, |
505 | - ordered=True, include_removed=True): |
506 | + ordered=True, order_by_date=False, |
507 | + include_removed=True): |
508 | """See `IArchive`.""" |
509 | - clauses, clauseTables, orderBy = self._getBinaryPublishingBaseClauses( |
510 | + clauses, order_by = self._getBinaryPublishingBaseClauses( |
511 | name=name, version=version, status=status, pocket=pocket, |
512 | distroarchseries=distroarchseries, exact_match=exact_match, |
513 | created_since_date=created_since_date, ordered=ordered, |
514 | - include_removed=include_removed) |
515 | - |
516 | - all_binaries = BinaryPackagePublishingHistory.select( |
517 | - ' AND '.join(clauses), clauseTables=clauseTables, |
518 | - orderBy=orderBy) |
519 | - |
520 | - return all_binaries |
521 | + order_by_date=order_by_date, include_removed=include_removed) |
522 | + |
523 | + return Store.of(self).find( |
524 | + BinaryPackagePublishingHistory, *clauses).order_by(*order_by) |
525 | |
526 | def getPublishedOnDiskBinaries(self, name=None, version=None, status=None, |
527 | distroarchseries=None, pocket=None, |
528 | - exact_match=False, |
529 | - created_since_date=None): |
530 | + exact_match=False): |
531 | """See `IArchive`.""" |
532 | - clauses, clauseTables, orderBy = self._getBinaryPublishingBaseClauses( |
533 | + # Circular imports. |
534 | + from lp.registry.model.distroseries import DistroSeries |
535 | + from lp.soyuz.model.distroarchseries import DistroArchSeries |
536 | + |
537 | + clauses, order_by = self._getBinaryPublishingBaseClauses( |
538 | name=name, version=version, status=status, pocket=pocket, |
539 | distroarchseries=distroarchseries, exact_match=exact_match, |
540 | - created_since_date=created_since_date) |
541 | - |
542 | - clauses.append(""" |
543 | - BinaryPackagePublishingHistory.distroarchseries = |
544 | - DistroArchSeries.id AND |
545 | - DistroArchSeries.distroseries = DistroSeries.id |
546 | - """) |
547 | - clauseTables.extend(['DistroSeries', 'DistroArchSeries']) |
548 | + need_bpr=True) |
549 | + |
550 | + clauses.extend([ |
551 | + BinaryPackagePublishingHistory.distroarchseriesID == |
552 | + DistroArchSeries.id, |
553 | + DistroArchSeries.distroseriesID == DistroSeries.id, |
554 | + ]) |
555 | + |
556 | + store = Store.of(self) |
557 | |
558 | # Retrieve only the binaries published for the 'nominated architecture |
559 | # independent' (usually i386) in the distroseries in question. |
560 | # It includes all architecture-independent binaries only once and the |
561 | # architecture-specific built for 'nominatedarchindep'. |
562 | - nominated_arch_independent_clause = [""" |
563 | - DistroSeries.nominatedarchindep = |
564 | - BinaryPackagePublishingHistory.distroarchseries |
565 | - """] |
566 | - nominated_arch_independent_query = ' AND '.join( |
567 | - clauses + nominated_arch_independent_clause) |
568 | - nominated_arch_independents = BinaryPackagePublishingHistory.select( |
569 | - nominated_arch_independent_query, clauseTables=clauseTables) |
570 | + nominated_arch_independent_clauses = clauses + [ |
571 | + DistroSeries.nominatedarchindepID == |
572 | + BinaryPackagePublishingHistory.distroarchseriesID, |
573 | + ] |
574 | + nominated_arch_independents = store.find( |
575 | + BinaryPackagePublishingHistory, |
576 | + *nominated_arch_independent_clauses) |
577 | |
578 | # Retrieve all architecture-specific binary publications except |
579 | # 'nominatedarchindep' (already included in the previous query). |
580 | - no_nominated_arch_independent_clause = [""" |
581 | - DistroSeries.nominatedarchindep != |
582 | - BinaryPackagePublishingHistory.distroarchseries AND |
583 | - BinaryPackageRelease.architecturespecific = true |
584 | - """] |
585 | - no_nominated_arch_independent_query = ' AND '.join( |
586 | - clauses + no_nominated_arch_independent_clause) |
587 | - no_nominated_arch_independents = ( |
588 | - BinaryPackagePublishingHistory.select( |
589 | - no_nominated_arch_independent_query, clauseTables=clauseTables)) |
590 | + no_nominated_arch_independent_clauses = clauses + [ |
591 | + DistroSeries.nominatedarchindepID != |
592 | + BinaryPackagePublishingHistory.distroarchseriesID, |
593 | + BinaryPackageRelease.architecturespecific == True, |
594 | + ] |
595 | + no_nominated_arch_independents = store.find( |
596 | + BinaryPackagePublishingHistory, |
597 | + *no_nominated_arch_independent_clauses) |
598 | |
599 | # XXX cprov 20071016: It's not possible to use the same ordering |
600 | # schema returned by self._getBinaryPublishingBaseClauses. |
601 | # It results in: |
602 | # ERROR: missing FROM-clause entry for table "binarypackagename" |
603 | unique_binary_publications = nominated_arch_independents.union( |
604 | - no_nominated_arch_independents).orderBy("id") |
605 | + no_nominated_arch_independents).order_by( |
606 | + BinaryPackagePublishingHistory.id) |
607 | |
608 | return unique_binary_publications |
609 | |
610 | |
611 | === modified file 'lib/lp/soyuz/scripts/tests/test_initialize_distroseries.py' |
612 | --- lib/lp/soyuz/scripts/tests/test_initialize_distroseries.py 2014-11-06 01:42:35 +0000 |
613 | +++ lib/lp/soyuz/scripts/tests/test_initialize_distroseries.py 2015-04-13 17:14:35 +0000 |
614 | @@ -1,4 +1,4 @@ |
615 | -# Copyright 2010-2014 Canonical Ltd. This software is licensed under the |
616 | +# Copyright 2010-2015 Canonical Ltd. This software is licensed under the |
617 | # GNU Affero General Public License version 3 (see the file LICENSE). |
618 | |
619 | """Test the initialize_distroseries script machinery.""" |
620 | @@ -581,9 +581,9 @@ |
621 | self.assertEqual( |
622 | parent_udev_pubs.count(), child_udev_pubs.count()) |
623 | parent_arch_udev_pubs = parent.main_archive.getAllPublishedBinaries( |
624 | - distroarchseries=parent[parent_das.architecturetag], name='udev') |
625 | + distroarchseries=parent[parent_das.architecturetag], name=u'udev') |
626 | child_arch_udev_pubs = child.main_archive.getAllPublishedBinaries( |
627 | - distroarchseries=child[parent_das.architecturetag], name='udev') |
628 | + distroarchseries=child[parent_das.architecturetag], name=u'udev') |
629 | self.assertEqual( |
630 | parent_arch_udev_pubs.count(), child_arch_udev_pubs.count()) |
631 | # And the binary package, and linked source package look fine too. |
632 | |
633 | === modified file 'lib/lp/soyuz/stories/ppa/xx-ppa-packages.txt' |
634 | --- lib/lp/soyuz/stories/ppa/xx-ppa-packages.txt 2014-07-24 09:37:03 +0000 |
635 | +++ lib/lp/soyuz/stories/ppa/xx-ppa-packages.txt 2015-04-13 17:14:35 +0000 |
636 | @@ -137,7 +137,7 @@ |
637 | >>> cprov = getUtility(IPersonSet).getByName('cprov') |
638 | >>> cprov_ppa = cprov.archive |
639 | >>> pmount_i386_pub = cprov_ppa.getAllPublishedBinaries( |
640 | - ... name='pmount', version='0.1-1')[1] |
641 | + ... name=u'pmount', version='0.1-1')[1] |
642 | >>> print pmount_i386_pub.displayname |
643 | pmount 0.1-1 in warty i386 |
644 | >>> from lp.soyuz.enums import PackagePublishingStatus |
645 | |
646 | === modified file 'lib/lp/soyuz/stories/webservice/xx-binary-package-publishing.txt' |
647 | --- lib/lp/soyuz/stories/webservice/xx-binary-package-publishing.txt 2015-04-08 13:19:19 +0000 |
648 | +++ lib/lp/soyuz/stories/webservice/xx-binary-package-publishing.txt 2015-04-13 17:14:35 +0000 |
649 | @@ -159,7 +159,7 @@ |
650 | >>> australia = getUtility(ICountrySet)['AU'] |
651 | |
652 | >>> firefox_db = cprov_db.archive.getAllPublishedBinaries( |
653 | - ... name='mozilla-firefox')[0] |
654 | + ... name=u'mozilla-firefox')[0] |
655 | >>> firefox_db.archive.updatePackageDownloadCount( |
656 | ... firefox_db.binarypackagerelease, date(2010, 2, 21), australia, 10) |
657 | >>> firefox_db.archive.updatePackageDownloadCount( |
658 | |
659 | === modified file 'lib/lp/soyuz/stories/webservice/xx-source-package-publishing.txt' |
660 | --- lib/lp/soyuz/stories/webservice/xx-source-package-publishing.txt 2014-07-24 09:37:03 +0000 |
661 | +++ lib/lp/soyuz/stories/webservice/xx-source-package-publishing.txt 2015-04-13 17:14:35 +0000 |
662 | @@ -216,7 +216,7 @@ |
663 | |
664 | >>> login("admin@canonical.com") |
665 | >>> for bin in cprov_ppa.getAllPublishedBinaries( |
666 | - ... name="testwebservice-bin"): |
667 | + ... name=u"testwebservice-bin"): |
668 | ... if bin.status != PackagePublishingStatus.DELETED: |
669 | ... print "%s is not deleted when it should be" % bin.displayname |
670 | ... else: |
671 | |
672 | === modified file 'lib/lp/soyuz/tests/test_archive.py' |
673 | --- lib/lp/soyuz/tests/test_archive.py 2015-03-12 13:59:27 +0000 |
674 | +++ lib/lp/soyuz/tests/test_archive.py 2015-04-13 17:14:35 +0000 |
675 | @@ -2187,14 +2187,24 @@ |
676 | component_name='universe') |
677 | self.assertEqual('universe', filtered.component.name) |
678 | |
679 | - |
680 | -class GetPublishedSourcesWebServiceTests(TestCaseWithFactory): |
681 | + def test_order_by_date(self): |
682 | + archive = self.factory.makeArchive() |
683 | + dates = [self.factory.getUniqueDate() for _ in range(5)] |
684 | + # Make sure the ID ordering and date ordering don't match so that we |
685 | + # can spot a date-ordered result. |
686 | + pubs = [ |
687 | + self.factory.makeSourcePackagePublishingHistory( |
688 | + archive=archive, date_uploaded=dates[(i + 1) % 5]) |
689 | + for i in range(5)] |
690 | + self.assertEqual( |
691 | + [pubs[i] for i in (3, 2, 1, 0, 4)], |
692 | + list(archive.getPublishedSources(order_by_date=True))) |
693 | + |
694 | + |
695 | +class TestGetPublishedSourcesWebService(TestCaseWithFactory): |
696 | |
697 | layer = LaunchpadFunctionalLayer |
698 | |
699 | - def setUp(self): |
700 | - super(GetPublishedSourcesWebServiceTests, self).setUp() |
701 | - |
702 | def createTestingPPA(self): |
703 | """Creates and populates a PPA for API performance tests. |
704 | |
705 | @@ -2208,7 +2218,6 @@ |
706 | # XXX cprov 2014-04-22: currently the target archive owner cannot |
707 | # 'addSource' to a `PackageUpload` ('launchpad.Edit'). It seems |
708 | # too restrive to me. |
709 | - from zope.security.proxy import removeSecurityProxy |
710 | with person_logged_in(ppa.owner): |
711 | for i in range(5): |
712 | upload = self.factory.makePackageUpload( |
713 | @@ -2814,6 +2823,19 @@ |
714 | publications, |
715 | [first_publication, middle_publication, later_publication]) |
716 | |
717 | + def test_order_by_date(self): |
718 | + archive = self.factory.makeArchive() |
719 | + dates = [self.factory.getUniqueDate() for _ in range(5)] |
720 | + # Make sure the ID ordering and date ordering don't match so that we |
721 | + # can spot a date-ordered result. |
722 | + pubs = [ |
723 | + self.factory.makeBinaryPackagePublishingHistory( |
724 | + archive=archive, datecreated=dates[(i + 1) % 5]) |
725 | + for i in range(5)] |
726 | + self.assertEqual( |
727 | + [pubs[i] for i in (3, 2, 1, 0, 4)], |
728 | + list(archive.getAllPublishedBinaries(order_by_date=True))) |
729 | + |
730 | |
731 | class TestRemovingPermissions(TestCaseWithFactory): |
732 | |
733 | |
734 | === modified file 'lib/lp/soyuz/tests/test_packagecopyjob.py' |
735 | --- lib/lp/soyuz/tests/test_packagecopyjob.py 2014-08-14 09:54:33 +0000 |
736 | +++ lib/lp/soyuz/tests/test_packagecopyjob.py 2015-04-13 17:14:35 +0000 |
737 | @@ -1,4 +1,4 @@ |
738 | -# Copyright 2010-2014 Canonical Ltd. This software is licensed under the |
739 | +# Copyright 2010-2015 Canonical Ltd. This software is licensed under the |
740 | # GNU Affero General Public License version 3 (see the file LICENSE). |
741 | |
742 | """Tests for sync package jobs.""" |
743 | @@ -1392,7 +1392,8 @@ |
744 | copied_sources = target_archive.getPublishedSources( |
745 | name=u"copyme", version="2.8-1") |
746 | self.assertNotEqual(0, copied_sources.count()) |
747 | - copied_binaries = target_archive.getAllPublishedBinaries(name="copyme") |
748 | + copied_binaries = target_archive.getAllPublishedBinaries( |
749 | + name=u"copyme") |
750 | self.assertNotEqual(0, copied_binaries.count()) |
751 | |
752 | # Check that files were unembargoed. |