Merge ~cjwatson/launchpad:stormify-archive into launchpad:master
- Git
- lp:~cjwatson/launchpad
- stormify-archive
- Merge into master
Proposed by
Colin Watson
Status: | Merged |
---|---|
Approved by: | Colin Watson |
Approved revision: | 25fb06249a97ebbcae748064428ba4fc2151d243 |
Merge reported by: | Otto Co-Pilot |
Merged at revision: | not available |
Proposed branch: | ~cjwatson/launchpad:stormify-archive |
Merge into: | launchpad:master |
Diff against target: |
984 lines (+216/-185) 18 files modified
lib/lp/code/model/sourcepackagerecipebuild.py (+1/-1) lib/lp/registry/browser/person.py (+1/-1) lib/lp/registry/model/distribution.py (+12/-15) lib/lp/registry/model/person.py (+4/-2) lib/lp/registry/scripts/populate_distroseriesdiff.py (+1/-1) lib/lp/scripts/garbo.py (+1/-1) lib/lp/snappy/model/snapbuild.py (+1/-1) lib/lp/soyuz/browser/queue.py (+1/-1) lib/lp/soyuz/model/archive.py (+150/-114) lib/lp/soyuz/model/archivesubscriber.py (+1/-1) lib/lp/soyuz/model/binarypackagebuild.py (+1/-1) lib/lp/soyuz/model/livefsbuild.py (+1/-1) lib/lp/soyuz/model/packagecloner.py (+8/-8) lib/lp/soyuz/model/queue.py (+1/-1) lib/lp/soyuz/scripts/initialize_distroseries.py (+2/-2) lib/lp/soyuz/scripts/packagecopier.py (+1/-1) lib/lp/soyuz/tests/test_archive.py (+14/-15) lib/lp/soyuz/vocabularies.py (+15/-18) |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Jürgen Gmach | Approve | ||
Review via email: mp+450462@code.launchpad.net |
Commit message
Convert Archive to Storm
Description of the change
To post a comment you must log in.
- 25fb062... by Colin Watson
-
Expand comment slightly
Revision history for this message
Colin Watson (cjwatson) : | # |
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | diff --git a/lib/lp/code/model/sourcepackagerecipebuild.py b/lib/lp/code/model/sourcepackagerecipebuild.py |
2 | index 1ec64c9..00d6ddb 100644 |
3 | --- a/lib/lp/code/model/sourcepackagerecipebuild.py |
4 | +++ b/lib/lp/code/model/sourcepackagerecipebuild.py |
5 | @@ -328,7 +328,7 @@ class SourcePackageRecipeBuild( |
6 | |
7 | load_related(LibraryFileAlias, builds, ["log_id"]) |
8 | archives = load_related(Archive, builds, ["archive_id"]) |
9 | - load_related(Person, archives, ["ownerID"]) |
10 | + load_related(Person, archives, ["owner_id"]) |
11 | distroseries = load_related(DistroSeries, builds, ["distroseries_id"]) |
12 | load_related(Distribution, distroseries, ["distribution_id"]) |
13 | sprs = load_related(SourcePackageRecipe, builds, ["recipe_id"]) |
14 | diff --git a/lib/lp/registry/browser/person.py b/lib/lp/registry/browser/person.py |
15 | index 5ca39fe..d14004b 100644 |
16 | --- a/lib/lp/registry/browser/person.py |
17 | +++ b/lib/lp/registry/browser/person.py |
18 | @@ -576,7 +576,7 @@ class PersonNavigation(BranchTraversalMixin, Navigation): |
19 | if not archive_id.isdigit(): |
20 | return None |
21 | return traverse_archive_subscription_for_subscriber( |
22 | - self.context, archive_id |
23 | + self.context, int(archive_id) |
24 | ) |
25 | else: |
26 | # Otherwise we return the normal view for a person's |
27 | diff --git a/lib/lp/registry/model/distribution.py b/lib/lp/registry/model/distribution.py |
28 | index 8f4bb03..924c672 100644 |
29 | --- a/lib/lp/registry/model/distribution.py |
30 | +++ b/lib/lp/registry/model/distribution.py |
31 | @@ -158,7 +158,6 @@ from lp.services.database.constants import UTC_NOW |
32 | from lp.services.database.decoratedresultset import DecoratedResultSet |
33 | from lp.services.database.enumcol import DBEnum |
34 | from lp.services.database.interfaces import IStore |
35 | -from lp.services.database.sqlbase import sqlvalues |
36 | from lp.services.database.stormbase import StormBase |
37 | from lp.services.database.stormexpr import ( |
38 | ArrayAgg, |
39 | @@ -195,6 +194,7 @@ from lp.soyuz.model.publishing import ( |
40 | SourcePackagePublishingHistory, |
41 | get_current_source_releases, |
42 | ) |
43 | +from lp.soyuz.model.queue import PackageUpload |
44 | from lp.translations.enums import TranslationPermission |
45 | from lp.translations.model.hastranslationimports import ( |
46 | HasTranslationImportsMixin, |
47 | @@ -1877,20 +1877,17 @@ class Distribution( |
48 | |
49 | def getPendingAcceptancePPAs(self): |
50 | """See `IDistribution`.""" |
51 | - query = """ |
52 | - Archive.purpose = %s AND |
53 | - Archive.distribution = %s AND |
54 | - PackageUpload.archive = Archive.id AND |
55 | - PackageUpload.status = %s |
56 | - """ % sqlvalues( |
57 | - ArchivePurpose.PPA, self.id, PackageUploadStatus.ACCEPTED |
58 | - ) |
59 | - |
60 | - return Archive.select( |
61 | - query, |
62 | - clauseTables=["PackageUpload"], |
63 | - orderBy=["archive.id"], |
64 | - distinct=True, |
65 | + return ( |
66 | + IStore(Archive) |
67 | + .find( |
68 | + Archive, |
69 | + Archive.purpose == ArchivePurpose.PPA, |
70 | + Archive.distribution == self, |
71 | + PackageUpload.archive == Archive.id, |
72 | + PackageUpload.status == PackageUploadStatus.ACCEPTED, |
73 | + ) |
74 | + .order_by(Archive.id) |
75 | + .config(distinct=True) |
76 | ) |
77 | |
78 | def getPendingPublicationPPAs(self): |
79 | diff --git a/lib/lp/registry/model/person.py b/lib/lp/registry/model/person.py |
80 | index 5aab17d..81f4187 100644 |
81 | --- a/lib/lp/registry/model/person.py |
82 | +++ b/lib/lp/registry/model/person.py |
83 | @@ -3573,8 +3573,10 @@ class Person( |
84 | @property |
85 | def ppas(self): |
86 | """See `IPerson`.""" |
87 | - return Archive.selectBy( |
88 | - owner=self, purpose=ArchivePurpose.PPA, orderBy="name" |
89 | + return ( |
90 | + IStore(Archive) |
91 | + .find(Archive, owner=self, purpose=ArchivePurpose.PPA) |
92 | + .order_by(Archive.name) |
93 | ) |
94 | |
95 | def getVisiblePPAs(self, user): |
96 | diff --git a/lib/lp/registry/scripts/populate_distroseriesdiff.py b/lib/lp/registry/scripts/populate_distroseriesdiff.py |
97 | index 7847ee6..97adebd 100644 |
98 | --- a/lib/lp/registry/scripts/populate_distroseriesdiff.py |
99 | +++ b/lib/lp/registry/scripts/populate_distroseriesdiff.py |
100 | @@ -56,7 +56,7 @@ def compose_sql_find_latest_source_package_releases(distroseries): |
101 | parameters = { |
102 | "active_status": quote(active_publishing_status), |
103 | "distroseries": quote(distroseries.id), |
104 | - "main_archive": quote(distroseries.distribution.main_archive), |
105 | + "main_archive": quote(distroseries.distribution.main_archive.id), |
106 | "release_pocket": quote(PackagePublishingPocket.RELEASE), |
107 | } |
108 | return ( |
109 | diff --git a/lib/lp/scripts/garbo.py b/lib/lp/scripts/garbo.py |
110 | index eab11db..06da799 100644 |
111 | --- a/lib/lp/scripts/garbo.py |
112 | +++ b/lib/lp/scripts/garbo.py |
113 | @@ -1994,7 +1994,7 @@ class ArchiveAuthTokenDeactivator(BulkPruner): |
114 | ) |
115 | ) |
116 | affected_ppas = load_related(Archive, tokens, ["archive_id"]) |
117 | - load_related(Person, affected_ppas, ["ownerID"]) |
118 | + load_related(Person, affected_ppas, ["owner_id"]) |
119 | getUtility(IPersonSet).getPrecachedPersonsFromIDs( |
120 | [token.person_id for token in tokens], need_preferred_email=True |
121 | ) |
122 | diff --git a/lib/lp/snappy/model/snapbuild.py b/lib/lp/snappy/model/snapbuild.py |
123 | index afc3275..0f4f041 100644 |
124 | --- a/lib/lp/snappy/model/snapbuild.py |
125 | +++ b/lib/lp/snappy/model/snapbuild.py |
126 | @@ -604,7 +604,7 @@ class SnapBuildSet(SpecificBuildFarmJobSourceMixin): |
127 | lfas = load_related(LibraryFileAlias, builds, ["log_id"]) |
128 | load_related(LibraryFileContent, lfas, ["contentID"]) |
129 | archives = load_related(Archive, builds, ["archive_id"]) |
130 | - load_related(Person, archives, ["ownerID"]) |
131 | + load_related(Person, archives, ["owner_id"]) |
132 | distroarchseries = load_related( |
133 | DistroArchSeries, builds, ["distro_arch_series_id"] |
134 | ) |
135 | diff --git a/lib/lp/soyuz/browser/queue.py b/lib/lp/soyuz/browser/queue.py |
136 | index bcae209..5112ea7 100644 |
137 | --- a/lib/lp/soyuz/browser/queue.py |
138 | +++ b/lib/lp/soyuz/browser/queue.py |
139 | @@ -190,7 +190,7 @@ class QueueItemsView(LaunchpadView): |
140 | if upload.package_copy_job_id is not None |
141 | } |
142 | archives = {pcj.source_archive for pcj in package_copy_jobs} |
143 | - person_ids = [archive.ownerID for archive in archives] |
144 | + person_ids = [archive.owner_id for archive in archives] |
145 | jobs = load_related(Job, package_copy_jobs, ["job_id"]) |
146 | person_ids.extend(job.requester_id for job in jobs) |
147 | list( |
148 | diff --git a/lib/lp/soyuz/model/archive.py b/lib/lp/soyuz/model/archive.py |
149 | index bae615f..fe36739 100644 |
150 | --- a/lib/lp/soyuz/model/archive.py |
151 | +++ b/lib/lp/soyuz/model/archive.py |
152 | @@ -14,18 +14,20 @@ __all__ = [ |
153 | import logging |
154 | import re |
155 | import typing |
156 | -from datetime import datetime |
157 | +from datetime import datetime, timedelta, timezone |
158 | from operator import attrgetter |
159 | from pathlib import PurePath |
160 | |
161 | import six |
162 | from lazr.lifecycle.event import ObjectCreatedEvent |
163 | from storm.expr import ( |
164 | + Alias, |
165 | And, |
166 | Cast, |
167 | Count, |
168 | Desc, |
169 | Exists, |
170 | + Is, |
171 | Join, |
172 | Not, |
173 | Or, |
174 | @@ -33,7 +35,7 @@ from storm.expr import ( |
175 | Sum, |
176 | Union, |
177 | ) |
178 | -from storm.properties import JSON, Int, Unicode |
179 | +from storm.properties import JSON, Bool, DateTime, Int, Unicode |
180 | from storm.references import Reference |
181 | from storm.store import EmptyResultSet, Store |
182 | from zope.component import getAdapter, getUtility |
183 | @@ -81,17 +83,9 @@ from lp.registry.model.teammembership import TeamParticipation |
184 | from lp.services.config import config |
185 | from lp.services.database.bulk import create, load_referencing, load_related |
186 | from lp.services.database.constants import UTC_NOW |
187 | -from lp.services.database.datetimecol import UtcDateTimeCol |
188 | from lp.services.database.decoratedresultset import DecoratedResultSet |
189 | from lp.services.database.enumcol import DBEnum |
190 | from lp.services.database.interfaces import IStandbyStore, IStore |
191 | -from lp.services.database.sqlbase import SQLBase, cursor, sqlvalues |
192 | -from lp.services.database.sqlobject import ( |
193 | - BoolCol, |
194 | - ForeignKey, |
195 | - IntCol, |
196 | - StringCol, |
197 | -) |
198 | from lp.services.database.stormbase import StormBase |
199 | from lp.services.database.stormexpr import BulkUpdate |
200 | from lp.services.features import getFeatureFlag |
201 | @@ -209,23 +203,21 @@ def storm_validate_external_dependencies(archive, attr, value): |
202 | |
203 | |
204 | @implementer(IArchive, IHasOwner, IHasBuildRecords) |
205 | -class Archive(SQLBase): |
206 | - _table = "Archive" |
207 | - _defaultOrder = "id" |
208 | - |
209 | - owner = ForeignKey( |
210 | - dbName="owner", |
211 | - foreignKey="Person", |
212 | - storm_validator=validate_person, |
213 | - notNull=True, |
214 | - ) |
215 | +class Archive(StormBase): |
216 | + __storm_table__ = "Archive" |
217 | + __storm_order__ = "id" |
218 | + |
219 | + id = Int(primary=True) |
220 | + |
221 | + owner_id = Int(name="owner", validator=validate_person, allow_none=False) |
222 | + owner = Reference(owner_id, "Person.id") |
223 | |
224 | def _validate_archive_name(self, attr, value): |
225 | """Only allow renaming of COPY archives. |
226 | |
227 | Also assert the name is valid when set via an unproxied object. |
228 | """ |
229 | - if not self._SO_creating: |
230 | + if not self._creating: |
231 | renamable = self.is_copy or ( |
232 | self.is_ppa and self.status == ArchiveStatus.DELETED |
233 | ) |
234 | @@ -277,13 +269,13 @@ class Archive(SQLBase): |
235 | |
236 | return value |
237 | |
238 | - name = StringCol( |
239 | - dbName="name", notNull=True, storm_validator=_validate_archive_name |
240 | + name = Unicode( |
241 | + name="name", allow_none=False, validator=_validate_archive_name |
242 | ) |
243 | |
244 | - displayname = StringCol(dbName="displayname", notNull=True) |
245 | + displayname = Unicode(name="displayname", allow_none=False) |
246 | |
247 | - description = StringCol(dbName="description", notNull=False, default=None) |
248 | + description = Unicode(name="description", allow_none=True, default=None) |
249 | |
250 | distribution_id = Int(name="distribution", allow_none=True) |
251 | distribution = Reference(distribution_id, "Distribution.id") |
252 | @@ -297,78 +289,76 @@ class Archive(SQLBase): |
253 | default=ArchiveStatus.ACTIVE, |
254 | ) |
255 | |
256 | - _enabled = BoolCol(dbName="enabled", notNull=True, default=True) |
257 | + _enabled = Bool(name="enabled", allow_none=False, default=True) |
258 | enabled = property(lambda x: x._enabled) |
259 | |
260 | - publish = BoolCol(dbName="publish", notNull=True, default=True) |
261 | + publish = Bool(name="publish", allow_none=False, default=True) |
262 | |
263 | - private = BoolCol( |
264 | - dbName="private", |
265 | - notNull=True, |
266 | + private = Bool( |
267 | + name="private", |
268 | + allow_none=False, |
269 | default=False, |
270 | - storm_validator=_validate_archive_privacy, |
271 | + validator=_validate_archive_privacy, |
272 | ) |
273 | |
274 | - require_virtualized = BoolCol( |
275 | - dbName="require_virtualized", notNull=True, default=True |
276 | + require_virtualized = Bool( |
277 | + name="require_virtualized", allow_none=False, default=True |
278 | ) |
279 | |
280 | - build_debug_symbols = BoolCol( |
281 | - dbName="build_debug_symbols", notNull=True, default=False |
282 | + build_debug_symbols = Bool( |
283 | + name="build_debug_symbols", allow_none=False, default=False |
284 | ) |
285 | - publish_debug_symbols = BoolCol( |
286 | - dbName="publish_debug_symbols", notNull=False, default=False |
287 | + publish_debug_symbols = Bool( |
288 | + name="publish_debug_symbols", allow_none=True, default=False |
289 | ) |
290 | |
291 | - permit_obsolete_series_uploads = BoolCol( |
292 | - dbName="permit_obsolete_series_uploads", default=False |
293 | + permit_obsolete_series_uploads = Bool( |
294 | + name="permit_obsolete_series_uploads", default=False |
295 | ) |
296 | |
297 | - authorized_size = IntCol(dbName="authorized_size", notNull=False) |
298 | + authorized_size = Int(name="authorized_size", allow_none=True) |
299 | |
300 | - sources_cached = IntCol(dbName="sources_cached", notNull=False, default=0) |
301 | + sources_cached = Int(name="sources_cached", allow_none=True, default=0) |
302 | |
303 | - binaries_cached = IntCol( |
304 | - dbName="binaries_cached", notNull=False, default=0 |
305 | - ) |
306 | + binaries_cached = Int(name="binaries_cached", allow_none=True, default=0) |
307 | |
308 | - package_description_cache = StringCol( |
309 | - dbName="package_description_cache", notNull=False, default=None |
310 | + package_description_cache = Unicode( |
311 | + name="package_description_cache", allow_none=True, default=None |
312 | ) |
313 | |
314 | - total_count = IntCol(dbName="total_count", notNull=True, default=0) |
315 | + total_count = Int(name="total_count", allow_none=False, default=0) |
316 | |
317 | - pending_count = IntCol(dbName="pending_count", notNull=True, default=0) |
318 | + pending_count = Int(name="pending_count", allow_none=False, default=0) |
319 | |
320 | - succeeded_count = IntCol(dbName="succeeded_count", notNull=True, default=0) |
321 | + succeeded_count = Int(name="succeeded_count", allow_none=False, default=0) |
322 | |
323 | - building_count = IntCol(dbName="building_count", notNull=True, default=0) |
324 | + building_count = Int(name="building_count", allow_none=False, default=0) |
325 | |
326 | - failed_count = IntCol(dbName="failed_count", notNull=True, default=0) |
327 | + failed_count = Int(name="failed_count", allow_none=False, default=0) |
328 | |
329 | - date_created = UtcDateTimeCol(dbName="date_created") |
330 | + date_created = DateTime(name="date_created", tzinfo=timezone.utc) |
331 | |
332 | signing_key_owner_id = Int(name="signing_key_owner") |
333 | signing_key_owner = Reference(signing_key_owner_id, "Person.id") |
334 | signing_key_fingerprint = Unicode() |
335 | |
336 | - relative_build_score = IntCol( |
337 | - dbName="relative_build_score", notNull=True, default=0 |
338 | + relative_build_score = Int( |
339 | + name="relative_build_score", allow_none=False, default=0 |
340 | ) |
341 | |
342 | # This field is specifically and only intended for OEM migration to |
343 | # Launchpad and should be re-examined in October 2010 to see if it |
344 | # is still relevant. |
345 | - external_dependencies = StringCol( |
346 | - dbName="external_dependencies", |
347 | - notNull=False, |
348 | + external_dependencies = Unicode( |
349 | + name="external_dependencies", |
350 | + allow_none=True, |
351 | default=None, |
352 | - storm_validator=storm_validate_external_dependencies, |
353 | + validator=storm_validate_external_dependencies, |
354 | ) |
355 | |
356 | - suppress_subscription_notifications = BoolCol( |
357 | - dbName="suppress_subscription_notifications", |
358 | - notNull=True, |
359 | + suppress_subscription_notifications = Bool( |
360 | + name="suppress_subscription_notifications", |
361 | + allow_none=False, |
362 | default=False, |
363 | ) |
364 | |
365 | @@ -382,10 +372,50 @@ class Archive(SQLBase): |
366 | name="repository_format", allow_none=True, enum=ArchiveRepositoryFormat |
367 | ) |
368 | |
369 | - def _init(self, *args, **kw): |
370 | - """Provide the right interface for URL traversal.""" |
371 | - SQLBase._init(self, *args, **kw) |
372 | + _creating = False |
373 | |
374 | + def __init__( |
375 | + self, |
376 | + owner, |
377 | + distribution, |
378 | + name, |
379 | + displayname, |
380 | + purpose, |
381 | + description=None, |
382 | + publish=True, |
383 | + require_virtualized=True, |
384 | + signing_key_owner=None, |
385 | + signing_key_fingerprint=None, |
386 | + publishing_method=None, |
387 | + repository_format=None, |
388 | + ): |
389 | + super().__init__() |
390 | + try: |
391 | + self._creating = True |
392 | + self.owner = owner |
393 | + self.distribution = distribution |
394 | + self.name = name |
395 | + self.displayname = displayname |
396 | + self.purpose = purpose |
397 | + self.description = description |
398 | + self.publish = publish |
399 | + self.require_virtualized = require_virtualized |
400 | + self.signing_key_owner = signing_key_owner |
401 | + self.signing_key_fingerprint = signing_key_fingerprint |
402 | + self.publishing_method = publishing_method |
403 | + self.repository_format = repository_format |
404 | + except Exception: |
405 | + # If validating references such as `owner` fails, then the new |
406 | + # object may have been added to the store first. Remove it |
407 | + # again in that case. |
408 | + store = Store.of(self) |
409 | + if store is not None: |
410 | + store.remove(self) |
411 | + raise |
412 | + self.__storm_loaded__() |
413 | + del self._creating |
414 | + |
415 | + def __storm_loaded__(self): |
416 | # Provide the additional marker interface depending on what type |
417 | # of archive this is. See also the lp:url declarations in |
418 | # zcml/archive.zcml. |
419 | @@ -3246,7 +3276,7 @@ class ArchiveSet: |
420 | |
421 | def get(self, archive_id): |
422 | """See `IArchiveSet`.""" |
423 | - return Archive.get(archive_id) |
424 | + return IStore(Archive).get(Archive, archive_id) |
425 | |
426 | def getByReference(self, reference, check_permissions=False, user=None): |
427 | """See `IArchiveSet`.""" |
428 | @@ -3351,8 +3381,12 @@ class ArchiveSet: |
429 | if name is None: |
430 | name = self._getDefaultArchiveNameByPurpose(purpose) |
431 | |
432 | - return Archive.selectOneBy( |
433 | - distribution=distribution, purpose=purpose, name=name |
434 | + return ( |
435 | + IStore(Archive) |
436 | + .find( |
437 | + Archive, distribution=distribution, purpose=purpose, name=name |
438 | + ) |
439 | + .one() |
440 | ) |
441 | |
442 | def getByDistroAndName(self, distribution, name): |
443 | @@ -3443,11 +3477,16 @@ class ArchiveSet: |
444 | % (name, distribution.name) |
445 | ) |
446 | else: |
447 | - archive = Archive.selectOneBy( |
448 | - owner=owner, |
449 | - distribution=distribution, |
450 | - name=name, |
451 | - purpose=ArchivePurpose.PPA, |
452 | + archive = ( |
453 | + IStore(Archive) |
454 | + .find( |
455 | + Archive, |
456 | + owner=owner, |
457 | + distribution=distribution, |
458 | + name=name, |
459 | + purpose=ArchivePurpose.PPA, |
460 | + ) |
461 | + .one() |
462 | ) |
463 | if archive is not None: |
464 | raise AssertionError( |
465 | @@ -3477,12 +3516,12 @@ class ArchiveSet: |
466 | signing_key_owner=signing_key_owner, |
467 | signing_key_fingerprint=signing_key_fingerprint, |
468 | require_virtualized=require_virtualized, |
469 | - _publishing_method=publishing_method, |
470 | - _repository_format=repository_format, |
471 | + publishing_method=publishing_method, |
472 | + repository_format=repository_format, |
473 | ) |
474 | |
475 | # Upon creation archives are enabled by default. |
476 | - if enabled == False: |
477 | + if not enabled: |
478 | new_archive.disable() |
479 | |
480 | # Private teams cannot have public PPAs. |
481 | @@ -3508,11 +3547,12 @@ class ArchiveSet: |
482 | ] |
483 | new_archive.setProcessors(processors) |
484 | |
485 | + Store.of(new_archive).flush() |
486 | return new_archive |
487 | |
488 | def __iter__(self): |
489 | """See `IArchiveSet`.""" |
490 | - return iter(Archive.select()) |
491 | + return iter(IStore(Archive).find(Archive)) |
492 | |
493 | def getPPAOwnedByPerson( |
494 | self, |
495 | @@ -3545,9 +3585,9 @@ class ArchiveSet: |
496 | direct_membership = Select( |
497 | Archive.id, |
498 | where=And( |
499 | - Archive._enabled == True, |
500 | + Is(Archive._enabled, True), |
501 | Archive.purpose == ArchivePurpose.PPA, |
502 | - TeamParticipation.team == Archive.ownerID, |
503 | + TeamParticipation.team == Archive.owner_id, |
504 | TeamParticipation.person == user, |
505 | ), |
506 | ) |
507 | @@ -3576,7 +3616,7 @@ class ArchiveSet: |
508 | result.order_by(Archive.displayname) |
509 | |
510 | def preload_owners(rows): |
511 | - load_related(Person, rows, ["ownerID"]) |
512 | + load_related(Person, rows, ["owner_id"]) |
513 | |
514 | return DecoratedResultSet(result, pre_iter_hook=preload_owners) |
515 | |
516 | @@ -3611,7 +3651,7 @@ class ArchiveSet: |
517 | Archive, |
518 | Archive.signing_key_fingerprint == None, |
519 | Archive.purpose == purpose, |
520 | - Archive._enabled == True, |
521 | + Is(Archive._enabled, True), |
522 | ) |
523 | ) |
524 | results.order_by(Archive.date_created) |
525 | @@ -3628,8 +3668,8 @@ class ArchiveSet: |
526 | SourcePackagePublishingHistory, |
527 | SourcePackagePublishingHistory.archive == Archive.id, |
528 | SourcePackagePublishingHistory.distroseries == DistroSeries.id, |
529 | - Archive.private == False, |
530 | - Archive._enabled == True, |
531 | + Is(Archive.private, False), |
532 | + Is(Archive._enabled, True), |
533 | Archive.distribution == distribution, |
534 | DistroSeries.distribution == distribution, |
535 | Archive.purpose == ArchivePurpose.PPA, |
536 | @@ -3642,28 +3682,24 @@ class ArchiveSet: |
537 | |
538 | def getMostActivePPAsForDistribution(self, distribution): |
539 | """See `IArchiveSet`.""" |
540 | - cur = cursor() |
541 | - query = """ |
542 | - SELECT a.id, count(*) as C |
543 | - FROM Archive a, SourcePackagePublishingHistory spph |
544 | - WHERE |
545 | - spph.archive = a.id AND |
546 | - a.private = FALSE AND |
547 | - spph.datecreated >= now() - INTERVAL '1 week' AND |
548 | - a.distribution = %s AND |
549 | - a.purpose = %s |
550 | - GROUP BY a.id |
551 | - ORDER BY C DESC, a.id |
552 | - LIMIT 5 |
553 | - """ % sqlvalues( |
554 | - distribution.id, ArchivePurpose.PPA |
555 | - ) |
556 | - |
557 | - cur.execute(query) |
558 | + spph_count = Alias(Count(SourcePackagePublishingHistory.id)) |
559 | + results = ( |
560 | + IStore(Archive) |
561 | + .find( |
562 | + (Archive, spph_count), |
563 | + SourcePackagePublishingHistory.archive == Archive.id, |
564 | + Is(Archive.private, False), |
565 | + SourcePackagePublishingHistory.datecreated |
566 | + >= UTC_NOW - Cast(timedelta(weeks=1), "interval"), |
567 | + Archive.distribution == distribution, |
568 | + Archive.purpose == ArchivePurpose.PPA, |
569 | + ) |
570 | + .group_by(Archive.id) |
571 | + .order_by(Desc(spph_count), Archive.id)[:5] |
572 | + ) |
573 | |
574 | most_active = [] |
575 | - for archive_id, number_of_uploads in cur.fetchall(): |
576 | - archive = Archive.get(int(archive_id)) |
577 | + for archive, number_of_uploads in results: |
578 | the_dict = {"archive": archive, "uploads": number_of_uploads} |
579 | most_active.append(the_dict) |
580 | |
581 | @@ -3673,7 +3709,7 @@ class ArchiveSet: |
582 | """See `IArchiveSet`.""" |
583 | return IStore(Archive).find( |
584 | Archive, |
585 | - Archive.private == True, |
586 | + Is(Archive.private, True), |
587 | Archive.purpose == ArchivePurpose.PPA, |
588 | ) |
589 | |
590 | @@ -3702,7 +3738,7 @@ class ArchiveSet: |
591 | extra_exprs.append(Archive.name == name) |
592 | |
593 | public_archive = And( |
594 | - Archive.private == False, Archive._enabled == True |
595 | + Is(Archive.private, False), Is(Archive._enabled, True) |
596 | ) |
597 | |
598 | if not check_permissions: |
599 | @@ -3720,14 +3756,14 @@ class ArchiveSet: |
600 | TeamParticipation.team_id, |
601 | where=And( |
602 | TeamParticipation.person == user, |
603 | - TeamParticipation.team_id == Archive.ownerID, |
604 | + TeamParticipation.team_id == Archive.owner_id, |
605 | ), |
606 | ) |
607 | |
608 | # Append the extra expression to capture either public |
609 | # archives, or archives owned by the user, or archives |
610 | # owned by a team of which the user is a member: |
611 | - # Note: 'Archive.ownerID == user.id' |
612 | + # Note: 'Archive.owner_id == user.id' |
613 | # is unnecessary below because there is a TeamParticipation |
614 | # entry showing that each person is a member of the "team" |
615 | # that consists of themselves. |
616 | @@ -3736,7 +3772,7 @@ class ArchiveSet: |
617 | extra_exprs.append( |
618 | Or( |
619 | public_archive, |
620 | - Archive.ownerID.is_in(user_teams_subselect), |
621 | + Archive.owner_id.is_in(user_teams_subselect), |
622 | ) |
623 | ) |
624 | else: |
625 | @@ -3745,7 +3781,7 @@ class ArchiveSet: |
626 | extra_exprs.append(public_archive) |
627 | |
628 | if exclude_disabled: |
629 | - extra_exprs.append(Archive._enabled == True) |
630 | + extra_exprs.append(Is(Archive._enabled, True)) |
631 | |
632 | if exclude_pristine: |
633 | extra_exprs.append( |
634 | @@ -3842,7 +3878,7 @@ class ArchiveSet: |
635 | # when a user is the direct owner of the PPA. |
636 | # Team ownership is accounted for in `get_enabled_archive_filter` |
637 | # below |
638 | - elif user.id == removeSecurityProxy(archive).ownerID: |
639 | + elif user.id == removeSecurityProxy(archive).owner_id: |
640 | allowed_ids.add(archive.id) |
641 | |
642 | else: |
643 | @@ -3888,7 +3924,7 @@ def get_archive_privacy_filter(user): |
644 | else: |
645 | privacy_filter = Or( |
646 | Not(Archive.private), |
647 | - Archive.ownerID.is_in( |
648 | + Archive.owner_id.is_in( |
649 | Select( |
650 | TeamParticipation.team_id, |
651 | where=(TeamParticipation.person == user), |
652 | @@ -3912,8 +3948,8 @@ def get_enabled_archive_filter( |
653 | if include_public: |
654 | terms = [ |
655 | purpose_term, |
656 | - Archive.private == False, |
657 | - Archive._enabled == True, |
658 | + Is(Archive.private, False), |
659 | + Is(Archive._enabled, True), |
660 | ] |
661 | return And(*terms) |
662 | else: |
663 | @@ -3929,7 +3965,7 @@ def get_enabled_archive_filter( |
664 | TeamParticipation.team_id, where=TeamParticipation.person == user |
665 | ) |
666 | |
667 | - is_owner = Archive.ownerID.is_in(user_teams) |
668 | + is_owner = Archive.owner_id.is_in(user_teams) |
669 | |
670 | from lp.soyuz.model.archivesubscriber import ArchiveSubscriber |
671 | |
672 | @@ -3970,6 +4006,6 @@ def get_enabled_archive_filter( |
673 | |
674 | if include_public: |
675 | filter_terms.append( |
676 | - And(Archive._enabled == True, Archive.private == False) |
677 | + And(Is(Archive._enabled, True), Is(Archive.private, False)) |
678 | ) |
679 | return And(purpose_term, Or(*filter_terms)) |
680 | diff --git a/lib/lp/soyuz/model/archivesubscriber.py b/lib/lp/soyuz/model/archivesubscriber.py |
681 | index 0ff0cc7..d3173df 100644 |
682 | --- a/lib/lp/soyuz/model/archivesubscriber.py |
683 | +++ b/lib/lp/soyuz/model/archivesubscriber.py |
684 | @@ -240,7 +240,7 @@ class ArchiveSubscriberSet: |
685 | archives = load_related(Archive, subscriptions, ["archive_id"]) |
686 | list( |
687 | getUtility(IPersonSet).getPrecachedPersonsFromIDs( |
688 | - [archive.ownerID for archive in archives], |
689 | + [archive.owner_id for archive in archives], |
690 | need_validity=True, |
691 | ) |
692 | ) |
693 | diff --git a/lib/lp/soyuz/model/binarypackagebuild.py b/lib/lp/soyuz/model/binarypackagebuild.py |
694 | index ca82a8e..4616feb 100644 |
695 | --- a/lib/lp/soyuz/model/binarypackagebuild.py |
696 | +++ b/lib/lp/soyuz/model/binarypackagebuild.py |
697 | @@ -942,7 +942,7 @@ class BinaryPackageBuildSet(SpecificBuildFarmJobSourceMixin): |
698 | self._prefetchBuildData(builds) |
699 | das = load_related(DistroArchSeries, builds, ["distro_arch_series_id"]) |
700 | archives = load_related(Archive, builds, ["archive_id"]) |
701 | - load_related(Person, archives, ["ownerID"]) |
702 | + load_related(Person, archives, ["owner_id"]) |
703 | distroseries = load_related(DistroSeries, das, ["distroseries_id"]) |
704 | load_related(Distribution, distroseries, ["distribution_id"]) |
705 | |
706 | diff --git a/lib/lp/soyuz/model/livefsbuild.py b/lib/lp/soyuz/model/livefsbuild.py |
707 | index d4a3396..f7312a9 100644 |
708 | --- a/lib/lp/soyuz/model/livefsbuild.py |
709 | +++ b/lib/lp/soyuz/model/livefsbuild.py |
710 | @@ -429,7 +429,7 @@ class LiveFSBuildSet(SpecificBuildFarmJobSourceMixin): |
711 | load_related(Person, builds, ["requester_id"]) |
712 | load_related(LibraryFileAlias, builds, ["log_id"]) |
713 | archives = load_related(Archive, builds, ["archive_id"]) |
714 | - load_related(Person, archives, ["ownerID"]) |
715 | + load_related(Person, archives, ["owner_id"]) |
716 | load_related(LiveFS, builds, ["livefs_id"]) |
717 | |
718 | def getByBuildFarmJobs(self, build_farm_jobs): |
719 | diff --git a/lib/lp/soyuz/model/packagecloner.py b/lib/lp/soyuz/model/packagecloner.py |
720 | index 15029b4..fee1b4c 100644 |
721 | --- a/lib/lp/soyuz/model/packagecloner.py |
722 | +++ b/lib/lp/soyuz/model/packagecloner.py |
723 | @@ -171,7 +171,7 @@ class PackageCloner: |
724 | bpph.binarypackagename |
725 | """ % sqlvalues( |
726 | destination_das.id, |
727 | - destination.archive, |
728 | + destination.archive.id, |
729 | UTC_NOW, |
730 | UTC_NOW, |
731 | destination.pocket, |
732 | @@ -188,7 +188,7 @@ class PackageCloner: |
733 | PackagePublishingStatus.PENDING, |
734 | PackagePublishingStatus.PUBLISHED, |
735 | origin.pocket, |
736 | - origin.archive, |
737 | + origin.archive.id, |
738 | ) |
739 | |
740 | if use_names: |
741 | @@ -230,7 +230,7 @@ class PackageCloner: |
742 | """ |
743 | % sqlvalues( |
744 | destination.distroseries.id, |
745 | - destination.archive, |
746 | + destination.archive.id, |
747 | UTC_NOW, |
748 | UTC_NOW, |
749 | destination.pocket, |
750 | @@ -291,7 +291,7 @@ class PackageCloner: |
751 | spn.name = mcd.sourcepackagename AND |
752 | spr.version > mcd.t_version |
753 | """ % sqlvalues( |
754 | - origin.archive, |
755 | + origin.archive.id, |
756 | PackagePublishingStatus.PENDING, |
757 | PackagePublishingStatus.PUBLISHED, |
758 | origin.distroseries.id, |
759 | @@ -334,7 +334,7 @@ class PackageCloner: |
760 | spn.name NOT IN ( |
761 | SELECT sourcepackagename FROM tmp_merge_copy_data) |
762 | """ % sqlvalues( |
763 | - origin.archive, |
764 | + origin.archive.id, |
765 | PackagePublishingStatus.PENDING, |
766 | PackagePublishingStatus.PUBLISHED, |
767 | origin.distroseries.id, |
768 | @@ -414,7 +414,7 @@ class PackageCloner: |
769 | secsrc.distroseries = %s AND |
770 | secsrc.pocket = %s |
771 | """ % sqlvalues( |
772 | - destination.archive, |
773 | + destination.archive.id, |
774 | PackagePublishingStatus.PENDING, |
775 | PackagePublishingStatus.PUBLISHED, |
776 | destination.distroseries.id, |
777 | @@ -465,7 +465,7 @@ class PackageCloner: |
778 | spph.archive = %s |
779 | """ % sqlvalues( |
780 | destination.distroseries.id, |
781 | - destination.archive, |
782 | + destination.archive.id, |
783 | UTC_NOW, |
784 | UTC_NOW, |
785 | destination.pocket, |
786 | @@ -473,7 +473,7 @@ class PackageCloner: |
787 | PackagePublishingStatus.PENDING, |
788 | PackagePublishingStatus.PUBLISHED, |
789 | origin.pocket, |
790 | - origin.archive, |
791 | + origin.archive.id, |
792 | ) |
793 | |
794 | if sourcepackagenames and len(sourcepackagenames) > 0: |
795 | diff --git a/lib/lp/soyuz/model/queue.py b/lib/lp/soyuz/model/queue.py |
796 | index 639640a..f330355 100644 |
797 | --- a/lib/lp/soyuz/model/queue.py |
798 | +++ b/lib/lp/soyuz/model/queue.py |
799 | @@ -451,7 +451,7 @@ class PackageUpload(StormBase): |
800 | AND bpf.libraryfile = lfa.id |
801 | AND lfa.filename IN (%%s) |
802 | """ % sqlvalues( |
803 | - self.archive, self.distroseries.distribution.id |
804 | + self.archive_id, self.distroseries.distribution_id |
805 | ) |
806 | # Inject the inner query. |
807 | query %= inner_query |
808 | diff --git a/lib/lp/soyuz/scripts/initialize_distroseries.py b/lib/lp/soyuz/scripts/initialize_distroseries.py |
809 | index bcb6f28..6a35b71 100644 |
810 | --- a/lib/lp/soyuz/scripts/initialize_distroseries.py |
811 | +++ b/lib/lp/soyuz/scripts/initialize_distroseries.py |
812 | @@ -857,7 +857,7 @@ class InitializeDistroSeries: |
813 | FROM Archivepermission WHERE packageset = %s |
814 | """ |
815 | % sqlvalues( |
816 | - self.distroseries.main_archive, |
817 | + self.distroseries.main_archive.id, |
818 | child_ps.id, |
819 | parent_ps.id, |
820 | ) |
821 | @@ -913,7 +913,7 @@ class InitializeDistroSeries: |
822 | WHERE pocket IS NOT NULL AND distroseries = %s |
823 | """ |
824 | % sqlvalues( |
825 | - self.distroseries.main_archive, |
826 | + self.distroseries.main_archive.id, |
827 | self.distroseries.id, |
828 | parent.id, |
829 | ) |
830 | diff --git a/lib/lp/soyuz/scripts/packagecopier.py b/lib/lp/soyuz/scripts/packagecopier.py |
831 | index d0553f5..f377107 100644 |
832 | --- a/lib/lp/soyuz/scripts/packagecopier.py |
833 | +++ b/lib/lp/soyuz/scripts/packagecopier.py |
834 | @@ -171,7 +171,7 @@ def check_copy_permissions( |
835 | # checks on each source archive. Not all of this is currently |
836 | # preloadable. |
837 | archives = load_related(Archive, sources, ["archive_id"]) |
838 | - load_related(Person, archives, ["ownerID"]) |
839 | + load_related(Person, archives, ["owner_id"]) |
840 | |
841 | # If there is a requester, check that they have upload permission into |
842 | # the destination (archive, component, pocket). This check is done |
843 | diff --git a/lib/lp/soyuz/tests/test_archive.py b/lib/lp/soyuz/tests/test_archive.py |
844 | index a72d9b7..5c17662 100644 |
845 | --- a/lib/lp/soyuz/tests/test_archive.py |
846 | +++ b/lib/lp/soyuz/tests/test_archive.py |
847 | @@ -44,6 +44,7 @@ from lp.buildmaster.interfaces.buildfarmjobbehaviour import ( |
848 | IBuildFarmJobBehaviour, |
849 | ) |
850 | from lp.buildmaster.interfaces.processor import IProcessorSet |
851 | +from lp.buildmaster.model.buildqueue import BuildQueue |
852 | from lp.registry.enums import PersonVisibility, TeamMembershipPolicy |
853 | from lp.registry.interfaces.distribution import IDistributionSet |
854 | from lp.registry.interfaces.person import IPersonSet |
855 | @@ -52,7 +53,6 @@ from lp.registry.interfaces.series import SeriesStatus |
856 | from lp.registry.interfaces.teammembership import TeamMembershipStatus |
857 | from lp.services.authserver.testing import InProcessAuthServerFixture |
858 | from lp.services.database.interfaces import IStore |
859 | -from lp.services.database.sqlbase import sqlvalues |
860 | from lp.services.features import getFeatureFlag |
861 | from lp.services.features.testing import FeatureFixture |
862 | from lp.services.gpg.interfaces import ( |
863 | @@ -116,6 +116,7 @@ from lp.soyuz.model.archivepermission import ( |
864 | ArchivePermission, |
865 | ArchivePermissionSet, |
866 | ) |
867 | +from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild |
868 | from lp.soyuz.model.binarypackagerelease import ( |
869 | BinaryPackageReleaseDownloadCount, |
870 | ) |
871 | @@ -398,20 +399,18 @@ class TestArchiveEnableDisable(TestCaseWithFactory): |
872 | |
873 | def _getBuildQueuesByStatus(self, archive, status): |
874 | # Return the count for archive build jobs with the given status. |
875 | - query = """ |
876 | - SELECT COUNT(BuildQueue.id) |
877 | - FROM BinaryPackageBuild, BuildQueue |
878 | - WHERE |
879 | - BinaryPackageBuild.build_farm_job = |
880 | - BuildQueue.build_farm_job |
881 | - AND BinaryPackageBuild.archive = %s |
882 | - AND BinaryPackageBuild.status = %s |
883 | - AND BuildQueue.status = %s; |
884 | - """ % sqlvalues( |
885 | - archive, BuildStatus.NEEDSBUILD, status |
886 | - ) |
887 | - |
888 | - return IStore(Archive).execute(query).get_one()[0] |
889 | + return ( |
890 | + IStore(BuildQueue) |
891 | + .find( |
892 | + BuildQueue.id, |
893 | + BinaryPackageBuild.build_farm_job_id |
894 | + == BuildQueue._build_farm_job_id, |
895 | + BinaryPackageBuild.archive == archive, |
896 | + BinaryPackageBuild.status == BuildStatus.NEEDSBUILD, |
897 | + BuildQueue.status == status, |
898 | + ) |
899 | + .count() |
900 | + ) |
901 | |
902 | def assertNoBuildQueuesHaveStatus(self, archive, status): |
903 | # Check that that the jobs attached to this archive do not have this |
904 | diff --git a/lib/lp/soyuz/vocabularies.py b/lib/lp/soyuz/vocabularies.py |
905 | index b8dbf55..8254e63 100644 |
906 | --- a/lib/lp/soyuz/vocabularies.py |
907 | +++ b/lib/lp/soyuz/vocabularies.py |
908 | @@ -11,6 +11,7 @@ __all__ = [ |
909 | "PPAVocabulary", |
910 | ] |
911 | |
912 | +from storm.expr import Is |
913 | from storm.locals import And, Or |
914 | from zope.component import getUtility |
915 | from zope.interface import implementer |
916 | @@ -22,11 +23,7 @@ from lp.registry.model.person import Person |
917 | from lp.services.database.interfaces import IStore |
918 | from lp.services.database.stormexpr import fti_search |
919 | from lp.services.webapp.interfaces import ILaunchBag |
920 | -from lp.services.webapp.vocabulary import ( |
921 | - IHugeVocabulary, |
922 | - SQLObjectVocabularyBase, |
923 | - StormVocabularyBase, |
924 | -) |
925 | +from lp.services.webapp.vocabulary import IHugeVocabulary, StormVocabularyBase |
926 | from lp.soyuz.enums import ArchivePurpose |
927 | from lp.soyuz.interfaces.archive import IArchiveSet |
928 | from lp.soyuz.model.archive import Archive, get_enabled_archive_filter |
929 | @@ -86,18 +83,17 @@ class PackageReleaseVocabulary(StormVocabularyBase): |
930 | |
931 | |
932 | @implementer(IHugeVocabulary) |
933 | -class PPAVocabulary(SQLObjectVocabularyBase): |
934 | +class PPAVocabulary(StormVocabularyBase): |
935 | _table = Archive |
936 | - _orderBy = ["Person.name, Archive.name"] |
937 | - _clauseTables = ["Person"] |
938 | + _order_by = ["Person.name", "Archive.name"] |
939 | # This should probably also filter by privacy, but that becomes |
940 | # problematic when you need to remove a dependency that you can no |
941 | # longer see. |
942 | - _filter = And( |
943 | - Archive._enabled == True, |
944 | + _clauses = [ |
945 | + Is(Archive._enabled, True), |
946 | Archive.owner == Person.id, |
947 | - Archive.q.purpose == ArchivePurpose.PPA, |
948 | - ) |
949 | + Archive.purpose == ArchivePurpose.PPA, |
950 | + ] |
951 | displayname = "Select a PPA" |
952 | step_title = "Search" |
953 | |
954 | @@ -121,7 +117,7 @@ class PPAVocabulary(SQLObjectVocabularyBase): |
955 | def search(self, query, vocab_filter=None): |
956 | """Return a resultset of archives. |
957 | |
958 | - This is a helper required by `SQLObjectVocabularyBase.searchForTerms`. |
959 | + This is a helper required by `StormVocabularyBase.searchForTerms`. |
960 | """ |
961 | if not query: |
962 | return self.emptySelectResults() |
963 | @@ -147,17 +143,18 @@ class PPAVocabulary(SQLObjectVocabularyBase): |
964 | Person.name == owner_name, Archive.name == archive_name |
965 | ) |
966 | |
967 | - clause = And( |
968 | - self._filter, |
969 | + extra_clauses = [ |
970 | get_enabled_archive_filter( |
971 | getUtility(ILaunchBag).user, |
972 | purpose=ArchivePurpose.PPA, |
973 | include_public=True, |
974 | ), |
975 | search_clause, |
976 | - ) |
977 | - return self._table.select( |
978 | - clause, orderBy=self._orderBy, clauseTables=self._clauseTables |
979 | + ] |
980 | + return ( |
981 | + IStore(self._table) |
982 | + .find(self._table, *self._clauses, *extra_clauses) |
983 | + .order_by(self._order_by) |
984 | ) |
985 | |
986 |
Thanks for your ongoing dedication to remove old cruft!