Merge ~cjwatson/launchpad:stormify-archivepublisher-xpph-queries into launchpad:master

Proposed by Colin Watson
Status: Merged
Approved by: Colin Watson
Approved revision: 99e45842ce2ce1c7d0a4f5b2c2afd3608905bcaf
Merge reported by: Otto Co-Pilot
Merged at revision: not available
Proposed branch: ~cjwatson/launchpad:stormify-archivepublisher-xpph-queries
Merge into: launchpad:master
Diff against target: 359 lines (+118/-112)
3 files modified
lib/lp/archivepublisher/deathrow.py (+72/-62)
lib/lp/archivepublisher/domination.py (+38/-46)
lib/lp/archivepublisher/tests/test_processdeathrow.py (+8/-4)
Reviewer Review Type Date Requested Status
Ioana Lasc (community) Approve
Review via email: mp+394585@code.launchpad.net

Commit message

Convert archivepublisher publishing history queries to Storm

To post a comment you must log in.
Revision history for this message
Ioana Lasc (ilasc) :
review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1diff --git a/lib/lp/archivepublisher/deathrow.py b/lib/lp/archivepublisher/deathrow.py
2index 82faacf..af6e090 100644
3--- a/lib/lp/archivepublisher/deathrow.py
4+++ b/lib/lp/archivepublisher/deathrow.py
5@@ -1,4 +1,4 @@
6-# Copyright 2009-2011 Canonical Ltd. This software is licensed under the
7+# Copyright 2009-2020 Canonical Ltd. This software is licensed under the
8 # GNU Affero General Public License version 3 (see the file LICENSE).
9
10 """
11@@ -11,11 +11,22 @@ import logging
12 import os
13
14 import pytz
15+from storm.expr import Exists
16+from storm.locals import (
17+ And,
18+ ClassAlias,
19+ Not,
20+ Select,
21+ )
22
23 from lp.archivepublisher.config import getPubConfig
24 from lp.archivepublisher.diskpool import DiskPool
25 from lp.services.database.constants import UTC_NOW
26-from lp.services.database.sqlbase import sqlvalues
27+from lp.services.database.interfaces import IStore
28+from lp.services.librarian.model import (
29+ LibraryFileAlias,
30+ LibraryFileContent,
31+ )
32 from lp.soyuz.enums import ArchivePurpose
33 from lp.soyuz.interfaces.publishing import (
34 IBinaryPackagePublishingHistory,
35@@ -24,6 +35,10 @@ from lp.soyuz.interfaces.publishing import (
36 MissingSymlinkInPool,
37 NotInPool,
38 )
39+from lp.soyuz.model.files import (
40+ BinaryPackageFile,
41+ SourcePackageReleaseFile,
42+ )
43 from lp.soyuz.model.publishing import (
44 BinaryPackagePublishingHistory,
45 SourcePackagePublishingHistory,
46@@ -108,35 +123,37 @@ class DeathRow:
47
48 Both sources and binaries are lists.
49 """
50- sources = SourcePackagePublishingHistory.select("""
51- SourcePackagePublishingHistory.archive = %s AND
52- SourcePackagePublishingHistory.scheduleddeletiondate < %s AND
53- SourcePackagePublishingHistory.dateremoved IS NULL AND
54- NOT EXISTS (
55- SELECT 1 FROM sourcepackagepublishinghistory as spph
56- WHERE
57- SourcePackagePublishingHistory.sourcepackagerelease =
58- spph.sourcepackagerelease AND
59- spph.archive = %s AND
60- spph.status NOT IN %s)
61- """ % sqlvalues(self.archive, UTC_NOW, self.archive,
62- inactive_publishing_status), orderBy="id")
63- self.logger.debug("%d Sources" % sources.count())
64-
65- binaries = BinaryPackagePublishingHistory.select("""
66- BinaryPackagePublishingHistory.archive = %s AND
67- BinaryPackagePublishingHistory.scheduleddeletiondate < %s AND
68- BinaryPackagePublishingHistory.dateremoved IS NULL AND
69- NOT EXISTS (
70- SELECT 1 FROM binarypackagepublishinghistory as bpph
71- WHERE
72- BinaryPackagePublishingHistory.binarypackagerelease =
73- bpph.binarypackagerelease AND
74- bpph.archive = %s AND
75- bpph.status NOT IN %s)
76- """ % sqlvalues(self.archive, UTC_NOW, self.archive,
77- inactive_publishing_status), orderBy="id")
78- self.logger.debug("%d Binaries" % binaries.count())
79+ OtherSPPH = ClassAlias(SourcePackagePublishingHistory)
80+ sources = list(IStore(SourcePackagePublishingHistory).find(
81+ SourcePackagePublishingHistory,
82+ SourcePackagePublishingHistory.archive == self.archive,
83+ SourcePackagePublishingHistory.scheduleddeletiondate < UTC_NOW,
84+ SourcePackagePublishingHistory.dateremoved == None,
85+ Not(Exists(Select(
86+ 1, tables=[OtherSPPH],
87+ where=And(
88+ SourcePackagePublishingHistory.sourcepackagereleaseID ==
89+ OtherSPPH.sourcepackagereleaseID,
90+ OtherSPPH.archiveID == self.archive.id,
91+ Not(OtherSPPH.status.is_in(inactive_publishing_status))),
92+ )))).order_by(SourcePackagePublishingHistory.id))
93+ self.logger.debug("%d Sources" % len(sources))
94+
95+ OtherBPPH = ClassAlias(BinaryPackagePublishingHistory)
96+ binaries = list(IStore(BinaryPackagePublishingHistory).find(
97+ BinaryPackagePublishingHistory,
98+ BinaryPackagePublishingHistory.archive == self.archive,
99+ BinaryPackagePublishingHistory.scheduleddeletiondate < UTC_NOW,
100+ BinaryPackagePublishingHistory.dateremoved == None,
101+ Not(Exists(Select(
102+ 1, tables=[OtherBPPH],
103+ where=And(
104+ BinaryPackagePublishingHistory.binarypackagereleaseID ==
105+ OtherBPPH.binarypackagereleaseID,
106+ OtherBPPH.archiveID == self.archive.id,
107+ Not(OtherBPPH.status.is_in(inactive_publishing_status))),
108+ )))).order_by(BinaryPackagePublishingHistory.id))
109+ self.logger.debug("%d Binaries" % len(binaries))
110
111 return (sources, binaries)
112
113@@ -150,41 +167,34 @@ class DeathRow:
114 Only allow removal of unnecessary files.
115 """
116 clauses = []
117- clauseTables = []
118-
119- if ISourcePackagePublishingHistory.implementedBy(
120- publication_class):
121- clauses.append("""
122- SourcePackagePublishingHistory.archive = %s AND
123- SourcePackagePublishingHistory.dateremoved is NULL AND
124- SourcePackagePublishingHistory.sourcepackagerelease =
125- SourcePackageReleaseFile.sourcepackagerelease AND
126- SourcePackageReleaseFile.libraryfile = LibraryFileAlias.id
127- """ % sqlvalues(self.archive))
128- clauseTables.append('SourcePackageReleaseFile')
129- elif IBinaryPackagePublishingHistory.implementedBy(
130- publication_class):
131- clauses.append("""
132- BinaryPackagePublishingHistory.archive = %s AND
133- BinaryPackagePublishingHistory.dateremoved is NULL AND
134- BinaryPackagePublishingHistory.binarypackagerelease =
135- BinaryPackageFile.binarypackagerelease AND
136- BinaryPackageFile.libraryfile = LibraryFileAlias.id
137- """ % sqlvalues(self.archive))
138- clauseTables.append('BinaryPackageFile')
139+
140+ if ISourcePackagePublishingHistory.implementedBy(publication_class):
141+ clauses.extend([
142+ SourcePackagePublishingHistory.archive == self.archive,
143+ SourcePackagePublishingHistory.dateremoved == None,
144+ SourcePackagePublishingHistory.sourcepackagerelease ==
145+ SourcePackageReleaseFile.sourcepackagereleaseID,
146+ SourcePackageReleaseFile.libraryfile == LibraryFileAlias.id,
147+ ])
148+ elif IBinaryPackagePublishingHistory.implementedBy(publication_class):
149+ clauses.extend([
150+ BinaryPackagePublishingHistory.archive == self.archive,
151+ BinaryPackagePublishingHistory.dateremoved == None,
152+ BinaryPackagePublishingHistory.binarypackagerelease ==
153+ BinaryPackageFile.binarypackagereleaseID,
154+ BinaryPackageFile.libraryfile == LibraryFileAlias.id,
155+ ])
156 else:
157 raise AssertionError("%r is not supported." % publication_class)
158
159- clauses.append("""
160- LibraryFileAlias.content = LibraryFileContent.id AND
161- LibraryFileAlias.filename = %s AND
162- LibraryFileContent.md5 = %s
163- """ % sqlvalues(filename, file_md5))
164- clauseTables.extend(
165- ['LibraryFileAlias', 'LibraryFileContent'])
166+ clauses.extend([
167+ LibraryFileAlias.content == LibraryFileContent.id,
168+ LibraryFileAlias.filename == filename,
169+ LibraryFileContent.md5 == file_md5,
170+ ])
171
172- all_publications = publication_class.select(
173- " AND ".join(clauses), clauseTables=clauseTables)
174+ all_publications = IStore(publication_class).find(
175+ publication_class, *clauses)
176
177 right_now = datetime.datetime.now(pytz.timezone('UTC'))
178 for pub in all_publications:
179diff --git a/lib/lp/archivepublisher/domination.py b/lib/lp/archivepublisher/domination.py
180index 0e9a8b0..dc6f1cf 100644
181--- a/lib/lp/archivepublisher/domination.py
182+++ b/lib/lp/archivepublisher/domination.py
183@@ -1,4 +1,4 @@
184-# Copyright 2009-2019 Canonical Ltd. This software is licensed under the
185+# Copyright 2009-2020 Canonical Ltd. This software is licensed under the
186 # GNU Affero General Public License version 3 (see the file LICENSE).
187
188 """Archive Domination class.
189@@ -79,10 +79,7 @@ from lp.services.database.bulk import load_related
190 from lp.services.database.constants import UTC_NOW
191 from lp.services.database.decoratedresultset import DecoratedResultSet
192 from lp.services.database.interfaces import IStore
193-from lp.services.database.sqlbase import (
194- flush_database_updates,
195- sqlvalues,
196- )
197+from lp.services.database.sqlbase import flush_database_updates
198 from lp.services.orderingcheck import OrderingCheck
199 from lp.soyuz.enums import (
200 BinaryPackageFormat,
201@@ -95,6 +92,7 @@ from lp.soyuz.interfaces.publishing import (
202 from lp.soyuz.model.binarypackagebuild import BinaryPackageBuild
203 from lp.soyuz.model.binarypackagename import BinaryPackageName
204 from lp.soyuz.model.binarypackagerelease import BinaryPackageRelease
205+from lp.soyuz.model.distroarchseries import DistroArchSeries
206 from lp.soyuz.model.publishing import (
207 BinaryPackagePublishingHistory,
208 SourcePackagePublishingHistory,
209@@ -537,34 +535,31 @@ class Dominator:
210 # Attempt to find all binaries of this
211 # SourcePackageRelease which are/have been in this
212 # distroseries...
213- considered_binaries = BinaryPackagePublishingHistory.select("""
214- binarypackagepublishinghistory.distroarchseries =
215- distroarchseries.id AND
216- binarypackagepublishinghistory.scheduleddeletiondate IS NULL AND
217- binarypackagepublishinghistory.dateremoved IS NULL AND
218- binarypackagepublishinghistory.archive = %s AND
219- binarypackagebuild.source_package_release = %s AND
220- distroarchseries.distroseries = %s AND
221- binarypackagepublishinghistory.binarypackagerelease =
222- binarypackagerelease.id AND
223- binarypackagerelease.build = binarypackagebuild.id AND
224- binarypackagepublishinghistory.pocket = %s
225- """ % sqlvalues(self.archive, srcpkg_release,
226- pub_record.distroseries, pub_record.pocket),
227- clauseTables=['DistroArchSeries', 'BinaryPackageRelease',
228- 'BinaryPackageBuild'])
229+ considered_binaries = IStore(BinaryPackagePublishingHistory).find(
230+ BinaryPackagePublishingHistory.distroarchseries ==
231+ DistroArchSeries.id,
232+ BinaryPackagePublishingHistory.scheduleddeletiondate == None,
233+ BinaryPackagePublishingHistory.dateremoved == None,
234+ BinaryPackagePublishingHistory.archive == self.archive,
235+ BinaryPackageBuild.source_package_release == srcpkg_release,
236+ DistroArchSeries.distroseries == pub_record.distroseries,
237+ BinaryPackagePublishingHistory.binarypackagerelease ==
238+ BinaryPackageRelease.id,
239+ BinaryPackageRelease.build == BinaryPackageBuild.id,
240+ BinaryPackagePublishingHistory.pocket == pub_record.pocket)
241
242 # There is at least one non-removed binary to consider
243 if not considered_binaries.is_empty():
244 # However we can still remove *this* record if there's
245 # at least one other PUBLISHED for the spr. This happens
246 # when a package is moved between components.
247- published = SourcePackagePublishingHistory.selectBy(
248+ published = IStore(SourcePackagePublishingHistory).find(
249+ SourcePackagePublishingHistory,
250 distroseries=pub_record.distroseries,
251 pocket=pub_record.pocket,
252 status=PackagePublishingStatus.PUBLISHED,
253 archive=self.archive,
254- sourcepackagereleaseID=srcpkg_release.id)
255+ sourcepackagerelease=srcpkg_release)
256 # Zero PUBLISHED for this spr, so nothing to take over
257 # for us, so leave it for consideration next time.
258 if published.is_empty():
259@@ -857,30 +852,27 @@ class Dominator:
260
261 def judge(self, distroseries, pocket):
262 """Judge superseded sources and binaries."""
263- sources = SourcePackagePublishingHistory.select("""
264- sourcepackagepublishinghistory.distroseries = %s AND
265- sourcepackagepublishinghistory.archive = %s AND
266- sourcepackagepublishinghistory.pocket = %s AND
267- sourcepackagepublishinghistory.status IN %s AND
268- sourcepackagepublishinghistory.scheduleddeletiondate is NULL AND
269- sourcepackagepublishinghistory.dateremoved is NULL
270- """ % sqlvalues(
271- distroseries, self.archive, pocket,
272- inactive_publishing_status))
273-
274- binaries = BinaryPackagePublishingHistory.select("""
275- binarypackagepublishinghistory.distroarchseries =
276- distroarchseries.id AND
277- distroarchseries.distroseries = %s AND
278- binarypackagepublishinghistory.archive = %s AND
279- binarypackagepublishinghistory.pocket = %s AND
280- binarypackagepublishinghistory.status IN %s AND
281- binarypackagepublishinghistory.scheduleddeletiondate is NULL AND
282- binarypackagepublishinghistory.dateremoved is NULL
283- """ % sqlvalues(
284- distroseries, self.archive, pocket,
285+ sources = IStore(SourcePackagePublishingHistory).find(
286+ SourcePackagePublishingHistory,
287+ SourcePackagePublishingHistory.distroseries == distroseries,
288+ SourcePackagePublishingHistory.archive == self.archive,
289+ SourcePackagePublishingHistory.pocket == pocket,
290+ SourcePackagePublishingHistory.status.is_in(
291+ inactive_publishing_status),
292+ SourcePackagePublishingHistory.scheduleddeletiondate == None,
293+ SourcePackagePublishingHistory.dateremoved == None)
294+
295+ binaries = IStore(BinaryPackagePublishingHistory).find(
296+ BinaryPackagePublishingHistory,
297+ BinaryPackagePublishingHistory.distroarchseries ==
298+ DistroArchSeries.id,
299+ DistroArchSeries.distroseries == distroseries,
300+ BinaryPackagePublishingHistory.archive == self.archive,
301+ BinaryPackagePublishingHistory.pocket == pocket,
302+ BinaryPackagePublishingHistory.status.is_in(
303 inactive_publishing_status),
304- clauseTables=['DistroArchSeries'])
305+ BinaryPackagePublishingHistory.scheduleddeletiondate == None,
306+ BinaryPackagePublishingHistory.dateremoved == None)
307
308 self._judgeSuperseded(sources, binaries)
309
310diff --git a/lib/lp/archivepublisher/tests/test_processdeathrow.py b/lib/lp/archivepublisher/tests/test_processdeathrow.py
311index 32d1bb2..e333727 100644
312--- a/lib/lp/archivepublisher/tests/test_processdeathrow.py
313+++ b/lib/lp/archivepublisher/tests/test_processdeathrow.py
314@@ -1,4 +1,4 @@
315-# Copyright 2009-2018 Canonical Ltd. This software is licensed under the
316+# Copyright 2009-2020 Canonical Ltd. This software is licensed under the
317 # GNU Affero General Public License version 3 (see the file LICENSE).
318
319 """Functional tests for process-death-row.py script.
320@@ -26,6 +26,7 @@ from zope.security.proxy import removeSecurityProxy
321 from lp.registry.interfaces.distribution import IDistributionSet
322 from lp.registry.interfaces.person import IPersonSet
323 from lp.services.config import config
324+from lp.services.database.interfaces import IStore
325 from lp.soyuz.enums import PackagePublishingStatus
326 from lp.soyuz.model.publishing import SourcePackagePublishingHistory
327 from lp.testing import TestCaseWithFactory
328@@ -148,25 +149,28 @@ class TestProcessDeathRow(TestCaseWithFactory):
329
330 def probePublishingStatus(self, pubrec_ids, status):
331 """Check if all source publishing records match the given status."""
332+ store = IStore(SourcePackagePublishingHistory)
333 for pubrec_id in pubrec_ids:
334- spph = SourcePackagePublishingHistory.get(pubrec_id)
335+ spph = store.get(SourcePackagePublishingHistory, pubrec_id)
336 self.assertEqual(
337 spph.status, status, "ID %s -> %s (expected %s)" % (
338 spph.id, spph.status.title, status.title))
339
340 def probeRemoved(self, pubrec_ids):
341 """Check if all source publishing records were removed."""
342+ store = IStore(SourcePackagePublishingHistory)
343 right_now = datetime.datetime.now(pytz.timezone('UTC'))
344 for pubrec_id in pubrec_ids:
345- spph = SourcePackagePublishingHistory.get(pubrec_id)
346+ spph = store.get(SourcePackagePublishingHistory, pubrec_id)
347 self.assertTrue(
348 spph.dateremoved < right_now,
349 "ID %s -> not removed" % (spph.id))
350
351 def probeNotRemoved(self, pubrec_ids):
352 """Check if all source publishing records were not removed."""
353+ store = IStore(SourcePackagePublishingHistory)
354 for pubrec_id in pubrec_ids:
355- spph = SourcePackagePublishingHistory.get(pubrec_id)
356+ spph = store.get(SourcePackagePublishingHistory, pubrec_id)
357 self.assertTrue(
358 spph.dateremoved is None,
359 "ID %s -> removed" % (spph.id))

Subscribers

People subscribed via source and target branches

to status/vote changes: