Merge ~cjwatson/launchpad:archive-api-snapshots into launchpad:master
- Git
- lp:~cjwatson/launchpad
- archive-api-snapshots
- Merge into master
Status: | Merged |
---|---|
Approved by: | Colin Watson |
Approved revision: | 7274fc2883b4443d78b241d07ed3b5a1dce4b0d8 |
Merge reported by: | Otto Co-Pilot |
Merged at revision: | not available |
Proposed branch: | ~cjwatson/launchpad:archive-api-snapshots |
Merge into: | launchpad:master |
Prerequisite: | ~cjwatson/launchpad:archive-file-deeper-history |
Diff against target: |
917 lines (+537/-57) 9 files modified
lib/lp/soyuz/interfaces/archive.py (+7/-1) lib/lp/soyuz/interfaces/archiveapi.py (+8/-1) lib/lp/soyuz/interfaces/archivefile.py (+13/-5) lib/lp/soyuz/model/archive.py (+14/-2) lib/lp/soyuz/model/archivefile.py (+40/-7) lib/lp/soyuz/tests/test_archive.py (+122/-0) lib/lp/soyuz/tests/test_archivefile.py (+125/-23) lib/lp/soyuz/xmlrpc/archive.py (+59/-17) lib/lp/soyuz/xmlrpc/tests/test_archive.py (+149/-1) |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Clinton Fung | Approve | ||
Review via email: mp+436591@code.launchpad.net |
Commit message
Add snapshot handling to ArchiveAPI.
Description of the change
Now that we record enough history of archive files in the database, we can extend the XML-RPC API to allow querying it. This commit adds an optional `live_at` parameter to `ArchiveAPI.
We needed some corresponding additions to internal APIs:
* `Archive.
* `ArchiveFileSet
See https:/
Clinton Fung (clinton-fung) : | # |
Preview Diff
1 | diff --git a/lib/lp/soyuz/interfaces/archive.py b/lib/lp/soyuz/interfaces/archive.py |
2 | index 1997cf6..ce90c50 100644 |
3 | --- a/lib/lp/soyuz/interfaces/archive.py |
4 | +++ b/lib/lp/soyuz/interfaces/archive.py |
5 | @@ -55,6 +55,8 @@ __all__ = [ |
6 | import http.client |
7 | import re |
8 | import typing |
9 | +from datetime import datetime |
10 | +from pathlib import PurePath |
11 | from urllib.parse import urlparse |
12 | |
13 | from lazr.restful.declarations import ( |
14 | @@ -792,12 +794,16 @@ class IArchiveSubscriberView(Interface): |
15 | :return: A collection containing `BinaryPackagePublishingHistory`. |
16 | """ |
17 | |
18 | - def getPoolFileByPath(path): |
19 | + def getPoolFileByPath( |
20 | + path: PurePath, live_at: typing.Optional[datetime] = None |
21 | + ): |
22 | """Return the `ILibraryFileAlias` for a path in this archive's pool. |
23 | |
24 | :param path: A `PurePath` for where a source or binary package file |
25 | is published in this archive's pool, e.g. |
26 | "pool/main/p/package/package_1.dsc". |
27 | + :param live_at: If not None, return only files that existed in the |
28 | + archive at this `datetime`. |
29 | :return: An `ILibraryFileAlias`, or None. |
30 | """ |
31 | |
32 | diff --git a/lib/lp/soyuz/interfaces/archiveapi.py b/lib/lp/soyuz/interfaces/archiveapi.py |
33 | index b8acecf..bc424b3 100644 |
34 | --- a/lib/lp/soyuz/interfaces/archiveapi.py |
35 | +++ b/lib/lp/soyuz/interfaces/archiveapi.py |
36 | @@ -8,6 +8,9 @@ __all__ = [ |
37 | "IArchiveApplication", |
38 | ] |
39 | |
40 | +from datetime import datetime |
41 | +from typing import Optional |
42 | + |
43 | from zope.interface import Interface |
44 | |
45 | from lp.services.webapp.interfaces import ILaunchpadApplication |
46 | @@ -42,12 +45,16 @@ class IArchiveAPI(Interface): |
47 | None. |
48 | """ |
49 | |
50 | - def translatePath(archive_reference, path): |
51 | + def translatePath( |
52 | + archive_reference: str, path: str, live_at: Optional[datetime] = None |
53 | + ): |
54 | """Find the librarian URL for a relative path within an archive. |
55 | |
56 | :param archive_reference: The reference form of the archive to check. |
57 | :param path: The relative path within the archive. This should not |
58 | begin with a "/" character. |
59 | + :param live_at: An optional timestamp; if passed, only return paths |
60 | + that existed at this timestamp. |
61 | |
62 | :return: A `NotFound` fault if `archive_reference` does not identify |
63 | an archive, or the archive's repository format is something |
64 | diff --git a/lib/lp/soyuz/interfaces/archivefile.py b/lib/lp/soyuz/interfaces/archivefile.py |
65 | index 6c530b3..3031d12 100644 |
66 | --- a/lib/lp/soyuz/interfaces/archivefile.py |
67 | +++ b/lib/lp/soyuz/interfaces/archivefile.py |
68 | @@ -112,7 +112,8 @@ class IArchiveFileSet(Interface): |
69 | container=None, |
70 | path=None, |
71 | sha256=None, |
72 | - condemned=None, |
73 | + live_at=None, |
74 | + existed_at=None, |
75 | only_published=False, |
76 | eager_load=False, |
77 | ): |
78 | @@ -125,14 +126,21 @@ class IArchiveFileSet(Interface): |
79 | directory is this path. |
80 | :param sha256: If not None, return only files with this SHA-256 |
81 | checksum. |
82 | - :param condemned: If True, return only files with a |
83 | - scheduled_deletion_date set; if False, return only files without |
84 | - a scheduled_deletion_date set; if None (the default), return |
85 | - both. |
86 | + :param live_at: If not None, return only files that held their path |
87 | + in the archive at this `datetime` (or |
88 | + `lp.services.database.constants.UTC_NOW`). |
89 | + :param existed_at: If not None, return only files that existed in |
90 | + the archive at this `datetime` (or |
91 | + `lp.services.database.constants.UTC_NOW`). This includes files |
92 | + that did not hold their path (e.g. `dists/jammy/InRelease`) and |
93 | + that are merely still published in a `by-hash` directory; it |
94 | + should normally be used together with `sha256`. |
95 | :param only_published: If True, return only files without a |
96 | `date_removed` set. |
97 | :param eager_load: If True, preload related `LibraryFileAlias` and |
98 | `LibraryFileContent` rows. |
99 | + :raises IncompatibleArguments: if both `live_at` and `existed_at` |
100 | + are specified. |
101 | :return: An iterable of matched files. |
102 | """ |
103 | |
104 | diff --git a/lib/lp/soyuz/model/archive.py b/lib/lp/soyuz/model/archive.py |
105 | index e5ed691..d96fc8e 100644 |
106 | --- a/lib/lp/soyuz/model/archive.py |
107 | +++ b/lib/lp/soyuz/model/archive.py |
108 | @@ -13,6 +13,7 @@ __all__ = [ |
109 | |
110 | import re |
111 | import typing |
112 | +from datetime import datetime |
113 | from operator import attrgetter |
114 | from pathlib import PurePath |
115 | |
116 | @@ -2047,7 +2048,7 @@ class Archive(SQLBase): |
117 | return archive_file |
118 | |
119 | def getPoolFileByPath( |
120 | - self, path: PurePath |
121 | + self, path: PurePath, live_at: typing.Optional[datetime] = None |
122 | ) -> typing.Optional[LibraryFileAlias]: |
123 | """See `IArchive`.""" |
124 | try: |
125 | @@ -2100,10 +2101,21 @@ class Archive(SQLBase): |
126 | xPPH.archive == self, |
127 | xPPH.component == Component.id, |
128 | xPPH.datepublished != None, |
129 | - xPPH.dateremoved == None, |
130 | xPF.libraryfile == LibraryFileAlias.id, |
131 | ] |
132 | ) |
133 | + if live_at: |
134 | + clauses.extend( |
135 | + [ |
136 | + xPPH.datepublished <= live_at, |
137 | + Or( |
138 | + xPPH.dateremoved == None, |
139 | + xPPH.dateremoved > live_at, |
140 | + ), |
141 | + ] |
142 | + ) |
143 | + else: |
144 | + clauses.append(xPPH.dateremoved == None) |
145 | return ( |
146 | store.find(LibraryFileAlias, *clauses) |
147 | .config(distinct=True) |
148 | diff --git a/lib/lp/soyuz/model/archivefile.py b/lib/lp/soyuz/model/archivefile.py |
149 | index 8272dd2..9e2acc3 100644 |
150 | --- a/lib/lp/soyuz/model/archivefile.py |
151 | +++ b/lib/lp/soyuz/model/archivefile.py |
152 | @@ -12,10 +12,11 @@ import os.path |
153 | import re |
154 | |
155 | import pytz |
156 | -from storm.locals import DateTime, Int, Reference, Unicode |
157 | +from storm.locals import DateTime, Int, Or, Reference, Unicode |
158 | from zope.component import getUtility |
159 | from zope.interface import implementer |
160 | |
161 | +from lp.app.errors import IncompatibleArguments |
162 | from lp.services.database.bulk import load_related |
163 | from lp.services.database.constants import UTC_NOW |
164 | from lp.services.database.decoratedresultset import DecoratedResultSet |
165 | @@ -119,7 +120,8 @@ class ArchiveFileSet: |
166 | path=None, |
167 | path_parent=None, |
168 | sha256=None, |
169 | - condemned=None, |
170 | + live_at=None, |
171 | + existed_at=None, |
172 | only_published=False, |
173 | eager_load=False, |
174 | ): |
175 | @@ -145,11 +147,42 @@ class ArchiveFileSet: |
176 | LibraryFileContent.sha256 == sha256, |
177 | ] |
178 | ) |
179 | - if condemned is not None: |
180 | - if condemned: |
181 | - clauses.append(ArchiveFile.scheduled_deletion_date != None) |
182 | - else: |
183 | - clauses.append(ArchiveFile.scheduled_deletion_date == None) |
184 | + |
185 | + if live_at is not None and existed_at is not None: |
186 | + raise IncompatibleArguments( |
187 | + "You cannot specify both 'live_at' and 'existed_at'." |
188 | + ) |
189 | + if live_at is not None: |
190 | + clauses.extend( |
191 | + [ |
192 | + Or( |
193 | + # Rows predating the introduction of date_created |
194 | + # will have it set to null. |
195 | + ArchiveFile.date_created == None, |
196 | + ArchiveFile.date_created <= live_at, |
197 | + ), |
198 | + Or( |
199 | + ArchiveFile.date_superseded == None, |
200 | + ArchiveFile.date_superseded > live_at, |
201 | + ), |
202 | + ] |
203 | + ) |
204 | + elif existed_at is not None: |
205 | + clauses.extend( |
206 | + [ |
207 | + Or( |
208 | + # Rows predating the introduction of date_created |
209 | + # will have it set to null. |
210 | + ArchiveFile.date_created == None, |
211 | + ArchiveFile.date_created <= existed_at, |
212 | + ), |
213 | + Or( |
214 | + ArchiveFile.date_removed == None, |
215 | + ArchiveFile.date_removed > existed_at, |
216 | + ), |
217 | + ] |
218 | + ) |
219 | + |
220 | if only_published: |
221 | clauses.append(ArchiveFile.date_removed == None) |
222 | archive_files = IStore(ArchiveFile).find(ArchiveFile, *clauses) |
223 | diff --git a/lib/lp/soyuz/tests/test_archive.py b/lib/lp/soyuz/tests/test_archive.py |
224 | index 5851aad..f2b836b 100644 |
225 | --- a/lib/lp/soyuz/tests/test_archive.py |
226 | +++ b/lib/lp/soyuz/tests/test_archive.py |
227 | @@ -3374,6 +3374,65 @@ class TestGetPoolFileByPath(TestCaseWithFactory): |
228 | ), |
229 | ) |
230 | |
231 | + def test_source_live_at(self): |
232 | + now = datetime.now(UTC) |
233 | + archive = self.factory.makeArchive() |
234 | + spph_1 = self.factory.makeSourcePackagePublishingHistory( |
235 | + archive=archive, |
236 | + status=PackagePublishingStatus.DELETED, |
237 | + sourcepackagename="test-package", |
238 | + component="main", |
239 | + version="1", |
240 | + ) |
241 | + removeSecurityProxy(spph_1).datepublished = now - timedelta(days=3) |
242 | + removeSecurityProxy(spph_1).dateremoved = now - timedelta(days=1) |
243 | + sprf_1 = self.factory.makeSourcePackageReleaseFile( |
244 | + sourcepackagerelease=spph_1.sourcepackagerelease, |
245 | + library_file=self.factory.makeLibraryFileAlias( |
246 | + filename="test-package_1.dsc", db_only=True |
247 | + ), |
248 | + ) |
249 | + spph_2 = self.factory.makeSourcePackagePublishingHistory( |
250 | + archive=archive, |
251 | + status=PackagePublishingStatus.PUBLISHED, |
252 | + sourcepackagename="test-package", |
253 | + component="main", |
254 | + version="2", |
255 | + ) |
256 | + removeSecurityProxy(spph_2).datepublished = now - timedelta(days=2) |
257 | + sprf_2 = self.factory.makeSourcePackageReleaseFile( |
258 | + sourcepackagerelease=spph_2.sourcepackagerelease, |
259 | + library_file=self.factory.makeLibraryFileAlias( |
260 | + filename="test-package_2.dsc", db_only=True |
261 | + ), |
262 | + ) |
263 | + IStore(archive).flush() |
264 | + for days, expected_file in ( |
265 | + (4, None), |
266 | + (3, sprf_1.libraryfile), |
267 | + (2, sprf_1.libraryfile), |
268 | + (1, None), |
269 | + ): |
270 | + self.assertEqual( |
271 | + expected_file, |
272 | + archive.getPoolFileByPath( |
273 | + PurePath("pool/main/t/test-package/test-package_1.dsc"), |
274 | + live_at=now - timedelta(days=days), |
275 | + ), |
276 | + ) |
277 | + for days, expected_file in ( |
278 | + (3, None), |
279 | + (2, sprf_2.libraryfile), |
280 | + (1, sprf_2.libraryfile), |
281 | + ): |
282 | + self.assertEqual( |
283 | + expected_file, |
284 | + archive.getPoolFileByPath( |
285 | + PurePath("pool/main/t/test-package/test-package_2.dsc"), |
286 | + live_at=now - timedelta(days=days), |
287 | + ), |
288 | + ) |
289 | + |
290 | def test_binary_not_found(self): |
291 | archive = self.factory.makeArchive() |
292 | self.factory.makeBinaryPackagePublishingHistory( |
293 | @@ -3466,6 +3525,69 @@ class TestGetPoolFileByPath(TestCaseWithFactory): |
294 | ), |
295 | ) |
296 | |
297 | + def test_binary_live_at(self): |
298 | + now = datetime.now(UTC) |
299 | + archive = self.factory.makeArchive() |
300 | + bpph_1 = self.factory.makeBinaryPackagePublishingHistory( |
301 | + archive=archive, |
302 | + status=PackagePublishingStatus.DELETED, |
303 | + sourcepackagename="test-package", |
304 | + component="main", |
305 | + version="1", |
306 | + ) |
307 | + removeSecurityProxy(bpph_1).datepublished = now - timedelta(days=3) |
308 | + removeSecurityProxy(bpph_1).dateremoved = now - timedelta(days=1) |
309 | + bpf_1 = self.factory.makeBinaryPackageFile( |
310 | + binarypackagerelease=bpph_1.binarypackagerelease, |
311 | + library_file=self.factory.makeLibraryFileAlias( |
312 | + filename="test-package_1_amd64.deb", db_only=True |
313 | + ), |
314 | + ) |
315 | + bpph_2 = self.factory.makeBinaryPackagePublishingHistory( |
316 | + archive=archive, |
317 | + status=PackagePublishingStatus.PUBLISHED, |
318 | + sourcepackagename="test-package", |
319 | + component="main", |
320 | + version="2", |
321 | + ) |
322 | + removeSecurityProxy(bpph_2).datepublished = now - timedelta(days=2) |
323 | + bpf_2 = self.factory.makeBinaryPackageFile( |
324 | + binarypackagerelease=bpph_2.binarypackagerelease, |
325 | + library_file=self.factory.makeLibraryFileAlias( |
326 | + filename="test-package_2_amd64.deb", db_only=True |
327 | + ), |
328 | + ) |
329 | + IStore(archive).flush() |
330 | + for days, expected_file in ( |
331 | + (4, None), |
332 | + (3, bpf_1.libraryfile), |
333 | + (2, bpf_1.libraryfile), |
334 | + (1, None), |
335 | + ): |
336 | + self.assertEqual( |
337 | + expected_file, |
338 | + archive.getPoolFileByPath( |
339 | + PurePath( |
340 | + "pool/main/t/test-package/test-package_1_amd64.deb" |
341 | + ), |
342 | + live_at=now - timedelta(days=days), |
343 | + ), |
344 | + ) |
345 | + for days, expected_file in ( |
346 | + (3, None), |
347 | + (2, bpf_2.libraryfile), |
348 | + (1, bpf_2.libraryfile), |
349 | + ): |
350 | + self.assertEqual( |
351 | + expected_file, |
352 | + archive.getPoolFileByPath( |
353 | + PurePath( |
354 | + "pool/main/t/test-package/test-package_2_amd64.deb" |
355 | + ), |
356 | + live_at=now - timedelta(days=days), |
357 | + ), |
358 | + ) |
359 | + |
360 | |
361 | class TestGetPublishedSources(TestCaseWithFactory): |
362 | |
363 | diff --git a/lib/lp/soyuz/tests/test_archivefile.py b/lib/lp/soyuz/tests/test_archivefile.py |
364 | index c4e2a57..9ee7a88 100644 |
365 | --- a/lib/lp/soyuz/tests/test_archivefile.py |
366 | +++ b/lib/lp/soyuz/tests/test_archivefile.py |
367 | @@ -4,14 +4,17 @@ |
368 | """ArchiveFile tests.""" |
369 | |
370 | import os |
371 | -from datetime import timedelta |
372 | +from datetime import datetime, timedelta |
373 | |
374 | +import pytz |
375 | import transaction |
376 | from storm.store import Store |
377 | from testtools.matchers import AfterPreprocessing, Equals, Is, MatchesStructure |
378 | from zope.component import getUtility |
379 | from zope.security.proxy import removeSecurityProxy |
380 | |
381 | +from lp.app.errors import IncompatibleArguments |
382 | +from lp.services.database.constants import UTC_NOW |
383 | from lp.services.database.sqlbase import ( |
384 | flush_database_caches, |
385 | get_transaction_timestamp, |
386 | @@ -84,13 +87,8 @@ class TestArchiveFile(TestCaseWithFactory): |
387 | def test_getByArchive(self): |
388 | archives = [self.factory.makeArchive(), self.factory.makeArchive()] |
389 | archive_files = [] |
390 | - now = get_transaction_timestamp(Store.of(archives[0])) |
391 | for archive in archives: |
392 | - archive_files.append( |
393 | - self.factory.makeArchiveFile( |
394 | - archive=archive, scheduled_deletion_date=now |
395 | - ) |
396 | - ) |
397 | + archive_files.append(self.factory.makeArchiveFile(archive=archive)) |
398 | archive_files.append( |
399 | self.factory.makeArchiveFile(archive=archive, container="foo") |
400 | ) |
401 | @@ -115,14 +113,6 @@ class TestArchiveFile(TestCaseWithFactory): |
402 | [], archive_file_set.getByArchive(archives[0], path="other") |
403 | ) |
404 | self.assertContentEqual( |
405 | - [archive_files[0]], |
406 | - archive_file_set.getByArchive(archives[0], condemned=True), |
407 | - ) |
408 | - self.assertContentEqual( |
409 | - [archive_files[1]], |
410 | - archive_file_set.getByArchive(archives[0], condemned=False), |
411 | - ) |
412 | - self.assertContentEqual( |
413 | archive_files[2:], archive_file_set.getByArchive(archives[1]) |
414 | ) |
415 | self.assertContentEqual( |
416 | @@ -142,14 +132,6 @@ class TestArchiveFile(TestCaseWithFactory): |
417 | [], archive_file_set.getByArchive(archives[1], path="other") |
418 | ) |
419 | self.assertContentEqual( |
420 | - [archive_files[2]], |
421 | - archive_file_set.getByArchive(archives[1], condemned=True), |
422 | - ) |
423 | - self.assertContentEqual( |
424 | - [archive_files[3]], |
425 | - archive_file_set.getByArchive(archives[1], condemned=False), |
426 | - ) |
427 | - self.assertContentEqual( |
428 | [archive_files[0]], |
429 | archive_file_set.getByArchive( |
430 | archives[0], |
431 | @@ -186,6 +168,126 @@ class TestArchiveFile(TestCaseWithFactory): |
432 | archive_file_set.getByArchive(archive, path_parent="dists/xenial"), |
433 | ) |
434 | |
435 | + def test_getByArchive_both_live_at_and_existed_at(self): |
436 | + now = datetime.now(pytz.UTC) |
437 | + archive = self.factory.makeArchive() |
438 | + self.assertRaisesWithContent( |
439 | + IncompatibleArguments, |
440 | + "You cannot specify both 'live_at' and 'existed_at'.", |
441 | + getUtility(IArchiveFileSet).getByArchive, |
442 | + archive, |
443 | + live_at=now, |
444 | + existed_at=now, |
445 | + ) |
446 | + |
447 | + def test_getByArchive_live_at(self): |
448 | + archive = self.factory.makeArchive() |
449 | + now = get_transaction_timestamp(Store.of(archive)) |
450 | + archive_file_1 = self.factory.makeArchiveFile( |
451 | + archive=archive, path="dists/jammy/InRelease" |
452 | + ) |
453 | + naked_archive_file_1 = removeSecurityProxy(archive_file_1) |
454 | + naked_archive_file_1.date_created = now - timedelta(days=3) |
455 | + naked_archive_file_1.date_superseded = now - timedelta(days=1) |
456 | + archive_file_2 = self.factory.makeArchiveFile( |
457 | + archive=archive, path="dists/jammy/InRelease" |
458 | + ) |
459 | + naked_archive_file_2 = removeSecurityProxy(archive_file_2) |
460 | + naked_archive_file_2.date_created = now - timedelta(days=1) |
461 | + archive_file_set = getUtility(IArchiveFileSet) |
462 | + for days, expected_file in ( |
463 | + (4, None), |
464 | + (3, archive_file_1), |
465 | + (2, archive_file_1), |
466 | + (1, archive_file_2), |
467 | + (0, archive_file_2), |
468 | + ): |
469 | + self.assertEqual( |
470 | + expected_file, |
471 | + archive_file_set.getByArchive( |
472 | + archive, |
473 | + path="dists/jammy/InRelease", |
474 | + live_at=now - timedelta(days=days) if days else UTC_NOW, |
475 | + ).one(), |
476 | + ) |
477 | + |
478 | + def test_getByArchive_live_at_without_date_created(self): |
479 | + archive = self.factory.makeArchive() |
480 | + now = get_transaction_timestamp(Store.of(archive)) |
481 | + archive_file = self.factory.makeArchiveFile( |
482 | + archive=archive, path="dists/jammy/InRelease" |
483 | + ) |
484 | + naked_archive_file = removeSecurityProxy(archive_file) |
485 | + naked_archive_file.date_created = None |
486 | + naked_archive_file.date_superseded = now |
487 | + archive_file_set = getUtility(IArchiveFileSet) |
488 | + for days, expected_file in ((1, archive_file), (0, None)): |
489 | + self.assertEqual( |
490 | + expected_file, |
491 | + archive_file_set.getByArchive( |
492 | + archive, |
493 | + path="dists/jammy/InRelease", |
494 | + live_at=now - timedelta(days=days) if days else UTC_NOW, |
495 | + ).one(), |
496 | + ) |
497 | + |
498 | + def test_getByArchive_existed_at(self): |
499 | + archive = self.factory.makeArchive() |
500 | + now = get_transaction_timestamp(Store.of(archive)) |
501 | + archive_file_1 = self.factory.makeArchiveFile( |
502 | + archive=archive, path="dists/jammy/InRelease" |
503 | + ) |
504 | + naked_archive_file_1 = removeSecurityProxy(archive_file_1) |
505 | + naked_archive_file_1.date_created = now - timedelta(days=3) |
506 | + naked_archive_file_1.date_superseded = now - timedelta(days=2) |
507 | + naked_archive_file_1.date_removed = now - timedelta(days=1) |
508 | + archive_file_2 = self.factory.makeArchiveFile( |
509 | + archive=archive, path="dists/jammy/InRelease" |
510 | + ) |
511 | + naked_archive_file_2 = removeSecurityProxy(archive_file_2) |
512 | + naked_archive_file_2.date_created = now - timedelta(days=2) |
513 | + archive_file_set = getUtility(IArchiveFileSet) |
514 | + for days, existed in ((4, False), (3, True), (2, True), (1, False)): |
515 | + self.assertEqual( |
516 | + archive_file_1 if existed else None, |
517 | + archive_file_set.getByArchive( |
518 | + archive, |
519 | + path="dists/jammy/InRelease", |
520 | + sha256=archive_file_1.library_file.content.sha256, |
521 | + existed_at=now - timedelta(days=days), |
522 | + ).one(), |
523 | + ) |
524 | + for days, existed in ((3, False), (2, True), (1, True), (0, True)): |
525 | + self.assertEqual( |
526 | + archive_file_2 if existed else None, |
527 | + archive_file_set.getByArchive( |
528 | + archive, |
529 | + path="dists/jammy/InRelease", |
530 | + sha256=archive_file_2.library_file.content.sha256, |
531 | + existed_at=now - timedelta(days=days) if days else UTC_NOW, |
532 | + ).one(), |
533 | + ) |
534 | + |
535 | + def test_getByArchive_existed_at_without_date_created(self): |
536 | + archive = self.factory.makeArchive() |
537 | + now = get_transaction_timestamp(Store.of(archive)) |
538 | + archive_file = self.factory.makeArchiveFile( |
539 | + archive=archive, path="dists/jammy/InRelease" |
540 | + ) |
541 | + naked_archive_file = removeSecurityProxy(archive_file) |
542 | + naked_archive_file.date_created = None |
543 | + naked_archive_file.date_removed = now |
544 | + archive_file_set = getUtility(IArchiveFileSet) |
545 | + for days, expected_file in ((1, archive_file), (0, None)): |
546 | + self.assertEqual( |
547 | + expected_file, |
548 | + archive_file_set.getByArchive( |
549 | + archive, |
550 | + path="dists/jammy/InRelease", |
551 | + existed_at=now - timedelta(days=days) if days else UTC_NOW, |
552 | + ).one(), |
553 | + ) |
554 | + |
555 | def test_scheduleDeletion(self): |
556 | archive_files = [self.factory.makeArchiveFile() for _ in range(3)] |
557 | getUtility(IArchiveFileSet).scheduleDeletion( |
558 | diff --git a/lib/lp/soyuz/xmlrpc/archive.py b/lib/lp/soyuz/xmlrpc/archive.py |
559 | index f9a35a8..83331ea 100644 |
560 | --- a/lib/lp/soyuz/xmlrpc/archive.py |
561 | +++ b/lib/lp/soyuz/xmlrpc/archive.py |
562 | @@ -8,6 +8,7 @@ __all__ = [ |
563 | ] |
564 | |
565 | import logging |
566 | +from datetime import datetime |
567 | from pathlib import PurePath |
568 | from typing import Optional, Union |
569 | from xmlrpc.client import Fault |
570 | @@ -18,6 +19,7 @@ from zope.interface import implementer |
571 | from zope.interface.interfaces import ComponentLookupError |
572 | from zope.security.proxy import removeSecurityProxy |
573 | |
574 | +from lp.services.database.constants import UTC_NOW |
575 | from lp.services.macaroons.interfaces import NO_USER, IMacaroonIssuer |
576 | from lp.services.webapp import LaunchpadXMLRPCView |
577 | from lp.soyuz.enums import ArchiveRepositoryFormat |
578 | @@ -126,7 +128,11 @@ class ArchiveAPI(LaunchpadXMLRPCView): |
579 | ) |
580 | |
581 | def _translatePathByHash( |
582 | - self, archive_reference: str, archive, path: PurePath |
583 | + self, |
584 | + archive_reference: str, |
585 | + archive, |
586 | + path: PurePath, |
587 | + existed_at: Optional[datetime], |
588 | ) -> Optional[str]: |
589 | suite = path.parts[1] |
590 | checksum_type = path.parts[-2] |
591 | @@ -145,6 +151,7 @@ class ArchiveAPI(LaunchpadXMLRPCView): |
592 | container="release:%s" % suite, |
593 | path_parent="/".join(path.parts[:-3]), |
594 | sha256=checksum, |
595 | + existed_at=UTC_NOW if existed_at is None else existed_at, |
596 | ) |
597 | .any() |
598 | ) |
599 | @@ -152,20 +159,27 @@ class ArchiveAPI(LaunchpadXMLRPCView): |
600 | return None |
601 | |
602 | log.info( |
603 | - "%s: %s (by-hash) -> LFA %d", |
604 | + "%s: %s (by-hash)%s -> LFA %d", |
605 | archive_reference, |
606 | path.as_posix(), |
607 | + "" if existed_at is None else " at %s" % existed_at.isoformat(), |
608 | archive_file.library_file.id, |
609 | ) |
610 | return archive_file.library_file.getURL(include_token=True) |
611 | |
612 | def _translatePathNonPool( |
613 | - self, archive_reference: str, archive, path: PurePath |
614 | + self, |
615 | + archive_reference: str, |
616 | + archive, |
617 | + path: PurePath, |
618 | + live_at: Optional[datetime], |
619 | ) -> Optional[str]: |
620 | archive_file = ( |
621 | getUtility(IArchiveFileSet) |
622 | .getByArchive( |
623 | - archive=archive, path=path.as_posix(), condemned=False |
624 | + archive=archive, |
625 | + path=path.as_posix(), |
626 | + live_at=UTC_NOW if live_at is None else live_at, |
627 | ) |
628 | .one() |
629 | ) |
630 | @@ -173,30 +187,41 @@ class ArchiveAPI(LaunchpadXMLRPCView): |
631 | return None |
632 | |
633 | log.info( |
634 | - "%s: %s (non-pool) -> LFA %d", |
635 | + "%s: %s (non-pool)%s -> LFA %d", |
636 | archive_reference, |
637 | path.as_posix(), |
638 | + "" if live_at is None else " at %s" % live_at.isoformat(), |
639 | archive_file.library_file.id, |
640 | ) |
641 | return archive_file.library_file.getURL(include_token=True) |
642 | |
643 | def _translatePathPool( |
644 | - self, archive_reference: str, archive, path: PurePath |
645 | + self, |
646 | + archive_reference: str, |
647 | + archive, |
648 | + path: PurePath, |
649 | + live_at: Optional[datetime], |
650 | ) -> Optional[str]: |
651 | - lfa = archive.getPoolFileByPath(path) |
652 | + lfa = archive.getPoolFileByPath(path, live_at=live_at) |
653 | if lfa is None or lfa.deleted: |
654 | return None |
655 | |
656 | log.info( |
657 | - "%s: %s (pool) -> LFA %d", |
658 | + "%s: %s (pool)%s -> LFA %d", |
659 | archive_reference, |
660 | path.as_posix(), |
661 | + "" if live_at is None else " at %s" % live_at.isoformat(), |
662 | lfa.id, |
663 | ) |
664 | return lfa.getURL(include_token=True) |
665 | |
666 | @return_fault |
667 | - def _translatePath(self, archive_reference: str, path: PurePath) -> str: |
668 | + def _translatePath( |
669 | + self, |
670 | + archive_reference: str, |
671 | + path: PurePath, |
672 | + live_at: Optional[datetime], |
673 | + ) -> str: |
674 | archive = getUtility(IArchiveSet).getByReference(archive_reference) |
675 | if archive is None: |
676 | log.info("%s: No archive found", archive_reference) |
677 | @@ -214,35 +239,52 @@ class ArchiveAPI(LaunchpadXMLRPCView): |
678 | message="Can't translate paths in '%s' with format %s." |
679 | % (archive_reference, archive.repository_format) |
680 | ) |
681 | + live_at_message = ( |
682 | + "" if live_at is None else " at %s" % live_at.isoformat() |
683 | + ) |
684 | |
685 | # Consider by-hash index files. |
686 | if path.parts[0] == "dists" and path.parts[2:][-3:-2] == ("by-hash",): |
687 | - url = self._translatePathByHash(archive_reference, archive, path) |
688 | + url = self._translatePathByHash( |
689 | + archive_reference, archive, path, live_at |
690 | + ) |
691 | if url is not None: |
692 | return url |
693 | |
694 | # Consider other non-pool files. |
695 | elif path.parts[0] != "pool": |
696 | - url = self._translatePathNonPool(archive_reference, archive, path) |
697 | + url = self._translatePathNonPool( |
698 | + archive_reference, archive, path, live_at |
699 | + ) |
700 | if url is not None: |
701 | return url |
702 | |
703 | # Consider pool files. |
704 | else: |
705 | - url = self._translatePathPool(archive_reference, archive, path) |
706 | + url = self._translatePathPool( |
707 | + archive_reference, archive, path, live_at |
708 | + ) |
709 | if url is not None: |
710 | return url |
711 | |
712 | - log.info("%s: %s not found", archive_reference, path.as_posix()) |
713 | + log.info( |
714 | + "%s: %s not found%s", |
715 | + archive_reference, |
716 | + path.as_posix(), |
717 | + live_at_message, |
718 | + ) |
719 | raise faults.NotFound( |
720 | - message="'%s' not found in '%s'." |
721 | - % (path.as_posix(), archive_reference) |
722 | + message="'%s' not found in '%s'%s." |
723 | + % (path.as_posix(), archive_reference, live_at_message) |
724 | ) |
725 | |
726 | def translatePath( |
727 | - self, archive_reference: str, path: str |
728 | + self, |
729 | + archive_reference: str, |
730 | + path: str, |
731 | + live_at: Optional[datetime] = None, |
732 | ) -> Union[str, Fault]: |
733 | """See `IArchiveAPI`.""" |
734 | # This thunk exists because you can't use a decorated function as |
735 | # the implementation of a method exported over XML-RPC. |
736 | - return self._translatePath(archive_reference, PurePath(path)) |
737 | + return self._translatePath(archive_reference, PurePath(path), live_at) |
738 | diff --git a/lib/lp/soyuz/xmlrpc/tests/test_archive.py b/lib/lp/soyuz/xmlrpc/tests/test_archive.py |
739 | index 5dd81a4..6b74eec 100644 |
740 | --- a/lib/lp/soyuz/xmlrpc/tests/test_archive.py |
741 | +++ b/lib/lp/soyuz/xmlrpc/tests/test_archive.py |
742 | @@ -3,8 +3,9 @@ |
743 | |
744 | """Tests for the internal Soyuz archive API.""" |
745 | |
746 | -from datetime import timedelta |
747 | +from datetime import datetime, timedelta |
748 | |
749 | +import pytz |
750 | from fixtures import FakeLogger |
751 | from zope.component import getUtility |
752 | from zope.security.proxy import removeSecurityProxy |
753 | @@ -398,6 +399,53 @@ class TestArchiveAPI(TestCaseWithFactory): |
754 | % (archive.reference, path, archive_file.library_file.id) |
755 | ) |
756 | |
757 | + def test_translatePath_by_hash_live_at(self): |
758 | + now = datetime.now(pytz.UTC) |
759 | + archive = removeSecurityProxy(self.factory.makeArchive(private=True)) |
760 | + archive_file = self.factory.makeArchiveFile( |
761 | + archive=archive, |
762 | + container="release:jammy", |
763 | + path="dists/jammy/InRelease", |
764 | + ) |
765 | + naked_archive_file = removeSecurityProxy(archive_file) |
766 | + naked_archive_file.date_created = now - timedelta(days=3) |
767 | + naked_archive_file.date_superseded = now - timedelta(days=2) |
768 | + naked_archive_file.date_removed = now - timedelta(days=1) |
769 | + path = ( |
770 | + "dists/jammy/by-hash/SHA256/%s" |
771 | + % archive_file.library_file.content.sha256 |
772 | + ) |
773 | + for days, expected in ((4, False), (3, True), (2, True), (1, False)): |
774 | + self.logger = self.useFixture(FakeLogger()) |
775 | + live_at = now - timedelta(days=days) |
776 | + if expected: |
777 | + self.assertEqual( |
778 | + archive_file.library_file.getURL(), |
779 | + self.archive_api.translatePath( |
780 | + archive.reference, path, live_at=live_at |
781 | + ), |
782 | + ) |
783 | + self.assertLogs( |
784 | + "%s: %s (by-hash) at %s -> LFA %d" |
785 | + % ( |
786 | + archive.reference, |
787 | + path, |
788 | + live_at.isoformat(), |
789 | + archive_file.library_file.id, |
790 | + ) |
791 | + ) |
792 | + else: |
793 | + self.assertNotFound( |
794 | + "translatePath", |
795 | + "'%s' not found in '%s' at %s." |
796 | + % (path, archive.reference, live_at.isoformat()), |
797 | + "%s: %s not found at %s" |
798 | + % (archive.reference, path, live_at.isoformat()), |
799 | + archive.reference, |
800 | + path, |
801 | + live_at=live_at, |
802 | + ) |
803 | + |
804 | def test_translatePath_non_pool_not_found(self): |
805 | archive = removeSecurityProxy(self.factory.makeArchive()) |
806 | self.factory.makeArchiveFile(archive=archive) |
807 | @@ -597,6 +645,56 @@ class TestArchiveAPI(TestCaseWithFactory): |
808 | % (archive.reference, path, sprf.libraryfile.id) |
809 | ) |
810 | |
811 | + def test_translatePath_pool_source_live_at(self): |
812 | + now = datetime.now(pytz.UTC) |
813 | + archive = removeSecurityProxy(self.factory.makeArchive()) |
814 | + spph = self.factory.makeSourcePackagePublishingHistory( |
815 | + archive=archive, |
816 | + status=PackagePublishingStatus.PUBLISHED, |
817 | + sourcepackagename="test-package", |
818 | + component="main", |
819 | + ) |
820 | + removeSecurityProxy(spph).datepublished = now - timedelta(days=2) |
821 | + removeSecurityProxy(spph).dateremoved = now - timedelta(days=1) |
822 | + sprf = self.factory.makeSourcePackageReleaseFile( |
823 | + sourcepackagerelease=spph.sourcepackagerelease, |
824 | + library_file=self.factory.makeLibraryFileAlias( |
825 | + filename="test-package_1.dsc", db_only=True |
826 | + ), |
827 | + ) |
828 | + IStore(sprf).flush() |
829 | + path = "pool/main/t/test-package/test-package_1.dsc" |
830 | + for days, expected in ((3, False), (2, True), (1, False)): |
831 | + self.logger = self.useFixture(FakeLogger()) |
832 | + live_at = now - timedelta(days=days) |
833 | + if expected: |
834 | + self.assertEqual( |
835 | + sprf.libraryfile.getURL(), |
836 | + self.archive_api.translatePath( |
837 | + archive.reference, path, live_at=live_at |
838 | + ), |
839 | + ) |
840 | + self.assertLogs( |
841 | + "%s: %s (pool) at %s -> LFA %d" |
842 | + % ( |
843 | + archive.reference, |
844 | + path, |
845 | + live_at.isoformat(), |
846 | + sprf.libraryfile.id, |
847 | + ) |
848 | + ) |
849 | + else: |
850 | + self.assertNotFound( |
851 | + "translatePath", |
852 | + "'%s' not found in '%s' at %s." |
853 | + % (path, archive.reference, live_at.isoformat()), |
854 | + "%s: %s not found at %s" |
855 | + % (archive.reference, path, live_at.isoformat()), |
856 | + archive.reference, |
857 | + path, |
858 | + live_at=live_at, |
859 | + ) |
860 | + |
861 | def test_translatePath_pool_binary_not_found(self): |
862 | archive = removeSecurityProxy(self.factory.makeArchive()) |
863 | self.factory.makeBinaryPackagePublishingHistory( |
864 | @@ -712,3 +810,53 @@ class TestArchiveAPI(TestCaseWithFactory): |
865 | "%s: %s (pool) -> LFA %d" |
866 | % (archive.reference, path, bpf.libraryfile.id) |
867 | ) |
868 | + |
869 | + def test_translatePath_pool_binary_live_at(self): |
870 | + now = datetime.now(pytz.UTC) |
871 | + archive = removeSecurityProxy(self.factory.makeArchive()) |
872 | + bpph = self.factory.makeBinaryPackagePublishingHistory( |
873 | + archive=archive, |
874 | + status=PackagePublishingStatus.PUBLISHED, |
875 | + sourcepackagename="test-package", |
876 | + component="main", |
877 | + ) |
878 | + removeSecurityProxy(bpph).datepublished = now - timedelta(days=2) |
879 | + removeSecurityProxy(bpph).dateremoved = now - timedelta(days=1) |
880 | + bpf = self.factory.makeBinaryPackageFile( |
881 | + binarypackagerelease=bpph.binarypackagerelease, |
882 | + library_file=self.factory.makeLibraryFileAlias( |
883 | + filename="test-package_1_amd64.deb", db_only=True |
884 | + ), |
885 | + ) |
886 | + IStore(bpf).flush() |
887 | + path = "pool/main/t/test-package/test-package_1_amd64.deb" |
888 | + for days, expected in ((3, False), (2, True), (1, False)): |
889 | + self.logger = self.useFixture(FakeLogger()) |
890 | + live_at = now - timedelta(days=days) |
891 | + if expected: |
892 | + self.assertEqual( |
893 | + bpf.libraryfile.getURL(), |
894 | + self.archive_api.translatePath( |
895 | + archive.reference, path, live_at=live_at |
896 | + ), |
897 | + ) |
898 | + self.assertLogs( |
899 | + "%s: %s (pool) at %s -> LFA %d" |
900 | + % ( |
901 | + archive.reference, |
902 | + path, |
903 | + live_at.isoformat(), |
904 | + bpf.libraryfile.id, |
905 | + ) |
906 | + ) |
907 | + else: |
908 | + self.assertNotFound( |
909 | + "translatePath", |
910 | + "'%s' not found in '%s' at %s." |
911 | + % (path, archive.reference, live_at.isoformat()), |
912 | + "%s: %s not found at %s" |
913 | + % (archive.reference, path, live_at.isoformat()), |
914 | + archive.reference, |
915 | + path, |
916 | + live_at=live_at, |
917 | + ) |