Merge ~cjwatson/launchpad:archive-translate-path-non-pool-multiple into launchpad:master

Proposed by Colin Watson
Status: Merged
Approved by: Colin Watson
Approved revision: b8e715ec5b98135fa2fbbb45a8362beca1b1979f
Merge reported by: Otto Co-Pilot
Merged at revision: not available
Proposed branch: ~cjwatson/launchpad:archive-translate-path-non-pool-multiple
Merge into: launchpad:master
Diff against target: 197 lines (+52/-27)
6 files modified
lib/lp/archivepublisher/publishing.py (+1/-4)
lib/lp/soyuz/interfaces/archivefile.py (+5/-3)
lib/lp/soyuz/model/archivefile.py (+6/-3)
lib/lp/soyuz/tests/test_archivefile.py (+10/-2)
lib/lp/soyuz/xmlrpc/archive.py (+3/-1)
lib/lp/soyuz/xmlrpc/tests/test_archive.py (+27/-14)
Reviewer Review Type Date Requested Status
Jürgen Gmach Approve
Review via email: mp+431673@code.launchpad.net

Commit message

Tolerate multiple non-pool files with the same name

Description of the change

This happens in practice when an file has been "condemned" (scheduled for deletion) but hasn't yet actually been deleted; publisher runs typically do this to old versions of archive metadata files when publishing new versions of them.

To post a comment you must log in.
Revision history for this message
Jürgen Gmach (jugmac00) wrote :

I think this is the first time I have encountered a three-state-boolean / troolean? :-), at least in Python.

It took me a while to come to a conclusion whether this is clever or awkward and at the end, I think its both :-)

The issue is... it is impossible to know the three states with a high certainty without looking the param up.

This leaves two possible solutions:
- two booleans a la `include_condemned_files` and `include_non_condemned_files`
- enum a la `file_status`: `only_condemned_files`, `only_non_condemned_files`, `all_files`

I think I like the enum much better.

I am sure you can come up with a better naming.

Revision history for this message
Jürgen Gmach (jugmac00) wrote :

Or...

`file_status`:

ACTIVE
MARKED_FOR_DELETION
ANY

TBH, I am not sure whether the enum is the best way...

Revision history for this message
Colin Watson (cjwatson) wrote :

I definitely don't like the two-contradictory-booleans approach.

I'm not especially keen on the enum either, but I guess I can look at that (tomorrow).

Revision history for this message
Jürgen Gmach (jugmac00) wrote :

The third option would be to have three public methods.

And the longer I reason about this... when there is no clear better way, just go with the most convenient one ¯\_(ツ)_/¯

review: Approve
Revision history for this message
Colin Watson (cjwatson) wrote :

I gave the enum approach a go, and while it was workable, it felt as though it was making things more verbose without really making them any clearer. So I think I'll just stick with the optional boolean. (But this is an entirely internal interface with relatively few callers, so it's easy to change if we change our minds.)

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1diff --git a/lib/lp/archivepublisher/publishing.py b/lib/lp/archivepublisher/publishing.py
2index f714c77..58d948a 100644
3--- a/lib/lp/archivepublisher/publishing.py
4+++ b/lib/lp/archivepublisher/publishing.py
5@@ -1221,10 +1221,7 @@ class Publisher:
6 )
7 uncondemned_files = set()
8 for db_file in archive_file_set.getByArchive(
9- self.archive,
10- container=container,
11- only_condemned=True,
12- eager_load=True,
13+ self.archive, container=container, condemned=True, eager_load=True
14 ):
15 stripped_path = strip_dists(db_file.path)
16 if stripped_path in current_files:
17diff --git a/lib/lp/soyuz/interfaces/archivefile.py b/lib/lp/soyuz/interfaces/archivefile.py
18index b72c54e..e41548b 100644
19--- a/lib/lp/soyuz/interfaces/archivefile.py
20+++ b/lib/lp/soyuz/interfaces/archivefile.py
21@@ -89,7 +89,7 @@ class IArchiveFileSet(Interface):
22 container=None,
23 path=None,
24 sha256=None,
25- only_condemned=False,
26+ condemned=None,
27 eager_load=False,
28 ):
29 """Get files in an archive.
30@@ -101,8 +101,10 @@ class IArchiveFileSet(Interface):
31 directory is this path.
32 :param sha256: If not None, return only files with this SHA-256
33 checksum.
34- :param only_condemned: If True, return only files with a
35- scheduled_deletion_date set.
36+ :param condemned: If True, return only files with a
37+ scheduled_deletion_date set; if False, return only files without
38+ a scheduled_deletion_date set; if None (the default), return
39+ both.
40 :param eager_load: If True, preload related `LibraryFileAlias` and
41 `LibraryFileContent` rows.
42 :return: An iterable of matched files.
43diff --git a/lib/lp/soyuz/model/archivefile.py b/lib/lp/soyuz/model/archivefile.py
44index 933e6d7..fe4ba4c 100644
45--- a/lib/lp/soyuz/model/archivefile.py
46+++ b/lib/lp/soyuz/model/archivefile.py
47@@ -101,7 +101,7 @@ class ArchiveFileSet:
48 path=None,
49 path_parent=None,
50 sha256=None,
51- only_condemned=False,
52+ condemned=None,
53 eager_load=False,
54 ):
55 """See `IArchiveFileSet`."""
56@@ -126,8 +126,11 @@ class ArchiveFileSet:
57 LibraryFileContent.sha256 == sha256,
58 ]
59 )
60- if only_condemned:
61- clauses.append(ArchiveFile.scheduled_deletion_date != None)
62+ if condemned is not None:
63+ if condemned:
64+ clauses.append(ArchiveFile.scheduled_deletion_date != None)
65+ else:
66+ clauses.append(ArchiveFile.scheduled_deletion_date == None)
67 archive_files = IStore(ArchiveFile).find(ArchiveFile, *clauses)
68
69 def eager_load(rows):
70diff --git a/lib/lp/soyuz/tests/test_archivefile.py b/lib/lp/soyuz/tests/test_archivefile.py
71index ca295ad..6e8f819 100644
72--- a/lib/lp/soyuz/tests/test_archivefile.py
73+++ b/lib/lp/soyuz/tests/test_archivefile.py
74@@ -92,7 +92,11 @@ class TestArchiveFile(TestCaseWithFactory):
75 )
76 self.assertContentEqual(
77 [archive_files[0]],
78- archive_file_set.getByArchive(archives[0], only_condemned=True),
79+ archive_file_set.getByArchive(archives[0], condemned=True),
80+ )
81+ self.assertContentEqual(
82+ [archive_files[1]],
83+ archive_file_set.getByArchive(archives[0], condemned=False),
84 )
85 self.assertContentEqual(
86 archive_files[2:], archive_file_set.getByArchive(archives[1])
87@@ -115,7 +119,11 @@ class TestArchiveFile(TestCaseWithFactory):
88 )
89 self.assertContentEqual(
90 [archive_files[2]],
91- archive_file_set.getByArchive(archives[1], only_condemned=True),
92+ archive_file_set.getByArchive(archives[1], condemned=True),
93+ )
94+ self.assertContentEqual(
95+ [archive_files[3]],
96+ archive_file_set.getByArchive(archives[1], condemned=False),
97 )
98 self.assertContentEqual(
99 [archive_files[0]],
100diff --git a/lib/lp/soyuz/xmlrpc/archive.py b/lib/lp/soyuz/xmlrpc/archive.py
101index 7c78ab8..d5e5f69 100644
102--- a/lib/lp/soyuz/xmlrpc/archive.py
103+++ b/lib/lp/soyuz/xmlrpc/archive.py
104@@ -162,7 +162,9 @@ class ArchiveAPI(LaunchpadXMLRPCView):
105 ) -> Optional[str]:
106 archive_file = (
107 getUtility(IArchiveFileSet)
108- .getByArchive(archive=archive, path=path.as_posix())
109+ .getByArchive(
110+ archive=archive, path=path.as_posix(), condemned=False
111+ )
112 .one()
113 )
114 if archive_file is None:
115diff --git a/lib/lp/soyuz/xmlrpc/tests/test_archive.py b/lib/lp/soyuz/xmlrpc/tests/test_archive.py
116index 39bb7c6..3d725ce 100644
117--- a/lib/lp/soyuz/xmlrpc/tests/test_archive.py
118+++ b/lib/lp/soyuz/xmlrpc/tests/test_archive.py
119@@ -3,6 +3,8 @@
120
121 """Tests for the internal Soyuz archive API."""
122
123+from datetime import timedelta
124+
125 from fixtures import FakeLogger
126 from zope.component import getUtility
127 from zope.security.proxy import removeSecurityProxy
128@@ -13,6 +15,7 @@ from lp.services.features.testing import FeatureFixture
129 from lp.services.macaroons.interfaces import IMacaroonIssuer
130 from lp.soyuz.enums import ArchiveRepositoryFormat, PackagePublishingStatus
131 from lp.soyuz.interfaces.archive import NAMED_AUTH_TOKEN_FEATURE_FLAG
132+from lp.soyuz.interfaces.archivefile import IArchiveFileSet
133 from lp.soyuz.xmlrpc.archive import ArchiveAPI
134 from lp.testing import TestCaseWithFactory, person_logged_in
135 from lp.testing.layers import LaunchpadFunctionalLayer
136@@ -383,38 +386,48 @@ class TestArchiveAPI(TestCaseWithFactory):
137 def test_translatePath_non_pool_found(self):
138 archive = removeSecurityProxy(self.factory.makeArchive())
139 self.factory.makeArchiveFile(archive=archive)
140- archive_file = self.factory.makeArchiveFile(archive=archive)
141+ path = "dists/focal/InRelease"
142+ archive_files = [
143+ self.factory.makeArchiveFile(archive=archive, path=path)
144+ for _ in range(2)
145+ ]
146+ getUtility(IArchiveFileSet).scheduleDeletion(
147+ [archive_files[0]], timedelta(days=1)
148+ )
149 self.assertEqual(
150- archive_file.library_file.getURL(),
151- self.archive_api.translatePath(
152- archive.reference, archive_file.path
153- ),
154+ archive_files[1].library_file.getURL(),
155+ self.archive_api.translatePath(archive.reference, path),
156 )
157 self.assertLogs(
158 "%s: %s (non-pool) -> LFA %d"
159 % (
160 archive.reference,
161- archive_file.path,
162- archive_file.library_file.id,
163+ path,
164+ archive_files[1].library_file.id,
165 )
166 )
167
168 def test_translatePath_non_pool_found_private(self):
169 archive = removeSecurityProxy(self.factory.makeArchive(private=True))
170 self.factory.makeArchiveFile(archive=archive)
171- archive_file = self.factory.makeArchiveFile(archive=archive)
172+ path = "dists/focal/InRelease"
173+ archive_files = [
174+ self.factory.makeArchiveFile(archive=archive, path=path)
175+ for _ in range(2)
176+ ]
177+ getUtility(IArchiveFileSet).scheduleDeletion(
178+ [archive_files[0]], timedelta(days=1)
179+ )
180 self.assertStartsWith(
181- archive_file.library_file.getURL() + "?token=",
182- self.archive_api.translatePath(
183- archive.reference, archive_file.path
184- ),
185+ archive_files[1].library_file.getURL() + "?token=",
186+ self.archive_api.translatePath(archive.reference, path),
187 )
188 self.assertLogs(
189 "%s: %s (non-pool) -> LFA %d"
190 % (
191 archive.reference,
192- archive_file.path,
193- archive_file.library_file.id,
194+ path,
195+ archive_files[1].library_file.id,
196 )
197 )
198

Subscribers

People subscribed via source and target branches

to status/vote changes: