Merge lp:~cjwatson/launchpad/inrelease-by-hash into lp:launchpad
- inrelease-by-hash
- Merge into devel
Proposed by
Colin Watson
Status: | Merged |
---|---|
Merged at revision: | 18605 |
Proposed branch: | lp:~cjwatson/launchpad/inrelease-by-hash |
Merge into: | lp:launchpad |
Prerequisite: | lp:~cjwatson/launchpad/refactor-archive-signing |
Diff against target: |
723 lines (+258/-115) 3 files modified
lib/lp/archivepublisher/publishing.py (+52/-19) lib/lp/archivepublisher/tests/test_publisher.py (+185/-83) lib/lp/soyuz/model/archivefile.py (+21/-13) |
To merge this branch: | bzr merge lp:~cjwatson/launchpad/inrelease-by-hash |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
William Grant | code | Approve | |
Review via email: mp+336675@code.launchpad.net |
Commit message
Add Release, Release.gpg, and InRelease to by-hash directories.
Description of the change
Like most of the by-hash stuff, this has lots of fiddly details and will want some careful QA on dogfood. But at its core it's reasonably straightforward: now that the signed files are generated early enough, we just add them to the set of files being considered by _updateByHash. I arranged to add these files to by-hash before they're renamed into place, which entailed introducing the concept of the "real file name" in a few places.
To post a comment you must log in.
Revision history for this message
William Grant (wgrant) : | # |
review:
Approve
(code)
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'lib/lp/archivepublisher/publishing.py' | |||
2 | --- lib/lp/archivepublisher/publishing.py 2018-03-27 23:02:02 +0000 | |||
3 | +++ lib/lp/archivepublisher/publishing.py 2018-03-27 23:27:31 +0000 | |||
4 | @@ -772,8 +772,8 @@ | |||
5 | 772 | for pocket in self.archive.getPockets(): | 772 | for pocket in self.archive.getPockets(): |
6 | 773 | ds_pocket = (distroseries.name, pocket) | 773 | ds_pocket = (distroseries.name, pocket) |
7 | 774 | suite = distroseries.getSuite(pocket) | 774 | suite = distroseries.getSuite(pocket) |
10 | 775 | release_path = os.path.join( | 775 | suite_path = os.path.join(self._config.distsroot, suite) |
11 | 776 | self._config.distsroot, suite, "Release") | 776 | release_path = os.path.join(suite_path, "Release") |
12 | 777 | 777 | ||
13 | 778 | if is_careful: | 778 | if is_careful: |
14 | 779 | if not self.isAllowed(distroseries, pocket): | 779 | if not self.isAllowed(distroseries, pocket): |
15 | @@ -803,7 +803,11 @@ | |||
16 | 803 | # We aren't publishing a new Release file for this | 803 | # We aren't publishing a new Release file for this |
17 | 804 | # suite, probably because it's immutable, but we still | 804 | # suite, probably because it's immutable, but we still |
18 | 805 | # need to prune by-hash files from it. | 805 | # need to prune by-hash files from it. |
20 | 806 | self._updateByHash(suite, "Release") | 806 | extra_by_hash_files = { |
21 | 807 | filename: filename | ||
22 | 808 | for filename in ("Release", "Release.gpg", "InRelease") | ||
23 | 809 | if file_exists(os.path.join(suite_path, filename))} | ||
24 | 810 | self._updateByHash(suite, "Release", extra_by_hash_files) | ||
25 | 807 | 811 | ||
26 | 808 | def _allIndexFiles(self, distroseries): | 812 | def _allIndexFiles(self, distroseries): |
27 | 809 | """Return all index files on disk for a distroseries. | 813 | """Return all index files on disk for a distroseries. |
28 | @@ -1025,7 +1029,7 @@ | |||
29 | 1025 | return self.distro.displayname | 1029 | return self.distro.displayname |
30 | 1026 | return "LP-PPA-%s" % get_ppa_reference(self.archive) | 1030 | return "LP-PPA-%s" % get_ppa_reference(self.archive) |
31 | 1027 | 1031 | ||
33 | 1028 | def _updateByHash(self, suite, release_file_name): | 1032 | def _updateByHash(self, suite, release_file_name, extra_files): |
34 | 1029 | """Update by-hash files for a suite. | 1033 | """Update by-hash files for a suite. |
35 | 1030 | 1034 | ||
36 | 1031 | This takes Release file data which references a set of on-disk | 1035 | This takes Release file data which references a set of on-disk |
37 | @@ -1034,6 +1038,16 @@ | |||
38 | 1034 | directories to be in sync with ArchiveFile. Any on-disk by-hash | 1038 | directories to be in sync with ArchiveFile. Any on-disk by-hash |
39 | 1035 | entries that ceased to be current sufficiently long ago are removed. | 1039 | entries that ceased to be current sufficiently long ago are removed. |
40 | 1036 | """ | 1040 | """ |
41 | 1041 | extra_data = {} | ||
42 | 1042 | for filename, real_filename in extra_files.items(): | ||
43 | 1043 | hashes = self._readIndexFileHashes( | ||
44 | 1044 | suite, filename, real_file_name=real_filename) | ||
45 | 1045 | if hashes is None: | ||
46 | 1046 | continue | ||
47 | 1047 | for archive_hash in archive_hashes: | ||
48 | 1048 | extra_data.setdefault(archive_hash.apt_name, []).append( | ||
49 | 1049 | hashes[archive_hash.deb822_name]) | ||
50 | 1050 | |||
51 | 1037 | release_path = os.path.join( | 1051 | release_path = os.path.join( |
52 | 1038 | self._config.distsroot, suite, release_file_name) | 1052 | self._config.distsroot, suite, release_file_name) |
53 | 1039 | with open(release_path) as release_file: | 1053 | with open(release_path) as release_file: |
54 | @@ -1052,12 +1066,13 @@ | |||
55 | 1052 | # Gather information on entries in the current Release file, and | 1066 | # Gather information on entries in the current Release file, and |
56 | 1053 | # make sure nothing there is condemned. | 1067 | # make sure nothing there is condemned. |
57 | 1054 | current_files = {} | 1068 | current_files = {} |
60 | 1055 | current_sha256_checksums = set() | 1069 | for current_entry in ( |
61 | 1056 | for current_entry in release_data["SHA256"]: | 1070 | release_data["SHA256"] + extra_data.get("SHA256", [])): |
62 | 1057 | path = os.path.join(suite_dir, current_entry["name"]) | 1071 | path = os.path.join(suite_dir, current_entry["name"]) |
63 | 1072 | real_name = current_entry.get("real_name", current_entry["name"]) | ||
64 | 1073 | real_path = os.path.join(suite_dir, real_name) | ||
65 | 1058 | current_files[path] = ( | 1074 | current_files[path] = ( |
68 | 1059 | int(current_entry["size"]), current_entry["sha256"]) | 1075 | int(current_entry["size"]), current_entry["sha256"], real_path) |
67 | 1060 | current_sha256_checksums.add(current_entry["sha256"]) | ||
69 | 1061 | uncondemned_files = set() | 1076 | uncondemned_files = set() |
70 | 1062 | for db_file in archive_file_set.getByArchive( | 1077 | for db_file in archive_file_set.getByArchive( |
71 | 1063 | self.archive, container=container, only_condemned=True, | 1078 | self.archive, container=container, only_condemned=True, |
72 | @@ -1117,15 +1132,16 @@ | |||
73 | 1117 | # XXX cjwatson 2016-03-15: This should possibly use bulk creation, | 1132 | # XXX cjwatson 2016-03-15: This should possibly use bulk creation, |
74 | 1118 | # although we can only avoid about a third of the queries since the | 1133 | # although we can only avoid about a third of the queries since the |
75 | 1119 | # librarian client has no bulk upload methods. | 1134 | # librarian client has no bulk upload methods. |
78 | 1120 | for path, (size, sha256) in current_files.items(): | 1135 | for path, (size, sha256, real_path) in current_files.items(): |
79 | 1121 | full_path = os.path.join(self._config.distsroot, path) | 1136 | full_path = os.path.join(self._config.distsroot, real_path) |
80 | 1122 | if (os.path.exists(full_path) and | 1137 | if (os.path.exists(full_path) and |
81 | 1123 | not by_hashes.known(path, "SHA256", sha256)): | 1138 | not by_hashes.known(path, "SHA256", sha256)): |
82 | 1124 | with open(full_path, "rb") as fileobj: | 1139 | with open(full_path, "rb") as fileobj: |
83 | 1125 | db_file = archive_file_set.newFromFile( | 1140 | db_file = archive_file_set.newFromFile( |
84 | 1126 | self.archive, container, os.path.join("dists", path), | 1141 | self.archive, container, os.path.join("dists", path), |
85 | 1127 | fileobj, size, filenameToContentType(path)) | 1142 | fileobj, size, filenameToContentType(path)) |
87 | 1128 | by_hashes.add(path, db_file.library_file, copy_from_path=path) | 1143 | by_hashes.add( |
88 | 1144 | path, db_file.library_file, copy_from_path=real_path) | ||
89 | 1129 | 1145 | ||
90 | 1130 | # Finally, remove any files from disk that aren't recorded in the | 1146 | # Finally, remove any files from disk that aren't recorded in the |
91 | 1131 | # database and aren't active. | 1147 | # database and aren't active. |
92 | @@ -1173,6 +1189,9 @@ | |||
93 | 1173 | # special games with timestamps here, as it will interfere with the | 1189 | # special games with timestamps here, as it will interfere with the |
94 | 1174 | # "staging" mechanism used to update these files. | 1190 | # "staging" mechanism used to update these files. |
95 | 1175 | extra_files = set() | 1191 | extra_files = set() |
96 | 1192 | # Extra by-hash files are not listed in the Release file, but we | ||
97 | 1193 | # still want to include them in by-hash directories. | ||
98 | 1194 | extra_by_hash_files = {} | ||
99 | 1176 | for component in all_components: | 1195 | for component in all_components: |
100 | 1177 | self._writeSuiteSource( | 1196 | self._writeSuiteSource( |
101 | 1178 | distroseries, pocket, component, core_files) | 1197 | distroseries, pocket, component, core_files) |
102 | @@ -1239,9 +1258,7 @@ | |||
103 | 1239 | 1258 | ||
104 | 1240 | self._writeReleaseFile(suite, release_file) | 1259 | self._writeReleaseFile(suite, release_file) |
105 | 1241 | core_files.add("Release") | 1260 | core_files.add("Release") |
109 | 1242 | 1261 | extra_by_hash_files["Release"] = "Release.new" | |
107 | 1243 | if distroseries.publish_by_hash: | ||
108 | 1244 | self._updateByHash(suite, "Release.new") | ||
110 | 1245 | 1262 | ||
111 | 1246 | signable_archive = ISignableArchive(self.archive) | 1263 | signable_archive = ISignableArchive(self.archive) |
112 | 1247 | if signable_archive.can_sign: | 1264 | if signable_archive.can_sign: |
113 | @@ -1250,11 +1267,16 @@ | |||
114 | 1250 | signable_archive.signRepository( | 1267 | signable_archive.signRepository( |
115 | 1251 | suite, pubconf=self._config, suffix=".new", log=self.log) | 1268 | suite, pubconf=self._config, suffix=".new", log=self.log) |
116 | 1252 | core_files.add("Release.gpg") | 1269 | core_files.add("Release.gpg") |
117 | 1270 | extra_by_hash_files["Release.gpg"] = "Release.gpg.new" | ||
118 | 1253 | core_files.add("InRelease") | 1271 | core_files.add("InRelease") |
119 | 1272 | extra_by_hash_files["InRelease"] = "InRelease.new" | ||
120 | 1254 | else: | 1273 | else: |
121 | 1255 | # Skip signature if the archive is not set up for signing. | 1274 | # Skip signature if the archive is not set up for signing. |
122 | 1256 | self.log.debug("No signing key available, skipping signature.") | 1275 | self.log.debug("No signing key available, skipping signature.") |
123 | 1257 | 1276 | ||
124 | 1277 | if distroseries.publish_by_hash: | ||
125 | 1278 | self._updateByHash(suite, "Release.new", extra_by_hash_files) | ||
126 | 1279 | |||
127 | 1258 | for name in ("Release", "Release.gpg", "InRelease"): | 1280 | for name in ("Release", "Release.gpg", "InRelease"): |
128 | 1259 | if name in core_files: | 1281 | if name in core_files: |
129 | 1260 | os.rename( | 1282 | os.rename( |
130 | @@ -1366,7 +1388,8 @@ | |||
131 | 1366 | # Schedule this for inclusion in the Release file. | 1388 | # Schedule this for inclusion in the Release file. |
132 | 1367 | all_series_files.add(os.path.join(component, "i18n", "Index")) | 1389 | all_series_files.add(os.path.join(component, "i18n", "Index")) |
133 | 1368 | 1390 | ||
135 | 1369 | def _readIndexFileHashes(self, suite, file_name, subpath=None): | 1391 | def _readIndexFileHashes(self, suite, file_name, subpath=None, |
136 | 1392 | real_file_name=None): | ||
137 | 1370 | """Read an index file and return its hashes. | 1393 | """Read an index file and return its hashes. |
138 | 1371 | 1394 | ||
139 | 1372 | :param suite: Suite name. | 1395 | :param suite: Suite name. |
140 | @@ -1374,6 +1397,11 @@ | |||
141 | 1374 | :param subpath: Optional subpath within the suite root. Generated | 1397 | :param subpath: Optional subpath within the suite root. Generated |
142 | 1375 | indexes will not include this path. If omitted, filenames are | 1398 | indexes will not include this path. If omitted, filenames are |
143 | 1376 | assumed to be relative to the suite root. | 1399 | assumed to be relative to the suite root. |
144 | 1400 | :param real_file_name: The actual filename to open when reading | ||
145 | 1401 | data (`file_name` will still be the name used in the returned | ||
146 | 1402 | dictionary). If this is passed, then the returned hash | ||
147 | 1403 | component dictionaries will include it in additional "real_name" | ||
148 | 1404 | items. | ||
149 | 1377 | :return: A dictionary mapping hash field names to dictionaries of | 1405 | :return: A dictionary mapping hash field names to dictionaries of |
150 | 1378 | their components as defined by debian.deb822.Release (e.g. | 1406 | their components as defined by debian.deb822.Release (e.g. |
151 | 1379 | {"md5sum": {"md5sum": ..., "size": ..., "name": ...}}), or None | 1407 | {"md5sum": {"md5sum": ..., "size": ..., "name": ...}}), or None |
152 | @@ -1381,7 +1409,8 @@ | |||
153 | 1381 | """ | 1409 | """ |
154 | 1382 | open_func = open | 1410 | open_func = open |
155 | 1383 | full_name = os.path.join( | 1411 | full_name = os.path.join( |
157 | 1384 | self._config.distsroot, suite, subpath or '.', file_name) | 1412 | self._config.distsroot, suite, subpath or '.', |
158 | 1413 | real_file_name or file_name) | ||
159 | 1385 | if not os.path.exists(full_name): | 1414 | if not os.path.exists(full_name): |
160 | 1386 | if os.path.exists(full_name + '.gz'): | 1415 | if os.path.exists(full_name + '.gz'): |
161 | 1387 | open_func = gzip.open | 1416 | open_func = gzip.open |
162 | @@ -1405,9 +1434,13 @@ | |||
163 | 1405 | for hashobj in hashes.values(): | 1434 | for hashobj in hashes.values(): |
164 | 1406 | hashobj.update(chunk) | 1435 | hashobj.update(chunk) |
165 | 1407 | size += len(chunk) | 1436 | size += len(chunk) |
169 | 1408 | return { | 1437 | ret = {} |
170 | 1409 | alg: {alg: hashobj.hexdigest(), "name": file_name, "size": size} | 1438 | for alg, hashobj in hashes.items(): |
171 | 1410 | for alg, hashobj in hashes.items()} | 1439 | digest = hashobj.hexdigest() |
172 | 1440 | ret[alg] = {alg: digest, "name": file_name, "size": size} | ||
173 | 1441 | if real_file_name: | ||
174 | 1442 | ret[alg]["real_name"] = real_file_name | ||
175 | 1443 | return ret | ||
176 | 1411 | 1444 | ||
177 | 1412 | def deleteArchive(self): | 1445 | def deleteArchive(self): |
178 | 1413 | """Delete the archive. | 1446 | """Delete the archive. |
179 | 1414 | 1447 | ||
180 | === modified file 'lib/lp/archivepublisher/tests/test_publisher.py' | |||
181 | --- lib/lp/archivepublisher/tests/test_publisher.py 2018-03-27 23:02:02 +0000 | |||
182 | +++ lib/lp/archivepublisher/tests/test_publisher.py 2018-03-27 23:27:31 +0000 | |||
183 | @@ -17,9 +17,11 @@ | |||
184 | 17 | datetime, | 17 | datetime, |
185 | 18 | timedelta, | 18 | timedelta, |
186 | 19 | ) | 19 | ) |
187 | 20 | from fnmatch import fnmatch | ||
188 | 20 | from functools import partial | 21 | from functools import partial |
189 | 21 | import gzip | 22 | import gzip |
190 | 22 | import hashlib | 23 | import hashlib |
191 | 24 | from itertools import product | ||
192 | 23 | from operator import attrgetter | 25 | from operator import attrgetter |
193 | 24 | import os | 26 | import os |
194 | 25 | import shutil | 27 | import shutil |
195 | @@ -29,10 +31,12 @@ | |||
196 | 29 | import time | 31 | import time |
197 | 30 | 32 | ||
198 | 31 | from debian.deb822 import Release | 33 | from debian.deb822 import Release |
199 | 34 | from fixtures import MonkeyPatch | ||
200 | 32 | try: | 35 | try: |
201 | 33 | import lzma | 36 | import lzma |
202 | 34 | except ImportError: | 37 | except ImportError: |
203 | 35 | from backports import lzma | 38 | from backports import lzma |
204 | 39 | import mock | ||
205 | 36 | import pytz | 40 | import pytz |
206 | 37 | from testscenarios import ( | 41 | from testscenarios import ( |
207 | 38 | load_tests_apply_scenarios, | 42 | load_tests_apply_scenarios, |
208 | @@ -66,6 +70,7 @@ | |||
209 | 66 | IArchiveSigningKey, | 70 | IArchiveSigningKey, |
210 | 67 | ) | 71 | ) |
211 | 68 | from lp.archivepublisher.publishing import ( | 72 | from lp.archivepublisher.publishing import ( |
212 | 73 | BY_HASH_STAY_OF_EXECUTION, | ||
213 | 69 | ByHash, | 74 | ByHash, |
214 | 70 | ByHashes, | 75 | ByHashes, |
215 | 71 | DirectoryHash, | 76 | DirectoryHash, |
216 | @@ -2547,6 +2552,22 @@ | |||
217 | 2547 | class TestUpdateByHash(TestPublisherBase): | 2552 | class TestUpdateByHash(TestPublisherBase): |
218 | 2548 | """Tests for handling of by-hash files.""" | 2553 | """Tests for handling of by-hash files.""" |
219 | 2549 | 2554 | ||
220 | 2555 | def setUpMockTime(self): | ||
221 | 2556 | """Start simulating the advance of time in the publisher.""" | ||
222 | 2557 | self.times = [datetime.now(pytz.UTC)] | ||
223 | 2558 | mock_datetime = mock.patch('lp.archivepublisher.publishing.datetime') | ||
224 | 2559 | mocked_datetime = mock_datetime.start() | ||
225 | 2560 | self.addCleanup(mock_datetime.stop) | ||
226 | 2561 | mocked_datetime.utcnow = lambda: self.times[-1].replace(tzinfo=None) | ||
227 | 2562 | self.useFixture(MonkeyPatch( | ||
228 | 2563 | 'lp.soyuz.model.archivefile._now', lambda: self.times[-1])) | ||
229 | 2564 | |||
230 | 2565 | def advanceTime(self, delta=None, absolute=None): | ||
231 | 2566 | if delta is not None: | ||
232 | 2567 | self.times.append(self.times[-1] + delta) | ||
233 | 2568 | else: | ||
234 | 2569 | self.times.append(absolute) | ||
235 | 2570 | |||
236 | 2550 | def runSteps(self, publisher, step_a=False, step_a2=False, step_c=False, | 2571 | def runSteps(self, publisher, step_a=False, step_a2=False, step_c=False, |
237 | 2551 | step_d=False): | 2572 | step_d=False): |
238 | 2552 | """Run publisher steps.""" | 2573 | """Run publisher steps.""" |
239 | @@ -2559,6 +2580,33 @@ | |||
240 | 2559 | if step_d: | 2580 | if step_d: |
241 | 2560 | publisher.D_writeReleaseFiles(False) | 2581 | publisher.D_writeReleaseFiles(False) |
242 | 2561 | 2582 | ||
243 | 2583 | @classmethod | ||
244 | 2584 | def _makeScheduledDeletionDateMatcher(cls, condemned_at): | ||
245 | 2585 | if condemned_at is None: | ||
246 | 2586 | return Is(None) | ||
247 | 2587 | else: | ||
248 | 2588 | return Equals( | ||
249 | 2589 | condemned_at + timedelta(days=BY_HASH_STAY_OF_EXECUTION)) | ||
250 | 2590 | |||
251 | 2591 | def assertHasSuiteFiles(self, patterns, *properties): | ||
252 | 2592 | def is_interesting(path): | ||
253 | 2593 | return any( | ||
254 | 2594 | fnmatch(path, 'dists/breezy-autotest/%s' % pattern) | ||
255 | 2595 | for pattern in patterns) | ||
256 | 2596 | |||
257 | 2597 | files = [ | ||
258 | 2598 | archive_file | ||
259 | 2599 | for archive_file in getUtility(IArchiveFileSet).getByArchive( | ||
260 | 2600 | self.ubuntutest.main_archive) | ||
261 | 2601 | if is_interesting(archive_file.path)] | ||
262 | 2602 | matchers = [] | ||
263 | 2603 | for path, condemned_at in properties: | ||
264 | 2604 | matchers.append(MatchesStructure( | ||
265 | 2605 | path=Equals('dists/breezy-autotest/%s' % path), | ||
266 | 2606 | scheduled_deletion_date=self._makeScheduledDeletionDateMatcher( | ||
267 | 2607 | condemned_at))) | ||
268 | 2608 | self.assertThat(files, MatchesSetwise(*matchers)) | ||
269 | 2609 | |||
270 | 2562 | def test_disabled(self): | 2610 | def test_disabled(self): |
271 | 2563 | # The publisher does not create by-hash directories if it is | 2611 | # The publisher does not create by-hash directories if it is |
272 | 2564 | # disabled in the series configuration. | 2612 | # disabled in the series configuration. |
273 | @@ -2611,14 +2659,18 @@ | |||
274 | 2611 | 2659 | ||
275 | 2612 | suite_path = partial( | 2660 | suite_path = partial( |
276 | 2613 | os.path.join, self.config.distsroot, 'breezy-autotest') | 2661 | os.path.join, self.config.distsroot, 'breezy-autotest') |
278 | 2614 | contents = set() | 2662 | top_contents = set() |
279 | 2663 | with open(suite_path('Release'), 'rb') as f: | ||
280 | 2664 | top_contents.add(f.read()) | ||
281 | 2665 | main_contents = set() | ||
282 | 2615 | for name in ('Release', 'Sources.gz', 'Sources.bz2'): | 2666 | for name in ('Release', 'Sources.gz', 'Sources.bz2'): |
283 | 2616 | with open(suite_path('main', 'source', name), 'rb') as f: | 2667 | with open(suite_path('main', 'source', name), 'rb') as f: |
285 | 2617 | contents.add(f.read()) | 2668 | main_contents.add(f.read()) |
286 | 2618 | 2669 | ||
287 | 2670 | self.assertThat(suite_path('by-hash'), ByHashHasContents(top_contents)) | ||
288 | 2619 | self.assertThat( | 2671 | self.assertThat( |
289 | 2620 | suite_path('main', 'source', 'by-hash'), | 2672 | suite_path('main', 'source', 'by-hash'), |
291 | 2621 | ByHashHasContents(contents)) | 2673 | ByHashHasContents(main_contents)) |
292 | 2622 | 2674 | ||
293 | 2623 | archive_files = getUtility(IArchiveFileSet).getByArchive( | 2675 | archive_files = getUtility(IArchiveFileSet).getByArchive( |
294 | 2624 | self.ubuntutest.main_archive) | 2676 | self.ubuntutest.main_archive) |
295 | @@ -2640,8 +2692,11 @@ | |||
296 | 2640 | 2692 | ||
297 | 2641 | suite_path = partial( | 2693 | suite_path = partial( |
298 | 2642 | os.path.join, self.config.distsroot, 'breezy-autotest') | 2694 | os.path.join, self.config.distsroot, 'breezy-autotest') |
299 | 2695 | top_contents = set() | ||
300 | 2643 | main_contents = set() | 2696 | main_contents = set() |
301 | 2644 | universe_contents = set() | 2697 | universe_contents = set() |
302 | 2698 | with open(suite_path('Release'), 'rb') as f: | ||
303 | 2699 | top_contents.add(f.read()) | ||
304 | 2645 | for name in ('Release', 'Sources.gz', 'Sources.bz2'): | 2700 | for name in ('Release', 'Sources.gz', 'Sources.bz2'): |
305 | 2646 | with open(suite_path('main', 'source', name), 'rb') as f: | 2701 | with open(suite_path('main', 'source', name), 'rb') as f: |
306 | 2647 | main_contents.add(f.read()) | 2702 | main_contents.add(f.read()) |
307 | @@ -2652,10 +2707,13 @@ | |||
308 | 2652 | self.runSteps(publisher, step_a=True, step_c=True, step_d=True) | 2707 | self.runSteps(publisher, step_a=True, step_c=True, step_d=True) |
309 | 2653 | flush_database_caches() | 2708 | flush_database_caches() |
310 | 2654 | 2709 | ||
311 | 2710 | with open(suite_path('Release'), 'rb') as f: | ||
312 | 2711 | top_contents.add(f.read()) | ||
313 | 2655 | for name in ('Release', 'Sources.gz', 'Sources.bz2'): | 2712 | for name in ('Release', 'Sources.gz', 'Sources.bz2'): |
314 | 2656 | with open(suite_path('main', 'source', name), 'rb') as f: | 2713 | with open(suite_path('main', 'source', name), 'rb') as f: |
315 | 2657 | main_contents.add(f.read()) | 2714 | main_contents.add(f.read()) |
316 | 2658 | 2715 | ||
317 | 2716 | self.assertThat(suite_path('by-hash'), ByHashHasContents(top_contents)) | ||
318 | 2659 | self.assertThat( | 2717 | self.assertThat( |
319 | 2660 | suite_path('main', 'source', 'by-hash'), | 2718 | suite_path('main', 'source', 'by-hash'), |
320 | 2661 | ByHashHasContents(main_contents)) | 2719 | ByHashHasContents(main_contents)) |
321 | @@ -2666,7 +2724,8 @@ | |||
322 | 2666 | archive_files = getUtility(IArchiveFileSet).getByArchive( | 2724 | archive_files = getUtility(IArchiveFileSet).getByArchive( |
323 | 2667 | self.ubuntutest.main_archive) | 2725 | self.ubuntutest.main_archive) |
324 | 2668 | self.assertContentEqual( | 2726 | self.assertContentEqual( |
326 | 2669 | ['dists/breezy-autotest/main/source/Sources.bz2', | 2727 | ['dists/breezy-autotest/Release', |
327 | 2728 | 'dists/breezy-autotest/main/source/Sources.bz2', | ||
328 | 2670 | 'dists/breezy-autotest/main/source/Sources.gz'], | 2729 | 'dists/breezy-autotest/main/source/Sources.gz'], |
329 | 2671 | [archive_file.path for archive_file in archive_files | 2730 | [archive_file.path for archive_file in archive_files |
330 | 2672 | if archive_file.scheduled_deletion_date is not None]) | 2731 | if archive_file.scheduled_deletion_date is not None]) |
331 | @@ -2680,11 +2739,11 @@ | |||
332 | 2680 | self.ubuntutest.main_archive) | 2739 | self.ubuntutest.main_archive) |
333 | 2681 | suite_path = partial( | 2740 | suite_path = partial( |
334 | 2682 | os.path.join, self.config.distsroot, 'breezy-autotest') | 2741 | os.path.join, self.config.distsroot, 'breezy-autotest') |
340 | 2683 | get_contents_files = lambda: [ | 2742 | self.setUpMockTime() |
341 | 2684 | archive_file | 2743 | |
342 | 2685 | for archive_file in getUtility(IArchiveFileSet).getByArchive( | 2744 | def get_release_contents(): |
343 | 2686 | self.ubuntutest.main_archive) | 2745 | with open(suite_path('Release')) as f: |
344 | 2687 | if archive_file.path.startswith('dists/breezy-autotest/Contents-')] | 2746 | return f.read() |
345 | 2688 | 2747 | ||
346 | 2689 | # Create the first file. | 2748 | # Create the first file. |
347 | 2690 | with open_for_writing(suite_path('Contents-i386'), 'w') as f: | 2749 | with open_for_writing(suite_path('Contents-i386'), 'w') as f: |
348 | @@ -2693,72 +2752,93 @@ | |||
349 | 2693 | self.breezy_autotest, PackagePublishingPocket.RELEASE) | 2752 | self.breezy_autotest, PackagePublishingPocket.RELEASE) |
350 | 2694 | self.runSteps(publisher, step_a=True, step_c=True, step_d=True) | 2753 | self.runSteps(publisher, step_a=True, step_c=True, step_d=True) |
351 | 2695 | flush_database_caches() | 2754 | flush_database_caches() |
357 | 2696 | matchers = [ | 2755 | self.assertHasSuiteFiles( |
358 | 2697 | MatchesStructure( | 2756 | ('Contents-*', 'Release'), |
359 | 2698 | path=Equals('dists/breezy-autotest/Contents-i386'), | 2757 | ('Contents-i386', None), ('Release', None)) |
360 | 2699 | scheduled_deletion_date=Is(None))] | 2758 | releases = [get_release_contents()] |
356 | 2700 | self.assertThat(get_contents_files(), MatchesSetwise(*matchers)) | ||
361 | 2701 | self.assertThat( | 2759 | self.assertThat( |
363 | 2702 | suite_path('by-hash'), ByHashHasContents(['A Contents file\n'])) | 2760 | suite_path('by-hash'), |
364 | 2761 | ByHashHasContents(['A Contents file\n'] + releases)) | ||
365 | 2703 | 2762 | ||
366 | 2704 | # Add a second identical file. | 2763 | # Add a second identical file. |
367 | 2705 | with open_for_writing(suite_path('Contents-hppa'), 'w') as f: | 2764 | with open_for_writing(suite_path('Contents-hppa'), 'w') as f: |
368 | 2706 | f.write('A Contents file\n') | 2765 | f.write('A Contents file\n') |
369 | 2766 | self.advanceTime(delta=timedelta(hours=1)) | ||
370 | 2707 | self.runSteps(publisher, step_d=True) | 2767 | self.runSteps(publisher, step_d=True) |
371 | 2708 | flush_database_caches() | 2768 | flush_database_caches() |
377 | 2709 | matchers.append( | 2769 | self.assertHasSuiteFiles( |
378 | 2710 | MatchesStructure( | 2770 | ('Contents-*', 'Release'), |
379 | 2711 | path=Equals('dists/breezy-autotest/Contents-hppa'), | 2771 | ('Contents-i386', None), ('Contents-hppa', None), |
380 | 2712 | scheduled_deletion_date=Is(None))) | 2772 | ('Release', self.times[1]), ('Release', None)) |
381 | 2713 | self.assertThat(get_contents_files(), MatchesSetwise(*matchers)) | 2773 | releases.append(get_release_contents()) |
382 | 2714 | self.assertThat( | 2774 | self.assertThat( |
384 | 2715 | suite_path('by-hash'), ByHashHasContents(['A Contents file\n'])) | 2775 | suite_path('by-hash'), |
385 | 2776 | ByHashHasContents(['A Contents file\n'] + releases)) | ||
386 | 2716 | 2777 | ||
387 | 2717 | # Delete the first file, but allow it its stay of execution. | 2778 | # Delete the first file, but allow it its stay of execution. |
388 | 2718 | os.unlink(suite_path('Contents-i386')) | 2779 | os.unlink(suite_path('Contents-i386')) |
389 | 2780 | self.advanceTime(delta=timedelta(hours=1)) | ||
390 | 2719 | self.runSteps(publisher, step_d=True) | 2781 | self.runSteps(publisher, step_d=True) |
391 | 2720 | flush_database_caches() | 2782 | flush_database_caches() |
394 | 2721 | matchers[0] = matchers[0].update(scheduled_deletion_date=Not(Is(None))) | 2783 | self.assertHasSuiteFiles( |
395 | 2722 | self.assertThat(get_contents_files(), MatchesSetwise(*matchers)) | 2784 | ('Contents-*', 'Release'), |
396 | 2785 | ('Contents-i386', self.times[2]), ('Contents-hppa', None), | ||
397 | 2786 | ('Release', self.times[1]), ('Release', self.times[2]), | ||
398 | 2787 | ('Release', None)) | ||
399 | 2788 | releases.append(get_release_contents()) | ||
400 | 2723 | self.assertThat( | 2789 | self.assertThat( |
402 | 2724 | suite_path('by-hash'), ByHashHasContents(['A Contents file\n'])) | 2790 | suite_path('by-hash'), |
403 | 2791 | ByHashHasContents(['A Contents file\n'] + releases)) | ||
404 | 2725 | 2792 | ||
405 | 2726 | # A no-op run leaves the scheduled deletion date intact. | 2793 | # A no-op run leaves the scheduled deletion date intact. |
406 | 2794 | self.advanceTime(delta=timedelta(hours=1)) | ||
407 | 2795 | self.runSteps(publisher, step_d=True) | ||
408 | 2796 | flush_database_caches() | ||
409 | 2797 | self.assertHasSuiteFiles( | ||
410 | 2798 | ('Contents-*', 'Release'), | ||
411 | 2799 | ('Contents-i386', self.times[2]), ('Contents-hppa', None), | ||
412 | 2800 | ('Release', self.times[1]), ('Release', self.times[2]), | ||
413 | 2801 | ('Release', self.times[3]), ('Release', None)) | ||
414 | 2802 | releases.append(get_release_contents()) | ||
415 | 2803 | self.assertThat( | ||
416 | 2804 | suite_path('by-hash'), | ||
417 | 2805 | ByHashHasContents(['A Contents file\n'] + releases)) | ||
418 | 2806 | |||
419 | 2807 | # Arrange for the first file to be pruned, and delete the second | ||
420 | 2808 | # file. This also puts us past the stay of execution of the first | ||
421 | 2809 | # two Release files. | ||
422 | 2727 | i386_file = getUtility(IArchiveFileSet).getByArchive( | 2810 | i386_file = getUtility(IArchiveFileSet).getByArchive( |
423 | 2728 | self.ubuntutest.main_archive, | 2811 | self.ubuntutest.main_archive, |
424 | 2729 | path='dists/breezy-autotest/Contents-i386').one() | 2812 | path='dists/breezy-autotest/Contents-i386').one() |
439 | 2730 | i386_date = i386_file.scheduled_deletion_date | 2813 | self.advanceTime( |
440 | 2731 | self.runSteps(publisher, step_d=True) | 2814 | absolute=i386_file.scheduled_deletion_date + timedelta(minutes=5)) |
427 | 2732 | flush_database_caches() | ||
428 | 2733 | matchers[0] = matchers[0].update( | ||
429 | 2734 | scheduled_deletion_date=Equals(i386_date)) | ||
430 | 2735 | self.assertThat(get_contents_files(), MatchesSetwise(*matchers)) | ||
431 | 2736 | self.assertThat( | ||
432 | 2737 | suite_path('by-hash'), ByHashHasContents(['A Contents file\n'])) | ||
433 | 2738 | |||
434 | 2739 | # Arrange for the first file to be pruned, and delete the second | ||
435 | 2740 | # file. | ||
436 | 2741 | now = datetime.now(pytz.UTC) | ||
437 | 2742 | removeSecurityProxy(i386_file).scheduled_deletion_date = ( | ||
438 | 2743 | now - timedelta(hours=1)) | ||
441 | 2744 | os.unlink(suite_path('Contents-hppa')) | 2815 | os.unlink(suite_path('Contents-hppa')) |
442 | 2745 | self.runSteps(publisher, step_d=True) | 2816 | self.runSteps(publisher, step_d=True) |
443 | 2746 | flush_database_caches() | 2817 | flush_database_caches() |
446 | 2747 | matchers = [matchers[1].update(scheduled_deletion_date=Not(Is(None)))] | 2818 | self.assertHasSuiteFiles( |
447 | 2748 | self.assertThat(get_contents_files(), MatchesSetwise(*matchers)) | 2819 | ('Contents-*', 'Release'), |
448 | 2820 | ('Contents-hppa', self.times[4]), | ||
449 | 2821 | ('Release', self.times[3]), ('Release', self.times[4]), | ||
450 | 2822 | ('Release', None)) | ||
451 | 2823 | releases.append(get_release_contents()) | ||
452 | 2749 | self.assertThat( | 2824 | self.assertThat( |
454 | 2750 | suite_path('by-hash'), ByHashHasContents(['A Contents file\n'])) | 2825 | suite_path('by-hash'), |
455 | 2826 | ByHashHasContents(['A Contents file\n'] + releases[2:])) | ||
456 | 2751 | 2827 | ||
458 | 2752 | # Arrange for the second file to be pruned. | 2828 | # Arrange for the second file to be pruned. This also puts us past |
459 | 2829 | # the stay of execution of the first two remaining Release files. | ||
460 | 2753 | hppa_file = getUtility(IArchiveFileSet).getByArchive( | 2830 | hppa_file = getUtility(IArchiveFileSet).getByArchive( |
461 | 2754 | self.ubuntutest.main_archive, | 2831 | self.ubuntutest.main_archive, |
462 | 2755 | path='dists/breezy-autotest/Contents-hppa').one() | 2832 | path='dists/breezy-autotest/Contents-hppa').one() |
465 | 2756 | removeSecurityProxy(hppa_file).scheduled_deletion_date = ( | 2833 | self.advanceTime( |
466 | 2757 | now - timedelta(hours=1)) | 2834 | absolute=hppa_file.scheduled_deletion_date + timedelta(minutes=5)) |
467 | 2758 | self.runSteps(publisher, step_d=True) | 2835 | self.runSteps(publisher, step_d=True) |
468 | 2759 | flush_database_caches() | 2836 | flush_database_caches() |
471 | 2760 | self.assertContentEqual([], get_contents_files()) | 2837 | self.assertHasSuiteFiles( |
472 | 2761 | self.assertThat(suite_path('by-hash'), Not(PathExists())) | 2838 | ('Contents-*', 'Release'), |
473 | 2839 | ('Release', self.times[5]), ('Release', None)) | ||
474 | 2840 | releases.append(get_release_contents()) | ||
475 | 2841 | self.assertThat(suite_path('by-hash'), ByHashHasContents(releases[4:])) | ||
476 | 2762 | 2842 | ||
477 | 2763 | def test_reprieve(self): | 2843 | def test_reprieve(self): |
478 | 2764 | # If a newly-modified index file is identical to a | 2844 | # If a newly-modified index file is identical to a |
479 | @@ -2771,6 +2851,7 @@ | |||
480 | 2771 | publisher = Publisher( | 2851 | publisher = Publisher( |
481 | 2772 | self.logger, self.config, self.disk_pool, | 2852 | self.logger, self.config, self.disk_pool, |
482 | 2773 | self.ubuntutest.main_archive) | 2853 | self.ubuntutest.main_archive) |
483 | 2854 | self.setUpMockTime() | ||
484 | 2774 | 2855 | ||
485 | 2775 | # Publish empty index files. | 2856 | # Publish empty index files. |
486 | 2776 | publisher.markPocketDirty( | 2857 | publisher.markPocketDirty( |
487 | @@ -2795,15 +2876,8 @@ | |||
488 | 2795 | ByHashHasContents(main_contents)) | 2876 | ByHashHasContents(main_contents)) |
489 | 2796 | 2877 | ||
490 | 2797 | # Make the empty Sources file ready to prune. | 2878 | # Make the empty Sources file ready to prune. |
500 | 2798 | old_archive_files = [] | 2879 | self.advanceTime( |
501 | 2799 | for archive_file in getUtility(IArchiveFileSet).getByArchive( | 2880 | delta=timedelta(days=BY_HASH_STAY_OF_EXECUTION, hours=1)) |
493 | 2800 | self.ubuntutest.main_archive): | ||
494 | 2801 | if ('main/source' in archive_file.path and | ||
495 | 2802 | archive_file.scheduled_deletion_date is not None): | ||
496 | 2803 | old_archive_files.append(archive_file) | ||
497 | 2804 | self.assertEqual(1, len(old_archive_files)) | ||
498 | 2805 | removeSecurityProxy(old_archive_files[0]).scheduled_deletion_date = ( | ||
499 | 2806 | datetime.now(pytz.UTC) - timedelta(hours=1)) | ||
502 | 2807 | 2881 | ||
503 | 2808 | # Delete the source package so that Sources is empty again. The | 2882 | # Delete the source package so that Sources is empty again. The |
504 | 2809 | # empty file is reprieved and the non-empty one is condemned. | 2883 | # empty file is reprieved and the non-empty one is condemned. |
505 | @@ -2824,6 +2898,7 @@ | |||
506 | 2824 | ])) | 2898 | ])) |
507 | 2825 | 2899 | ||
508 | 2826 | def setUpPruneableSuite(self): | 2900 | def setUpPruneableSuite(self): |
509 | 2901 | self.setUpMockTime() | ||
510 | 2827 | self.breezy_autotest.publish_by_hash = True | 2902 | self.breezy_autotest.publish_by_hash = True |
511 | 2828 | self.breezy_autotest.advertise_by_hash = True | 2903 | self.breezy_autotest.advertise_by_hash = True |
512 | 2829 | publisher = Publisher( | 2904 | publisher = Publisher( |
513 | @@ -2832,47 +2907,50 @@ | |||
514 | 2832 | 2907 | ||
515 | 2833 | suite_path = partial( | 2908 | suite_path = partial( |
516 | 2834 | os.path.join, self.config.distsroot, 'breezy-autotest') | 2909 | os.path.join, self.config.distsroot, 'breezy-autotest') |
519 | 2835 | main_contents = set() | 2910 | top_contents = [] |
520 | 2836 | for sourcename in ('foo', 'bar'): | 2911 | main_contents = [] |
521 | 2912 | for sourcename in ('foo', 'bar', 'baz'): | ||
522 | 2837 | self.getPubSource( | 2913 | self.getPubSource( |
523 | 2838 | sourcename=sourcename, filecontent='Source: %s\n' % sourcename) | 2914 | sourcename=sourcename, filecontent='Source: %s\n' % sourcename) |
524 | 2839 | self.runSteps(publisher, step_a=True, step_c=True, step_d=True) | 2915 | self.runSteps(publisher, step_a=True, step_c=True, step_d=True) |
525 | 2916 | with open(suite_path('Release'), 'rb') as f: | ||
526 | 2917 | top_contents.append(f.read()) | ||
527 | 2840 | for name in ('Release', 'Sources.gz', 'Sources.bz2'): | 2918 | for name in ('Release', 'Sources.gz', 'Sources.bz2'): |
528 | 2841 | with open(suite_path('main', 'source', name), 'rb') as f: | 2919 | with open(suite_path('main', 'source', name), 'rb') as f: |
530 | 2842 | main_contents.add(f.read()) | 2920 | main_contents.append(f.read()) |
531 | 2921 | self.advanceTime(delta=timedelta(hours=6)) | ||
532 | 2843 | transaction.commit() | 2922 | transaction.commit() |
533 | 2844 | 2923 | ||
534 | 2924 | # We have two condemned sets of index files and one uncondemned set. | ||
535 | 2925 | # main/source/Release contains a small enough amount of information | ||
536 | 2926 | # that it doesn't change. | ||
537 | 2927 | expected_suite_files = ( | ||
538 | 2928 | list(product( | ||
539 | 2929 | ('main/source/Sources.gz', 'main/source/Sources.bz2', | ||
540 | 2930 | 'Release'), | ||
541 | 2931 | (self.times[1], self.times[2], None))) + | ||
542 | 2932 | [('main/source/Release', None)]) | ||
543 | 2933 | self.assertHasSuiteFiles( | ||
544 | 2934 | ('main/source/*', 'Release'), *expected_suite_files) | ||
545 | 2935 | self.assertThat(suite_path('by-hash'), ByHashHasContents(top_contents)) | ||
546 | 2845 | self.assertThat( | 2936 | self.assertThat( |
547 | 2846 | suite_path('main', 'source', 'by-hash'), | 2937 | suite_path('main', 'source', 'by-hash'), |
548 | 2847 | ByHashHasContents(main_contents)) | 2938 | ByHashHasContents(main_contents)) |
572 | 2848 | old_archive_files = [] | 2939 | |
573 | 2849 | for archive_file in getUtility(IArchiveFileSet).getByArchive( | 2940 | # Advance time to the point where the first condemned set of index |
574 | 2850 | self.ubuntutest.main_archive): | 2941 | # files is scheduled for deletion. |
575 | 2851 | if ('main/source' in archive_file.path and | 2942 | self.advanceTime( |
576 | 2852 | archive_file.scheduled_deletion_date is not None): | 2943 | absolute=self.times[1] + timedelta( |
577 | 2853 | old_archive_files.append(archive_file) | 2944 | days=BY_HASH_STAY_OF_EXECUTION, hours=1)) |
578 | 2854 | self.assertEqual(2, len(old_archive_files)) | 2945 | del top_contents[0] |
579 | 2855 | 2946 | del main_contents[:3] | |
580 | 2856 | now = datetime.now(pytz.UTC) | 2947 | |
581 | 2857 | removeSecurityProxy(old_archive_files[0]).scheduled_deletion_date = ( | 2948 | return top_contents, main_contents |
559 | 2858 | now + timedelta(hours=12)) | ||
560 | 2859 | removeSecurityProxy(old_archive_files[1]).scheduled_deletion_date = ( | ||
561 | 2860 | now - timedelta(hours=12)) | ||
562 | 2861 | old_archive_files[1].library_file.open() | ||
563 | 2862 | try: | ||
564 | 2863 | main_contents.remove(old_archive_files[1].library_file.read()) | ||
565 | 2864 | finally: | ||
566 | 2865 | old_archive_files[1].library_file.close() | ||
567 | 2866 | self.assertThat( | ||
568 | 2867 | suite_path('main', 'source', 'by-hash'), | ||
569 | 2868 | Not(ByHashHasContents(main_contents))) | ||
570 | 2869 | |||
571 | 2870 | return main_contents | ||
582 | 2871 | 2949 | ||
583 | 2872 | def test_prune(self): | 2950 | def test_prune(self): |
584 | 2873 | # The publisher prunes files from by-hash that were condemned more | 2951 | # The publisher prunes files from by-hash that were condemned more |
585 | 2874 | # than a day ago. | 2952 | # than a day ago. |
587 | 2875 | main_contents = self.setUpPruneableSuite() | 2953 | top_contents, main_contents = self.setUpPruneableSuite() |
588 | 2876 | suite_path = partial( | 2954 | suite_path = partial( |
589 | 2877 | os.path.join, self.config.distsroot, 'breezy-autotest') | 2955 | os.path.join, self.config.distsroot, 'breezy-autotest') |
590 | 2878 | 2956 | ||
591 | @@ -2882,7 +2960,19 @@ | |||
592 | 2882 | self.logger, self.config, self.disk_pool, | 2960 | self.logger, self.config, self.disk_pool, |
593 | 2883 | self.ubuntutest.main_archive) | 2961 | self.ubuntutest.main_archive) |
594 | 2884 | self.runSteps(publisher, step_a2=True, step_c=True, step_d=True) | 2962 | self.runSteps(publisher, step_a2=True, step_c=True, step_d=True) |
595 | 2963 | transaction.commit() | ||
596 | 2885 | self.assertEqual(set(), publisher.dirty_pockets) | 2964 | self.assertEqual(set(), publisher.dirty_pockets) |
597 | 2965 | # The condemned index files are removed, and no new Release file is | ||
598 | 2966 | # generated. | ||
599 | 2967 | expected_suite_files = ( | ||
600 | 2968 | list(product( | ||
601 | 2969 | ('main/source/Sources.gz', 'main/source/Sources.bz2'), | ||
602 | 2970 | (self.times[2], None))) + | ||
603 | 2971 | [('main/source/Release', None), | ||
604 | 2972 | ('Release', self.times[2]), ('Release', None)]) | ||
605 | 2973 | self.assertHasSuiteFiles( | ||
606 | 2974 | ('main/source/*', 'Release'), *expected_suite_files) | ||
607 | 2975 | self.assertThat(suite_path('by-hash'), ByHashHasContents(top_contents)) | ||
608 | 2886 | self.assertThat( | 2976 | self.assertThat( |
609 | 2887 | suite_path('main', 'source', 'by-hash'), | 2977 | suite_path('main', 'source', 'by-hash'), |
610 | 2888 | ByHashHasContents(main_contents)) | 2978 | ByHashHasContents(main_contents)) |
611 | @@ -2890,7 +2980,7 @@ | |||
612 | 2890 | def test_prune_immutable(self): | 2980 | def test_prune_immutable(self): |
613 | 2891 | # The publisher prunes by-hash files from immutable suites, but | 2981 | # The publisher prunes by-hash files from immutable suites, but |
614 | 2892 | # doesn't regenerate the Release file in that case. | 2982 | # doesn't regenerate the Release file in that case. |
616 | 2893 | main_contents = self.setUpPruneableSuite() | 2983 | top_contents, main_contents = self.setUpPruneableSuite() |
617 | 2894 | suite_path = partial( | 2984 | suite_path = partial( |
618 | 2895 | os.path.join, self.config.distsroot, 'breezy-autotest') | 2985 | os.path.join, self.config.distsroot, 'breezy-autotest') |
619 | 2896 | release_path = suite_path('Release') | 2986 | release_path = suite_path('Release') |
620 | @@ -2903,8 +2993,20 @@ | |||
621 | 2903 | self.logger, self.config, self.disk_pool, | 2993 | self.logger, self.config, self.disk_pool, |
622 | 2904 | self.ubuntutest.main_archive) | 2994 | self.ubuntutest.main_archive) |
623 | 2905 | self.runSteps(publisher, step_a2=True, step_c=True, step_d=True) | 2995 | self.runSteps(publisher, step_a2=True, step_c=True, step_d=True) |
624 | 2996 | transaction.commit() | ||
625 | 2906 | self.assertEqual(set(), publisher.dirty_pockets) | 2997 | self.assertEqual(set(), publisher.dirty_pockets) |
626 | 2907 | self.assertEqual(release_mtime, os.stat(release_path).st_mtime) | 2998 | self.assertEqual(release_mtime, os.stat(release_path).st_mtime) |
627 | 2999 | # The condemned index files are removed, and no new Release file is | ||
628 | 3000 | # generated. | ||
629 | 3001 | expected_suite_files = ( | ||
630 | 3002 | list(product( | ||
631 | 3003 | ('main/source/Sources.gz', 'main/source/Sources.bz2'), | ||
632 | 3004 | (self.times[2], None))) + | ||
633 | 3005 | [('main/source/Release', None), | ||
634 | 3006 | ('Release', self.times[2]), ('Release', None)]) | ||
635 | 3007 | self.assertHasSuiteFiles( | ||
636 | 3008 | ('main/source/*', 'Release'), *expected_suite_files) | ||
637 | 3009 | self.assertThat(suite_path('by-hash'), ByHashHasContents(top_contents)) | ||
638 | 2908 | self.assertThat( | 3010 | self.assertThat( |
639 | 2909 | suite_path('main', 'source', 'by-hash'), | 3011 | suite_path('main', 'source', 'by-hash'), |
640 | 2910 | ByHashHasContents(main_contents)) | 3012 | ByHashHasContents(main_contents)) |
641 | 2911 | 3013 | ||
642 | === modified file 'lib/lp/soyuz/model/archivefile.py' | |||
643 | --- lib/lp/soyuz/model/archivefile.py 2016-04-04 10:06:33 +0000 | |||
644 | +++ lib/lp/soyuz/model/archivefile.py 2018-03-27 23:27:31 +0000 | |||
645 | @@ -1,4 +1,4 @@ | |||
647 | 1 | # Copyright 2016 Canonical Ltd. This software is licensed under the | 1 | # Copyright 2016-2018 Canonical Ltd. This software is licensed under the |
648 | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). |
649 | 3 | 3 | ||
650 | 4 | """A file in an archive.""" | 4 | """A file in an archive.""" |
651 | @@ -33,6 +33,7 @@ | |||
652 | 33 | IMasterStore, | 33 | IMasterStore, |
653 | 34 | IStore, | 34 | IStore, |
654 | 35 | ) | 35 | ) |
655 | 36 | from lp.services.database.sqlbase import convert_storm_clause_to_string | ||
656 | 36 | from lp.services.database.stormexpr import BulkUpdate | 37 | from lp.services.database.stormexpr import BulkUpdate |
657 | 37 | from lp.services.librarian.interfaces import ILibraryFileAliasSet | 38 | from lp.services.librarian.interfaces import ILibraryFileAliasSet |
658 | 38 | from lp.services.librarian.model import ( | 39 | from lp.services.librarian.model import ( |
659 | @@ -76,6 +77,15 @@ | |||
660 | 76 | self.scheduled_deletion_date = None | 77 | self.scheduled_deletion_date = None |
661 | 77 | 78 | ||
662 | 78 | 79 | ||
663 | 80 | def _now(): | ||
664 | 81 | """Get the current transaction timestamp. | ||
665 | 82 | |||
666 | 83 | Tests can override this with a Storm expression or a `datetime` to | ||
667 | 84 | simulate time changes. | ||
668 | 85 | """ | ||
669 | 86 | return UTC_NOW | ||
670 | 87 | |||
671 | 88 | |||
672 | 79 | @implementer(IArchiveFileSet) | 89 | @implementer(IArchiveFileSet) |
673 | 80 | class ArchiveFileSet: | 90 | class ArchiveFileSet: |
674 | 81 | """See `IArchiveFileSet`.""" | 91 | """See `IArchiveFileSet`.""" |
675 | @@ -128,7 +138,7 @@ | |||
676 | 128 | ArchiveFile.library_file == LibraryFileAlias.id, | 138 | ArchiveFile.library_file == LibraryFileAlias.id, |
677 | 129 | LibraryFileAlias.content == LibraryFileContent.id, | 139 | LibraryFileAlias.content == LibraryFileContent.id, |
678 | 130 | ] | 140 | ] |
680 | 131 | new_date = UTC_NOW + stay_of_execution | 141 | new_date = _now() + stay_of_execution |
681 | 132 | return_columns = [ | 142 | return_columns = [ |
682 | 133 | ArchiveFile.container, ArchiveFile.path, LibraryFileContent.sha256] | 143 | ArchiveFile.container, ArchiveFile.path, LibraryFileContent.sha256] |
683 | 134 | return list(IMasterStore(ArchiveFile).execute(Returning( | 144 | return list(IMasterStore(ArchiveFile).execute(Returning( |
684 | @@ -162,7 +172,7 @@ | |||
685 | 162 | def getContainersToReap(archive, container_prefix=None): | 172 | def getContainersToReap(archive, container_prefix=None): |
686 | 163 | clauses = [ | 173 | clauses = [ |
687 | 164 | ArchiveFile.archive == archive, | 174 | ArchiveFile.archive == archive, |
689 | 165 | ArchiveFile.scheduled_deletion_date < UTC_NOW, | 175 | ArchiveFile.scheduled_deletion_date < _now(), |
690 | 166 | ] | 176 | ] |
691 | 167 | if container_prefix is not None: | 177 | if container_prefix is not None: |
692 | 168 | clauses.append(ArchiveFile.container.startswith(container_prefix)) | 178 | clauses.append(ArchiveFile.container.startswith(container_prefix)) |
693 | @@ -175,22 +185,20 @@ | |||
694 | 175 | # XXX cjwatson 2016-03-30 bug=322972: Requires manual SQL due to | 185 | # XXX cjwatson 2016-03-30 bug=322972: Requires manual SQL due to |
695 | 176 | # lack of support for DELETE FROM ... USING ... in Storm. | 186 | # lack of support for DELETE FROM ... USING ... in Storm. |
696 | 177 | clauses = [ | 187 | clauses = [ |
702 | 178 | "ArchiveFile.archive = ?", | 188 | ArchiveFile.archive == archive, |
703 | 179 | "ArchiveFile.scheduled_deletion_date < " | 189 | ArchiveFile.scheduled_deletion_date < _now(), |
704 | 180 | "CURRENT_TIMESTAMP AT TIME ZONE 'UTC'", | 190 | ArchiveFile.library_file_id == LibraryFileAlias.id, |
705 | 181 | "ArchiveFile.library_file = LibraryFileAlias.id", | 191 | LibraryFileAlias.contentID == LibraryFileContent.id, |
701 | 182 | "LibraryFileAlias.content = LibraryFileContent.id", | ||
706 | 183 | ] | 192 | ] |
707 | 184 | values = [archive.id] | ||
708 | 185 | if container is not None: | 193 | if container is not None: |
711 | 186 | clauses.append("ArchiveFile.container = ?") | 194 | clauses.append(ArchiveFile.container == container) |
712 | 187 | values.append(container) | 195 | where = convert_storm_clause_to_string(And(*clauses)) |
713 | 188 | return list(IMasterStore(ArchiveFile).execute(""" | 196 | return list(IMasterStore(ArchiveFile).execute(""" |
714 | 189 | DELETE FROM ArchiveFile | 197 | DELETE FROM ArchiveFile |
715 | 190 | USING LibraryFileAlias, LibraryFileContent | 198 | USING LibraryFileAlias, LibraryFileContent |
717 | 191 | WHERE """ + " AND ".join(clauses) + """ | 199 | WHERE """ + where + """ |
718 | 192 | RETURNING | 200 | RETURNING |
719 | 193 | ArchiveFile.container, | 201 | ArchiveFile.container, |
720 | 194 | ArchiveFile.path, | 202 | ArchiveFile.path, |
721 | 195 | LibraryFileContent.sha256 | 203 | LibraryFileContent.sha256 |
723 | 196 | """, values)) | 204 | """)) |