Merge lp:~cjwatson/launchpad/remove-archive-cruft-check into lp:launchpad
- remove-archive-cruft-check
- Merge into devel
Proposed by
Colin Watson
Status: | Merged |
---|---|
Approved by: | Francis J. Lacoste |
Approved revision: | no longer in the source branch. |
Merged at revision: | 14075 |
Proposed branch: | lp:~cjwatson/launchpad/remove-archive-cruft-check |
Merge into: | lp:launchpad |
Diff against target: |
675 lines (+0/-637) 4 files modified
lib/lp/soyuz/doc/soyuz-upload.txt (+0/-30) lib/lp/soyuz/scripts/ftpmaster.py (+0/-404) lib/lp/soyuz/scripts/tests/test_archivecruftchecker.py (+0/-147) scripts/ftpmaster-tools/archive-cruft-check.py (+0/-56) |
To merge this branch: | bzr merge lp:~cjwatson/launchpad/remove-archive-cruft-check |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
j.c.sackett (community) | Approve | ||
Review via email: mp+77524@code.launchpad.net |
Commit message
[r=jcsackett]
Description of the change
Remove archive-
I believe this is qa-untestable. Robert and William expressed general approbation on IRC.
To post a comment you must log in.
Revision history for this message
j.c.sackett (jcsackett) wrote : | # |
... and assumption confirmed.
review:
Approve
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'lib/lp/soyuz/doc/soyuz-upload.txt' |
2 | --- lib/lp/soyuz/doc/soyuz-upload.txt 2011-09-29 09:37:58 +0000 |
3 | +++ lib/lp/soyuz/doc/soyuz-upload.txt 2011-09-29 13:11:28 +0000 |
4 | @@ -615,36 +615,6 @@ |
5 | END |
6 | |
7 | |
8 | -Testing archive-cruft-check-ng behaviour: |
9 | -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ |
10 | - |
11 | -Defining path to the script: |
12 | - |
13 | - >>> script = os.path.join(config.root, "scripts", "ftpmaster-tools", |
14 | - ... "archive-cruft-check.py") |
15 | - >>> process = subprocess.Popen([sys.executable, script, "-vn", |
16 | - ... "-d", "ubuntutest", |
17 | - ... "-s", "breezy-autotest", |
18 | - ... "/var/tmp/archive"], |
19 | - ... stdout=subprocess.PIPE, |
20 | - ... stderr=subprocess.PIPE,) |
21 | - >>> stdout, stderr = process.communicate() |
22 | - >>> process.returncode |
23 | - 0 |
24 | - >>> print stderr |
25 | - INFO Creating lockfile: ... |
26 | - DEBUG Considering Sources: |
27 | - DEBUG Processing /var/tmp/archive/ubuntutest/dists/breezy-autotest/restricted/source/Sources.gz |
28 | - DEBUG Processing /var/tmp/archive/ubuntutest/dists/breezy-autotest/main/source/Sources.gz |
29 | - DEBUG Processing /var/tmp/archive/ubuntutest/dists/breezy-autotest/multiverse/source/Sources.gz |
30 | - DEBUG Processing /var/tmp/archive/ubuntutest/dists/breezy-autotest/universe/source/Sources.gz |
31 | - DEBUG Building not build from source list (NBS): |
32 | - DEBUG Building all superseded by any list (ASBA): |
33 | - DEBUG No NBS found |
34 | - DEBUG Removing lock file: ... |
35 | - <BLANKLINE> |
36 | - |
37 | - |
38 | Nice! That's enough for now.. let's kill the process and clean |
39 | everything up. |
40 | |
41 | |
42 | === modified file 'lib/lp/soyuz/scripts/ftpmaster.py' |
43 | --- lib/lp/soyuz/scripts/ftpmaster.py 2011-09-05 03:12:47 +0000 |
44 | +++ lib/lp/soyuz/scripts/ftpmaster.py 2011-09-29 13:11:28 +0000 |
45 | @@ -6,8 +6,6 @@ |
46 | __metaclass__ = type |
47 | |
48 | __all__ = [ |
49 | - 'ArchiveCruftChecker', |
50 | - 'ArchiveCruftCheckerError', |
51 | 'ChrootManager', |
52 | 'ChrootManagerError', |
53 | 'LpQueryDistro', |
54 | @@ -69,408 +67,6 @@ |
55 | ) |
56 | |
57 | |
58 | -class ArchiveCruftCheckerError(Exception): |
59 | - """ArchiveCruftChecker specific exception. |
60 | - |
61 | - Mostly used to describe errors in the initialization of this object. |
62 | - """ |
63 | - |
64 | - |
65 | -class TagFileNotFound(Exception): |
66 | - """Raised when an archive tag file could not be found.""" |
67 | - |
68 | - |
69 | -class ArchiveCruftChecker: |
70 | - """Perform overall checks to identify and remove obsolete records. |
71 | - |
72 | - Use initialize() method to validate passed parameters and build the |
73 | - infrastructure variables. It will raise ArchiveCruftCheckerError if |
74 | - something goes wrong. |
75 | - """ |
76 | - |
77 | - # XXX cprov 2006-05-15: the default archive path should come |
78 | - # from the config. |
79 | - def __init__(self, logger, distribution_name='ubuntu', suite=None, |
80 | - archive_path='/srv/launchpad.net/ubuntu-archive'): |
81 | - """Store passed arguments. |
82 | - |
83 | - Also Initialize empty variables for storing preliminar results. |
84 | - """ |
85 | - self.distribution_name = distribution_name |
86 | - self.suite = suite |
87 | - self.archive_path = archive_path |
88 | - self.logger = logger |
89 | - # initialize a group of variables to store temporary results |
90 | - # available versions of published sources |
91 | - self.source_versions = {} |
92 | - # available binaries produced by published sources |
93 | - self.source_binaries = {} |
94 | - # 'Not Build From Source' binaries |
95 | - self.nbs = {} |
96 | - # 'All superseded by Any' binaries |
97 | - self.asba = {} |
98 | - # published binary package names |
99 | - self.bin_pkgs = {} |
100 | - # Architecture specific binary packages |
101 | - self.arch_any = {} |
102 | - # proposed NBS (before clean up) |
103 | - self.dubious_nbs = {} |
104 | - # NBS after clean up |
105 | - self.real_nbs = {} |
106 | - # definitive NBS organized for clean up |
107 | - self.nbs_to_remove = [] |
108 | - |
109 | - @property |
110 | - def architectures(self): |
111 | - return dict([(a.architecturetag, a) |
112 | - for a in self.distroseries.architectures]) |
113 | - |
114 | - @property |
115 | - def components(self): |
116 | - return dict([(c.name, c) for c in self.distroseries.components]) |
117 | - |
118 | - @property |
119 | - def components_and_di(self): |
120 | - components_and_di = [] |
121 | - for component in self.components: |
122 | - components_and_di.append(component) |
123 | - components_and_di.append('%s/debian-installer' % (component)) |
124 | - return components_and_di |
125 | - |
126 | - @property |
127 | - def dist_archive(self): |
128 | - return os.path.join( |
129 | - self.archive_path, self.distro.name, 'dists', |
130 | - self.distroseries.name + pocketsuffix[self.pocket]) |
131 | - |
132 | - def gunzipTagFileContent(self, filename): |
133 | - """Gunzip the contents of passed filename. |
134 | - |
135 | - Check filename presence, if not present in the filesystem, |
136 | - raises ArchiveCruftCheckerError. Use an tempfile.mkstemp() |
137 | - to store the uncompressed content. Invoke system available |
138 | - gunzip`, raises ArchiveCruftCheckError if it fails. |
139 | - |
140 | - This method doesn't close the file descriptor used and does not |
141 | - remove the temporary file from the filesystem, those actions |
142 | - are required in the callsite. (apt_pkg.ParseTagFile is lazy) |
143 | - |
144 | - Return a tuple containing: |
145 | - * temp file descriptor |
146 | - * temp filename |
147 | - * the contents parsed by apt_pkg.ParseTagFile() |
148 | - """ |
149 | - if not os.path.exists(filename): |
150 | - raise TagFileNotFound("File does not exist: %s" % filename) |
151 | - |
152 | - temp_fd, temp_filename = tempfile.mkstemp() |
153 | - (result, output) = commands.getstatusoutput( |
154 | - "gunzip -c %s > %s" % (filename, temp_filename)) |
155 | - if result != 0: |
156 | - raise ArchiveCruftCheckerError( |
157 | - "Gunzip invocation failed!\n%s" % output) |
158 | - |
159 | - temp_file = os.fdopen(temp_fd) |
160 | - # XXX cprov 2006-05-15: maybe we need some sort of data integrity |
161 | - # check at this point, and maybe keep the uncrompressed file |
162 | - # for debug purposes, let's see how it behaves in real conditions. |
163 | - parsed_contents = apt_pkg.ParseTagFile(temp_file) |
164 | - |
165 | - return temp_file, temp_filename, parsed_contents |
166 | - |
167 | - def processSources(self): |
168 | - """Process archive sources index. |
169 | - |
170 | - Build source_binaries, source_versions and bin_pkgs lists. |
171 | - """ |
172 | - self.logger.debug("Considering Sources:") |
173 | - for component in self.components: |
174 | - filename = os.path.join( |
175 | - self.dist_archive, "%s/source/Sources.gz" % component) |
176 | - |
177 | - self.logger.debug("Processing %s" % filename) |
178 | - try: |
179 | - temp_fd, temp_filename, parsed_sources = ( |
180 | - self.gunzipTagFileContent(filename)) |
181 | - except TagFileNotFound, warning: |
182 | - self.logger.warn(warning) |
183 | - return |
184 | - try: |
185 | - while parsed_sources.Step(): |
186 | - source = parsed_sources.Section.Find("Package") |
187 | - source_version = parsed_sources.Section.Find("Version") |
188 | - binaries = parsed_sources.Section.Find("Binary") |
189 | - for binary in [ |
190 | - item.strip() for item in binaries.split(',')]: |
191 | - self.bin_pkgs.setdefault(binary, []) |
192 | - self.bin_pkgs[binary].append(source) |
193 | - |
194 | - self.source_binaries[source] = binaries |
195 | - self.source_versions[source] = source_version |
196 | - finally: |
197 | - # close fd and remove temporary file used to store |
198 | - # uncompressed tag file content from the filesystem. |
199 | - temp_fd.close() |
200 | - os.unlink(temp_filename) |
201 | - |
202 | - def buildNBS(self): |
203 | - """Build the group of 'not build from source' binaries""" |
204 | - # Checks based on the Packages files |
205 | - self.logger.debug("Building not build from source list (NBS):") |
206 | - for component in self.components_and_di: |
207 | - for architecture in self.architectures: |
208 | - self.buildArchNBS(component, architecture) |
209 | - |
210 | - def buildArchNBS(self, component, architecture): |
211 | - """Build NBS per architecture. |
212 | - |
213 | - Store results in self.nbs, also build architecture specific |
214 | - binaries group (stored in self.arch_any) |
215 | - """ |
216 | - filename = os.path.join( |
217 | - self.dist_archive, |
218 | - "%s/binary-%s/Packages.gz" % (component, architecture)) |
219 | - |
220 | - self.logger.debug("Processing %s" % filename) |
221 | - try: |
222 | - temp_fd, temp_filename, parsed_packages = ( |
223 | - self.gunzipTagFileContent(filename)) |
224 | - except TagFileNotFound, warning: |
225 | - self.logger.warn(warning) |
226 | - return |
227 | - |
228 | - try: |
229 | - while parsed_packages.Step(): |
230 | - package = parsed_packages.Section.Find('Package') |
231 | - source = parsed_packages.Section.Find('Source', "") |
232 | - version = parsed_packages.Section.Find('Version') |
233 | - architecture = parsed_packages.Section.Find('Architecture') |
234 | - |
235 | - if source == "": |
236 | - source = package |
237 | - |
238 | - if source.find("(") != -1: |
239 | - m = re_extract_src_version.match(source) |
240 | - source = m.group(1) |
241 | - version = m.group(2) |
242 | - |
243 | - if package not in self.bin_pkgs: |
244 | - self.nbs.setdefault(source, {}) |
245 | - self.nbs[source].setdefault(package, {}) |
246 | - self.nbs[source][package][version] = "" |
247 | - |
248 | - if architecture != "all": |
249 | - self.arch_any.setdefault(package, "0") |
250 | - if apt_pkg.VersionCompare( |
251 | - version, self.arch_any[package]) < 1: |
252 | - self.arch_any[package] = version |
253 | - finally: |
254 | - # close fd and remove temporary file used to store uncompressed |
255 | - # tag file content from the filesystem. |
256 | - temp_fd.close() |
257 | - os.unlink(temp_filename) |
258 | - |
259 | - def buildASBA(self): |
260 | - """Build the group of 'all superseded by any' binaries.""" |
261 | - self.logger.debug("Building all superseded by any list (ASBA):") |
262 | - for component in self.components_and_di: |
263 | - for architecture in self.architectures: |
264 | - self.buildArchASBA(component, architecture) |
265 | - |
266 | - def buildArchASBA(self, component, architecture): |
267 | - """Build ASBA per architecture. |
268 | - |
269 | - Store the result in self.asba, require self.arch_any to be built |
270 | - previously. |
271 | - """ |
272 | - filename = os.path.join( |
273 | - self.dist_archive, |
274 | - "%s/binary-%s/Packages.gz" % (component, architecture)) |
275 | - |
276 | - try: |
277 | - temp_fd, temp_filename, parsed_packages = ( |
278 | - self.gunzipTagFileContent(filename)) |
279 | - except TagFileNotFound, warning: |
280 | - self.logger.warn(warning) |
281 | - return |
282 | - |
283 | - try: |
284 | - while parsed_packages.Step(): |
285 | - package = parsed_packages.Section.Find('Package') |
286 | - source = parsed_packages.Section.Find('Source', "") |
287 | - version = parsed_packages.Section.Find('Version') |
288 | - architecture = parsed_packages.Section.Find('Architecture') |
289 | - |
290 | - if source == "": |
291 | - source = package |
292 | - |
293 | - if source.find("(") != -1: |
294 | - m = re_extract_src_version.match(source) |
295 | - source = m.group(1) |
296 | - version = m.group(2) |
297 | - |
298 | - if architecture == "all": |
299 | - if (package in self.arch_any and |
300 | - apt_pkg.VersionCompare( |
301 | - version, self.arch_any[package]) > -1): |
302 | - self.asba.setdefault(source, {}) |
303 | - self.asba[source].setdefault(package, {}) |
304 | - self.asba[source][package].setdefault(version, {}) |
305 | - self.asba[source][package][version][architecture] = "" |
306 | - finally: |
307 | - # close fd and remove temporary file used to store uncompressed |
308 | - # tag file content from the filesystem. |
309 | - temp_fd.close() |
310 | - os.unlink(temp_filename) |
311 | - |
312 | - def addNBS(self, nbs_d, source, version, package): |
313 | - """Add a new entry in given organized nbs_d list |
314 | - |
315 | - Ensure the package is still published in the suite before add. |
316 | - """ |
317 | - result = self.distroseries.getBinaryPackagePublishing(name=package) |
318 | - |
319 | - if len(list(result)) == 0: |
320 | - return |
321 | - |
322 | - nbs_d.setdefault(source, {}) |
323 | - nbs_d[source].setdefault(version, {}) |
324 | - nbs_d[source][version][package] = "" |
325 | - |
326 | - def refineNBS(self): |
327 | - """ Distinguish dubious from real NBS. |
328 | - |
329 | - They are 'dubious' if the version numbers match and 'real' |
330 | - if the versions don't match. |
331 | - It stores results in self.dubious_nbs and self.real_nbs. |
332 | - """ |
333 | - for source in self.nbs.keys(): |
334 | - for package in self.nbs[source].keys(): |
335 | - versions = self.nbs[source][package].keys() |
336 | - versions.sort(apt_pkg.VersionCompare) |
337 | - latest_version = versions.pop() |
338 | - |
339 | - source_version = self.source_versions.get(source, "0") |
340 | - |
341 | - if apt_pkg.VersionCompare(latest_version, |
342 | - source_version) == 0: |
343 | - self.addNBS(self.dubious_nbs, source, latest_version, |
344 | - package) |
345 | - else: |
346 | - self.addNBS(self.real_nbs, source, latest_version, |
347 | - package) |
348 | - |
349 | - def outputNBS(self): |
350 | - """Properly display built NBS entries. |
351 | - |
352 | - Also organize the 'real' NBSs for removal in self.nbs_to_remove |
353 | - attribute. |
354 | - """ |
355 | - output = "Not Built from Source\n" |
356 | - output += "---------------------\n\n" |
357 | - |
358 | - nbs_keys = self.real_nbs.keys() |
359 | - nbs_keys.sort() |
360 | - |
361 | - for source in nbs_keys: |
362 | - proposed_bin = self.source_binaries.get( |
363 | - source, "(source does not exist)") |
364 | - porposed_version = self.source_versions.get(source, "??") |
365 | - output += (" * %s_%s builds: %s\n" |
366 | - % (source, porposed_version, proposed_bin)) |
367 | - output += "\tbut no longer builds:\n" |
368 | - versions = self.real_nbs[source].keys() |
369 | - versions.sort(apt_pkg.VersionCompare) |
370 | - |
371 | - for version in versions: |
372 | - packages = self.real_nbs[source][version].keys() |
373 | - packages.sort() |
374 | - |
375 | - for pkg in packages: |
376 | - self.nbs_to_remove.append(pkg) |
377 | - |
378 | - output += " o %s: %s\n" % ( |
379 | - version, ", ".join(packages)) |
380 | - |
381 | - output += "\n" |
382 | - |
383 | - if self.nbs_to_remove: |
384 | - self.logger.info(output) |
385 | - else: |
386 | - self.logger.debug("No NBS found") |
387 | - |
388 | - def initialize(self): |
389 | - """Initialize and build required lists of obsolete entries in archive. |
390 | - |
391 | - Check integrity of passed parameters and store organised data. |
392 | - The result list is the self.nbs_to_remove which should contain |
393 | - obsolete packages not currently able to be built from again. |
394 | - Another preliminary lists can be inspected in order to have better |
395 | - idea of what was computed. |
396 | - If anything goes wrong mid-process, it raises ArchiveCruftCheckError, |
397 | - otherwise a list of packages to be removes is printed. |
398 | - """ |
399 | - if self.distribution_name is None: |
400 | - self.distro = getUtility(ILaunchpadCelebrities).ubuntu |
401 | - else: |
402 | - try: |
403 | - self.distro = getUtility(IDistributionSet)[ |
404 | - self.distribution_name] |
405 | - except NotFoundError: |
406 | - raise ArchiveCruftCheckerError( |
407 | - "Invalid distribution: '%s'" % self.distribution_name) |
408 | - |
409 | - if not self.suite: |
410 | - self.distroseries = self.distro.currentseries |
411 | - self.pocket = PackagePublishingPocket.RELEASE |
412 | - else: |
413 | - try: |
414 | - self.distroseries, self.pocket = ( |
415 | - self.distro.getDistroSeriesAndPocket(self.suite)) |
416 | - except NotFoundError: |
417 | - raise ArchiveCruftCheckerError( |
418 | - "Invalid suite: '%s'" % self.suite) |
419 | - |
420 | - if not os.path.exists(self.dist_archive): |
421 | - raise ArchiveCruftCheckerError( |
422 | - "Invalid archive path: '%s'" % self.dist_archive) |
423 | - |
424 | - apt_pkg.init() |
425 | - self.processSources() |
426 | - self.buildNBS() |
427 | - self.buildASBA() |
428 | - self.refineNBS() |
429 | - self.outputNBS() |
430 | - |
431 | - def doRemovals(self): |
432 | - """Perform the removal of the obsolete packages found. |
433 | - |
434 | - It iterates over the previously build list (self.nbs_to_remove) |
435 | - and mark them as 'superseded' in the archive DB model. They will |
436 | - get removed later by the archive sanity check run each cycle |
437 | - of the cron.daily. |
438 | - """ |
439 | - for package in self.nbs_to_remove: |
440 | - |
441 | - for distroarchseries in self.distroseries.architectures: |
442 | - binarypackagename = getUtility(IBinaryPackageNameSet)[package] |
443 | - dasbp = distroarchseries.getBinaryPackage(binarypackagename) |
444 | - dasbpr = dasbp.currentrelease |
445 | - try: |
446 | - bpph = dasbpr.current_publishing_record |
447 | - bpph.supersede() |
448 | - # We're blindly removing for all arches, if it's not there |
449 | - # for some, that's fine ... |
450 | - except NotFoundError: |
451 | - pass |
452 | - else: |
453 | - version = bpph.binarypackagerelease.version |
454 | - self.logger.info("Removed %s_%s from %s/%s ... " |
455 | - % (package, version, |
456 | - self.distroseries.name, |
457 | - distroarchseries.architecturetag)) |
458 | - |
459 | - |
460 | class PubBinaryContent: |
461 | """Binary publication container. |
462 | |
463 | |
464 | === removed file 'lib/lp/soyuz/scripts/tests/test_archivecruftchecker.py' |
465 | --- lib/lp/soyuz/scripts/tests/test_archivecruftchecker.py 2011-07-20 14:46:38 +0000 |
466 | +++ lib/lp/soyuz/scripts/tests/test_archivecruftchecker.py 1970-01-01 00:00:00 +0000 |
467 | @@ -1,147 +0,0 @@ |
468 | -# Copyright 2009-2010 Canonical Ltd. This software is licensed under the |
469 | -# GNU Affero General Public License version 3 (see the file LICENSE). |
470 | - |
471 | -"""ArchiveCruftChecker tests. |
472 | - |
473 | -Check how scripts/ftpmaster-tools/archive-cruft-check.py works on a |
474 | -just-published 'ubuntutest' archive. |
475 | -""" |
476 | - |
477 | -__metaclass__ = type |
478 | - |
479 | -import shutil |
480 | -import transaction |
481 | -import unittest |
482 | - |
483 | -from zope.component import getUtility |
484 | - |
485 | -from canonical.config import config |
486 | -from canonical.testing.layers import LaunchpadZopelessLayer |
487 | -from lp.registry.interfaces.distribution import IDistributionSet |
488 | -from lp.registry.interfaces.pocket import PackagePublishingPocket |
489 | -from lp.services.log.logger import BufferLogger |
490 | -from lp.soyuz.scripts.ftpmaster import ( |
491 | - ArchiveCruftChecker, |
492 | - ArchiveCruftCheckerError, |
493 | - ) |
494 | -from lp.soyuz.scripts.publishdistro import PublishDistro |
495 | - |
496 | - |
497 | -# XXX cprov 2006-05-15: {create, remove}TestArchive functions should be |
498 | -# moved to the publisher test domain as soon as we have it. |
499 | -def createTestArchive(): |
500 | - """Creates a fresh test archive based on sampledata.""" |
501 | - script = PublishDistro(test_args=["-C", "-q", "-d", "ubuntutest"]) |
502 | - script.txn = transaction |
503 | - script.main() |
504 | - |
505 | - |
506 | -def removeTestArchive(): |
507 | - # XXX JeroenVermeulen 2011-07-20 bug=813538: Use a temporary |
508 | - # directory so we don't have to commit this horror. |
509 | - """Remove the entire test archive directory from the filesystem.""" |
510 | - shutil.rmtree("/var/tmp/archive/") |
511 | - |
512 | - |
513 | -class TestArchiveCruftChecker(unittest.TestCase): |
514 | - layer = LaunchpadZopelessLayer |
515 | - |
516 | - def setUp(self): |
517 | - """Setup the test environment.""" |
518 | - self.layer.switchDbUser(config.archivepublisher.dbuser) |
519 | - self.log = BufferLogger() |
520 | - self.ubuntutest = getUtility(IDistributionSet)['ubuntutest'] |
521 | - self.breezy_autotest = self.ubuntutest['breezy-autotest'] |
522 | - self.archive_path = "/var/tmp/archive" |
523 | - createTestArchive() |
524 | - |
525 | - def tearDown(self): |
526 | - """Clean up test environment and remove the test archive.""" |
527 | - removeTestArchive() |
528 | - |
529 | - def testInitializeSuccess(self): |
530 | - """Test ArchiveCruftChecker initialization process. |
531 | - |
532 | - Check if the correct attributes are built after initialization. |
533 | - """ |
534 | - checker = ArchiveCruftChecker( |
535 | - self.log, distribution_name='ubuntutest', suite='breezy-autotest', |
536 | - archive_path=self.archive_path) |
537 | - checker.initialize() |
538 | - |
539 | - self.assertEqual(self.ubuntutest, checker.distro) |
540 | - self.assertEqual(self.breezy_autotest, checker.distroseries) |
541 | - self.assertEqual(PackagePublishingPocket.RELEASE, checker.pocket) |
542 | - self.assertEqual(0, len(checker.nbs_to_remove)) |
543 | - self.assertEqual(0, len(checker.real_nbs)) |
544 | - self.assertEqual(0, len(checker.dubious_nbs)) |
545 | - self.assertEqual(0, len(checker.bin_pkgs)) |
546 | - self.assertEqual(0, len(checker.arch_any)) |
547 | - self.assertEqual(0, len(checker.source_versions)) |
548 | - self.assertEqual(0, len(checker.source_binaries)) |
549 | - |
550 | - # The 'dist_archive' is an absolute path to the 'dists' section |
551 | - # based on the given 'archive_path'. |
552 | - self.assertEqual( |
553 | - checker.dist_archive, |
554 | - '/var/tmp/archive/ubuntutest/dists/breezy-autotest') |
555 | - |
556 | - # The 'components' dictionary contains all components selected |
557 | - # for the given distroseries organized as: |
558 | - # {$component_name: IComponent, ...} |
559 | - for component_name, component in checker.components.iteritems(): |
560 | - self.assertEqual(component_name, component.name) |
561 | - checker_components = sorted( |
562 | - [component_name for component_name in checker.components.keys()]) |
563 | - self.assertEqual( |
564 | - checker_components, |
565 | - ['main', 'multiverse', 'restricted', 'universe']) |
566 | - |
567 | - # The 'components_and_di' lists the relative 'dists' paths |
568 | - # for all components subsections of the archive which contain |
569 | - # indexes. |
570 | - expected = [ |
571 | - 'main', |
572 | - 'main/debian-installer', |
573 | - 'multiverse', |
574 | - 'multiverse/debian-installer', |
575 | - 'restricted', |
576 | - 'restricted/debian-installer', |
577 | - 'universe', |
578 | - 'universe/debian-installer', |
579 | - ] |
580 | - self.assertEqual(sorted(checker.components_and_di), expected) |
581 | - |
582 | - def testSuiteDistArchive(self): |
583 | - """Check if 'dist_archive' path considers pocket correctly.""" |
584 | - checker = ArchiveCruftChecker( |
585 | - self.log, distribution_name='ubuntutest', |
586 | - suite='breezy-autotest-security', |
587 | - archive_path=self.archive_path) |
588 | - checker.initialize() |
589 | - |
590 | - self.assertEqual( |
591 | - checker.dist_archive, |
592 | - '/var/tmp/archive/ubuntutest/dists/breezy-autotest-security') |
593 | - |
594 | - def testInitializeFailure(self): |
595 | - """ArchiveCruftCheck initialization failures. |
596 | - |
597 | - * An unknown suite; |
598 | - * An unknown distribution; |
599 | - * The absence of the distribution in the given archive path. |
600 | - """ |
601 | - checker = ArchiveCruftChecker( |
602 | - self.log, distribution_name='ubuntu', suite='miserable', |
603 | - archive_path=self.archive_path) |
604 | - self.assertRaises(ArchiveCruftCheckerError, checker.initialize) |
605 | - |
606 | - checker = ArchiveCruftChecker( |
607 | - self.log, distribution_name='foobuntu', suite='breezy-autotest', |
608 | - archive_path=self.archive_path) |
609 | - self.assertRaises(ArchiveCruftCheckerError, checker.initialize) |
610 | - |
611 | - checker = ArchiveCruftChecker( |
612 | - self.log, distribution_name='ubuntu', suite='breezy-autotest', |
613 | - archive_path=self.archive_path) |
614 | - self.assertRaises(ArchiveCruftCheckerError, checker.initialize) |
615 | |
616 | === removed file 'scripts/ftpmaster-tools/archive-cruft-check.py' |
617 | --- scripts/ftpmaster-tools/archive-cruft-check.py 2011-09-18 05:45:56 +0000 |
618 | +++ scripts/ftpmaster-tools/archive-cruft-check.py 1970-01-01 00:00:00 +0000 |
619 | @@ -1,56 +0,0 @@ |
620 | -#!/usr/bin/python -S |
621 | -# |
622 | -# Copyright 2009 Canonical Ltd. This software is licensed under the |
623 | -# GNU Affero General Public License version 3 (see the file LICENSE). |
624 | - |
625 | -# pylint: disable-msg=W0403 |
626 | - |
627 | -"""Archive Cruft checker. |
628 | - |
629 | -A kind of archive garbage collector, supersede NBS binaries (not build |
630 | -from source). |
631 | -""" |
632 | - |
633 | -import _pythonpath |
634 | - |
635 | -from canonical.config import config |
636 | -from lp.services.scripts.base import LaunchpadScript, LaunchpadScriptFailure |
637 | -from lp.soyuz.scripts.ftpmaster import ( |
638 | - ArchiveCruftChecker, ArchiveCruftCheckerError) |
639 | - |
640 | - |
641 | -class ArchiveCruftCheckerScript(LaunchpadScript): |
642 | - |
643 | - usage = "Usage: archive-cruft-check.py [options] <ARCHIVE_PATH>" |
644 | - |
645 | - def add_my_options(self): |
646 | - self.parser.add_option( |
647 | - "-d", "--distro", dest="distro", help="remove from DISTRO") |
648 | - self.parser.add_option( |
649 | - "-n", "--no-action", dest="action", default=True, |
650 | - action="store_false", help="don't do anything") |
651 | - self.parser.add_option( |
652 | - "-s", "--suite", dest="suite", help="only act on SUITE") |
653 | - |
654 | - def main(self): |
655 | - if len(self.args) != 1: |
656 | - self.parser.error('ARCHIVEPATH is require') |
657 | - archive_path = self.args[0] |
658 | - |
659 | - checker = ArchiveCruftChecker( |
660 | - self.logger, distribution_name=self.options.distro, |
661 | - suite=self.options.suite, archive_path=archive_path) |
662 | - |
663 | - try: |
664 | - checker.initialize() |
665 | - except ArchiveCruftCheckerError, info: |
666 | - raise LaunchpadScriptFailure(info) |
667 | - |
668 | - # XXX cprov 2007-06-26 bug=121784: Disabling by distro-team request. |
669 | - # if checker.nbs_to_remove and options.action: |
670 | - # checker.doRemovals() |
671 | - # ztm.commit() |
672 | - |
673 | -if __name__ == '__main__': |
674 | - ArchiveCruftCheckerScript( |
675 | - 'archive-cruft-check', config.archivepublisher.dbuser).lock_and_run() |
So, this code looks fine to land, assuming that I'm understanding correctly that only the Ubuntu archive admins ever used this functionality.
I've confirmed that nothing else seems to use any of this code, so I have no worries about it breaking.
Colin, if you can confirm my assumption is correct, I will be happy to mark this as approved.
Likewise, if someone else in the reviewer team is looking at this and can confirm my assumption, feel free to just vote approve on this.