Merge lp:~cyphermox/britney/whitespace into lp:britney

Proposed by Mathieu Trudel-Lapierre
Status: Superseded
Proposed branch: lp:~cyphermox/britney/whitespace
Merge into: lp:britney
Diff against target: 3161 lines (+1355/-656)
15 files modified
INSTALL (+3/-3)
autopkgtest.py (+189/-0)
britney.conf (+36/-27)
britney.py (+582/-557)
britney_nobreakall.conf (+37/-28)
britney_util.py (+375/-0)
consts.py (+37/-0)
excuse.py (+18/-3)
hints.py (+20/-4)
lib/Makefile (+1/-1)
lib/britney-py.c (+16/-14)
lib/dpkg.c (+25/-7)
lib/dpkg.h (+3/-0)
lib/example.py (+10/-9)
migrationitem.py (+3/-3)
To merge this branch: bzr merge lp:~cyphermox/britney/whitespace
Reviewer Review Type Date Requested Status
Colin Watson Pending
Review via email: mp+196370@code.launchpad.net

This proposal has been superseded by a proposal from 2013-11-23.

Description of the change

Fix whitespace between blocks of package installation checks.

To post a comment you must log in.
Revision history for this message
Mathieu Trudel-Lapierre (cyphermox) wrote :

Hmm, looks like there's been an issue with the branch I used to make the commit.. Setting this back to work in progress

Unmerged revisions

383. By Mathieu Trudel-Lapierre

Update whitespace; make the output easier to read

FAILED/SUCCESS lines would be separated by a whitespace from the list
of architectures, but not itself followed by whitespace. This is slightly
confusing, as one could interpret it as being a heading for the following
block of tested packages, rather that the final result of the previous
block.

382. By Colin Watson

Remove arm64 from OUTOFSYNC_ARCHES, now that it's caught up.

381. By Colin Watson

britney_nobreakall.conf: sync up with britney.conf

380. By Colin Watson

autopkgtest.py: set release for adt-britney

379. By Adam Conrad

Move from saucy to trusty

378. By Colin Watson

Render Launchpad links for binary-only entries correctly.

377. By Colin Watson

Ignore previously Architecture: all binaries not (yet?) in unstable when unstable is a partial suite.

376. By Colin Watson

Revert some changes against Debian that are no longer needed now that we have upload dates.

375. By Colin Watson

Explicitly set mindays to 0 for all urgencies.

374. By Colin Watson

Clarify excuse HTML for the mindays == 0 case.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'INSTALL'
2--- INSTALL 2011-12-12 09:30:59 +0000
3+++ INSTALL 2013-11-22 19:48:51 +0000
4@@ -4,9 +4,9 @@
5 Requirements:
6 -------------
7
8- * Python 2.6 aptitude install python2.6
9- * Python APT/DPKG bindings aptitude install python2.6-apt libapt-pkg-dev dpkg-dev
10- * Python dev headers aptitude install python2.6-dev
11+ * Python 2.7 aptitude install python2.7
12+ * Python APT/DPKG bindings aptitude install python2.7-apt libapt-pkg-dev dpkg-dev
13+ * Python dev headers aptitude install python2.7-dev
14
15 Compiling:
16 ----------
17
18=== added file 'autopkgtest.py'
19--- autopkgtest.py 1970-01-01 00:00:00 +0000
20+++ autopkgtest.py 2013-11-22 19:48:51 +0000
21@@ -0,0 +1,189 @@
22+# -*- coding: utf-8 -*-
23+
24+# Copyright (C) 2013 Canonical Ltd.
25+# Author: Colin Watson <cjwatson@ubuntu.com>
26+# Partly based on code in auto-package-testing by
27+# Jean-Baptiste Lallement <jean-baptiste.lallement@canonical.com>
28+
29+# This program is free software; you can redistribute it and/or modify
30+# it under the terms of the GNU General Public License as published by
31+# the Free Software Foundation; either version 2 of the License, or
32+# (at your option) any later version.
33+
34+# This program is distributed in the hope that it will be useful,
35+# but WITHOUT ANY WARRANTY; without even the implied warranty of
36+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
37+# GNU General Public License for more details.
38+
39+from __future__ import print_function
40+
41+from collections import defaultdict
42+from contextlib import closing
43+import logging
44+import os
45+import subprocess
46+import tempfile
47+from textwrap import dedent
48+import time
49+
50+import apt_pkg
51+
52+
53+adt_britney = os.path.expanduser("~/auto-package-testing/jenkins/adt-britney")
54+
55+
56+class AutoPackageTest(object):
57+ """autopkgtest integration
58+
59+ Look for autopkgtest jobs to run for each update that is otherwise a
60+ valid candidate, and collect the results. If an update causes any
61+ autopkgtest jobs to be run, then they must all pass before the update is
62+ accepted.
63+ """
64+
65+ def __init__(self, britney, series, debug=False):
66+ self.britney = britney
67+ self.series = series
68+ self.debug = debug
69+ self.read()
70+ self.rc_path = None
71+
72+ def _ensure_rc_file(self):
73+ if self.rc_path:
74+ return
75+ self.rc_path = os.path.expanduser(
76+ "~/proposed-migration/autopkgtest/rc.%s" % self.series)
77+ with open(self.rc_path, "w") as rc_file:
78+ home = os.path.expanduser("~")
79+ print(dedent("""\
80+ release: %s
81+ aptroot: ~/.chdist/%s-proposed-amd64/
82+ apturi: file:%s/mirror/ubuntu
83+ components: main restricted universe multiverse
84+ rsync_host: rsync://10.189.74.2/adt/
85+ datadir: ~/proposed-migration/autopkgtest/data""" %
86+ (self.series, self.series, home)), file=rc_file)
87+
88+ @property
89+ def _request_path(self):
90+ return os.path.expanduser(
91+ "~/proposed-migration/autopkgtest/work/adt.request.%s" %
92+ self.series)
93+
94+ @property
95+ def _result_path(self):
96+ return os.path.expanduser(
97+ "~/proposed-migration/autopkgtest/work/adt.result.%s" %
98+ self.series)
99+
100+ def _parse(self, path):
101+ if os.path.exists(path):
102+ with open(path) as f:
103+ for line in f:
104+ line = line.strip()
105+ if line.startswith("Suite:") or line.startswith("Date:"):
106+ continue
107+ linebits = line.split()
108+ if len(linebits) < 2:
109+ logging.warning(
110+ "Invalid line format: '%s', skipped" % line)
111+ continue
112+ yield linebits
113+
114+ def read(self):
115+ self.pkglist = defaultdict(dict)
116+ self.pkgcauses = defaultdict(lambda: defaultdict(list))
117+ for linebits in self._parse(self._result_path):
118+ src = linebits.pop(0)
119+ ver = linebits.pop(0)
120+ self.pkglist[src][ver] = {
121+ "status": "NEW",
122+ "causes": {},
123+ }
124+ try:
125+ status = linebits.pop(0).upper()
126+ self.pkglist[src][ver]["status"] = status
127+ while True:
128+ trigsrc = linebits.pop(0)
129+ trigver = linebits.pop(0)
130+ self.pkglist[src][ver]["causes"][trigsrc] = trigver
131+ except IndexError:
132+ # End of the list
133+ pass
134+ for src in self.pkglist:
135+ all_vers = sorted(self.pkglist[src], cmp=apt_pkg.version_compare)
136+ for ver in self.pkglist[src]:
137+ status = self.pkglist[src][ver]["status"]
138+ for trigsrc, trigver in \
139+ self.pkglist[src][ver]["causes"].items():
140+ self.pkgcauses[trigsrc][trigver].append((status, src, ver))
141+
142+ def _adt_britney(self, *args):
143+ command = [
144+ adt_britney,
145+ "-c", self.rc_path, "-r", self.series, "-PU",
146+ ]
147+ if self.debug:
148+ command.append("-d")
149+ command.extend(args)
150+ subprocess.check_call(command)
151+
152+ def request(self, packages):
153+ self._ensure_rc_file()
154+ request_path = self._request_path
155+ if os.path.exists(request_path):
156+ os.unlink(request_path)
157+ with closing(tempfile.NamedTemporaryFile(mode="w")) as request_file:
158+ for src, ver in packages:
159+ if src in self.pkglist and ver in self.pkglist[src]:
160+ continue
161+ print("%s %s" % (src, ver), file=request_file)
162+ request_file.flush()
163+ self._adt_britney("request", "-O", request_path, request_file.name)
164+ for linebits in self._parse(request_path):
165+ # Make sure that there's an entry in pkgcauses for each new
166+ # request, so that results() gives useful information without
167+ # relying on the submit/collect cycle. This improves behaviour
168+ # in dry-run mode.
169+ src = linebits.pop(0)
170+ ver = linebits.pop(0)
171+ if self.britney.options.verbose:
172+ print("I: [%s] - Requested autopkgtest for %s_%s (%s)" %
173+ (time.asctime(), src, ver, " ".join(linebits)))
174+ try:
175+ status = linebits.pop(0).upper()
176+ while True:
177+ trigsrc = linebits.pop(0)
178+ trigver = linebits.pop(0)
179+ for status, csrc, cver in self.pkgcauses[trigsrc][trigver]:
180+ if csrc == trigsrc and cver == trigver:
181+ break
182+ else:
183+ self.pkgcauses[trigsrc][trigver].append(
184+ (status, src, ver))
185+ except IndexError:
186+ # End of the list
187+ pass
188+
189+ def submit(self):
190+ self._ensure_rc_file()
191+ request_path = self._request_path
192+ if os.path.exists(request_path):
193+ self._adt_britney("submit", request_path)
194+
195+ def collect(self):
196+ self._ensure_rc_file()
197+ result_path = self._result_path
198+ self._adt_britney("collect", "-O", result_path)
199+ self.read()
200+ if self.britney.options.verbose:
201+ for src in sorted(self.pkglist):
202+ for ver in self.pkglist[src]:
203+ print("I: [%s] - Collected autopkgtest status for %s_%s: "
204+ "%s" %
205+ (time.asctime(), src, ver,
206+ self.pkglist[src][ver]["status"]))
207+
208+ def results(self, trigsrc, trigver):
209+ for status, src, ver in self.pkgcauses[trigsrc][trigver]:
210+ yield status, src, ver
211
212=== modified file 'britney.conf'
213--- britney.conf 2013-01-22 13:02:13 +0000
214+++ britney.conf 2013-11-22 19:48:51 +0000
215@@ -1,24 +1,25 @@
216 # Configuration file for britney
217
218 # Paths for control files
219-TESTING = /srv/release.debian.org/britney/var/data-b2/testing
220-TPU = /srv/release.debian.org/britney/var/data-b2/testing-proposed-updates
221-UNSTABLE = /srv/release.debian.org/britney/var/data-b2/unstable
222+TESTING = data/testing
223+UNSTABLE = data/unstable
224+PARTIAL_UNSTABLE = yes
225
226 # Output
227-NONINST_STATUS = /srv/release.debian.org/britney/var/data-b2/non-installable-status
228-EXCUSES_OUTPUT = /srv/release.debian.org/britney/var/data-b2/output/excuses.html
229-UPGRADE_OUTPUT = /srv/release.debian.org/britney/var/data-b2/output/output.txt
230-HEIDI_OUTPUT = /srv/release.debian.org/britney/var/data-b2/output/HeidiResult
231+NONINST_STATUS = data/non-installable-status
232+EXCUSES_OUTPUT = output/excuses.html
233+UPGRADE_OUTPUT = output/output.txt
234+HEIDI_OUTPUT = output/HeidiResult
235+DELTA_OUTPUT = output/Delta
236
237 # List of release architectures
238-ARCHITECTURES = i386 amd64 armel ia64 mips mipsel powerpc s390 sparc kfreebsd-i386 kfreebsd-amd64 armhf s390x
239+ARCHITECTURES = amd64 arm64 armhf i386 powerpc
240
241 # if you're not in this list, arch: all packages are allowed to break on you
242 NOBREAKALL_ARCHES = i386
243
244 # if you're in this list, your packages may not stay in sync with the source
245-FUCKED_ARCHES =
246+OUTOFSYNC_ARCHES =
247
248 # if you're in this list, your uninstallability count may increase
249 BREAK_ARCHES =
250@@ -27,30 +28,30 @@
251 NEW_ARCHES =
252
253 # priorities and delays
254-MINDAYS_LOW = 10
255-MINDAYS_MEDIUM = 5
256-MINDAYS_HIGH = 2
257+MINDAYS_LOW = 0
258+MINDAYS_MEDIUM = 0
259+MINDAYS_HIGH = 0
260 MINDAYS_CRITICAL = 0
261 MINDAYS_EMERGENCY = 0
262 DEFAULT_URGENCY = low
263
264 # hint permissions
265+HINTS_CJWATSON = ALL
266+HINTS_ADCONRAD = ALL
267+HINTS_KITTERMAN = ALL
268+HINTS_LANEY = ALL
269+HINTS_JRIDDELL = ALL
270+HINTS_STEFANOR = ALL
271+HINTS_STGRABER = ALL
272 HINTS_VORLON = ALL
273-HINTS_ABA = ALL
274-HINTS_HE = ALL
275-HINTS_LUK = ALL
276-HINTS_PKERN = STANDARD force
277-HINTS_ADSB = STANDARD force force-hint
278-HINTS_NEILM = STANDARD
279-HINTS_MEHDI = STANDARD
280-HINTS_JCRISTAU = STANDARD force force-hint
281-HINTS_FAW = HELPERS
282-HINTS_NTHYKIER = STANDARD
283-HINTS_KIBI = STANDARD
284-HINTS_JMW = STANDARD
285-HINTS_FREEZE = block block-all block-udeb
286-HINTS_FREEZE-EXCEPTION = unblock unblock-udeb
287-HINTS_SATBRITNEY = easy
288+HINTS_FREEZE = block block-all
289+
290+HINTS_UBUNTU-TOUCH/DIDROCKS = block unblock
291+HINTS_UBUNTU-TOUCH/EV = block unblock
292+HINTS_UBUNTU-TOUCH/KEN-VANDINE = block unblock
293+HINTS_UBUNTU-TOUCH/LOOL = block unblock
294+HINTS_UBUNTU-TOUCH/MATHIEU-TL = block unblock
295+HINTS_UBUNTU-TOUCH/OGRA = block unblock
296
297 # support for old libraries in testing (smooth update)
298 # use ALL to enable smooth updates for all the sections
299@@ -58,3 +59,11 @@
300 # naming a non-existent section will effectively disable new smooth
301 # updates but still allow removals to occur
302 SMOOTH_UPDATES = badgers
303+
304+REMOVE_OBSOLETE = no
305+
306+# autopkgtest needs to know the series name; set to the empty string to
307+# disable autopkgtest
308+ADT_SERIES = trusty
309+ADT_DEBUG = no
310+ADT_ARCHES = amd64 i386
311
312=== modified file 'britney.py'
313--- britney.py 2013-03-17 10:33:59 +0000
314+++ britney.py 2013-11-22 19:48:51 +0000
315@@ -1,10 +1,10 @@
316-#!/usr/bin/python2.6 -u
317+#!/usr/bin/python2.7 -u
318 # -*- coding: utf-8 -*-
319
320 # Copyright (C) 2001-2008 Anthony Towns <ajt@debian.org>
321 # Andreas Barth <aba@debian.org>
322 # Fabio Tranchitella <kobold@debian.org>
323-# Copyright (C) 2010-2012 Adam D. Barratt <adsb@debian.org>
324+# Copyright (C) 2010-2013 Adam D. Barratt <adsb@debian.org>
325
326 # This program is free software; you can redistribute it and/or modify
327 # it under the terms of the GNU General Public License as published by
328@@ -24,7 +24,7 @@
329 Packages are usually installed into the `testing' distribution after
330 they have undergone some degree of testing in unstable. The goal of
331 this software is to do this task in a smart way, allowing testing
332-to be always fully installable and close to being a release candidate.
333+to always be fully installable and close to being a release candidate.
334
335 Britney's source code is split between two different but related tasks:
336 the first one is the generation of the update excuses, while the
337@@ -95,9 +95,9 @@
338 is ignored: it will be removed and not updated.
339
340 2. For every binary package built from the new source, it checks
341- for unsatisfied dependencies, new binary package and updated
342- binary package (binNMU) excluding the architecture-independent
343- ones and the packages not built from the same source.
344+ for unsatisfied dependencies, new binary packages and updated
345+ binary packages (binNMU), excluding the architecture-independent
346+ ones, and packages not built from the same source.
347
348 3. For every binary package built from the old source, it checks
349 if it is still built from the new source; if this is not true
350@@ -146,7 +146,7 @@
351 If this is not true, then these are called `out-of-date'
352 architectures and the package is ignored.
353
354- 9. The source package must have at least a binary package, otherwise
355+ 9. The source package must have at least one binary package, otherwise
356 it is ignored.
357
358 10. If the suite is unstable, the new source package must have no
359@@ -181,7 +181,6 @@
360 """
361
362 import os
363-import re
364 import sys
365 import string
366 import time
367@@ -191,7 +190,7 @@
368 import apt_pkg
369
370 from functools import reduce, partial
371-from itertools import chain, repeat
372+from itertools import chain, ifilter
373 from operator import attrgetter
374
375 if __name__ == '__main__':
376@@ -210,35 +209,21 @@
377 sys.path.insert(0, idir)
378
379 from excuse import Excuse
380-from migrationitem import MigrationItem, HintItem
381+from migrationitem import MigrationItem
382 from hints import HintCollection
383 from britney import buildSystem
384-
385+from britney_util import (old_libraries_format, same_source, undo_changes,
386+ register_reverses, compute_reverse_tree,
387+ read_nuninst, write_nuninst, write_heidi,
388+ eval_uninst, newly_uninst, make_migrationitem)
389+from consts import (VERSION, SECTION, BINARIES, MAINTAINER, FAKESRC,
390+ SOURCE, SOURCEVER, ARCHITECTURE, DEPENDS, CONFLICTS,
391+ PROVIDES, RDEPENDS, RCONFLICTS, MULTIARCH)
392+from autopkgtest import AutoPackageTest
393
394 __author__ = 'Fabio Tranchitella and the Debian Release Team'
395 __version__ = '2.0'
396
397-# source package
398-VERSION = 0
399-SECTION = 1
400-BINARIES = 2
401-MAINTAINER = 3
402-FAKESRC = 4
403-
404-# binary package
405-SOURCE = 2
406-SOURCEVER = 3
407-ARCHITECTURE = 4
408-# PREDEPENDS = 5 - No longer used by the python code
409-# - The C-code needs it for alignment reasons and still check it
410-# but ignore it if it is None (so keep it None).
411-DEPENDS = 6
412-CONFLICTS = 7
413-PROVIDES = 8
414-RDEPENDS = 9
415-RCONFLICTS = 10
416-
417-
418 class Britney(object):
419 """Britney, the Debian testing updater script
420
421@@ -253,7 +238,7 @@
422
423 HINTS_HELPERS = ("easy", "hint", "remove", "block", "block-udeb", "unblock", "unblock-udeb", "approve")
424 HINTS_STANDARD = ("urgent", "age-days") + HINTS_HELPERS
425- HINTS_ALL = ("force", "force-hint", "block-all") + HINTS_STANDARD
426+ HINTS_ALL = ("force", "force-hint", "force-badtest", "force-skiptest", "block-all") + HINTS_STANDARD
427
428 def __init__(self):
429 """Class constructor
430@@ -261,8 +246,7 @@
431 This method initializes and populates the data lists, which contain all
432 the information needed by the other methods of the class.
433 """
434- # britney's "day" begins at 3pm
435- self.date_now = int(((time.time() / (60*60)) - 15) / 24)
436+ self.date_now = int(time.time())
437
438 # parse the command line arguments
439 self.__parse_arguments()
440@@ -292,7 +276,7 @@
441 if self.options.print_uninst:
442 self.nuninst_arch_report(nuninst, arch)
443 if not self.options.print_uninst:
444- self.write_nuninst(nuninst)
445+ write_nuninst(self.options.noninst_status, nuninst)
446 else:
447 self.__log("Not building the list of non-installable packages, as requested", type="I")
448
449@@ -308,7 +292,12 @@
450 if 'testing' not in self.sources:
451 self.sources['testing'] = self.read_sources(self.options.testing)
452 self.sources['unstable'] = self.read_sources(self.options.unstable)
453- self.sources['tpu'] = self.read_sources(self.options.tpu)
454+ if hasattr(self.options, 'partial_unstable'):
455+ self.merge_sources('testing', 'unstable')
456+ if hasattr(self.options, 'tpu'):
457+ self.sources['tpu'] = self.read_sources(self.options.tpu)
458+ else:
459+ self.sources['tpu'] = {}
460
461 if hasattr(self.options, 'pu'):
462 self.sources['pu'] = self.read_sources(self.options.pu)
463@@ -325,7 +314,10 @@
464 if arch not in self.binaries['testing']:
465 self.binaries['testing'][arch] = self.read_binaries(self.options.testing, "testing", arch)
466 self.binaries['unstable'][arch] = self.read_binaries(self.options.unstable, "unstable", arch)
467- self.binaries['tpu'][arch] = self.read_binaries(self.options.tpu, "tpu", arch)
468+ if hasattr(self.options, 'partial_unstable'):
469+ self.merge_binaries('testing', 'unstable', arch)
470+ if hasattr(self.options, 'tpu'):
471+ self.binaries['tpu'][arch] = self.read_binaries(self.options.tpu, "tpu", arch)
472 if hasattr(self.options, 'pu'):
473 self.binaries['pu'][arch] = self.read_binaries(self.options.pu, "pu", arch)
474 # build the testing system
475@@ -399,7 +391,7 @@
476 # Sort the architecture list
477 allarches = sorted(self.options.architectures.split())
478 arches = [x for x in allarches if x in self.options.nobreakall_arches.split()]
479- arches += [x for x in allarches if x not in arches and x not in self.options.fucked_arches.split()]
480+ arches += [x for x in allarches if x not in arches and x not in self.options.outofsync_arches.split()]
481 arches += [x for x in allarches if x not in arches and x not in self.options.break_arches.split()]
482 arches += [x for x in allarches if x not in arches and x not in self.options.new_arches.split()]
483 arches += [x for x in allarches if x not in arches]
484@@ -413,7 +405,7 @@
485 output. The type parameter controls the urgency of the message, and
486 can be equal to `I' for `Information', `W' for `Warning' and `E' for
487 `Error'. Warnings and errors are always printed, and information is
488- printed only if the verbose logging is enabled.
489+ printed only if verbose logging is enabled.
490 """
491 if self.options.verbose or type in ("E", "W"):
492 print "%s: [%s] - %s" % (type, time.asctime(), msg)
493@@ -479,7 +471,7 @@
494 within the directory specified as `basedir' parameter, replacing
495 ${arch} with the value of the arch parameter. Considering the
496 large amount of memory needed, not all the fields are loaded
497- in memory. The available fields are Version, Source, Pre-Depends,
498+ in memory. The available fields are Version, Source, Multi-Arch,
499 Depends, Conflicts, Provides and Architecture.
500
501 After reading the packages, reverse dependencies are computed
502@@ -540,7 +532,7 @@
503 pkg,
504 version,
505 get_field('Architecture'),
506- None, # Pre-depends - leave as None for the C-code
507+ get_field('Multi-Arch'),
508 deps,
509 ', '.join(final_conflicts_list) or None,
510 get_field('Provides'),
511@@ -555,12 +547,20 @@
512 if "(" in source:
513 dpkg[SOURCEVER] = source[source.find("(")+1:source.find(")")]
514
515+ pkgarch = "%s/%s" % (pkg,arch)
516 # if the source package is available in the distribution, then register this binary package
517 if dpkg[SOURCE] in sources[distribution]:
518- sources[distribution][dpkg[SOURCE]][BINARIES].append(pkg + "/" + arch)
519+ # There may be multiple versions of any arch:all packages
520+ # (in unstable) if some architectures have out-of-date
521+ # binaries. We only want to include the package in the
522+ # source -> binary mapping once. It doesn't matter which
523+ # of the versions we include as only the package name and
524+ # architecture are recorded.
525+ if pkgarch not in sources[distribution][dpkg[SOURCE]][BINARIES]:
526+ sources[distribution][dpkg[SOURCE]][BINARIES].append(pkgarch)
527 # if the source package doesn't exist, create a fake one
528 else:
529- sources[distribution][dpkg[SOURCE]] = [dpkg[SOURCEVER], 'faux', [pkg + "/" + arch], None, True]
530+ sources[distribution][dpkg[SOURCE]] = [dpkg[SOURCEVER], 'faux', [pkgarch], None, True]
531
532 # register virtual packages and real packages that provide them
533 if dpkg[PROVIDES]:
534@@ -576,53 +576,76 @@
535 packages[pkg] = dpkg
536
537 # loop again on the list of packages to register reverse dependencies and conflicts
538- register_reverses = self.register_reverses
539- for pkg in packages:
540- register_reverses(pkg, packages, provides, check_doubles=False)
541+ register_reverses(packages, provides, check_doubles=False)
542
543 # return a tuple with the list of real and virtual packages
544 return (packages, provides)
545
546- def register_reverses(self, pkg, packages, provides, check_doubles=True, parse_depends=apt_pkg.parse_depends):
547- """Register reverse dependencies and conflicts for the specified package
548-
549- This method registers the reverse dependencies and conflicts for
550- a given package using `packages` as the list of packages and `provides`
551- as the list of virtual packages.
552-
553- The method has an optional parameter parse_depends which is there
554- just for performance reasons and is not meant to be overwritten.
555- """
556- # register the list of the dependencies for the depending packages
557- dependencies = []
558- if packages[pkg][DEPENDS]:
559- dependencies.extend(parse_depends(packages[pkg][DEPENDS], False))
560- # go through the list
561- for p in dependencies:
562- for a in p:
563- # register real packages
564- if a[0] in packages and (not check_doubles or pkg not in packages[a[0]][RDEPENDS]):
565- packages[a[0]][RDEPENDS].append(pkg)
566- # also register packages which provide the package (if any)
567- if a[0] in provides:
568- for i in provides.get(a[0]):
569- if i not in packages: continue
570- if not check_doubles or pkg not in packages[i][RDEPENDS]:
571- packages[i][RDEPENDS].append(pkg)
572- # register the list of the conflicts for the conflicting packages
573- if packages[pkg][CONFLICTS]:
574- for p in parse_depends(packages[pkg][CONFLICTS], False):
575- for a in p:
576- # register real packages
577- if a[0] in packages and (not check_doubles or pkg not in packages[a[0]][RCONFLICTS]):
578- packages[a[0]][RCONFLICTS].append(pkg)
579- # also register packages which provide the package (if any)
580- if a[0] in provides:
581- for i in provides[a[0]]:
582- if i not in packages: continue
583- if not check_doubles or pkg not in packages[i][RCONFLICTS]:
584- packages[i][RCONFLICTS].append(pkg)
585-
586+ def merge_sources(self, source, target):
587+ """Merge sources from `source' into partial suite `target'."""
588+ source_sources = self.sources[source]
589+ target_sources = self.sources[target]
590+ for pkg, value in source_sources.items():
591+ if pkg in target_sources:
592+ continue
593+ target_sources[pkg] = list(value)
594+ target_sources[pkg][BINARIES] = list(
595+ target_sources[pkg][BINARIES])
596+
597+ def merge_binaries(self, source, target, arch):
598+ """Merge `arch' binaries from `source' into partial suite `target'."""
599+ source_sources = self.sources[source]
600+ source_binaries, _ = self.binaries[source][arch]
601+ target_sources = self.sources[target]
602+ target_binaries, target_provides = self.binaries[target][arch]
603+ oodsrcs = set()
604+ for pkg, value in source_binaries.items():
605+ if pkg in target_binaries:
606+ continue
607+
608+ # Don't merge binaries rendered stale by new sources in target
609+ # that have built on this architecture.
610+ if value[SOURCE] not in oodsrcs:
611+ source_version = source_sources[value[SOURCE]][VERSION]
612+ target_version = target_sources[value[SOURCE]][VERSION]
613+ if source_version != target_version:
614+ current_arch = value[ARCHITECTURE]
615+ built = False
616+ for b in target_sources[value[SOURCE]][BINARIES]:
617+ binpkg, binarch = b.split('/')
618+ if binarch == arch:
619+ target_value = target_binaries[binpkg]
620+ if current_arch in (
621+ target_value[ARCHITECTURE], "all"):
622+ built = True
623+ break
624+ if built:
625+ continue
626+ oodsrcs.add(value[SOURCE])
627+
628+ if pkg in target_binaries:
629+ for p in target_binaries[pkg][PROVIDES]:
630+ target_provides[p].remove(pkg)
631+ if not target_provides[p]:
632+ del target_provides[p]
633+
634+ target_binaries[pkg] = value
635+
636+ pkg_arch = pkg + "/" + arch
637+ if pkg_arch not in target_sources[value[SOURCE]][BINARIES]:
638+ target_sources[value[SOURCE]][BINARIES].append(pkg_arch)
639+
640+ for p in value[PROVIDES]:
641+ if p not in target_provides:
642+ target_provides[p] = []
643+ target_provides[p].append(pkg)
644+
645+ for pkg, value in target_binaries.items():
646+ value[RDEPENDS] = []
647+ value[RCONFLICTS] = []
648+ register_reverses(
649+ target_binaries, target_provides, check_doubles=False)
650+
651 def read_bugs(self, basedir):
652 """Read the release critial bug summary from the specified directory
653
654@@ -638,31 +661,20 @@
655 bugs = {}
656 filename = os.path.join(basedir, "BugsV")
657 self.__log("Loading RC bugs data from %s" % filename)
658- for line in open(filename):
659- l = line.split()
660- if len(l) != 2:
661- self.__log("Malformed line found in line %s" % (line), type='W')
662- continue
663- pkg = l[0]
664- bugs.setdefault(pkg, [])
665- bugs[pkg] += l[1].split(",")
666+ try:
667+ for line in open(filename):
668+ l = line.split()
669+ if len(l) != 2:
670+ self.__log("Malformed line found in line %s" % (line),
671+ type='W')
672+ continue
673+ pkg = l[0]
674+ bugs.setdefault(pkg, [])
675+ bugs[pkg] += l[1].split(",")
676+ except IOError:
677+ self.__log("%s missing; skipping bug-based processing" % filename)
678 return bugs
679
680- def write_bugs(self, basedir, bugs):
681- """Write the release critical bug summary to the specified directory
682-
683- For a more detailed explanation of the format, please check the method
684- read_bugs.
685- """
686- filename = os.path.join(basedir, "BugsV")
687- self.__log("Writing RC bugs data to %s" % filename)
688- f = open(filename, 'w')
689- for pkg in sorted(bugs.keys()):
690- if not bugs[pkg]:
691- continue
692- f.write("%s %s\n" % (pkg, ','.join(bugs[pkg])))
693- f.close()
694-
695 def __maxver(self, pkg, dist):
696 """Return the maximum version for a given package name
697
698@@ -688,7 +700,7 @@
699 object attribute `bugs'.
700 """
701 # loop on all the package names from testing and unstable bug summaries
702- for pkg in set(self.bugs['testing'].keys() + self.bugs['unstable'].keys()):
703+ for pkg in set(chain(self.bugs['testing'], self.bugs['unstable'])):
704
705 # make sure that the key is present in both dictionaries
706 if pkg not in self.bugs['testing']:
707@@ -716,7 +728,8 @@
708
709 <package-name> <version> <date-of-upload>
710
711- The dates are expressed as days starting from the 1970-01-01.
712+ The dates are expressed as the number of seconds from the Unix epoch
713+ (1970-01-01 00:00:00 UTC).
714
715 The method returns a dictionary where the key is the binary package
716 name and the value is a tuple with two items, the version and the date.
717@@ -742,7 +755,7 @@
718 filename = os.path.join(basedir, "Dates")
719 self.__log("Writing upload data to %s" % filename)
720 f = open(filename, 'w')
721- for pkg in sorted(dates.keys()):
722+ for pkg in sorted(dates):
723 f.write("%s %s %d\n" % ((pkg,) + dates[pkg]))
724 f.close()
725
726@@ -764,31 +777,34 @@
727 urgencies = {}
728 filename = os.path.join(basedir, "Urgency")
729 self.__log("Loading upload urgencies from %s" % filename)
730- for line in open(filename):
731- l = line.split()
732- if len(l) != 3: continue
733-
734- # read the minimum days associated with the urgencies
735- urgency_old = urgencies.get(l[0], self.options.default_urgency)
736- mindays_old = self.MINDAYS.get(urgency_old, self.MINDAYS[self.options.default_urgency])
737- mindays_new = self.MINDAYS.get(l[2], self.MINDAYS[self.options.default_urgency])
738-
739- # if the new urgency is lower (so the min days are higher), do nothing
740- if mindays_old <= mindays_new:
741- continue
742-
743- # if the package exists in testing and it is more recent, do nothing
744- tsrcv = self.sources['testing'].get(l[0], None)
745- if tsrcv and apt_pkg.version_compare(tsrcv[VERSION], l[1]) >= 0:
746- continue
747-
748- # if the package doesn't exist in unstable or it is older, do nothing
749- usrcv = self.sources['unstable'].get(l[0], None)
750- if not usrcv or apt_pkg.version_compare(usrcv[VERSION], l[1]) < 0:
751- continue
752-
753- # update the urgency for the package
754- urgencies[l[0]] = l[2]
755+ try:
756+ for line in open(filename):
757+ l = line.split()
758+ if len(l) != 3: continue
759+
760+ # read the minimum days associated with the urgencies
761+ urgency_old = urgencies.get(l[0], self.options.default_urgency)
762+ mindays_old = self.MINDAYS.get(urgency_old, self.MINDAYS[self.options.default_urgency])
763+ mindays_new = self.MINDAYS.get(l[2], self.MINDAYS[self.options.default_urgency])
764+
765+ # if the new urgency is lower (so the min days are higher), do nothing
766+ if mindays_old <= mindays_new:
767+ continue
768+
769+ # if the package exists in testing and it is more recent, do nothing
770+ tsrcv = self.sources['testing'].get(l[0], None)
771+ if tsrcv and apt_pkg.version_compare(tsrcv[VERSION], l[1]) >= 0:
772+ continue
773+
774+ # if the package doesn't exist in unstable or it is older, do nothing
775+ usrcv = self.sources['unstable'].get(l[0], None)
776+ if not usrcv or apt_pkg.version_compare(usrcv[VERSION], l[1]) < 0:
777+ continue
778+
779+ # update the urgency for the package
780+ urgencies[l[0]] = l[2]
781+ except IOError:
782+ self.__log("%s missing; using default for all packages" % filename)
783
784 return urgencies
785
786@@ -830,7 +846,7 @@
787 elif len(l) == 1:
788 # All current hints require at least one argument
789 self.__log("Malformed hint found in %s: '%s'" % (filename, line), type="W")
790- elif l[0] in ["approve", "block", "block-all", "block-udeb", "unblock", "unblock-udeb", "force", "urgent", "remove"]:
791+ elif l[0] in ["approve", "block", "block-all", "block-udeb", "unblock", "unblock-udeb", "force", "force-badtest", "force-skiptest", "urgent", "remove"]:
792 if l[0] == 'approve': l[0] = 'unblock'
793 for package in l[1:]:
794 hints.add_hint('%s %s' % (l[0], package), who)
795@@ -840,7 +856,7 @@
796 else:
797 hints.add_hint(l, who)
798
799- for x in ["block", "block-all", "block-udeb", "unblock", "unblock-udeb", "force", "urgent", "remove", "age-days"]:
800+ for x in ["block", "block-all", "block-udeb", "unblock", "unblock-udeb", "force", "force-badtest", "force-skiptest", "urgent", "remove", "age-days"]:
801 z = {}
802 for hint in hints[x]:
803 package = hint.package
804@@ -872,37 +888,33 @@
805
806 return hints
807
808- def write_heidi(self, filename):
809- """Write the output HeidiResult
810-
811- This method write the output for Heidi, which contains all the
812- binary packages and the source packages in the form:
813-
814- <pkg-name> <pkg-version> <pkg-architecture> <pkg-section>
815- <src-name> <src-version> source <src-section>
816+
817+ def write_delta(self, filename):
818+ """Write the output delta
819+
820+ This method writes the packages to be upgraded, in the form:
821+ <src-name> <src-version>
822+ or (if the source is to be removed):
823+ <src-name>
824+
825+ The order corresponds to that shown in update_output.
826 """
827- self.__log("Writing Heidi results to %s" % filename)
828- f = open(filename, 'w')
829+ self.__log("Writing delta to %s" % filename)
830+ f = open(filename, "w")
831
832- # local copies
833 sources = self.sources['testing']
834-
835- # write binary packages
836- for arch in sorted(self.options.architectures):
837- binaries = self.binaries['testing'][arch][0]
838- for pkg_name in sorted(binaries):
839- pkg = binaries[pkg_name]
840- pkgv = pkg[VERSION]
841- pkgarch = pkg[ARCHITECTURE] or 'all'
842- pkgsec = pkg[SECTION] or 'faux'
843- f.write('%s %s %s %s\n' % (pkg_name, pkgv, pkgarch, pkgsec))
844-
845- # write sources
846- for src_name in sorted(sources):
847- src = sources[src_name]
848- srcv = src[VERSION]
849- srcsec = src[SECTION] or 'unknown'
850- f.write('%s %s source %s\n' % (src_name, srcv, srcsec))
851+ for name in self.all_selected:
852+ if "/" in name:
853+ pkg_name, arch = name.split('/', 1)
854+ if pkg_name in sources:
855+ f.write('%s %s\n' % (name, sources[pkg_name][VERSION]))
856+ else:
857+ f.write('%s\n' % name)
858+ else:
859+ if name in sources:
860+ f.write('%s %s\n' % (name, sources[name][VERSION]))
861+ else:
862+ f.write('%s\n' % name)
863
864 f.close()
865
866@@ -921,7 +933,7 @@
867 binaries = self.binaries[suite][arch][0]
868 for pkg in binaries:
869 output = "Package: %s\n" % pkg
870- for key, k in ((SECTION, 'Section'), (ARCHITECTURE, 'Architecture'), (SOURCE, 'Source'), (VERSION, 'Version'),
871+ for key, k in ((SECTION, 'Section'), (ARCHITECTURE, 'Architecture'), (MULTIARCH, 'Multi-Arch'), (SOURCE, 'Source'), (VERSION, 'Version'),
872 (DEPENDS, 'Depends'), (PROVIDES, 'Provides'), (CONFLICTS, 'Conflicts')):
873 if not binaries[pkg][key]: continue
874 if key == SOURCE:
875@@ -955,52 +967,9 @@
876 f.write(output + "\n")
877 f.close()
878
879- def write_nuninst(self, nuninst):
880- """Write the non-installable report"""
881- f = open(self.options.noninst_status, 'w')
882- f.write("Built on: " + time.strftime("%Y.%m.%d %H:%M:%S %z", time.gmtime(time.time())) + "\n")
883- f.write("Last update: " + time.strftime("%Y.%m.%d %H:%M:%S %z", time.gmtime(time.time())) + "\n\n")
884- f.write("".join([k + ": " + " ".join(nuninst[k]) + "\n" for k in nuninst]))
885- f.close()
886-
887- def read_nuninst(self):
888- """Read the non-installable report"""
889- f = open(self.options.noninst_status)
890- nuninst = {}
891- for r in f:
892- if ":" not in r: continue
893- arch, packages = r.strip().split(":", 1)
894- if arch.split("+", 1)[0] in self.options.architectures:
895- nuninst[arch] = set(packages.split())
896- return nuninst
897-
898-
899 # Utility methods for package analysis
900 # ------------------------------------
901
902- def same_source(self, sv1, sv2):
903- """Check if two version numbers are built from the same source
904-
905- This method returns a boolean value which is true if the two
906- version numbers specified as parameters are built from the same
907- source. The main use of this code is to detect binary-NMU.
908- """
909- if sv1 == sv2:
910- return 1
911-
912- if sv1 is None or sv2 is None:
913- return 0
914-
915- m = re.match(r'^(.*)\+b\d+$', sv1)
916- if m: sv1 = m.group(1)
917- m = re.match(r'^(.*)\+b\d+$', sv2)
918- if m: sv2 = m.group(1)
919-
920- if sv1 == sv2:
921- return 1
922-
923- return 0
924-
925 def get_dependency_solvers(self, block, arch, distribution):
926 """Find the packages which satisfy a dependency block
927
928@@ -1020,20 +989,29 @@
929
930 # for every package, version and operation in the block
931 for name, version, op in block:
932+ if ":" in name:
933+ name, archqual = name.split(":", 1)
934+ else:
935+ archqual = None
936+
937 # look for the package in unstable
938 if name in binaries[0]:
939 package = binaries[0][name]
940- # check the versioned dependency (if present)
941- if op == '' and version == '' or apt_pkg.check_dep(package[VERSION], op, version):
942- packages.append(name)
943+ # check the versioned dependency and architecture qualifier
944+ # (if present)
945+ if (op == '' and version == '') or apt_pkg.check_dep(package[VERSION], op, version):
946+ if archqual is None or (archqual == 'any' and package[MULTIARCH] == 'allowed'):
947+ packages.append(name)
948
949 # look for the package in the virtual packages list and loop on them
950 for prov in binaries[1].get(name, []):
951 if prov not in binaries[0]: continue
952 package = binaries[0][prov]
953- # A provides only satisfies an unversioned dependency
954- # (per Policy Manual §7.5)
955- if op == '' and version == '':
956+ # A provides only satisfies:
957+ # - an unversioned dependency (per Policy Manual §7.5)
958+ # - a dependency without an architecture qualifier
959+ # (per analysis of apt code)
960+ if op == '' and version == '' and archqual is None:
961 packages.append(prov)
962
963 return (len(packages) > 0, packages)
964@@ -1050,7 +1028,7 @@
965 # retrieve the binary package from the specified suite and arch
966 binary_u = self.binaries[suite][arch][0][pkg]
967
968- # local copies for better performances
969+ # local copies for better performance
970 parse_depends = apt_pkg.parse_depends
971 get_dependency_solvers = self.get_dependency_solvers
972
973@@ -1059,7 +1037,7 @@
974 return
975 deps = binary_u[DEPENDS]
976
977- # for every block of dependency (which is formed as conjunction of disconjunction)
978+ # for every dependency block (formed as conjunction of disjunction)
979 for block, block_txt in zip(parse_depends(deps, False), deps.split(',')):
980 # if the block is satisfied in testing, then skip the block
981 solved, packages = get_dependency_solvers(block, arch, 'testing')
982@@ -1103,7 +1081,7 @@
983 present in the unstable distribution anymore.
984
985 It returns True if the package can be removed, False otherwise.
986- In the former case, a new excuse is appended to the the object
987+ In the former case, a new excuse is appended to the object
988 attribute excuses.
989 """
990 # if the source package is available in unstable, then do nothing
991@@ -1118,7 +1096,7 @@
992
993 # if the package is blocked, skip it
994 for hint in self.hints.search('block', package=pkg, removal=True):
995- excuse.addhtml("Not touching package, as requested by %s (contact debian-release "
996+ excuse.addhtml("Not touching package, as requested by %s (contact #ubuntu-release "
997 "if update is needed)" % hint.user)
998 excuse.addhtml("Not considered")
999 self.excuses.append(excuse)
1000@@ -1128,18 +1106,18 @@
1001 self.excuses.append(excuse)
1002 return True
1003
1004- def should_upgrade_srcarch(self, src, arch, suite):
1005- """Check if a binary package should be upgraded
1006+ def should_upgrade_srcarch(self, src, arch, suite, same_source=same_source):
1007+ """Check if a set of binary packages should be upgraded
1008
1009- This method checks if a binary package should be upgraded; this can
1010- happen also if the binary package is a binary-NMU for the given arch.
1011- The analysis is performed for the source package specified by the
1012- `src' parameter, checking the architecture `arch' for the distribution
1013- `suite'.
1014+ This method checks if the binary packages produced by the source
1015+ package on the given architecture should be upgraded; this can
1016+ happen also if the migration is a binary-NMU for the given arch.
1017
1018- It returns False if the given package doesn't need to be upgraded,
1019+ It returns False if the given packages don't need to be upgraded,
1020 True otherwise. In the former case, a new excuse is appended to
1021- the the object attribute excuses.
1022+ the object attribute excuses.
1023+
1024+ same_source is an optimization to avoid "load global".
1025 """
1026 # retrieve the source packages for testing and suite
1027 source_t = self.sources['testing'][src]
1028@@ -1154,7 +1132,9 @@
1029
1030 # if there is a `remove' hint and the requested version is the same as the
1031 # version in testing, then stop here and return False
1032- for hint in [ x for x in self.hints.search('remove', package=src) if self.same_source(source_t[VERSION], x.version) ]:
1033+ # (as a side effect, a removal may generate such excuses for both the source
1034+ # package and its binary packages on each architecture)
1035+ for hint in [ x for x in self.hints.search('remove', package=src) if same_source(source_t[VERSION], x.version) ]:
1036 excuse.addhtml("Removal request by %s" % (hint.user))
1037 excuse.addhtml("Trying to remove package, not update it")
1038 excuse.addhtml("Not considered")
1039@@ -1166,11 +1146,14 @@
1040 anyworthdoing = False
1041
1042 # for every binary package produced by this source in unstable for this architecture
1043- for pkg in sorted(filter(lambda x: x.endswith("/" + arch), source_u[BINARIES]), key=lambda x: x.split("/")[0]):
1044+ for pkg in sorted(ifilter(lambda x: x.endswith("/" + arch), source_u[BINARIES]), key=lambda x: x.split("/")[0]):
1045 pkg_name = pkg.split("/")[0]
1046
1047 # retrieve the testing (if present) and unstable corresponding binary packages
1048 binary_t = pkg in source_t[BINARIES] and self.binaries['testing'][arch][0][pkg_name] or None
1049+ if hasattr(self.options, 'partial_unstable') and binary_t is not None and binary_t[ARCHITECTURE] == 'all' and pkg_name not in self.binaries[suite][arch][0]:
1050+ excuse.addhtml("Ignoring %s %s (from %s) as it is arch: all and not yet built in unstable" % (pkg_name, binary_t[VERSION], binary_t[SOURCEVER]))
1051+ continue
1052 binary_u = self.binaries[suite][arch][0][pkg_name]
1053
1054 # this is the source version for the new binary package
1055@@ -1182,9 +1165,18 @@
1056 continue
1057
1058 # if the new binary package is not from the same source as the testing one, then skip it
1059- if not self.same_source(source_t[VERSION], pkgsv):
1060+ # this implies that this binary migration is part of a source migration
1061+ if not same_source(source_t[VERSION], pkgsv):
1062+ if binary_t is None or binary_t[VERSION] != binary_u[VERSION]:
1063+ anywrongver = True
1064+ excuse.addhtml("From wrong source: %s %s (%s not %s)" % (pkg_name, binary_u[VERSION], pkgsv, source_t[VERSION]))
1065+ break
1066+
1067+ # if the source package has been updated in unstable and this is a binary migration, skip it
1068+ # (the binaries are now out-of-date)
1069+ if same_source(source_t[VERSION], pkgsv) and source_t[VERSION] != source_u[VERSION]:
1070 anywrongver = True
1071- excuse.addhtml("From wrong source: %s %s (%s not %s)" % (pkg_name, binary_u[VERSION], pkgsv, source_t[VERSION]))
1072+ excuse.addhtml("From wrong source: %s %s (%s not %s)" % (pkg_name, binary_u[VERSION], pkgsv, source_u[VERSION]))
1073 break
1074
1075 # find unsatisfied dependencies for the new binary package
1076@@ -1215,21 +1207,37 @@
1077 # package is not fake, then check what packages should be removed
1078 if not anywrongver and (anyworthdoing or not self.sources[suite][src][FAKESRC]):
1079 srcv = self.sources[suite][src][VERSION]
1080- ssrc = self.same_source(source_t[VERSION], srcv)
1081+ ssrc = same_source(source_t[VERSION], srcv)
1082 # if this is a binary-only migration via *pu, we never want to try
1083 # removing binary packages
1084 if not (ssrc and suite != 'unstable'):
1085 # for every binary package produced by this source in testing for this architecture
1086- for pkg in sorted([x.split("/")[0] for x in self.sources['testing'][src][BINARIES] if x.endswith("/"+arch)]):
1087+ source_data = self.sources['testing'][src]
1088+ _, smoothbins = self.find_upgraded_binaries(src,
1089+ source_data,
1090+ arch,
1091+ suite)
1092+
1093+ for pkg in sorted(x.split("/")[0] for x in source_data[BINARIES] if x.endswith("/"+arch)):
1094 # if the package is architecture-independent, then ignore it
1095- if self.binaries['testing'][arch][0][pkg][ARCHITECTURE] == 'all':
1096+ tpkg_data = self.binaries['testing'][arch][0][pkg]
1097+ if tpkg_data[ARCHITECTURE] == 'all':
1098 excuse.addhtml("Ignoring removal of %s as it is arch: all" % (pkg))
1099 continue
1100 # if the package is not produced by the new source package, then remove it from testing
1101 if pkg not in self.binaries[suite][arch][0]:
1102- tpkgv = self.binaries['testing'][arch][0][pkg][VERSION]
1103- excuse.addhtml("Removed binary: %s %s" % (pkg, tpkgv))
1104- if ssrc: anyworthdoing = True
1105+ excuse.addhtml("Removed binary: %s %s" % (pkg, tpkg_data[VERSION]))
1106+ # the removed binary is only interesting if this is a binary-only migration,
1107+ # as otherwise the updated source will already cause the binary packages
1108+ # to be updated
1109+ if ssrc:
1110+ # Special-case, if the binary is a candidate for a smooth update, we do not consider
1111+ # it "interesting" on its own. This case happens quite often with smooth updatable
1112+ # packages, where the old binary "survives" a full run because it still has
1113+ # reverse dependencies.
1114+ name = pkg + "/" + tpkg_data[ARCHITECTURE]
1115+ if name not in smoothbins:
1116+ anyworthdoing = True
1117
1118 # if there is nothing wrong and there is something worth doing, this is a valid candidate
1119 if not anywrongver and anyworthdoing:
1120@@ -1244,7 +1252,7 @@
1121 # otherwise, return False
1122 return False
1123
1124- def should_upgrade_src(self, src, suite):
1125+ def should_upgrade_src(self, src, suite, same_source=same_source):
1126 """Check if source package should be upgraded
1127
1128 This method checks if a source package should be upgraded. The analysis
1129@@ -1254,6 +1262,8 @@
1130 It returns False if the given package doesn't need to be upgraded,
1131 True otherwise. In the former case, a new excuse is appended to
1132 the object attribute excuses.
1133+
1134+ same_source is an opt to avoid "load global".
1135 """
1136
1137 # retrieve the source packages for testing (if available) and suite
1138@@ -1273,8 +1283,9 @@
1139 source_u[MAINTAINER] and excuse.set_maint(source_u[MAINTAINER].strip())
1140 source_u[SECTION] and excuse.set_section(source_u[SECTION].strip())
1141
1142- # the starting point is that we will update the candidate
1143+ # the starting point is that we will update the candidate and run autopkgtests
1144 update_candidate = True
1145+ run_autopkgtest = True
1146
1147 # if the version in unstable is older, then stop here with a warning in the excuse and return False
1148 if source_t and apt_pkg.version_compare(source_u[VERSION], source_t[VERSION]) < 0:
1149@@ -1286,6 +1297,7 @@
1150 if source_u[FAKESRC]:
1151 excuse.addhtml("%s source package doesn't exist" % (src))
1152 update_candidate = False
1153+ run_autopkgtest = False
1154
1155 # retrieve the urgency for the upload, ignoring it if this is a NEW package (not present in testing)
1156 urgency = self.urgencies.get(src, self.options.default_urgency)
1157@@ -1296,11 +1308,12 @@
1158 # if there is a `remove' hint and the requested version is the same as the
1159 # version in testing, then stop here and return False
1160 for item in self.hints.search('remove', package=src):
1161- if source_t and self.same_source(source_t[VERSION], item.version) or \
1162- self.same_source(source_u[VERSION], item.version):
1163+ if source_t and same_source(source_t[VERSION], item.version) or \
1164+ same_source(source_u[VERSION], item.version):
1165 excuse.addhtml("Removal request by %s" % (item.user))
1166 excuse.addhtml("Trying to remove package, not update it")
1167 update_candidate = False
1168+ run_autopkgtest = False
1169
1170 # check if there is a `block' or `block-udeb' hint for this package, or a `block-all source' hint
1171 blocked = {}
1172@@ -1321,7 +1334,7 @@
1173 unblock_cmd = "un" + block_cmd
1174 unblocks = self.hints.search(unblock_cmd, package=src)
1175
1176- if unblocks and self.same_source(unblocks[0].version, source_u[VERSION]):
1177+ if unblocks and unblocks[0].version is not None and same_source(unblocks[0].version, source_u[VERSION]):
1178 if suite == 'unstable' or block_cmd == 'block-udeb':
1179 excuse.addhtml("Ignoring %s request by %s, due to %s request by %s" %
1180 (block_cmd, blocked[block_cmd].user, unblock_cmd, unblocks[0].user))
1181@@ -1329,10 +1342,14 @@
1182 excuse.addhtml("Approved by %s" % (unblocks[0].user))
1183 else:
1184 if unblocks:
1185- excuse.addhtml("%s request by %s ignored due to version mismatch: %s" %
1186- (unblock_cmd.capitalize(), unblocks[0].user, unblocks[0].version))
1187+ if unblocks[0].version is None:
1188+ excuse.addhtml("%s request by %s ignored due to missing version" %
1189+ (unblock_cmd.capitalize(), unblocks[0].user))
1190+ else:
1191+ excuse.addhtml("%s request by %s ignored due to version mismatch: %s" %
1192+ (unblock_cmd.capitalize(), unblocks[0].user, unblocks[0].version))
1193 if suite == 'unstable' or block_cmd == 'block-udeb':
1194- excuse.addhtml("Not touching package due to %s request by %s (contact debian-release if update is needed)" %
1195+ excuse.addhtml("Not touching package due to %s request by %s (contact #ubuntu-release if update is needed)" %
1196 (block_cmd, blocked[block_cmd].user))
1197 else:
1198 excuse.addhtml("NEEDS APPROVAL BY RM")
1199@@ -1345,14 +1362,14 @@
1200 if suite == 'unstable':
1201 if src not in self.dates:
1202 self.dates[src] = (source_u[VERSION], self.date_now)
1203- elif not self.same_source(self.dates[src][0], source_u[VERSION]):
1204+ elif not same_source(self.dates[src][0], source_u[VERSION]):
1205 self.dates[src] = (source_u[VERSION], self.date_now)
1206
1207- days_old = self.date_now - self.dates[src][1]
1208+ days_old = (self.date_now - self.dates[src][1]) / 60 / 60 / 24
1209 min_days = self.MINDAYS[urgency]
1210
1211 for age_days_hint in [ x for x in self.hints.search('age-days', package=src) if \
1212- self.same_source(source_u[VERSION], x.version) ]:
1213+ same_source(source_u[VERSION], x.version) ]:
1214 excuse.addhtml("Overriding age needed from %d days to %d by %s" % (min_days,
1215 int(age_days_hint.days), age_days_hint.user))
1216 min_days = int(age_days_hint.days)
1217@@ -1360,11 +1377,12 @@
1218 excuse.setdaysold(days_old, min_days)
1219 if days_old < min_days:
1220 urgent_hints = [ x for x in self.hints.search('urgent', package=src) if \
1221- self.same_source(source_u[VERSION], x.version) ]
1222+ same_source(source_u[VERSION], x.version) ]
1223 if urgent_hints:
1224 excuse.addhtml("Too young, but urgency pushed by %s" % (urgent_hints[0].user))
1225 else:
1226 update_candidate = False
1227+ run_autopkgtest = False
1228
1229 if suite in ['pu', 'tpu']:
1230 # o-o-d(ish) checks for (t-)p-u
1231@@ -1391,22 +1409,25 @@
1232 base = 'testing'
1233 else:
1234 base = 'stable'
1235- text = "Not yet built on <a href=\"http://buildd.debian.org/status/logs.php?arch=%s&pkg=%s&ver=%s&suite=%s\" target=\"_blank\">%s</a> (relative to testing)" % (urllib.quote(arch), urllib.quote(src), urllib.quote(source_u[VERSION]), base, arch)
1236+ text = "Not yet built on <a href=\"https://launchpad.net/ubuntu/+source/%s/%s\" target=\"_blank\">%s</a> (relative to testing)" % (urllib.quote(src.split("/")[0]), urllib.quote(source_u[VERSION]), arch)
1237
1238- if arch in self.options.fucked_arches.split():
1239+ if arch in self.options.outofsync_arches.split():
1240 text = text + " (but %s isn't keeping up, so never mind)" % (arch)
1241 else:
1242 update_candidate = False
1243+ if arch in self.options.adt_arches.split():
1244+ run_autopkgtest = False
1245
1246 excuse.addhtml(text)
1247
1248 # at this point, we check the status of the builds on all the supported architectures
1249 # to catch the out-of-date ones
1250 pkgs = {src: ["source"]}
1251+ built_anywhere = False
1252 for arch in self.options.architectures:
1253 oodbins = {}
1254 # for every binary package produced by this source in the suite for this architecture
1255- for pkg in sorted([x.split("/")[0] for x in self.sources[suite][src][BINARIES] if x.endswith("/"+arch)]):
1256+ for pkg in sorted(x.split("/")[0] for x in self.sources[suite][src][BINARIES] if x.endswith("/"+arch)):
1257 if pkg not in pkgs: pkgs[pkg] = []
1258 pkgs[pkg].append(arch)
1259
1260@@ -1415,11 +1436,12 @@
1261 pkgsv = binary_u[SOURCEVER]
1262
1263 # if it wasn't built by the same source, it is out-of-date
1264- if not self.same_source(source_u[VERSION], pkgsv):
1265+ if not same_source(source_u[VERSION], pkgsv):
1266 if pkgsv not in oodbins:
1267 oodbins[pkgsv] = []
1268 oodbins[pkgsv].append(pkg)
1269 continue
1270+ built_anywhere = True
1271
1272 # if the package is architecture-dependent or the current arch is `nobreakall'
1273 # find unsatisfied dependencies for the binary package
1274@@ -1428,36 +1450,42 @@
1275
1276 # if there are out-of-date packages, warn about them in the excuse and set update_candidate
1277 # to False to block the update; if the architecture where the package is out-of-date is
1278- # in the `fucked_arches' list, then do not block the update
1279+ # in the `outofsync_arches' list, then do not block the update
1280 if oodbins:
1281 oodtxt = ""
1282 for v in oodbins.keys():
1283 if oodtxt: oodtxt = oodtxt + "; "
1284- oodtxt = oodtxt + "%s (from <a href=\"http://buildd.debian.org/status/logs.php?" \
1285- "arch=%s&pkg=%s&ver=%s\" target=\"_blank\">%s</a>)" % \
1286- (", ".join(sorted(oodbins[v])), urllib.quote(arch), urllib.quote(src), urllib.quote(v), v)
1287- text = "out of date on <a href=\"http://buildd.debian.org/status/logs.php?" \
1288- "arch=%s&pkg=%s&ver=%s\" target=\"_blank\">%s</a>: %s" % \
1289- (urllib.quote(arch), urllib.quote(src), urllib.quote(source_u[VERSION]), arch, oodtxt)
1290+ oodtxt = oodtxt + "%s (from <a href=\"https://launchpad.net/ubuntu/+source/" \
1291+ "%s/%s\" target=\"_blank\">%s</a>)" % \
1292+ (", ".join(sorted(oodbins[v])), urllib.quote(src.split("/")[0]), urllib.quote(v), v)
1293+ text = "out of date on <a href=\"https://launchpad.net/ubuntu/+source/" \
1294+ "%s/%s\" target=\"_blank\">%s</a>: %s" % \
1295+ (urllib.quote(src.split("/")[0]), urllib.quote(source_u[VERSION]), arch, oodtxt)
1296
1297- if arch in self.options.fucked_arches.split():
1298+ if arch in self.options.outofsync_arches.split():
1299 text = text + " (but %s isn't keeping up, so nevermind)" % (arch)
1300 else:
1301 update_candidate = False
1302+ if arch in self.options.adt_arches.split():
1303+ run_autopkgtest = False
1304
1305- if self.date_now != self.dates[src][1]:
1306- excuse.addhtml(text)
1307+ excuse.addhtml(text)
1308
1309 # if the source package has no binaries, set update_candidate to False to block the update
1310 if len(self.sources[suite][src][BINARIES]) == 0:
1311 excuse.addhtml("%s has no binaries on any arch" % src)
1312 update_candidate = False
1313+ run_autopkgtest = False
1314+ elif not built_anywhere:
1315+ excuse.addhtml("%s has no up-to-date binaries on any arch" % src)
1316+ update_candidate = False
1317+ run_autopkgtest = False
1318
1319 # if the suite is unstable, then we have to check the release-critical bug lists before
1320 # updating testing; if the unstable package has RC bugs that do not apply to the testing
1321- # one, the check fails and we set update_candidate to False to block the update
1322+ # one, the check fails and we set update_candidate to False to block the update
1323 if suite == 'unstable':
1324- for pkg in pkgs.keys():
1325+ for pkg in pkgs:
1326 bugs_t = []
1327 bugs_u = []
1328 if pkg in self.bugs['testing']:
1329@@ -1481,6 +1509,7 @@
1330 excuse.addhtml("Updating %s introduces new bugs: %s" % (pkg, ", ".join(
1331 ["<a href=\"http://bugs.debian.org/%s\">#%s</a>" % (urllib.quote(a), a) for a in new_bugs])))
1332 update_candidate = False
1333+ run_autopkgtest = False
1334
1335 if len(old_bugs) > 0:
1336 excuse.addhtml("Updating %s fixes old bugs: %s" % (pkg, ", ".join(
1337@@ -1490,12 +1519,13 @@
1338 "though it fixes more than it introduces, whine at debian-release)" % pkg)
1339
1340 # check if there is a `force' hint for this package, which allows it to go in even if it is not updateable
1341- forces = [ x for x in self.hints.search('force', package=src) if self.same_source(source_u[VERSION], x.version) ]
1342+ forces = [ x for x in self.hints.search('force', package=src) if same_source(source_u[VERSION], x.version) ]
1343 if forces:
1344 excuse.dontinvalidate = True
1345 if not update_candidate and forces:
1346 excuse.addhtml("Should ignore, but forced by %s" % (forces[0].user))
1347 update_candidate = True
1348+ run_autopkgtest = True
1349
1350 # if the package can be updated, it is a valid candidate
1351 if update_candidate:
1352@@ -1503,6 +1533,7 @@
1353 # else it won't be considered
1354 else:
1355 excuse.addhtml("Not considered")
1356+ excuse.run_autopkgtest = run_autopkgtest
1357
1358 self.excuses.append(excuse)
1359 return update_candidate
1360@@ -1563,12 +1594,14 @@
1361 exclookup[x].is_valid = False
1362 i = i + 1
1363
1364- def write_excuses(self):
1365+ def write_excuses(self, same_source=same_source):
1366 """Produce and write the update excuses
1367
1368 This method handles the update excuses generation: the packages are
1369 looked at to determine whether they are valid candidates. For the details
1370 of this procedure, please refer to the module docstring.
1371+
1372+ same_source is an opt to avoid "load global".
1373 """
1374
1375 self.__log("Update Excuses generation started", type="I")
1376@@ -1628,7 +1661,7 @@
1377
1378 # check if the version specified in the hint is the same as the considered package
1379 tsrcv = sources['testing'][src][VERSION]
1380- if not self.same_source(tsrcv, item.version): continue
1381+ if not same_source(tsrcv, item.version): continue
1382
1383 # add the removal of the package to upgrade_me and build a new excuse
1384 upgrade_me.append("-%s" % (src))
1385@@ -1644,6 +1677,79 @@
1386 # extract the not considered packages, which are in the excuses but not in upgrade_me
1387 unconsidered = [e.name for e in self.excuses if e.name not in upgrade_me]
1388
1389+ if self.options.adt_series:
1390+ # trigger autopkgtests for valid candidates
1391+ adt_debug = getattr(self.options, "adt_debug", "no") == "yes"
1392+ autopkgtest = AutoPackageTest(
1393+ self, self.options.adt_series, debug=adt_debug)
1394+ autopkgtest_packages = []
1395+ autopkgtest_excuses = []
1396+ for e in self.excuses:
1397+ if not e.run_autopkgtest:
1398+ continue
1399+ # skip removals, binary-only candidates, and proposed-updates
1400+ if e.name.startswith("-") or "/" in e.name or "_" in e.name:
1401+ pass
1402+ if e.ver[1] == "-":
1403+ continue
1404+ autopkgtest_excuses.append(e)
1405+ autopkgtest_packages.append((e.name, e.ver[1]))
1406+ autopkgtest.request(autopkgtest_packages)
1407+ if not self.options.dry_run:
1408+ autopkgtest.submit()
1409+ autopkgtest.collect()
1410+ jenkins_public = (
1411+ "https://jenkins.qa.ubuntu.com/view/%s/view/AutoPkgTest/job" %
1412+ self.options.adt_series.title())
1413+ jenkins_private = (
1414+ "http://10.98.0.1:8080/view/%s/view/AutoPkgTest/job" %
1415+ self.options.adt_series.title())
1416+ for e in autopkgtest_excuses:
1417+ adtpass = True
1418+ for status, adtsrc, adtver in autopkgtest.results(
1419+ e.name, e.ver[1]):
1420+ public_url = "%s/%s-adt-%s/" % (
1421+ jenkins_public, self.options.adt_series,
1422+ adtsrc.replace("+", "-"))
1423+ private_url = "%s/%s-adt-%s/" % (
1424+ jenkins_private, self.options.adt_series,
1425+ adtsrc.replace("+", "-"))
1426+ e.addhtml(
1427+ "autopkgtest for %s %s: %s (Jenkins: "
1428+ "<a href=\"%s\">public</a>, "
1429+ "<a href=\"%s\">private</a>)" %
1430+ (adtsrc, adtver, status, public_url, private_url))
1431+ if status != "PASS":
1432+ hints = self.hints.search(
1433+ 'force-badtest', package=adtsrc)
1434+ hints.extend(
1435+ self.hints.search('force', package=adtsrc))
1436+ forces = [
1437+ x for x in hints
1438+ if same_source(adtver, x.version) ]
1439+ if forces:
1440+ e.addhtml(
1441+ "Should wait for %s %s test, but forced by "
1442+ "%s" % (adtsrc, adtver, forces[0].user))
1443+ else:
1444+ adtpass = False
1445+ if not adtpass and e.is_valid:
1446+ hints = self.hints.search('force-skiptest', package=e.name)
1447+ hints.extend(self.hints.search('force', package=e.name))
1448+ forces = [
1449+ x for x in hints
1450+ if same_source(e.ver[1], x.version) ]
1451+ if forces:
1452+ e.addhtml(
1453+ "Should wait for tests relating to %s %s, but "
1454+ "forced by %s" %
1455+ (e.name, e.ver[1], forces[0].user))
1456+ else:
1457+ upgrade_me.remove(e.name)
1458+ unconsidered.append(e.name)
1459+ e.addhtml("Not considered")
1460+ e.is_valid = False
1461+
1462 # invalidate impossible excuses
1463 for e in self.excuses:
1464 # parts[0] == package name
1465@@ -1679,7 +1785,7 @@
1466 self.invalidate_excuses(upgrade_me, unconsidered)
1467
1468 # sort the list of candidates
1469- self.upgrade_me = sorted([ MigrationItem(x) for x in upgrade_me ])
1470+ self.upgrade_me = sorted( make_migrationitem(x, self.sources) for x in upgrade_me )
1471
1472 # write excuses to the output file
1473 if not self.options.dry_run:
1474@@ -1689,6 +1795,7 @@
1475 f.write("<html><head><title>excuses...</title>")
1476 f.write("<meta http-equiv=\"Content-Type\" content=\"text/html;charset=utf-8\"></head><body>\n")
1477 f.write("<p>Generated: " + time.strftime("%Y.%m.%d %H:%M:%S %z", time.gmtime(time.time())) + "</p>\n")
1478+ f.write("<p>See the <a href=\"https://wiki.ubuntu.com/ProposedMigration\">documentation</a> for help interpreting this page.</p>\n")
1479 f.write("<ul>\n")
1480 for e in self.excuses:
1481 f.write("<li>%s" % e.html())
1482@@ -1700,20 +1807,6 @@
1483 # Upgrade run
1484 # -----------
1485
1486- def newlyuninst(self, nuold, nunew):
1487- """Return a nuninst statstic with only new uninstallable packages
1488-
1489- This method subtracts the uninstallable packages of the statistic
1490- `nunew` from the statistic `nuold`.
1491-
1492- It returns a dictionary with the architectures as keys and the list
1493- of uninstallable packages as values.
1494- """
1495- res = {}
1496- for arch in nuold:
1497- if arch not in nunew: continue
1498- res[arch] = [x for x in nunew[arch] if x not in nuold[arch]]
1499- return res
1500
1501 def get_nuninst(self, requested_arch=None, build=False):
1502 """Return the uninstallability statistic for all the architectures
1503@@ -1726,10 +1819,13 @@
1504
1505 It returns a dictionary with the architectures as keys and the list
1506 of uninstallable packages as values.
1507+
1508+ NB: If build is False, requested_arch is ignored.
1509 """
1510 # if we are not asked to build the nuninst, read it from the cache
1511 if not build:
1512- return self.read_nuninst()
1513+ return read_nuninst(self.options.noninst_status,
1514+ self.options.architectures)
1515
1516 nuninst = {}
1517
1518@@ -1794,20 +1890,6 @@
1519 res.append("%s-%d" % (arch[0], n))
1520 return "%d+%d: %s" % (total, totalbreak, ":".join(res))
1521
1522- def eval_uninst(self, nuninst):
1523- """Return a string which represents the uninstallable packages
1524-
1525- This method returns a string which represents the uninstallable
1526- packages reading the uninstallability statistics `nuninst`.
1527-
1528- An example of the output string is:
1529- * i386: broken-pkg1, broken-pkg2
1530- """
1531- parts = []
1532- for arch in self.options.architectures:
1533- if arch in nuninst and len(nuninst[arch]) > 0:
1534- parts.append(" * %s: %s\n" % (arch,", ".join(sorted(nuninst[arch]))))
1535- return "".join(parts)
1536
1537 def is_nuninst_asgood_generous(self, old, new):
1538 diff = 0
1539@@ -1817,6 +1899,111 @@
1540 return diff <= 0
1541
1542
1543+ def find_upgraded_binaries(self, source_name, source_data,
1544+ architecture, suite):
1545+ # XXX: not the best name - really.
1546+ """Find smooth and non-smooth updatable binaries for upgrades
1547+
1548+ This method will compute the binaries that will be replaced in
1549+ testing and which of them are smooth updatable.
1550+
1551+ Parameters:
1552+ * "source_name" is the name of the source package, whose
1553+ binaries are migrating.
1554+ * "source_data" is the fields of that source package from
1555+ testing.
1556+ * "architecture" is the architecture determines architecture of
1557+ the migrating binaries (can be "source" for a
1558+ "source"-migration, meaning all binaries regardless of
1559+ architecture).
1560+ * "suite" is the suite from which the binaries are migrating.
1561+
1562+ Returns a tuple (bins, smoothbins). "bins" is a set of binaries
1563+ that are not smooth-updatable (or binaries that could be, but
1564+ there is no reason to let them be smooth updated).
1565+ "smoothbins" is set of binaries that are to be smooth-updated
1566+
1567+ Pre-Conditions: The source package must be in testing and this
1568+ should only be used when considering to do an upgrade
1569+ migration from the input suite. (e.g. do not use this for
1570+ removals).
1571+ """
1572+ bins = set()
1573+ smoothbins = set()
1574+ check = []
1575+
1576+ binaries_t = self.binaries['testing']
1577+ # first, build a list of eligible binaries
1578+ for p in source_data[BINARIES]:
1579+ binary, parch = p.split("/")
1580+ if architecture != 'source':
1581+ # for a binary migration, binaries should not be removed:
1582+ # - unless they are for the correct architecture
1583+ if parch != architecture:
1584+ continue
1585+ # - if they are arch:all and the migration is via *pu,
1586+ # as the packages will not have been rebuilt and the
1587+ # source suite will not contain them
1588+ if binaries_t[parch][0][binary][ARCHITECTURE] == 'all' and \
1589+ suite != 'unstable':
1590+ continue
1591+ # do not remove binaries which have been hijacked by other sources
1592+ if binaries_t[parch][0][binary][SOURCE] != source_name:
1593+ continue
1594+ bins.add(p)
1595+
1596+ if suite != 'unstable':
1597+ # We only allow smooth updates from unstable, so if it we
1598+ # are not migrating from unstable just exit now.
1599+ return (bins, smoothbins)
1600+
1601+ for p in bins:
1602+ binary, parch = p.split("/")
1603+ # if a smooth update is possible for the package, skip it
1604+ if binary not in self.binaries[suite][parch][0] and \
1605+ ('ALL' in self.options.smooth_updates or \
1606+ binaries_t[parch][0][binary][SECTION] in self.options.smooth_updates):
1607+
1608+ # if the package has reverse-dependencies which are
1609+ # built from other sources, it's a valid candidate for
1610+ # a smooth update. if not, it may still be a valid
1611+ # candidate if one if its r-deps is itself a candidate,
1612+ # so note it for checking later
1613+ rdeps = binaries_t[parch][0][binary][RDEPENDS]
1614+
1615+ # the list of reverse-dependencies may be outdated
1616+ # if, for example, we're processing a hint and
1617+ # a new version of one of the apparent reverse-dependencies
1618+ # migrated earlier in the hint. walk the list to make
1619+ # sure that at least one of the entries is still
1620+ # valid
1621+ rrdeps = [x for x in rdeps if x not in [y.split("/")[0] for y in bins]]
1622+ if rrdeps:
1623+ for dep in rrdeps:
1624+ if dep in binaries_t[parch][0]:
1625+ bin = binaries_t[parch][0][dep]
1626+ deps = []
1627+ if bin[DEPENDS] is not None:
1628+ deps.extend(apt_pkg.parse_depends(bin[DEPENDS], False))
1629+ if any(binary == entry[0] for deplist in deps for entry in deplist):
1630+ smoothbins.add(p)
1631+ break
1632+ else:
1633+ check.append(p)
1634+
1635+
1636+ # check whether we should perform a smooth update for
1637+ # packages which are candidates but do not have r-deps
1638+ # outside of the current source
1639+ for p in check:
1640+ binary, parch = p.split("/")
1641+ if any(bin for bin in binaries_t[parch][0][binary][RDEPENDS] \
1642+ if bin in [y.split("/")[0] for y in smoothbins]):
1643+ smoothbins.add(p)
1644+
1645+ bins -= smoothbins
1646+ return (bins, smoothbins)
1647+
1648 def doop_source(self, item, hint_undo=[]):
1649 """Apply a change to the testing distribution as requested by `pkg`
1650
1651@@ -1837,85 +2024,24 @@
1652 # local copies for better performances
1653 sources = self.sources
1654 binaries = self.binaries['testing']
1655+ get_reverse_tree = partial(compute_reverse_tree, self.binaries["testing"])
1656 # remove all binary packages (if the source already exists)
1657 if item.architecture == 'source' or not item.is_removal:
1658 if item.package in sources['testing']:
1659 source = sources['testing'][item.package]
1660
1661- bins = []
1662- check = []
1663- smoothbins = []
1664-
1665- # remove all the binaries
1666-
1667- # first, build a list of eligible binaries
1668- for p in source[BINARIES]:
1669- binary, parch = p.split("/")
1670- if item.architecture != 'source':
1671- # for a binary migration, binaries should not be removed:
1672- # - unless they are for the correct architecture
1673- if parch != item.architecture: continue
1674- # - if they are arch:all and the migration is via *pu,
1675- # as the packages will not have been rebuilt and the
1676- # source suite will not contain them
1677- if binaries[parch][0][binary][ARCHITECTURE] == 'all' and \
1678- item.suite != 'unstable':
1679- continue
1680- # do not remove binaries which have been hijacked by other sources
1681- if binaries[parch][0][binary][SOURCE] != item.package: continue
1682- bins.append(p)
1683-
1684+ bins, _ = self.find_upgraded_binaries(item.package,
1685+ source,
1686+ item.architecture,
1687+ item.suite)
1688+
1689+ # remove all the binaries which aren't being smooth updated
1690 for p in bins:
1691 binary, parch = p.split("/")
1692- # if a smooth update is possible for the package, skip it
1693- if item.suite == 'unstable' and \
1694- binary not in self.binaries[item.suite][parch][0] and \
1695- ('ALL' in self.options.smooth_updates or \
1696- binaries[parch][0][binary][SECTION] in self.options.smooth_updates):
1697-
1698- # if the package has reverse-dependencies which are
1699- # built from other sources, it's a valid candidate for
1700- # a smooth update. if not, it may still be a valid
1701- # candidate if one if its r-deps is itself a candidate,
1702- # so note it for checking later
1703- rdeps = binaries[parch][0][binary][RDEPENDS]
1704-
1705- # the list of reverse-dependencies may be outdated
1706- # if, for example, we're processing a hint and
1707- # a new version of one of the apparent reverse-dependencies
1708- # migrated earlier in the hint. walk the list to make
1709- # sure that at least one of the entries is still
1710- # valid
1711- rrdeps = [x for x in rdeps if x not in [y.split("/")[0] for y in bins]]
1712- if len(rrdeps) > 0:
1713- for dep in rrdeps:
1714- if dep in binaries[parch][0]:
1715- bin = binaries[parch][0][dep]
1716- deps = []
1717- if bin[DEPENDS] is not None:
1718- deps.extend(apt_pkg.parse_depends(bin[DEPENDS], False))
1719- if any(binary == entry[0] for deplist in deps for entry in deplist):
1720- smoothbins.append(p)
1721- break
1722- else:
1723- check.append(p)
1724-
1725- # check whether we should perform a smooth update for
1726- # packages which are candidates but do not have r-deps
1727- # outside of the current source
1728- for p in check:
1729- binary, parch = p.split("/")
1730- if any(bin for bin in binaries[parch][0][binary][RDEPENDS] \
1731- if bin in [y.split("/")[0] for y in smoothbins]):
1732- smoothbins.append(p)
1733-
1734- # remove all the binaries which aren't being smooth updated
1735- for p in [ bin for bin in bins if bin not in smoothbins ]:
1736- binary, parch = p.split("/")
1737 # save the old binary for undo
1738 undo['binaries'][p] = binaries[parch][0][binary]
1739 # all the reverse dependencies are affected by the change
1740- affected.update(self.get_reverse_tree(binary, parch, 'testing'))
1741+ affected.update(get_reverse_tree(binary, parch))
1742 # remove the provided virtual packages
1743 for j in binaries[parch][0][binary][PROVIDES]:
1744 key = j + "/" + parch
1745@@ -1939,7 +2065,7 @@
1746 # updates but not supported as a manual hint
1747 elif item.package in binaries[item.architecture][0]:
1748 undo['binaries'][item.package + "/" + item.architecture] = binaries[item.architecture][0][item.package]
1749- affected.update(self.get_reverse_tree(item.package, item.architecture, 'testing'))
1750+ affected.update(get_reverse_tree(item.package, item.architecture))
1751 del binaries[item.architecture][0][item.package]
1752 self.systems[item.architecture].remove_binary(item.package)
1753
1754@@ -1960,14 +2086,10 @@
1755 # save the old binary package
1756 undo['binaries'][p] = binaries[parch][0][binary]
1757 # all the reverse dependencies are affected by the change
1758- affected.update(self.get_reverse_tree(binary, parch, 'testing'))
1759+ affected.update(get_reverse_tree(binary, parch))
1760 # all the reverse conflicts and their dependency tree are affected by the change
1761 for j in binaries[parch][0][binary][RCONFLICTS]:
1762- key = (j, parch)
1763- if key not in affected: affected.add(key)
1764- for p in self.get_full_tree(j, parch, 'testing'):
1765- key = (p, parch)
1766- if key not in affected: affected.add(key)
1767+ affected.update(get_reverse_tree(j, parch))
1768 self.systems[parch].remove_binary(binary)
1769 else:
1770 # the binary isn't in testing, but it may have been at
1771@@ -1985,8 +2107,7 @@
1772 if p in tundo['binaries']:
1773 for rdep in tundo['binaries'][p][RDEPENDS]:
1774 if rdep in binaries[parch][0] and rdep not in source[BINARIES]:
1775- affected.add( (rdep, parch) )
1776- affected.update(self.get_reverse_tree(rdep, parch, 'testing'))
1777+ affected.update(get_reverse_tree(rdep, parch))
1778 # add/update the binary package
1779 binaries[parch][0][binary] = self.binaries[item.suite][parch][0][binary]
1780 self.systems[parch].add_binary(binary, binaries[parch][0][binary][:PROVIDES] + \
1781@@ -2001,13 +2122,15 @@
1782 undo['virtual'][key] = binaries[parch][1][j][:]
1783 binaries[parch][1][j].append(binary)
1784 # all the reverse dependencies are affected by the change
1785- affected.update(self.get_reverse_tree(binary, parch, 'testing'))
1786+ affected.update(get_reverse_tree(binary, parch))
1787
1788 # register reverse dependencies and conflicts for the new binary packages
1789- for p in source[BINARIES]:
1790- binary, parch = p.split("/")
1791- if item.architecture not in ['source', parch]: continue
1792- self.register_reverses(binary, binaries[parch][0] , binaries[parch][1])
1793+ if item.architecture == 'source':
1794+ pkg_iter = (p.split("/")[0] for p in source[BINARIES])
1795+ else:
1796+ ext = "/" + item.architecture
1797+ pkg_iter = (p.split("/")[0] for p in source[BINARIES] if p.endswith(ext))
1798+ register_reverses(binaries[parch][0], binaries[parch][1], iterator=pkg_iter)
1799
1800 # add/update the source package
1801 if item.architecture == 'source':
1802@@ -2016,49 +2139,13 @@
1803 # return the package name, the suite, the list of affected packages and the undo dictionary
1804 return (item, affected, undo)
1805
1806- def get_reverse_tree(self, pkg, arch, suite):
1807- binaries = self.binaries[suite][arch][0]
1808-
1809- rev_deps = set(binaries[pkg][RDEPENDS])
1810- seen = set()
1811- while len(rev_deps) > 0:
1812- # mark all of the current iteration of packages as affected
1813- seen |= rev_deps
1814- # generate the next iteration, which is the reverse-dependencies of
1815- # the current iteration
1816- new_rev_deps = [ binaries[x][RDEPENDS] for x in rev_deps \
1817- if x in binaries ]
1818- # flatten the list-of-lists, filtering out already handled packages
1819- # in the process
1820- rev_deps = set([package for package in chain.from_iterable(new_rev_deps) \
1821- if package not in seen ])
1822- return zip(seen, repeat(arch))
1823-
1824- def get_full_tree(self, pkg, arch, suite):
1825- """Calculate the full dependency tree for the given package
1826-
1827- This method returns the full dependency tree for the package `pkg`,
1828- inside the `arch` architecture for the suite `suite`.
1829- """
1830- packages = [pkg]
1831- binaries = self.binaries[suite][arch][0]
1832- if pkg in binaries:
1833- l = n = 0
1834- while len(packages) > l:
1835- l = len(packages)
1836- for p in packages[n:]:
1837- packages.extend([x for x in binaries[p][RDEPENDS] if x not in packages and x in binaries])
1838- n = l
1839- return packages
1840- else:
1841- return []
1842
1843 def _check_packages(self, binaries, systems, arch, affected, skip_archall, nuninst, pkg):
1844 broken = nuninst[arch + "+all"]
1845 to_check = []
1846
1847 # broken packages (first round)
1848- for p in [x[0] for x in affected if x[1] == arch]:
1849+ for p in (x[0] for x in affected if x[1] == arch):
1850 if p not in binaries[arch][0]:
1851 continue
1852 nuninst_arch = None
1853@@ -2119,7 +2206,7 @@
1854 if lundo is None:
1855 lundo = []
1856 if not hint:
1857- self.output_write("recur: [%s] %s %d/%d\n" % ("", ",".join([x.uvname for x in selected]), len(packages), len(extra)))
1858+ self.output_write("recur: [%s] %s %d/%d\n" % ("", ",".join(x.uvname for x in selected), len(packages), len(extra)))
1859
1860 # loop on the packages (or better, actions)
1861 while packages:
1862@@ -2137,14 +2224,14 @@
1863 defer = False
1864 for p in dependencies.get(pkg, []):
1865 if p in skipped:
1866- deferred.append(pkg)
1867- skipped.append(pkg)
1868+ deferred.append(make_migrationitem(pkg, self.sources))
1869+ skipped.append(make_migrationitem(pkg, self.sources))
1870 defer = True
1871 break
1872 if defer: continue
1873
1874 if not hint:
1875- self.output_write("trying: %s\n" % (pkg))
1876+ self.output_write("trying: %s\n" % (pkg.uvname))
1877
1878 better = True
1879 nuninst = {}
1880@@ -2163,10 +2250,10 @@
1881 skip_archall = True
1882 else: skip_archall = False
1883
1884- nuninst[arch] = set([x for x in nuninst_comp[arch] if x in binaries[arch][0]])
1885- nuninst[arch + "+all"] = set([x for x in nuninst_comp[arch + "+all"] if x in binaries[arch][0]])
1886+ nuninst[arch] = set(x for x in nuninst_comp[arch] if x in binaries[arch][0])
1887+ nuninst[arch + "+all"] = set(x for x in nuninst_comp[arch + "+all"] if x in binaries[arch][0])
1888
1889- check_packages(arch, affected, skip_archall, nuninst, pkg)
1890+ check_packages(arch, affected, skip_archall, nuninst, pkg.uvname)
1891
1892 # if we are processing hints, go ahead
1893 if hint:
1894@@ -2189,36 +2276,37 @@
1895 selected.append(pkg)
1896 packages.extend(extra)
1897 extra = []
1898- self.output_write("accepted: %s\n" % (pkg))
1899+ self.output_write("accepted: %s\n" % (pkg.uvname))
1900 self.output_write(" ori: %s\n" % (self.eval_nuninst(self.nuninst_orig)))
1901 self.output_write(" pre: %s\n" % (self.eval_nuninst(nuninst_comp)))
1902 self.output_write(" now: %s\n" % (self.eval_nuninst(nuninst, nuninst_comp)))
1903 if len(selected) <= 20:
1904- self.output_write(" all: %s\n" % (" ".join([ x.uvname for x in selected ])))
1905+ self.output_write(" all: %s\n" % (" ".join( x.uvname for x in selected )))
1906 else:
1907- self.output_write(" most: (%d) .. %s\n" % (len(selected), " ".join([x.uvname for x in selected][-20:])))
1908+ self.output_write(" most: (%d) .. %s\n" % (len(selected), " ".join(x.uvname for x in selected[-20:])))
1909 for k in nuninst:
1910 nuninst_comp[k] = nuninst[k]
1911 else:
1912- self.output_write("skipped: %s (%d <- %d)\n" % (pkg, len(extra), len(packages)))
1913+ self.output_write("skipped: %s (%d <- %d)\n" % (pkg.uvname, len(extra), len(packages)))
1914 self.output_write(" got: %s\n" % (self.eval_nuninst(nuninst, pkg.architecture != 'source' and nuninst_comp or None)))
1915- self.output_write(" * %s: %s\n" % (arch, ", ".join(sorted([b for b in nuninst[arch] if b not in nuninst_comp[arch]]))))
1916+ self.output_write(" * %s: %s\n" % (arch, ", ".join(sorted(b for b in nuninst[arch] if b not in nuninst_comp[arch]))))
1917
1918- extra.append(pkg)
1919+ extra.append(item)
1920 if not mark_passed:
1921- skipped.append(pkg)
1922+ skipped.append(item)
1923 single_undo = [(undo, item)]
1924 # (local-scope) binaries is actually self.binaries["testing"] so we cannot use it here.
1925- self.undo_changes(single_undo, systems, sources, self.binaries)
1926+ undo_changes(single_undo, systems, sources, self.binaries)
1927
1928 # if we are processing hints, return now
1929 if hint:
1930 return (nuninst_comp, [])
1931
1932- self.output_write(" finish: [%s]\n" % ",".join([ x.uvname for x in selected ]))
1933+ self.output_write(" finish: [%s]\n" % ",".join( x.uvname for x in selected ))
1934 self.output_write("endloop: %s\n" % (self.eval_nuninst(self.nuninst_orig)))
1935 self.output_write(" now: %s\n" % (self.eval_nuninst(nuninst_comp)))
1936- self.output_write(self.eval_uninst(self.newlyuninst(self.nuninst_orig, nuninst_comp)))
1937+ self.output_write(eval_uninst(self.options.architectures,
1938+ newly_uninst(self.nuninst_orig, nuninst_comp)))
1939 self.output_write("\n")
1940
1941 return (nuninst_comp, extra)
1942@@ -2251,7 +2339,7 @@
1943 if init:
1944 if not force:
1945 lundo = []
1946- self.output_write("leading: %s\n" % (",".join([ x.uvname for x in init ])))
1947+ self.output_write("leading: %s\n" % (",".join( x.uvname for x in init )))
1948 for x in init:
1949 if x not in upgrade_me:
1950 self.output_write("failed: %s\n" % (x.uvname))
1951@@ -2281,13 +2369,14 @@
1952 self.output_write("easy: %s\n" % nuninst_end_str)
1953
1954 if not force:
1955- self.output_write(self.eval_uninst(self.newlyuninst(nuninst_start, nuninst_end)) + "\n")
1956+ self.output_write(eval_uninst(self.options.architectures,
1957+ newly_uninst(nuninst_start, nuninst_end)))
1958
1959 if force or self.is_nuninst_asgood_generous(self.nuninst_orig, nuninst_end):
1960 # Result accepted either by force or by being better than the original result.
1961 if recurse:
1962 self.output_write("Apparently successful\n")
1963- self.output_write("final: %s\n" % ",".join(sorted([ x.uvname for x in selected ])))
1964+ self.output_write("final: %s\n" % ",".join(sorted( x.uvname for x in selected )))
1965 self.output_write("start: %s\n" % self.eval_nuninst(nuninst_start))
1966 if not force:
1967 self.output_write(" orig: %s\n" % self.eval_nuninst(self.nuninst_orig))
1968@@ -2296,9 +2385,11 @@
1969 self.output_write(" end: %s\n" % nuninst_end_str)
1970 if force:
1971 self.output_write("force breaks:\n")
1972- self.output_write(self.eval_uninst(self.newlyuninst(nuninst_start, nuninst_end)) + "\n")
1973+ self.output_write(eval_uninst(self.options.architectures,
1974+ newly_uninst(nuninst_start, nuninst_end)))
1975 self.output_write("SUCCESS (%d/%d)\n" % (len(actions or self.upgrade_me), len(extra)))
1976 self.nuninst_orig = nuninst_end
1977+ self.all_selected += [x.uvname for x in selected]
1978 if not actions:
1979 if recurse:
1980 self.upgrade_me = sorted(extra)
1981@@ -2308,76 +2399,10 @@
1982 else:
1983 self.output_write("FAILED\n")
1984 if not lundo: return
1985-
1986- self.undo_changes(lundo, self.systems, self.sources, self.binaries)
1987-
1988-
1989- def undo_changes(self, lundo, systems, sources, binaries):
1990- """Undoes one or more changes to testing
1991-
1992- * lundo is a list of (undo, item)-tuples
1993- * systems is the britney-py.c system
1994- * sources is the table of all source packages for all suites
1995- * binaries is the table of all binary packages for all suites
1996- and architectures
1997- """
1998-
1999- # We do the undo process in "4 steps" and each step must be
2000- # fully completed for each undo-item before starting on the
2001- # next.
2002- #
2003- # see commit:ef71f0e33a7c3d8ef223ec9ad5e9843777e68133 and
2004- # #624716 for the issues we had when we did not do this.
2005-
2006-
2007- # STEP 1
2008- # undo all the changes for sources
2009- for (undo, item) in lundo:
2010- for k in undo['sources'].keys():
2011- if k[0] == '-':
2012- del sources["testing"][k[1:]]
2013- else:
2014- sources["testing"][k] = undo['sources'][k]
2015-
2016- # STEP 2
2017- # undo all new binaries (consequence of the above)
2018- for (undo, item) in lundo:
2019- if not item.is_removal and item.package in sources[item.suite]:
2020- for p in sources[item.suite][item.package][BINARIES]:
2021- binary, arch = p.split("/")
2022- if item.architecture in ['source', arch]:
2023- del binaries["testing"][arch][0][binary]
2024- systems[arch].remove_binary(binary)
2025-
2026-
2027- # STEP 3
2028- # undo all other binary package changes (except virtual packages)
2029- for (undo, item) in lundo:
2030- for p in undo['binaries'].keys():
2031- binary, arch = p.split("/")
2032- if binary[0] == "-":
2033- del binaries['testing'][arch][0][binary[1:]]
2034- systems[arch].remove_binary(binary[1:])
2035- else:
2036- binaries_t_a = binaries['testing'][arch][0]
2037- binaries_t_a[binary] = undo['binaries'][p]
2038- systems[arch].remove_binary(binary)
2039- systems[arch].add_binary(binary, binaries_t_a[binary][:PROVIDES] + \
2040- [", ".join(binaries_t_a[binary][PROVIDES]) or None])
2041-
2042- # STEP 4
2043- # undo all changes to virtual packages
2044- for (undo, item) in lundo:
2045- for p in undo['nvirtual']:
2046- j, arch = p.split("/")
2047- del binaries['testing'][arch][1][j]
2048- for p in undo['virtual']:
2049- j, arch = p.split("/")
2050- if j[0] == '-':
2051- del binaries['testing'][arch][1][j[1:]]
2052- else:
2053- binaries['testing'][arch][1][j] = undo['virtual'][p]
2054-
2055+ lundo.reverse()
2056+
2057+ undo_changes(lundo, self.systems, self.sources, self.binaries)
2058+ self.output_write("\n")
2059
2060
2061
2062@@ -2397,6 +2422,7 @@
2063 self.nuninst_orig = self.get_nuninst()
2064 # nuninst_orig may get updated during the upgrade process
2065 self.nuninst_orig_save = self.get_nuninst()
2066+ self.all_selected = []
2067
2068 if not self.options.actions:
2069 # process `easy' hints
2070@@ -2421,7 +2447,7 @@
2071 allpackages += self.upgrade_me
2072 for a in self.options.break_arches.split():
2073 backup = self.options.break_arches
2074- self.options.break_arches = " ".join([x for x in self.options.break_arches.split() if x != a])
2075+ self.options.break_arches = " ".join(x for x in self.options.break_arches.split() if x != a)
2076 self.upgrade_me = archpackages[a]
2077 self.output_write("info: broken arch run for %s\n" % (a))
2078 self.do_all()
2079@@ -2448,21 +2474,22 @@
2080 # obsolete source packages
2081 # a package is obsolete if none of the binary packages in testing
2082 # are built by it
2083- self.__log("> Removing obsolete source packages from testing", type="I")
2084- removals = []
2085- # local copies for performance
2086- sources = self.sources['testing']
2087- binaries = self.binaries['testing']
2088- used = set(binaries[arch][0][binary][SOURCE]
2089- for arch in binaries
2090- for binary in binaries[arch][0]
2091- )
2092- removals = [ HintItem("-%s/%s" % (source, sources[source][VERSION]))
2093- for source in sources if source not in used
2094- ]
2095- if len(removals) > 0:
2096- self.output_write("Removing obsolete source packages from testing (%d):\n" % (len(removals)))
2097- self.do_all(actions=removals)
2098+ if getattr(self.options, "remove_obsolete", "yes") == "yes":
2099+ self.__log("> Removing obsolete source packages from testing", type="I")
2100+ removals = []
2101+ # local copies for performance
2102+ sources = self.sources['testing']
2103+ binaries = self.binaries['testing']
2104+ used = set(binaries[arch][0][binary][SOURCE]
2105+ for arch in binaries
2106+ for binary in binaries[arch][0]
2107+ )
2108+ removals = [ MigrationItem("-%s/%s" % (source, sources[source][VERSION]))
2109+ for source in sources if source not in used
2110+ ]
2111+ if len(removals) > 0:
2112+ self.output_write("Removing obsolete source packages from testing (%d):\n" % (len(removals)))
2113+ self.do_all(actions=removals)
2114
2115 # smooth updates
2116 if len(self.options.smooth_updates) > 0:
2117@@ -2470,14 +2497,14 @@
2118 removals = self.old_libraries()
2119 if len(removals) > 0:
2120 self.output_write("Removing packages left in testing for smooth updates (%d):\n%s" % \
2121- (len(removals), self.old_libraries_format(removals)))
2122- self.do_all(actions=[ MigrationItem(x) for x in removals ])
2123+ (len(removals), old_libraries_format(removals)))
2124+ self.do_all(actions=removals)
2125 removals = self.old_libraries()
2126 else:
2127 removals = ()
2128
2129 self.output_write("List of old libraries in testing (%d):\n%s" % \
2130- (len(removals), self.old_libraries_format(removals)))
2131+ (len(removals), old_libraries_format(removals)))
2132
2133 # output files
2134 if not self.options.dry_run:
2135@@ -2489,15 +2516,22 @@
2136 self.write_dates(self.options.testing, self.dates)
2137
2138 # write HeidiResult
2139- self.write_heidi(self.options.heidi_output)
2140+ self.__log("Writing Heidi results to %s" % self.options.heidi_output)
2141+ write_heidi(self.options.heidi_output, self.sources["testing"],
2142+ self.binaries["testing"])
2143+
2144+ # write Delta
2145+ if hasattr(self.options, 'delta_output'):
2146+ self.write_delta(self.options.delta_output)
2147
2148 self.printuninstchange()
2149 self.__log("Test completed!", type="I")
2150
2151 def printuninstchange(self):
2152 self.__log("Checking for newly uninstallable packages", type="I")
2153- text = self.eval_uninst(self.newlyuninst(
2154- self.nuninst_orig_save, self.nuninst_orig))
2155+ text = eval_uninst(self.options.architectures, newly_uninst(
2156+ self.nuninst_orig_save, self.nuninst_orig))
2157+
2158 if text != '':
2159 self.output_write("\nNewly uninstallable packages in testing:\n%s" % \
2160 (text))
2161@@ -2511,6 +2545,7 @@
2162 self.__log("> Calculating current uninstallability counters", type="I")
2163 self.nuninst_orig = self.get_nuninst()
2164 self.nuninst_orig_save = self.get_nuninst()
2165+ self.all_selected = []
2166
2167 import readline
2168 from completer import Completer
2169@@ -2564,7 +2599,7 @@
2170 """
2171
2172 if isinstance(pkgvers[0], tuple) or isinstance(pkgvers[0], list):
2173- _pkgvers = [ HintItem('%s/%s' % (p, v)) for (p,v) in pkgvers ]
2174+ _pkgvers = [ MigrationItem('%s/%s' % (p, v)) for (p,v) in pkgvers ]
2175 else:
2176 _pkgvers = pkgvers
2177
2178@@ -2638,7 +2673,7 @@
2179 self.dependencies[e.name] = e.deps
2180
2181 # replace the list of actions with the new one
2182- self.upgrade_me = [ MigrationItem(x) for x in upgrade_me ]
2183+ self.upgrade_me = [ make_migrationitem(x, self.sources) for x in upgrade_me ]
2184
2185 def auto_hinter(self):
2186 """Auto-generate "easy" hints.
2187@@ -2659,7 +2694,7 @@
2188 self.__log("> Processing hints from the auto hinter", type="I")
2189
2190 # consider only excuses which are valid candidates
2191- excuses = dict([(x.name, x) for x in self.excuses if x.name in [y.uvname for y in self.upgrade_me]])
2192+ excuses = dict((x.name, x) for x in self.excuses if x.name in [y.uvname for y in self.upgrade_me])
2193
2194 def find_related(e, hint, circular_first=False):
2195 if e not in excuses:
2196@@ -2693,9 +2728,9 @@
2197 looped = False
2198 for item, ver in items:
2199 # excuses which depend on "item" or are depended on by it
2200- items.extend( [ (x, excuses[x].ver[1]) for x in excuses if \
2201+ items.extend( (x, excuses[x].ver[1]) for x in excuses if \
2202 (item in excuses[x].deps or x in excuses[item].deps) \
2203- and (x, excuses[x].ver[1]) not in items ] )
2204+ and (x, excuses[x].ver[1]) not in items )
2205 if not looped and len(items) > 1:
2206 mincands.append(items[:])
2207 looped = True
2208@@ -2717,15 +2752,17 @@
2209 to_skip.append(i)
2210 for i in range(len(l)):
2211 if i not in to_skip:
2212- self.do_hint("easy", "autohinter", [ HintItem("%s/%s" % (x[0], x[1])) for x in l[i] ])
2213+ self.do_hint("easy", "autohinter", [ MigrationItem("%s/%s" % (x[0], x[1])) for x in l[i] ])
2214
2215- def old_libraries(self):
2216+ def old_libraries(self, same_source=same_source):
2217 """Detect old libraries left in testing for smooth transitions
2218
2219 This method detects old libraries which are in testing but no longer
2220 built from the source package: they are still there because other
2221 packages still depend on them, but they should be removed as soon
2222 as possible.
2223+
2224+ same_source is an opt to avoid "load global".
2225 """
2226 sources = self.sources['testing']
2227 testing = self.binaries['testing']
2228@@ -2735,22 +2772,10 @@
2229 for pkg_name in testing[arch][0]:
2230 pkg = testing[arch][0][pkg_name]
2231 if pkg_name not in unstable[arch][0] and \
2232- not self.same_source(sources[pkg[SOURCE]][VERSION], pkg[SOURCEVER]):
2233- removals.append("-" + pkg_name + "/" + arch)
2234+ not same_source(sources[pkg[SOURCE]][VERSION], pkg[SOURCEVER]):
2235+ removals.append(MigrationItem("-" + pkg_name + "/" + arch + "/" + pkg[SOURCEVER]))
2236 return removals
2237
2238- def old_libraries_format(self, libs):
2239- """Format old libraries in a smart table"""
2240- libraries = {}
2241- for i in libs:
2242- pkg, arch = i.split("/")
2243- pkg = pkg[1:]
2244- if pkg in libraries:
2245- libraries[pkg].append(arch)
2246- else:
2247- libraries[pkg] = [arch]
2248- return "\n".join([" " + k + ": " + " ".join(libraries[k]) for k in libraries]) + "\n"
2249-
2250 def nuninst_arch_report(self, nuninst, arch):
2251 """Print a report of uninstallable packages for one architecture."""
2252 all = {}
2253@@ -2760,7 +2785,7 @@
2254
2255 print '* %s' % (arch,)
2256
2257- for (src, ver), pkgs in sorted(all.items()):
2258+ for (src, ver), pkgs in sorted(all.iteritems()):
2259 print ' %s (%s): %s' % (src, ver, ' '.join(sorted(pkgs)))
2260
2261 print
2262
2263=== modified file 'britney_nobreakall.conf'
2264--- britney_nobreakall.conf 2013-01-22 13:02:13 +0000
2265+++ britney_nobreakall.conf 2013-11-22 19:48:51 +0000
2266@@ -1,24 +1,25 @@
2267 # Configuration file for britney
2268
2269 # Paths for control files
2270-TESTING = /srv/release.debian.org/britney/var/data-b2/testing
2271-TPU = /srv/release.debian.org/britney/var/data-b2/testing-proposed-updates
2272-UNSTABLE = /srv/release.debian.org/britney/var/data-b2/unstable
2273+TESTING = data/testing
2274+UNSTABLE = data/unstable
2275+PARTIAL_UNSTABLE = yes
2276
2277 # Output
2278-NONINST_STATUS = /srv/release.debian.org/britney/var/data-b2/non-installable-status
2279-EXCUSES_OUTPUT = /srv/release.debian.org/britney/var/data-b2/output/excuses.html
2280-UPGRADE_OUTPUT = /srv/release.debian.org/britney/var/data-b2/output/output.txt
2281-HEIDI_OUTPUT = /srv/release.debian.org/britney/var/data-b2/output/HeidiResult
2282+NONINST_STATUS = data/non-installable-status
2283+EXCUSES_OUTPUT = output/excuses.html
2284+UPGRADE_OUTPUT = output/output.txt
2285+HEIDI_OUTPUT = output/HeidiResult
2286+DELTA_OUTPUT = output/Delta
2287
2288 # List of release architectures
2289-ARCHITECTURES = i386 amd64 armel ia64 mips mipsel powerpc s390 sparc kfreebsd-i386 kfreebsd-amd64 armhf s390x
2290+ARCHITECTURES = amd64 arm64 armhf i386 powerpc
2291
2292 # if you're not in this list, arch: all packages are allowed to break on you
2293-NOBREAKALL_ARCHES = i386 amd64 armel ia64 mips mipsel powerpc s390 sparc kfreebsd-i386 kfreebsd-amd64 armhf s390x
2294+NOBREAKALL_ARCHES = amd64 arm64 armhf i386 powerpc
2295
2296 # if you're in this list, your packages may not stay in sync with the source
2297-FUCKED_ARCHES =
2298+OUTOFSYNC_ARCHES =
2299
2300 # if you're in this list, your uninstallability count may increase
2301 BREAK_ARCHES =
2302@@ -27,32 +28,40 @@
2303 NEW_ARCHES =
2304
2305 # priorities and delays
2306-MINDAYS_LOW = 10
2307-MINDAYS_MEDIUM = 5
2308-MINDAYS_HIGH = 2
2309+MINDAYS_LOW = 0
2310+MINDAYS_MEDIUM = 0
2311+MINDAYS_HIGH = 0
2312 MINDAYS_CRITICAL = 0
2313 MINDAYS_EMERGENCY = 0
2314 DEFAULT_URGENCY = low
2315
2316 # hint permissions
2317+HINTS_CJWATSON = ALL
2318+HINTS_ADCONRAD = ALL
2319+HINTS_KITTERMAN = ALL
2320+HINTS_LANEY = ALL
2321+HINTS_JRIDDELL = ALL
2322+HINTS_STEFANOR = ALL
2323+HINTS_STGRABER = ALL
2324 HINTS_VORLON = ALL
2325-HINTS_ABA = ALL
2326-HINTS_HE = ALL
2327-HINTS_LUK = ALL
2328-HINTS_PKERN = STANDARD force
2329-HINTS_ADSB = STANDARD force force-hint
2330-HINTS_NEILM = STANDARD
2331-HINTS_MEHDI = STANDARD
2332-HINTS_JCRISTAU = STANDARD force force-hint
2333-HINTS_FAW = HELPERS
2334-HINTS_NTHYKIER = STANDARD
2335-HINTS_KIBI = STANDARD
2336-HINTS_JMW = STANDARD
2337-HINTS_FREEZE = block block-all block-udeb
2338-HINTS_FREEZE-EXCEPTION = unblock unblock-udeb
2339-HINTS_SATBRITNEY = easy
2340+HINTS_FREEZE = block block-all
2341+
2342+HINTS_UBUNTU-TOUCH/DIDROCKS = block unblock
2343+HINTS_UBUNTU-TOUCH/EV = block unblock
2344+HINTS_UBUNTU-TOUCH/KEN-VANDINE = block unblock
2345+HINTS_UBUNTU-TOUCH/LOOL = block unblock
2346+HINTS_UBUNTU-TOUCH/MATHIEU-TL = block unblock
2347+HINTS_UBUNTU-TOUCH/OGRA = block unblock
2348
2349 # support for old libraries in testing (smooth update)
2350 # use ALL to enable smooth updates for all the sections
2351 # SMOOTH_UPDATES = libs oldlibs
2352 SMOOTH_UPDATES =
2353+
2354+REMOVE_OBSOLETE = no
2355+
2356+# autopkgtest needs to know the series name; set to the empty string to
2357+# disable autopkgtest
2358+ADT_SERIES = trusty
2359+ADT_DEBUG = no
2360+ADT_ARCHES = amd64 i386
2361
2362=== added file 'britney_util.py'
2363--- britney_util.py 1970-01-01 00:00:00 +0000
2364+++ britney_util.py 2013-11-22 19:48:51 +0000
2365@@ -0,0 +1,375 @@
2366+# -*- coding: utf-8 -*-
2367+
2368+# Refactored parts from britney.py, which is/was:
2369+# Copyright (C) 2001-2008 Anthony Towns <ajt@debian.org>
2370+# Andreas Barth <aba@debian.org>
2371+# Fabio Tranchitella <kobold@debian.org>
2372+# Copyright (C) 2010-2012 Adam D. Barratt <adsb@debian.org>
2373+# Copyright (C) 2012 Niels Thykier <niels@thykier.net>
2374+#
2375+# New portions
2376+# Copyright (C) 2013 Adam D. Barratt <adsb@debian.org>
2377+
2378+# This program is free software; you can redistribute it and/or modify
2379+# it under the terms of the GNU General Public License as published by
2380+# the Free Software Foundation; either version 2 of the License, or
2381+# (at your option) any later version.
2382+
2383+# This program is distributed in the hope that it will be useful,
2384+# but WITHOUT ANY WARRANTY; without even the implied warranty of
2385+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
2386+# GNU General Public License for more details.
2387+
2388+
2389+import apt_pkg
2390+from functools import partial
2391+from itertools import chain, ifilter, ifilterfalse, izip, repeat
2392+import re
2393+import time
2394+from migrationitem import MigrationItem, UnversionnedMigrationItem
2395+
2396+from consts import (VERSION, BINARIES, PROVIDES, DEPENDS, CONFLICTS,
2397+ RDEPENDS, RCONFLICTS, ARCHITECTURE, SECTION)
2398+
2399+binnmu_re = re.compile(r'^(.*)\+b\d+$')
2400+
2401+def same_source(sv1, sv2, binnmu_re=binnmu_re):
2402+ """Check if two version numbers are built from the same source
2403+
2404+ This method returns a boolean value which is true if the two
2405+ version numbers specified as parameters are built from the same
2406+ source. The main use of this code is to detect binary-NMU.
2407+
2408+ binnmu_re is an optimization to avoid "load global".
2409+ """
2410+ if sv1 == sv2:
2411+ return 1
2412+
2413+ m = binnmu_re.match(sv1)
2414+ if m: sv1 = m.group(1)
2415+ m = binnmu_re.match(sv2)
2416+ if m: sv2 = m.group(1)
2417+
2418+ if sv1 == sv2:
2419+ return 1
2420+
2421+ return 0
2422+
2423+
2424+def ifilter_except(container, iterable=None):
2425+ """Filter out elements in container
2426+
2427+ If given an iterable it returns a filtered iterator, otherwise it
2428+ returns a function to generate filtered iterators. The latter is
2429+ useful if the same filter has to be (re-)used on multiple
2430+ iterators that are not known on beforehand.
2431+ """
2432+ if iterable is not None:
2433+ return ifilterfalse(container.__contains__, iterable)
2434+ return partial(ifilterfalse, container.__contains__)
2435+
2436+
2437+def ifilter_only(container, iterable=None):
2438+ """Filter out elements in which are not in container
2439+
2440+ If given an iterable it returns a filtered iterator, otherwise it
2441+ returns a function to generate filtered iterators. The latter is
2442+ useful if the same filter has to be (re-)used on multiple
2443+ iterators that are not known on beforehand.
2444+ """
2445+ if iterable is not None:
2446+ return ifilter(container.__contains__, iterable)
2447+ return partial(ifilter, container.__contains__)
2448+
2449+
2450+def undo_changes(lundo, systems, sources, binaries,
2451+ BINARIES=BINARIES, PROVIDES=PROVIDES):
2452+ """Undoes one or more changes to testing
2453+
2454+ * lundo is a list of (undo, item)-tuples
2455+ * systems is the britney-py.c system
2456+ * sources is the table of all source packages for all suites
2457+ * binaries is the table of all binary packages for all suites
2458+ and architectures
2459+
2460+ The "X=X" parameters are optimizations to avoid "load global"
2461+ in loops.
2462+ """
2463+
2464+ # We do the undo process in "4 steps" and each step must be
2465+ # fully completed for each undo-item before starting on the
2466+ # next.
2467+ #
2468+ # see commit:ef71f0e33a7c3d8ef223ec9ad5e9843777e68133 and
2469+ # #624716 for the issues we had when we did not do this.
2470+
2471+
2472+ # STEP 1
2473+ # undo all the changes for sources
2474+ for (undo, item) in lundo:
2475+ for k in undo['sources']:
2476+ if k[0] == '-':
2477+ del sources["testing"][k[1:]]
2478+ else:
2479+ sources["testing"][k] = undo['sources'][k]
2480+
2481+ # STEP 2
2482+ # undo all new binaries (consequence of the above)
2483+ for (undo, item) in lundo:
2484+ if not item.is_removal and item.package in sources[item.suite]:
2485+ for p in sources[item.suite][item.package][BINARIES]:
2486+ binary, arch = p.split("/")
2487+ if item.architecture in ['source', arch]:
2488+ del binaries["testing"][arch][0][binary]
2489+ systems[arch].remove_binary(binary)
2490+
2491+
2492+ # STEP 3
2493+ # undo all other binary package changes (except virtual packages)
2494+ for (undo, item) in lundo:
2495+ for p in undo['binaries']:
2496+ binary, arch = p.split("/")
2497+ if binary[0] == "-":
2498+ del binaries['testing'][arch][0][binary[1:]]
2499+ systems[arch].remove_binary(binary[1:])
2500+ else:
2501+ binaries_t_a = binaries['testing'][arch][0]
2502+ binaries_t_a[binary] = undo['binaries'][p]
2503+ systems[arch].remove_binary(binary)
2504+ systems[arch].add_binary(binary, binaries_t_a[binary][:PROVIDES] + \
2505+ [", ".join(binaries_t_a[binary][PROVIDES]) or None])
2506+
2507+ # STEP 4
2508+ # undo all changes to virtual packages
2509+ for (undo, item) in lundo:
2510+ for p in undo['nvirtual']:
2511+ j, arch = p.split("/")
2512+ del binaries['testing'][arch][1][j]
2513+ for p in undo['virtual']:
2514+ j, arch = p.split("/")
2515+ if j[0] == '-':
2516+ del binaries['testing'][arch][1][j[1:]]
2517+ else:
2518+ binaries['testing'][arch][1][j] = undo['virtual'][p]
2519+
2520+
2521+def old_libraries_format(libs):
2522+ """Format old libraries in a smart table"""
2523+ libraries = {}
2524+ for i in libs:
2525+ pkg = i.package
2526+ if pkg in libraries:
2527+ libraries[pkg].append(i.architecture)
2528+ else:
2529+ libraries[pkg] = [i.architecture]
2530+ return "\n".join(" " + k + ": " + " ".join(libraries[k]) for k in libraries) + "\n"
2531+
2532+
2533+
2534+def register_reverses(packages, provides, check_doubles=True, iterator=None,
2535+ parse_depends=apt_pkg.parse_depends,
2536+ DEPENDS=DEPENDS, CONFLICTS=CONFLICTS,
2537+ RDEPENDS=RDEPENDS, RCONFLICTS=RCONFLICTS):
2538+ """Register reverse dependencies and conflicts for a given
2539+ sequence of packages
2540+
2541+ This method registers the reverse dependencies and conflicts for a
2542+ given sequence of packages. "packages" is a table of real
2543+ packages and "provides" is a table of virtual packages.
2544+
2545+ iterator is the sequence of packages for which the reverse
2546+ relations should be updated.
2547+
2548+ The "X=X" parameters are optimizations to avoid "load global" in
2549+ the loops.
2550+ """
2551+ if iterator is None:
2552+ iterator = packages.iterkeys()
2553+ else:
2554+ iterator = ifilter_only(packages, iterator)
2555+
2556+ for pkg in iterator:
2557+ # register the list of the dependencies for the depending packages
2558+ dependencies = []
2559+ pkg_data = packages[pkg]
2560+ if pkg_data[DEPENDS]:
2561+ dependencies.extend(parse_depends(pkg_data[DEPENDS], False))
2562+ # go through the list
2563+ for p in dependencies:
2564+ for a in p:
2565+ dep = a[0]
2566+ # register real packages
2567+ if dep in packages and (not check_doubles or pkg not in packages[dep][RDEPENDS]):
2568+ packages[dep][RDEPENDS].append(pkg)
2569+ # also register packages which provide the package (if any)
2570+ if dep in provides:
2571+ for i in provides[dep]:
2572+ if i not in packages: continue
2573+ if not check_doubles or pkg not in packages[i][RDEPENDS]:
2574+ packages[i][RDEPENDS].append(pkg)
2575+ # register the list of the conflicts for the conflicting packages
2576+ if pkg_data[CONFLICTS]:
2577+ for p in parse_depends(pkg_data[CONFLICTS], False):
2578+ for a in p:
2579+ con = a[0]
2580+ # register real packages
2581+ if con in packages and (not check_doubles or pkg not in packages[con][RCONFLICTS]):
2582+ packages[con][RCONFLICTS].append(pkg)
2583+ # also register packages which provide the package (if any)
2584+ if con in provides:
2585+ for i in provides[con]:
2586+ if i not in packages: continue
2587+ if not check_doubles or pkg not in packages[i][RCONFLICTS]:
2588+ packages[i][RCONFLICTS].append(pkg)
2589+
2590+
2591+def compute_reverse_tree(packages_s, pkg, arch,
2592+ set=set, flatten=chain.from_iterable,
2593+ RDEPENDS=RDEPENDS):
2594+ """Calculate the full dependency tree for the given package
2595+
2596+ This method returns the full dependency tree for the package
2597+ "pkg", inside the "arch" architecture for a given suite flattened
2598+ as an iterable. The first argument "packages_s" is the binary
2599+ package table for that given suite (e.g. Britney().binaries["testing"]).
2600+
2601+ The tree (or graph) is returned as an iterable of (package, arch)
2602+ tuples and the iterable will contain ("pkg", "arch") if it is
2603+ available on that architecture.
2604+
2605+ If "pkg" is not available on that architecture in that suite,
2606+ this returns an empty iterable.
2607+
2608+ The method does not promise any ordering of the returned
2609+ elements and the iterable is not reusable.
2610+
2611+ The flatten=... and the "X=X" parameters are optimizations to
2612+ avoid "load global" in the loops.
2613+ """
2614+ binaries = packages_s[arch][0]
2615+ if pkg not in binaries:
2616+ return frozenset()
2617+ rev_deps = set(binaries[pkg][RDEPENDS])
2618+ seen = set([pkg])
2619+
2620+ binfilt = ifilter_only(binaries)
2621+ revfilt = ifilter_except(seen)
2622+
2623+ while rev_deps:
2624+ # mark all of the current iteration of packages as affected
2625+ seen |= rev_deps
2626+ # generate the next iteration, which is the reverse-dependencies of
2627+ # the current iteration
2628+ rev_deps = set(revfilt(flatten( binaries[x][RDEPENDS] for x in binfilt(rev_deps) )))
2629+ return izip(seen, repeat(arch))
2630+
2631+
2632+def write_nuninst(filename, nuninst):
2633+ """Write the non-installable report
2634+
2635+ Write the non-installable report derived from "nuninst" to the
2636+ file denoted by "filename".
2637+ """
2638+ with open(filename, 'w') as f:
2639+ # Having two fields with (almost) identical dates seems a bit
2640+ # redundant.
2641+ f.write("Built on: " + time.strftime("%Y.%m.%d %H:%M:%S %z", time.gmtime(time.time())) + "\n")
2642+ f.write("Last update: " + time.strftime("%Y.%m.%d %H:%M:%S %z", time.gmtime(time.time())) + "\n\n")
2643+ for k in nuninst:
2644+ f.write("%s: %s\n" % (k, " ".join(nuninst[k])))
2645+
2646+
2647+def read_nuninst(filename, architectures):
2648+ """Read the non-installable report
2649+
2650+ Read the non-installable report from the file denoted by
2651+ "filename" and return it. Only architectures in "architectures"
2652+ will be included in the report.
2653+ """
2654+ nuninst = {}
2655+ with open(filename) as f:
2656+ for r in f:
2657+ if ":" not in r: continue
2658+ arch, packages = r.strip().split(":", 1)
2659+ if arch.split("+", 1)[0] in architectures:
2660+ nuninst[arch] = set(packages.split())
2661+ return nuninst
2662+
2663+
2664+def newly_uninst(nuold, nunew):
2665+ """Return a nuninst statstic with only new uninstallable packages
2666+
2667+ This method subtracts the uninstallable packages of the statistic
2668+ "nunew" from the statistic "nuold".
2669+
2670+ It returns a dictionary with the architectures as keys and the list
2671+ of uninstallable packages as values.
2672+ """
2673+ res = {}
2674+ for arch in ifilter_only(nunew, nuold):
2675+ res[arch] = [x for x in nunew[arch] if x not in nuold[arch]]
2676+ return res
2677+
2678+
2679+def eval_uninst(architectures, nuninst):
2680+ """Return a string which represents the uninstallable packages
2681+
2682+ This method returns a string which represents the uninstallable
2683+ packages reading the uninstallability statistics "nuninst".
2684+
2685+ An example of the output string is:
2686+ * i386: broken-pkg1, broken-pkg2
2687+ """
2688+ parts = []
2689+ for arch in architectures:
2690+ if arch in nuninst and nuninst[arch]:
2691+ parts.append(" * %s: %s\n" % (arch,", ".join(sorted(nuninst[arch]))))
2692+ return "".join(parts)
2693+
2694+
2695+def write_heidi(filename, sources_t, packages_t,
2696+ VERSION=VERSION, SECTION=SECTION,
2697+ ARCHITECTURE=ARCHITECTURE, sorted=sorted):
2698+ """Write the output HeidiResult
2699+
2700+ This method write the output for Heidi, which contains all the
2701+ binary packages and the source packages in the form:
2702+
2703+ <pkg-name> <pkg-version> <pkg-architecture> <pkg-section>
2704+ <src-name> <src-version> source <src-section>
2705+
2706+ The file is written as "filename", it assumes all sources and
2707+ packages in "sources_t" and "packages_t" to be the packages in
2708+ "testing".
2709+
2710+ The "X=X" parameters are optimizations to avoid "load global" in
2711+ the loops.
2712+ """
2713+ with open(filename, 'w') as f:
2714+
2715+ # write binary packages
2716+ for arch in sorted(packages_t):
2717+ binaries = packages_t[arch][0]
2718+ for pkg_name in sorted(binaries):
2719+ pkg = binaries[pkg_name]
2720+ pkgv = pkg[VERSION]
2721+ pkgarch = pkg[ARCHITECTURE] or 'all'
2722+ pkgsec = pkg[SECTION] or 'faux'
2723+ f.write('%s %s %s %s\n' % (pkg_name, pkgv, pkgarch, pkgsec))
2724+
2725+ # write sources
2726+ for src_name in sorted(sources_t):
2727+ src = sources_t[src_name]
2728+ srcv = src[VERSION]
2729+ srcsec = src[SECTION] or 'unknown'
2730+ f.write('%s %s source %s\n' % (src_name, srcv, srcsec))
2731+
2732+def make_migrationitem(package, sources, VERSION=VERSION):
2733+ """Convert a textual package specification to a MigrationItem
2734+
2735+ sources is a list of source packages in each suite, used to determine
2736+ the version which should be used for the MigrationItem.
2737+ """
2738+
2739+ item = UnversionnedMigrationItem(package)
2740+ return MigrationItem("%s/%s" % (item.uvname, sources[item.suite][item.package][VERSION]))
2741
2742=== added file 'consts.py'
2743--- consts.py 1970-01-01 00:00:00 +0000
2744+++ consts.py 2013-11-22 19:48:51 +0000
2745@@ -0,0 +1,37 @@
2746+# -*- coding: utf-8 -*-
2747+
2748+# Constants from britney.py
2749+#
2750+# Assuming constants are copyrightable, then they are:
2751+# Copyright (C) 2001-2008 Anthony Towns <ajt@debian.org>
2752+# Andreas Barth <aba@debian.org>
2753+# Fabio Tranchitella <kobold@debian.org>
2754+# Copyright (C) 2010-2012 Adam D. Barratt <adsb@debian.org>
2755+
2756+# This program is free software; you can redistribute it and/or modify
2757+# it under the terms of the GNU General Public License as published by
2758+# the Free Software Foundation; either version 2 of the License, or
2759+# (at your option) any later version.
2760+
2761+# This program is distributed in the hope that it will be useful,
2762+# but WITHOUT ANY WARRANTY; without even the implied warranty of
2763+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
2764+# GNU General Public License for more details.
2765+
2766+# source package
2767+VERSION = 0
2768+SECTION = 1
2769+BINARIES = 2
2770+MAINTAINER = 3
2771+FAKESRC = 4
2772+
2773+# binary package
2774+SOURCE = 2
2775+SOURCEVER = 3
2776+ARCHITECTURE = 4
2777+MULTIARCH = 5
2778+DEPENDS = 6
2779+CONFLICTS = 7
2780+PROVIDES = 8
2781+RDEPENDS = 9
2782+RCONFLICTS = 10
2783
2784=== modified file 'excuse.py'
2785--- excuse.py 2012-12-25 18:28:14 +0000
2786+++ excuse.py 2013-11-22 19:48:51 +0000
2787@@ -50,6 +50,7 @@
2788 self.section = None
2789 self._is_valid = False
2790 self._dontinvalidate = False
2791+ self.run_autopkgtest = False
2792
2793 self.invalid_deps = []
2794 self.deps = {}
2795@@ -120,14 +121,28 @@
2796
2797 def html(self):
2798 """Render the excuse in HTML"""
2799- res = "<a id=\"%s\" name=\"%s\">%s</a> (%s to %s)\n<ul>\n" % \
2800- (self.name, self.name, self.name, self.ver[0], self.ver[1])
2801+ lp_pkg = "https://launchpad.net/ubuntu/+source/%s" % self.name.split("/")[0]
2802+ if self.ver[0] == "-":
2803+ lp_old = self.ver[0]
2804+ else:
2805+ lp_old = "<a href=\"%s/%s\">%s</a>" % (
2806+ lp_pkg, self.ver[0], self.ver[0])
2807+ if self.ver[1] == "-":
2808+ lp_new = self.ver[1]
2809+ else:
2810+ lp_new = "<a href=\"%s/%s\">%s</a>" % (
2811+ lp_pkg, self.ver[1], self.ver[1])
2812+ res = (
2813+ "<a id=\"%s\" name=\"%s\" href=\"%s\">%s</a> (%s to %s)\n<ul>\n" %
2814+ (self.name, self.name, lp_pkg, self.name, lp_old, lp_new))
2815 if self.maint:
2816 res = res + "<li>Maintainer: %s\n" % (self.maint)
2817 if self.section and string.find(self.section, "/") > -1:
2818 res = res + "<li>Section: %s\n" % (self.section)
2819 if self.daysold != None:
2820- if self.daysold < self.mindays:
2821+ if self.mindays == 0:
2822+ res = res + ("<li>%d days old\n" % self.daysold)
2823+ elif self.daysold < self.mindays:
2824 res = res + ("<li>Too young, only %d of %d days old\n" %
2825 (self.daysold, self.mindays))
2826 else:
2827
2828=== modified file 'hints.py'
2829--- hints.py 2012-01-06 18:39:57 +0000
2830+++ hints.py 2013-11-22 19:48:51 +0000
2831@@ -1,6 +1,6 @@
2832 # -*- coding: utf-8 -*-
2833
2834-# Copyright (C) 2011 Adam D. Barratt <adsb@debian.org>
2835+# Copyright (C) 2013 Adam D. Barratt <adsb@debian.org>
2836
2837 # This program is free software; you can redistribute it and/or modify
2838 # it under the terms of the GNU General Public License as published by
2839@@ -12,7 +12,7 @@
2840 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
2841 # GNU General Public License for more details.
2842
2843-from migrationitem import HintItem
2844+from migrationitem import MigrationItem
2845
2846 class HintCollection(object):
2847 def __init__(self):
2848@@ -36,6 +36,8 @@
2849 self._hints.append(Hint(hint, user))
2850
2851 class Hint(object):
2852+ NO_VERSION = [ 'block', 'block-all', 'block-udeb' ]
2853+
2854 def __init__(self, hint, user):
2855 self._hint = hint
2856 self._user = user
2857@@ -57,7 +59,16 @@
2858 if isinstance(self._packages, str):
2859 self._packages = self._packages.split(' ')
2860
2861- self._packages = [HintItem(x) for x in self._packages]
2862+ self._packages = [MigrationItem(x) for x in self._packages]
2863+
2864+ self.check()
2865+
2866+ def check(self):
2867+ for package in self.packages:
2868+ if self.type in self.__class__.NO_VERSION:
2869+ assert package.version is None, package
2870+ else:
2871+ assert package.version is not None, package
2872
2873 def set_active(self, active):
2874 self._active = active
2875@@ -66,7 +77,12 @@
2876 return self._hint
2877
2878 def __eq__(self, other):
2879- return str(self) == str(other)
2880+ if self.type != other.type:
2881+ return False
2882+ elif self.type == 'age-days' and self.days != other.days:
2883+ return False
2884+ else:
2885+ return frozenset(self.packages) == frozenset(other.packages)
2886
2887 @property
2888 def type(self):
2889
2890=== modified file 'lib/Makefile'
2891--- lib/Makefile 2011-12-14 08:52:10 +0000
2892+++ lib/Makefile 2013-11-22 19:48:51 +0000
2893@@ -11,7 +11,7 @@
2894 rm -f freelist aptvercmp checklib libajdpkg.a
2895
2896 checklib : checklib.o dpkg.o dpkg-lib.o memory3.o freelist.o assert.o
2897- $(CC) $(CFLAGS) -o checklib -lapt-pkg $^ # -lccmalloc -ldl
2898+ $(CC) $(CFLAGS) -o checklib $^ -lapt-pkg # -lccmalloc -ldl
2899
2900 aptvercmp : dpkg-lib.cpp
2901 $(CXX) $(CFLAGS) -DTESTBIN -o aptvercmp dpkg-lib.cpp -lapt-pkg
2902
2903=== modified file 'lib/britney-py.c'
2904--- lib/britney-py.c 2012-03-05 11:55:41 +0000
2905+++ lib/britney-py.c 2013-11-22 19:48:51 +0000
2906@@ -1,4 +1,4 @@
2907-#include <python2.6/Python.h>
2908+#include <python2.7/Python.h>
2909
2910 #include "dpkg.h"
2911
2912@@ -85,6 +85,7 @@
2913 pkg->package = strdup(pkg_name);
2914 pkg->priority = 0;
2915 pkg->details = NULL;
2916+ pkg->depends[1] = NULL;
2917 pkg->depends[2] = NULL;
2918 pkg->depends[3] = NULL;
2919
2920@@ -107,15 +108,15 @@
2921 pyString = PyList_GetItem(value, 5);
2922 if (pyString == NULL) return NULL;
2923 if (pyString != Py_None) {
2924+ pkg->multiarch = PyString_AsString(pyString);
2925+ } else pkg->multiarch = NULL;
2926+
2927+ pyString = PyList_GetItem(value, 6);
2928+ if (pyString == NULL) return NULL;
2929+ if (pyString != Py_None) {
2930 pkg->depends[0] = read_dep_andor(PyString_AsString(pyString));
2931 } else pkg->depends[0] = NULL;
2932
2933- pyString = PyList_GetItem(value, 6);
2934- if (pyString == NULL) return NULL;
2935- if (pyString != Py_None) {
2936- pkg->depends[1] = read_dep_andor(PyString_AsString(pyString));
2937- } else pkg->depends[1] = NULL;
2938-
2939 pyString = PyList_GetItem(value, 7);
2940 if (pyString == NULL) return NULL;
2941 if (pyString != Py_None) {
2942@@ -204,7 +205,7 @@
2943 # SOURCE = 2
2944 # SOURCEVER = 3
2945 # ARCHITECTURE = 4
2946- # PREDEPENDS = 5
2947+ # MULTIARCH = 5
2948 # DEPENDS = 6
2949 # CONFLICTS = 7
2950 # PROVIDES = 8
2951@@ -223,6 +224,7 @@
2952 pkg->package = strdup(PyString_AsString(key));
2953 pkg->priority = 0;
2954 pkg->details = NULL;
2955+ pkg->depends[1] = NULL;
2956 pkg->depends[2] = NULL;
2957 pkg->depends[3] = NULL;
2958
2959@@ -245,15 +247,15 @@
2960 pyString = PyList_GetItem(value, 5);
2961 if (pyString == NULL) continue;
2962 if (pyString != Py_None) {
2963+ pkg->multiarch = PyString_AsString(pyString);
2964+ } else pkg->multiarch = NULL;
2965+
2966+ pyString = PyList_GetItem(value, 6);
2967+ if (pyString == NULL) continue;
2968+ if (pyString != Py_None) {
2969 pkg->depends[0] = read_dep_andor(PyString_AsString(pyString));
2970 } else pkg->depends[0] = NULL;
2971
2972- pyString = PyList_GetItem(value, 6);
2973- if (pyString == NULL) continue;
2974- if (pyString != Py_None) {
2975- pkg->depends[1] = read_dep_andor(PyString_AsString(pyString));
2976- } else pkg->depends[1] = NULL;
2977-
2978 pyString = PyList_GetItem(value, 7);
2979 if (pyString == NULL) continue;
2980 if (pyString != Py_None) {
2981
2982=== modified file 'lib/dpkg.c'
2983--- lib/dpkg.c 2012-03-05 12:13:21 +0000
2984+++ lib/dpkg.c 2013-11-22 19:48:51 +0000
2985@@ -24,7 +24,8 @@
2986 static deplist *read_deplist(char **buf, char sep, char end);
2987 static dependency *read_dependency(char **buf, char *end);
2988 static void add_virtualpackage(virtualpkgtbl *vpkgs, char *package,
2989- char *version, dpkg_collected_package *cpkg);
2990+ char *version, char *multiarch,
2991+ dpkg_collected_package *cpkg);
2992 static void remove_virtualpackage(virtualpkgtbl *vpkgs, char *pkgname,
2993 dpkg_collected_package *cpkg);
2994 static char *read_packagename(char **buf, char *end);
2995@@ -177,9 +178,9 @@
2996 add_packagetbl(pkgs->packages, cpkg->pkg->package, cpkg);
2997
2998 add_virtualpackage(pkgs->virtualpkgs, cpkg->pkg->package,
2999- cpkg->pkg->version, cpkg);
3000+ cpkg->pkg->version, cpkg->pkg->multiarch, cpkg);
3001 for (v = cpkg->pkg->provides; v != NULL; v = v->next) {
3002- add_virtualpackage(pkgs->virtualpkgs, v->value, NULL, cpkg);
3003+ add_virtualpackage(pkgs->virtualpkgs, v->value, NULL, NULL, cpkg);
3004 }
3005 }
3006
3007@@ -246,7 +247,8 @@
3008 }
3009
3010 static void add_virtualpackage(virtualpkgtbl *vpkgs, char *package,
3011- char *version, dpkg_collected_package *cpkg)
3012+ char *version, char *multiarch,
3013+ dpkg_collected_package *cpkg)
3014 {
3015 dpkg_provision value;
3016 virtualpkg *list, **addto;
3017@@ -254,6 +256,7 @@
3018
3019 value.pkg = cpkg;
3020 value.version = version;
3021+ value.multiarch = multiarch;
3022
3023 list = lookup_virtualpkgtbl(vpkgs, package);
3024 shouldreplace = (list != NULL);
3025@@ -398,11 +401,11 @@
3026 static dependency *read_dependency(char **buf, char *end) {
3027 dependency *dep;
3028 char *name;
3029- char newend[10];
3030+ char newend[11];
3031 DEBUG_ONLY( char *strend = *buf + strlen(*buf); )
3032
3033 assert(strlen(end) <= 8);
3034- newend[0] = '('; strcpy(newend + 1, end);
3035+ newend[0] = '('; newend[1] = ':'; strcpy(newend + 2, end);
3036
3037 name = my_strdup(read_until_char(buf, newend));
3038 if (name == NULL) return NULL;
3039@@ -411,6 +414,13 @@
3040 if (dep == NULL) die("read_dependency alloc 1:");
3041
3042 dep->package = name;
3043+
3044+ if (**buf == ':') {
3045+ (*buf)++;
3046+ dep->archqual = my_strdup(read_until_char(buf, newend));
3047+ if (dep->archqual == NULL) return NULL;
3048+ } else
3049+ dep->archqual = NULL;
3050
3051 while(isspace(**buf)) (*buf)++;
3052
3053@@ -465,7 +475,7 @@
3054 }
3055
3056 while (isspace(**buf)) (*buf)++;
3057- newend[0] = ')';
3058+ newend[0] = ')'; strcpy(newend + 1, end);
3059 dep->version = my_strdup(read_until_char(buf, newend));
3060 while (isspace(**buf)) (*buf)++;
3061
3062@@ -509,6 +519,14 @@
3063 }
3064 }
3065
3066+ if (dep->archqual != NULL) {
3067+ if (strcmp(dep->archqual, "any") == 0) {
3068+ if (vpkg->value.multiarch == NULL || strcmp(vpkg->value.multiarch, "allowed") != 0)
3069+ add = 0;
3070+ } else
3071+ add = 0;
3072+ }
3073+
3074 if (add) {
3075 insert_l_collpackagelist(addto, vpkg->value.pkg, line);
3076 addto = &(*addto)->next;
3077
3078=== modified file 'lib/dpkg.h'
3079--- lib/dpkg.h 2011-12-27 10:39:57 +0000
3080+++ lib/dpkg.h 2013-11-22 19:48:51 +0000
3081@@ -33,6 +33,7 @@
3082 typedef struct dependency dependency;
3083 struct dependency {
3084 char *package;
3085+ char *archqual;
3086 dependency_relation op;
3087 char *version;
3088 };
3089@@ -48,6 +49,7 @@
3090 struct dpkg_package {
3091 char *package;
3092 char *version;
3093+ char *multiarch;
3094
3095 char *source;
3096 char *source_ver;
3097@@ -102,6 +104,7 @@
3098 typedef struct dpkg_provision dpkg_provision;
3099 struct dpkg_provision {
3100 char *version;
3101+ char *multiarch;
3102 dpkg_collected_package *pkg;
3103 };
3104
3105
3106=== modified file 'lib/example.py'
3107--- lib/example.py 2006-08-20 19:25:21 +0000
3108+++ lib/example.py 2013-11-22 19:48:51 +0000
3109@@ -8,20 +8,21 @@
3110 # SOURCE = 2
3111 # SOURCEVER = 3
3112 # ARCHITECTURE = 4
3113-# PREDEPENDS = 5
3114-# DEPENDS = 6
3115-# CONFLICTS = 7
3116-# PROVIDES = 8
3117-# RDEPENDS = 9
3118-# RCONFLICTS = 10
3119+# MULTIARCH = 5
3120+# PREDEPENDS = 6
3121+# DEPENDS = 7
3122+# CONFLICTS = 8
3123+# PROVIDES = 9
3124+# RDEPENDS = 10
3125+# RCONFLICTS = 11
3126
3127-packages = {'phpldapadmin': ['1.0', 'web', 'phpldapadmin', '1.0', 'all', '', 'apache2 (>= 2.0)', '', '', [], []],
3128- 'apache2': ['2.0', 'web', 'apache2', '2.0', 'i386', '', '', 'phpldapadmin (<= 1.0~)', '', [], []],
3129+packages = {'phpldapadmin': ['1.0', 'web', 'phpldapadmin', '1.0', 'all', None, '', 'apache2 (>= 2.0)', '', '', [], []],
3130+ 'apache2': ['2.0', 'web', 'apache2', '2.0', 'i386', None, '', '', 'phpldapadmin (<= 1.0~)', '', [], []],
3131 }
3132
3133 system = britney.buildSystem('i386', packages)
3134 print system.is_installable('phpldapadmin'), system.packages
3135 system.remove_binary('apache2')
3136 print system.is_installable('phpldapadmin'), system.packages
3137-system.add_binary('apache2', ['2.0', 'web', 'apache2', '2.0', 'i386', '', '', 'phpldapadmin (<= 1.0~)', '', [], []])
3138+system.add_binary('apache2', ['2.0', 'web', 'apache2', '2.0', 'i386', None, '', '', 'phpldapadmin (<= 1.0~)', '', [], []])
3139 print system.is_installable('phpldapadmin'), system.packages
3140
3141=== modified file 'migrationitem.py'
3142--- migrationitem.py 2011-11-27 18:18:53 +0000
3143+++ migrationitem.py 2013-11-22 19:48:51 +0000
3144@@ -23,7 +23,7 @@
3145 def get_architectures(cls):
3146 return cls._architectures
3147
3148- def __init__(self, name = None, versionned = False):
3149+ def __init__(self, name = None, versionned = True):
3150 self._name = None
3151 self._uvname = None
3152 self._package = None
3153@@ -141,6 +141,6 @@
3154 def uvname(self):
3155 return self._uvname
3156
3157-class HintItem(MigrationItem):
3158+class UnversionnedMigrationItem(MigrationItem):
3159 def __init__(self, name = None):
3160- MigrationItem.__init__(self, name = name, versionned = True)
3161+ MigrationItem.__init__(self, name = name, versionned = False)

Subscribers

People subscribed via source and target branches

to all changes: