Merge lp:~canonical-platform-qa/britney/tests into lp:~ubuntu-release/britney/britney2-ubuntu
- tests
- Merge into britney2-ubuntu
Status: | Merged |
---|---|
Approved by: | Colin Watson |
Approved revision: | 408 |
Merged at revision: | 397 |
Proposed branch: | lp:~canonical-platform-qa/britney/tests |
Merge into: | lp:~ubuntu-release/britney/britney2-ubuntu |
Diff against target: |
451 lines (+446/-0) 1 file modified
tests/autopkgtest.py (+446/-0) |
To merge this branch: | bzr merge lp:~canonical-platform-qa/britney/tests |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Colin Watson (community) | Approve | ||
Review via email: mp+207982@code.launchpad.net |
Commit message
Description of the change
Add tests and reproduce some important bugs
This reproduces two bugs which we've recently encountered, and an additional
one which came up when writing the tests:
* If a new source builds an existing binary, britney ignores all autopkgtests.
This is what caused the "break trusty" disaster when uploading gccgo-4.9
which built an empty/broken libgcc1.
* Britney requests autopkgtest runs for uninstallable packages, causing
needless test failures and manual intervention to re-try packages once they
become installable again.
* Britney does not cross-check the version number that a test was run with,
and just applies the test result to the currently pending version. This
hasn't demonstrably caused any ill effect in practice as adt-britney should
already ensure that the requested version was tested. It might be a good
idea to verify this anyway though, for robustness.
Simply run the tests with "tests/
building lib, and creating the britneymodule.so -> lib/britneymodu
- 399. By Martin Pitt
-
use symbolic constants instead of True/False for considered status
- 400. By Martin Pitt
-
some more tests
- 401. By Martin Pitt
-
add --debug option mock adt-britney
- 402. By Martin Pitt
-
run britney in verbose mode
- 403. By Martin Pitt
-
merge with trunk
- 404. By Martin Pitt
-
merge trunk
- 405. By Martin Pitt
-
add test for uninstallable binary built from new source package
- 406. By Martin Pitt
-
fix source package name in cause of new_source tests
Martin Pitt (pitti) wrote : | # |
- 407. By Jean-Baptiste Lallement
-
merged trunk
- 408. By Martin Pitt
-
Mark test_result_
from_older_ version as XFAIL
Colin Watson (cjwatson) : | # |
Preview Diff
1 | === added directory 'tests' | |||
2 | === added file 'tests/autopkgtest.py' | |||
3 | --- tests/autopkgtest.py 1970-01-01 00:00:00 +0000 | |||
4 | +++ tests/autopkgtest.py 2014-05-12 12:05:05 +0000 | |||
5 | @@ -0,0 +1,446 @@ | |||
6 | 1 | #!/usr/bin/python | ||
7 | 2 | # (C) 2014 Canonical Ltd. | ||
8 | 3 | # | ||
9 | 4 | # This program is free software; you can redistribute it and/or modify | ||
10 | 5 | # it under the terms of the GNU General Public License as published by | ||
11 | 6 | # the Free Software Foundation; either version 2 of the License, or | ||
12 | 7 | # (at your option) any later version. | ||
13 | 8 | |||
14 | 9 | import tempfile | ||
15 | 10 | import shutil | ||
16 | 11 | import os | ||
17 | 12 | import sys | ||
18 | 13 | import subprocess | ||
19 | 14 | import unittest | ||
20 | 15 | |||
21 | 16 | architectures = ['amd64', 'arm64', 'armhf', 'i386', 'powerpc', 'ppc64el'] | ||
22 | 17 | |||
23 | 18 | my_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) | ||
24 | 19 | |||
25 | 20 | NOT_CONSIDERED = False | ||
26 | 21 | VALID_CANDIDATE = True | ||
27 | 22 | |||
28 | 23 | |||
29 | 24 | class TestData: | ||
30 | 25 | def __init__(self): | ||
31 | 26 | '''Construct local test package indexes. | ||
32 | 27 | |||
33 | 28 | The archive is initially empty. You can create new packages with | ||
34 | 29 | create_deb(). self.path contains the path of the archive, and | ||
35 | 30 | self.apt_source provides an apt source "deb" line. | ||
36 | 31 | |||
37 | 32 | It is kept in a temporary directory which gets removed when the Archive | ||
38 | 33 | object gets deleted. | ||
39 | 34 | ''' | ||
40 | 35 | self.path = tempfile.mkdtemp(prefix='testarchive.') | ||
41 | 36 | self.apt_source = 'deb file://%s /' % self.path | ||
42 | 37 | self.dirs = {False: os.path.join(self.path, 'data', 'testing'), | ||
43 | 38 | True: os.path.join(self.path, 'data', 'unstable')} | ||
44 | 39 | os.makedirs(self.dirs[False]) | ||
45 | 40 | os.mkdir(self.dirs[True]) | ||
46 | 41 | self.added_sources = {False: set(), True: set()} | ||
47 | 42 | self.added_binaries = {False: set(), True: set()} | ||
48 | 43 | |||
49 | 44 | # pre-create all files for all architectures | ||
50 | 45 | for arch in architectures: | ||
51 | 46 | for dir in self.dirs.values(): | ||
52 | 47 | with open(os.path.join(dir, 'Packages_' + arch), 'w'): | ||
53 | 48 | pass | ||
54 | 49 | for dir in self.dirs.values(): | ||
55 | 50 | for fname in ['Dates', 'Blocks']: | ||
56 | 51 | with open(os.path.join(dir, fname), 'w'): | ||
57 | 52 | pass | ||
58 | 53 | for dname in ['Hints']: | ||
59 | 54 | os.mkdir(os.path.join(dir, dname)) | ||
60 | 55 | |||
61 | 56 | os.mkdir(os.path.join(self.path, 'output')) | ||
62 | 57 | |||
63 | 58 | # create temporary home dir for proposed-migration autopktest status | ||
64 | 59 | self.home = os.path.join(self.path, 'home') | ||
65 | 60 | os.environ['HOME'] = self.home | ||
66 | 61 | os.makedirs(os.path.join(self.home, 'proposed-migration', | ||
67 | 62 | 'autopkgtest', 'work')) | ||
68 | 63 | |||
69 | 64 | def __del__(self): | ||
70 | 65 | shutil.rmtree(self.path) | ||
71 | 66 | |||
72 | 67 | def add(self, name, unstable, fields={}, add_src=True): | ||
73 | 68 | '''Add a binary package to the index file. | ||
74 | 69 | |||
75 | 70 | You need to specify at least the package name and in which list to put | ||
76 | 71 | it (unstable==True for unstable/proposed, or False for | ||
77 | 72 | testing/release). fields specifies all additional entries, e. g. | ||
78 | 73 | {'Depends': 'foo, bar', 'Conflicts: baz'}. There are defaults for most | ||
79 | 74 | fields. | ||
80 | 75 | |||
81 | 76 | Unless add_src is set to False, this will also automatically create a | ||
82 | 77 | source record, based on fields['Source'] and name. | ||
83 | 78 | ''' | ||
84 | 79 | assert (name not in self.added_binaries[unstable]) | ||
85 | 80 | self.added_binaries[unstable].add(name) | ||
86 | 81 | |||
87 | 82 | fields.setdefault('Architecture', architectures[0]) | ||
88 | 83 | fields.setdefault('Version', '1') | ||
89 | 84 | fields.setdefault('Priority', 'optional') | ||
90 | 85 | fields.setdefault('Section', 'devel') | ||
91 | 86 | fields.setdefault('Description', 'test pkg') | ||
92 | 87 | if fields['Architecture'] == 'all': | ||
93 | 88 | for a in architectures: | ||
94 | 89 | self._append(name, unstable, 'Packages_' + a, fields) | ||
95 | 90 | else: | ||
96 | 91 | self._append(name, unstable, 'Packages_' + fields['Architecture'], | ||
97 | 92 | fields) | ||
98 | 93 | |||
99 | 94 | if add_src: | ||
100 | 95 | src = fields.get('Source', name) | ||
101 | 96 | if src not in self.added_sources[unstable]: | ||
102 | 97 | self.add_src(src, unstable, {'Version': fields['Version'], | ||
103 | 98 | 'Section': fields['Section']}) | ||
104 | 99 | |||
105 | 100 | def add_src(self, name, unstable, fields={}): | ||
106 | 101 | '''Add a source package to the index file. | ||
107 | 102 | |||
108 | 103 | You need to specify at least the package name and in which list to put | ||
109 | 104 | it (unstable==True for unstable/proposed, or False for | ||
110 | 105 | testing/release). fields specifies all additional entries, which can be | ||
111 | 106 | Version (default: 1), Section (default: devel), and Extra-Source-Only. | ||
112 | 107 | ''' | ||
113 | 108 | assert (name not in self.added_sources[unstable]) | ||
114 | 109 | self.added_sources[unstable].add(name) | ||
115 | 110 | |||
116 | 111 | fields.setdefault('Version', '1') | ||
117 | 112 | fields.setdefault('Section', 'devel') | ||
118 | 113 | self._append(name, unstable, 'Sources', fields) | ||
119 | 114 | |||
120 | 115 | def _append(self, name, unstable, file_name, fields): | ||
121 | 116 | with open(os.path.join(self.dirs[unstable], file_name), 'a') as f: | ||
122 | 117 | f.write('''Package: %s | ||
123 | 118 | Maintainer: Joe <joe@example.com> | ||
124 | 119 | ''' % name) | ||
125 | 120 | |||
126 | 121 | for k, v in fields.items(): | ||
127 | 122 | f.write('%s: %s\n' % (k, v)) | ||
128 | 123 | f.write('\n') | ||
129 | 124 | |||
130 | 125 | |||
131 | 126 | class Test(unittest.TestCase): | ||
132 | 127 | def setUp(self): | ||
133 | 128 | self.data = TestData() | ||
134 | 129 | |||
135 | 130 | # add a bunch of packages to testing to avoid repetition | ||
136 | 131 | self.data.add('libc6', False) | ||
137 | 132 | self.data.add('libgreen1', False, {'Source': 'green', | ||
138 | 133 | 'Depends': 'libc6 (>= 0.9)'}) | ||
139 | 134 | self.data.add('green', False, {'Depends': 'libc6 (>= 0.9), libgreen1', | ||
140 | 135 | 'Conflicts': 'blue'}) | ||
141 | 136 | self.data.add('lightgreen', False, {'Depends': 'libgreen1'}) | ||
142 | 137 | self.data.add('darkgreen', False, {'Depends': 'libgreen1'}) | ||
143 | 138 | self.data.add('blue', False, {'Depends': 'libc6 (>= 0.9)', | ||
144 | 139 | 'Conflicts': 'green'}) | ||
145 | 140 | self.data.add('justdata', False, {'Architecture': 'all'}) | ||
146 | 141 | |||
147 | 142 | self.britney = os.path.join(my_dir, 'britney.py') | ||
148 | 143 | self.britney_conf = os.path.join(my_dir, 'britney.conf') | ||
149 | 144 | assert os.path.exists(self.britney) | ||
150 | 145 | assert os.path.exists(self.britney_conf) | ||
151 | 146 | |||
152 | 147 | # fake adt-britney script | ||
153 | 148 | self.adt_britney = os.path.join(self.data.home, 'auto-package-testing', | ||
154 | 149 | 'jenkins', 'adt-britney') | ||
155 | 150 | os.makedirs(os.path.dirname(self.adt_britney)) | ||
156 | 151 | |||
157 | 152 | with open(self.adt_britney, 'w') as f: | ||
158 | 153 | f.write('''#!/bin/sh -e | ||
159 | 154 | echo "$@" >> /%s/adt-britney.log ''' % self.data.path) | ||
160 | 155 | os.chmod(self.adt_britney, 0o755) | ||
161 | 156 | |||
162 | 157 | def tearDown(self): | ||
163 | 158 | del self.data | ||
164 | 159 | |||
165 | 160 | def make_adt_britney(self, request): | ||
166 | 161 | with open(self.adt_britney, 'w') as f: | ||
167 | 162 | f.write('''#!%(py)s | ||
168 | 163 | import argparse, shutil,sys | ||
169 | 164 | |||
170 | 165 | def request(): | ||
171 | 166 | if args.req: | ||
172 | 167 | shutil.copy(args.req, '%(path)s/adt-britney.requestarg') | ||
173 | 168 | with open(args.output, 'w') as f: | ||
174 | 169 | f.write("""%(rq)s""".replace('PASS', 'NEW').replace('FAIL', 'NEW').replace('RUNNING', 'NEW')) | ||
175 | 170 | |||
176 | 171 | def submit(): | ||
177 | 172 | with open(args.req, 'w') as f: | ||
178 | 173 | f.write("""%(rq)s""".replace('PASS', 'RUNNING'). | ||
179 | 174 | replace('FAIL', 'RUNNING')) | ||
180 | 175 | |||
181 | 176 | def collect(): | ||
182 | 177 | with open(args.output, 'w') as f: | ||
183 | 178 | f.write("""%(rq)s""") | ||
184 | 179 | |||
185 | 180 | p = argparse.ArgumentParser() | ||
186 | 181 | p.add_argument('-c', '--config') | ||
187 | 182 | p.add_argument('-a', '--arch') | ||
188 | 183 | p.add_argument('-r', '--release') | ||
189 | 184 | p.add_argument('-P', '--use-proposed', action='store_true') | ||
190 | 185 | p.add_argument('-d', '--debug', action='store_true') | ||
191 | 186 | p.add_argument('-U', '--no-update', action='store_true') | ||
192 | 187 | sp = p.add_subparsers() | ||
193 | 188 | |||
194 | 189 | prequest = sp.add_parser('request') | ||
195 | 190 | prequest.add_argument('-O', '--output') | ||
196 | 191 | prequest.add_argument('req', nargs='?') | ||
197 | 192 | prequest.set_defaults(func=request) | ||
198 | 193 | |||
199 | 194 | psubmit = sp.add_parser('submit') | ||
200 | 195 | psubmit.add_argument('req') | ||
201 | 196 | psubmit.set_defaults(func=submit) | ||
202 | 197 | |||
203 | 198 | pcollect = sp.add_parser('collect') | ||
204 | 199 | pcollect.add_argument('-O', '--output') | ||
205 | 200 | pcollect.add_argument('-n', '--new-only', action='store_true', default=False) | ||
206 | 201 | pcollect.set_defaults(func=collect) | ||
207 | 202 | |||
208 | 203 | args = p.parse_args() | ||
209 | 204 | args.func() | ||
210 | 205 | ''' % {'py': sys.executable, 'path': self.data.path, 'rq': request}) | ||
211 | 206 | |||
212 | 207 | def run_britney(self, args=[]): | ||
213 | 208 | '''Run britney. | ||
214 | 209 | |||
215 | 210 | Assert that it succeeds and does not produce anything on stderr. | ||
216 | 211 | Return (excuses.html, britney_out). | ||
217 | 212 | ''' | ||
218 | 213 | britney = subprocess.Popen([self.britney, '-v', '-c', self.britney_conf], | ||
219 | 214 | stdout=subprocess.PIPE, | ||
220 | 215 | stderr=subprocess.PIPE, | ||
221 | 216 | cwd=self.data.path, | ||
222 | 217 | universal_newlines=True) | ||
223 | 218 | (out, err) = britney.communicate() | ||
224 | 219 | self.assertEqual(britney.returncode, 0, out + err) | ||
225 | 220 | self.assertEqual(err, '') | ||
226 | 221 | |||
227 | 222 | with open(os.path.join(self.data.path, 'output', 'excuses.html')) as f: | ||
228 | 223 | excuses = f.read() | ||
229 | 224 | |||
230 | 225 | return (excuses, out) | ||
231 | 226 | |||
232 | 227 | def test_no_request_for_uninstallable(self): | ||
233 | 228 | '''Does not request a test for an uninstallable package''' | ||
234 | 229 | |||
235 | 230 | self.do_test( | ||
236 | 231 | # uninstallable unstable version | ||
237 | 232 | [('green', {'Version': '1.1~beta', 'Depends': 'libc6 (>= 0.9), libgreen1 (>= 2)'})], | ||
238 | 233 | 'green 1.1~beta RUNNING green 1.1~beta\n', | ||
239 | 234 | NOT_CONSIDERED, | ||
240 | 235 | [r'\bgreen\b.*>1</a> to .*>1.1~beta<', | ||
241 | 236 | 'green/amd64 unsatisfiable Depends: libgreen1 \(>= 2\)'], | ||
242 | 237 | # autopkgtest should not be triggered for uninstallable pkg | ||
243 | 238 | ['autopkgtest']) | ||
244 | 239 | |||
245 | 240 | def test_request_for_installable_running(self): | ||
246 | 241 | '''Requests a test for an installable package, test still running''' | ||
247 | 242 | |||
248 | 243 | self.do_test( | ||
249 | 244 | [('green', {'Version': '1.1~beta', 'Depends': 'libc6 (>= 0.9), libgreen1'})], | ||
250 | 245 | 'green 1.1~beta RUNNING green 1.1~beta\n', | ||
251 | 246 | NOT_CONSIDERED, | ||
252 | 247 | [r'\bgreen\b.*>1</a> to .*>1.1~beta<', | ||
253 | 248 | '<li>autopkgtest for green 1.1~beta: RUNNING']) | ||
254 | 249 | |||
255 | 250 | def test_request_for_installable_fail(self): | ||
256 | 251 | '''Requests a test for an installable package, test fail''' | ||
257 | 252 | |||
258 | 253 | self.do_test( | ||
259 | 254 | [('green', {'Version': '1.1~beta', 'Depends': 'libc6 (>= 0.9), libgreen1'})], | ||
260 | 255 | 'green 1.1~beta FAIL green 1.1~beta\n', | ||
261 | 256 | NOT_CONSIDERED, | ||
262 | 257 | [r'\bgreen\b.*>1</a> to .*>1.1~beta<', | ||
263 | 258 | '<li>autopkgtest for green 1.1~beta: FAIL']) | ||
264 | 259 | |||
265 | 260 | def test_request_for_installable_pass(self): | ||
266 | 261 | '''Requests a test for an installable package, test pass''' | ||
267 | 262 | |||
268 | 263 | self.do_test( | ||
269 | 264 | [('green', {'Version': '1.1~beta', 'Depends': 'libc6 (>= 0.9), libgreen1'})], | ||
270 | 265 | 'green 1.1~beta PASS green 1.1~beta\n', | ||
271 | 266 | VALID_CANDIDATE, | ||
272 | 267 | [r'\bgreen\b.*>1</a> to .*>1.1~beta<', | ||
273 | 268 | '<li>autopkgtest for green 1.1~beta: PASS']) | ||
274 | 269 | |||
275 | 270 | def test_multi_rdepends_with_tests_running(self): | ||
276 | 271 | '''Multiple reverse dependencies with tests (still running)''' | ||
277 | 272 | |||
278 | 273 | self.do_test( | ||
279 | 274 | [('libgreen1', {'Version': '2', 'Source': 'green', 'Depends': 'libc6'})], | ||
280 | 275 | 'lightgreen 1 PASS green 2\n' | ||
281 | 276 | 'darkgreen 1 RUNNING green 2\n', | ||
282 | 277 | NOT_CONSIDERED, | ||
283 | 278 | [r'\bgreen\b.*>1</a> to .*>2<', | ||
284 | 279 | '<li>autopkgtest for lightgreen 1: PASS', | ||
285 | 280 | '<li>autopkgtest for darkgreen 1: RUNNING']) | ||
286 | 281 | |||
287 | 282 | def test_multi_rdepends_with_tests_fail(self): | ||
288 | 283 | '''Multiple reverse dependencies with tests (fail)''' | ||
289 | 284 | |||
290 | 285 | self.do_test( | ||
291 | 286 | [('libgreen1', {'Version': '2', 'Source': 'green', 'Depends': 'libc6'})], | ||
292 | 287 | 'lightgreen 1 PASS green 2\n' | ||
293 | 288 | 'darkgreen 1 FAIL green 2\n', | ||
294 | 289 | NOT_CONSIDERED, | ||
295 | 290 | [r'\bgreen\b.*>1</a> to .*>2<', | ||
296 | 291 | '<li>autopkgtest for lightgreen 1: PASS', | ||
297 | 292 | '<li>autopkgtest for darkgreen 1: FAIL']) | ||
298 | 293 | |||
299 | 294 | def test_multi_rdepends_with_tests_pass(self): | ||
300 | 295 | '''Multiple reverse dependencies with tests (pass)''' | ||
301 | 296 | |||
302 | 297 | self.do_test( | ||
303 | 298 | [('libgreen1', {'Version': '2', 'Source': 'green', 'Depends': 'libc6'})], | ||
304 | 299 | 'lightgreen 1 PASS green 2\n' | ||
305 | 300 | 'darkgreen 1 PASS green 2\n', | ||
306 | 301 | VALID_CANDIDATE, | ||
307 | 302 | [r'\bgreen\b.*>1</a> to .*>2<', | ||
308 | 303 | '<li>autopkgtest for lightgreen 1: PASS', | ||
309 | 304 | '<li>autopkgtest for darkgreen 1: PASS']) | ||
310 | 305 | |||
311 | 306 | def test_multi_rdepends_with_some_tests_running(self): | ||
312 | 307 | '''Multiple reverse dependencies with some tests (running)''' | ||
313 | 308 | |||
314 | 309 | # add a third reverse dependency to libgreen1 which does not have a test | ||
315 | 310 | self.data.add('mint', False, {'Depends': 'libgreen1'}) | ||
316 | 311 | |||
317 | 312 | self.do_test( | ||
318 | 313 | [('libgreen1', {'Version': '2', 'Source': 'green', 'Depends': 'libc6'})], | ||
319 | 314 | 'lightgreen 1 RUNNING green 2\n' | ||
320 | 315 | 'darkgreen 1 RUNNING green 2\n', | ||
321 | 316 | NOT_CONSIDERED, | ||
322 | 317 | [r'\bgreen\b.*>1</a> to .*>2<', | ||
323 | 318 | '<li>autopkgtest for lightgreen 1: RUNNING', | ||
324 | 319 | '<li>autopkgtest for darkgreen 1: RUNNING']) | ||
325 | 320 | |||
326 | 321 | def test_multi_rdepends_with_some_tests_fail(self): | ||
327 | 322 | '''Multiple reverse dependencies with some tests (fail)''' | ||
328 | 323 | |||
329 | 324 | # add a third reverse dependency to libgreen1 which does not have a test | ||
330 | 325 | self.data.add('mint', False, {'Depends': 'libgreen1'}) | ||
331 | 326 | |||
332 | 327 | self.do_test( | ||
333 | 328 | [('libgreen1', {'Version': '2', 'Source': 'green', 'Depends': 'libc6'})], | ||
334 | 329 | 'lightgreen 1 PASS green 2\n' | ||
335 | 330 | 'darkgreen 1 FAIL green 2\n', | ||
336 | 331 | NOT_CONSIDERED, | ||
337 | 332 | [r'\bgreen\b.*>1</a> to .*>2<', | ||
338 | 333 | '<li>autopkgtest for lightgreen 1: PASS', | ||
339 | 334 | '<li>autopkgtest for darkgreen 1: FAIL']) | ||
340 | 335 | |||
341 | 336 | def test_multi_rdepends_with_some_tests_pass(self): | ||
342 | 337 | '''Multiple reverse dependencies with some tests (pass)''' | ||
343 | 338 | |||
344 | 339 | # add a third reverse dependency to libgreen1 which does not have a test | ||
345 | 340 | self.data.add('mint', False, {'Depends': 'libgreen1'}) | ||
346 | 341 | |||
347 | 342 | self.do_test( | ||
348 | 343 | [('libgreen1', {'Version': '2', 'Source': 'green', 'Depends': 'libc6'})], | ||
349 | 344 | 'lightgreen 1 PASS green 2\n' | ||
350 | 345 | 'darkgreen 1 PASS green 2\n', | ||
351 | 346 | VALID_CANDIDATE, | ||
352 | 347 | [r'\bgreen\b.*>1</a> to .*>2<', | ||
353 | 348 | '<li>autopkgtest for lightgreen 1: PASS', | ||
354 | 349 | '<li>autopkgtest for darkgreen 1: PASS']) | ||
355 | 350 | |||
356 | 351 | def test_binary_from_new_source_package_running(self): | ||
357 | 352 | '''building an existing binary for a new source package (running)''' | ||
358 | 353 | |||
359 | 354 | self.do_test( | ||
360 | 355 | [('libgreen1', {'Version': '2', 'Source': 'newgreen', 'Depends': 'libc6'})], | ||
361 | 356 | 'lightgreen 1 PASS newgreen 2\n' | ||
362 | 357 | 'darkgreen 1 RUNNING newgreen 2\n', | ||
363 | 358 | NOT_CONSIDERED, | ||
364 | 359 | [r'\bnewgreen\b.*\(- to .*>2<', | ||
365 | 360 | '<li>autopkgtest for lightgreen 1: PASS', | ||
366 | 361 | '<li>autopkgtest for darkgreen 1: RUNNING']) | ||
367 | 362 | |||
368 | 363 | def test_binary_from_new_source_package_fail(self): | ||
369 | 364 | '''building an existing binary for a new source package (fail)''' | ||
370 | 365 | |||
371 | 366 | self.do_test( | ||
372 | 367 | [('libgreen1', {'Version': '2', 'Source': 'newgreen', 'Depends': 'libc6'})], | ||
373 | 368 | 'lightgreen 1 PASS newgreen 2\n' | ||
374 | 369 | 'darkgreen 1 FAIL newgreen 2\n', | ||
375 | 370 | NOT_CONSIDERED, | ||
376 | 371 | [r'\bnewgreen\b.*\(- to .*>2<', | ||
377 | 372 | '<li>autopkgtest for lightgreen 1: PASS', | ||
378 | 373 | '<li>autopkgtest for darkgreen 1: FAIL']) | ||
379 | 374 | |||
380 | 375 | def test_binary_from_new_source_package_pass(self): | ||
381 | 376 | '''building an existing binary for a new source package (pass)''' | ||
382 | 377 | |||
383 | 378 | self.do_test( | ||
384 | 379 | [('libgreen1', {'Version': '2', 'Source': 'newgreen', 'Depends': 'libc6'})], | ||
385 | 380 | 'lightgreen 1 PASS newgreen 2\n' | ||
386 | 381 | 'darkgreen 1 PASS newgreen 2\n', | ||
387 | 382 | VALID_CANDIDATE, | ||
388 | 383 | [r'\bnewgreen\b.*\(- to .*>2<', | ||
389 | 384 | '<li>autopkgtest for lightgreen 1: PASS', | ||
390 | 385 | '<li>autopkgtest for darkgreen 1: PASS']) | ||
391 | 386 | |||
392 | 387 | def test_binary_from_new_source_package_uninst(self): | ||
393 | 388 | '''building an existing binary for a new source package (uninstallable)''' | ||
394 | 389 | |||
395 | 390 | self.do_test( | ||
396 | 391 | [('libgreen1', {'Version': '2', 'Source': 'newgreen', 'Depends': 'libc6, nosuchpkg'})], | ||
397 | 392 | 'darkgreen 1 FAIL newgreen 2\n', | ||
398 | 393 | NOT_CONSIDERED, | ||
399 | 394 | [r'\bnewgreen\b.*\(- to .*>2<', | ||
400 | 395 | 'libgreen1/amd64 unsatisfiable Depends: nosuchpkg'], | ||
401 | 396 | # autopkgtest should not be triggered for uninstallable pkg | ||
402 | 397 | ['autopkgtest']) | ||
403 | 398 | |||
404 | 399 | @unittest.expectedFailure | ||
405 | 400 | def test_result_from_older_version(self): | ||
406 | 401 | '''test result from older version than the uploaded one''' | ||
407 | 402 | |||
408 | 403 | self.do_test( | ||
409 | 404 | [('green', {'Version': '1.1~beta', 'Depends': 'libc6 (>= 0.9), libgreen1'})], | ||
410 | 405 | 'green 1.1~alpha PASS green 1.1~beta\n', | ||
411 | 406 | NOT_CONSIDERED, | ||
412 | 407 | [r'\bgreen\b.*>1</a> to .*>1.1~beta<', | ||
413 | 408 | # it's not entirely clear what precisely it should say here | ||
414 | 409 | '<li>autopkgtest for green 1.1~beta: RUNNING']) | ||
415 | 410 | |||
416 | 411 | def do_test(self, unstable_add, adt_request, considered, expect=None, | ||
417 | 412 | no_expect=None): | ||
418 | 413 | for (pkg, fields) in unstable_add: | ||
419 | 414 | self.data.add(pkg, True, fields) | ||
420 | 415 | |||
421 | 416 | self.make_adt_britney(adt_request) | ||
422 | 417 | |||
423 | 418 | (excuses, out) = self.run_britney() | ||
424 | 419 | #print('-------\nexcuses: %s\n-----' % excuses) | ||
425 | 420 | #print('-------\nout: %s\n-----' % out) | ||
426 | 421 | #print('run:\n%s -c %s\n' % (self.britney, self.britney_conf)) | ||
427 | 422 | #subprocess.call(['bash', '-i'], cwd=self.data.path) | ||
428 | 423 | if considered: | ||
429 | 424 | self.assertIn('Valid candidate', excuses) | ||
430 | 425 | else: | ||
431 | 426 | self.assertIn('Not considered', excuses) | ||
432 | 427 | |||
433 | 428 | if expect: | ||
434 | 429 | for re in expect: | ||
435 | 430 | self.assertRegexpMatches(excuses, re) | ||
436 | 431 | if no_expect: | ||
437 | 432 | for re in no_expect: | ||
438 | 433 | self.assertNotRegexpMatches(excuses, re) | ||
439 | 434 | |||
440 | 435 | def shell(self): | ||
441 | 436 | # uninstallable unstable version | ||
442 | 437 | self.data.add('yellow', True, {'Version': '1.1~beta', | ||
443 | 438 | 'Depends': 'libc6 (>= 0.9), nosuchpkg'}) | ||
444 | 439 | |||
445 | 440 | self.make_adt_britney('yellow 1.1~beta RUNNING yellow 1.1~beta\n') | ||
446 | 441 | |||
447 | 442 | print('run:\n%s -c %s\n' % (self.britney, self.britney_conf)) | ||
448 | 443 | subprocess.call(['bash', '-i'], cwd=self.data.path) | ||
449 | 444 | |||
450 | 445 | |||
451 | 446 | unittest.main() |
> * If a new source builds an existing binary, britney ignores all autopkgtests.
> This is what caused the "break trusty" disaster when uploading gccgo-4.9
> which built an empty/broken libgcc1.
While the actual bug is still there somewhere (it did happen, after all), these tests don't reproduce that unfortunately. Turned out the failures were due to a copy&paste error, fixed in r406. They pass now.