Merge lp:~cprov/britney/integration-tests into lp:~canonical-ci-engineering/britney/queued-announce-and-collect
- integration-tests
- Merge into queued-announce-and-collect
Proposed by
Celso Providelo
Status: | Merged |
---|---|
Merged at revision: | 436 |
Proposed branch: | lp:~cprov/britney/integration-tests |
Merge into: | lp:~canonical-ci-engineering/britney/queued-announce-and-collect |
Diff against target: |
392 lines (+245/-21) 4 files modified
britney.conf (+4/-0) britney.py (+7/-6) testclient.py (+3/-3) tests/test_testclient.py (+231/-12) |
To merge this branch: | bzr merge lp:~cprov/britney/integration-tests |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Thomi Richards (community) | Approve | ||
Review via email: mp+260046@code.launchpad.net |
Commit message
Adding integration tests for britney/
Description of the change
Adding integrations tests for testclient features. The down-side here is to depend on local rabbit installation for running the tests (kombu 'memory://' cannot be shared across processed and we are calling `britney` for tests).
To post a comment you must log in.
- 439. By Celso Providelo
-
Skip tests if no local rabbitmq is available.
Revision history for this message
Thomi Richards (thomir-deactivatedaccount) wrote : | # |
review:
Approve
Revision history for this message
Celso Providelo (cprov) wrote : | # |
Thomi,
Thanks for the review, comments addressed.
- 440. By Celso Providelo
-
Addressing review comments.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'britney.conf' | |||
2 | --- britney.conf 2015-03-05 14:57:03 +0000 | |||
3 | +++ britney.conf 2015-05-25 23:49:18 +0000 | |||
4 | @@ -70,3 +70,7 @@ | |||
5 | 70 | BOOTTEST_DEBUG = yes | 70 | BOOTTEST_DEBUG = yes |
6 | 71 | BOOTTEST_ARCHES = armhf amd64 | 71 | BOOTTEST_ARCHES = armhf amd64 |
7 | 72 | BOOTTEST_FETCH = yes | 72 | BOOTTEST_FETCH = yes |
8 | 73 | |||
9 | 74 | TESTCLIENT_ENABLE = yes | ||
10 | 75 | TESTCLIENT_AMQP_URIS = amqp://guest:guest@162.213.32.181:5672// | ||
11 | 76 | TESTCLIENT_REQUIRED_TESTS = | ||
12 | 73 | 77 | ||
13 | === modified file 'britney.py' | |||
14 | --- britney.py 2015-05-22 02:51:53 +0000 | |||
15 | +++ britney.py 2015-05-25 23:49:18 +0000 | |||
16 | @@ -2019,22 +2019,23 @@ | |||
17 | 2019 | self.hints.search('force-badtest', package=excuse.name)) | 2019 | self.hints.search('force-badtest', package=excuse.name)) |
18 | 2020 | forces = [x for x in hints | 2020 | forces = [x for x in hints |
19 | 2021 | if same_source(excuse.ver[1], x.version)] | 2021 | if same_source(excuse.ver[1], x.version)] |
21 | 2022 | for test in testclient.getTests(excuse.name): | 2022 | for test in testclient.getTests(excuse.name, excuse.ver[1]): |
22 | 2023 | label = TestClient.EXCUSE_LABELS.get( | 2023 | label = TestClient.EXCUSE_LABELS.get( |
24 | 2024 | test.status, 'UNKNOWN STATUS') | 2024 | test.get('status'), 'UNKNOWN STATUS') |
25 | 2025 | excuse.addhtml( | 2025 | excuse.addhtml( |
26 | 2026 | "%s result: %s (<a href=\"%s\">results</a>)" % ( | 2026 | "%s result: %s (<a href=\"%s\">results</a>)" % ( |
28 | 2027 | test.name, label, test.result_url)) | 2027 | test.get('name').capitalize(), label, |
29 | 2028 | test.get('url'))) | ||
30 | 2028 | if forces: | 2029 | if forces: |
31 | 2029 | excuse.addhtml( | 2030 | excuse.addhtml( |
32 | 2030 | "Should wait for %s %s %s, but forced by " | 2031 | "Should wait for %s %s %s, but forced by " |
33 | 2031 | "%s" % (excuse.name, excuse.ver[1], | 2032 | "%s" % (excuse.name, excuse.ver[1], |
34 | 2032 | test.name, forces[0].user)) | 2033 | test.name, forces[0].user)) |
35 | 2033 | continue | 2034 | continue |
37 | 2034 | if test.name not in required_tests: | 2035 | if test.get('name') not in required_tests: |
38 | 2035 | continue | 2036 | continue |
41 | 2036 | if test.status not in TestClient.VALID_STATUSES: | 2037 | if test.get('status') not in TestClient.VALID_STATUSES: |
42 | 2037 | excuse.addreason(test.name) | 2038 | excuse.addreason(test.get('name')) |
43 | 2038 | if excuse.is_valid: | 2039 | if excuse.is_valid: |
44 | 2039 | excuse.is_valid = False | 2040 | excuse.is_valid = False |
45 | 2040 | excuse.addhtml("Not considered") | 2041 | excuse.addhtml("Not considered") |
46 | 2041 | 2042 | ||
47 | === modified file 'testclient.py' | |||
48 | --- testclient.py 2015-05-22 13:55:57 +0000 | |||
49 | +++ testclient.py 2015-05-25 23:49:18 +0000 | |||
50 | @@ -68,9 +68,9 @@ | |||
51 | 68 | 68 | ||
52 | 69 | VALID_STATUSES = ('PASS', 'SKIP') | 69 | VALID_STATUSES = ('PASS', 'SKIP') |
53 | 70 | 70 | ||
55 | 71 | LABELS = { | 71 | EXCUSE_LABELS = { |
56 | 72 | "PASS": '<span style="background:#87d96c">Pass</span>', | 72 | "PASS": '<span style="background:#87d96c">Pass</span>', |
58 | 73 | "SKIP": '<span style="background:#ffff00">Skip</span>', | 73 | "SKIP": '<span style="background:#ffff00">Test skipped</span>', |
59 | 74 | "FAIL": '<span style="background:#ff6666">Regression</span>', | 74 | "FAIL": '<span style="background:#ff6666">Regression</span>', |
60 | 75 | "RUNNING": '<span style="background:#99ddff">Test in progress</span>', | 75 | "RUNNING": '<span style="background:#99ddff">Test in progress</span>', |
61 | 76 | } | 76 | } |
62 | @@ -93,7 +93,7 @@ | |||
63 | 93 | """Announce new source candidates. | 93 | """Announce new source candidates. |
64 | 94 | 94 | ||
65 | 95 | Post a message to the EXCHANGE_CANDATIDATES for every new given | 95 | Post a message to the EXCHANGE_CANDATIDATES for every new given |
67 | 96 | excuses (cache announcementes so excuses do not get re-annouced). | 96 | excuses (cache announcements so excuses do not get re-annouced). |
68 | 97 | """ | 97 | """ |
69 | 98 | with nested(json_cached_info(self.cache_path), | 98 | with nested(json_cached_info(self.cache_path), |
70 | 99 | kombu.Connection(self.amqp_uris)) as (cache, connection): | 99 | kombu.Connection(self.amqp_uris)) as (cache, connection): |
71 | 100 | 100 | ||
72 | === modified file 'tests/test_testclient.py' | |||
73 | --- tests/test_testclient.py 2015-05-22 17:39:54 +0000 | |||
74 | +++ tests/test_testclient.py 2015-05-25 23:49:18 +0000 | |||
75 | @@ -15,6 +15,7 @@ | |||
76 | 15 | import kombu | 15 | import kombu |
77 | 16 | from kombu.pools import producers | 16 | from kombu.pools import producers |
78 | 17 | 17 | ||
79 | 18 | |||
80 | 18 | PROJECT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) | 19 | PROJECT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) |
81 | 19 | sys.path.insert(0, PROJECT_DIR) | 20 | sys.path.insert(0, PROJECT_DIR) |
82 | 20 | 21 | ||
83 | @@ -24,13 +25,14 @@ | |||
84 | 24 | make_cache_key, | 25 | make_cache_key, |
85 | 25 | TestClient, | 26 | TestClient, |
86 | 26 | ) | 27 | ) |
87 | 28 | from tests import TestBase | ||
88 | 27 | 29 | ||
89 | 28 | 30 | ||
90 | 29 | class TestJsonCachedInfo(unittest.TestCase): | 31 | class TestJsonCachedInfo(unittest.TestCase): |
91 | 30 | 32 | ||
92 | 31 | def setUp(self): | 33 | def setUp(self): |
93 | 32 | super(TestJsonCachedInfo, self).setUp() | 34 | super(TestJsonCachedInfo, self).setUp() |
95 | 33 | (_dummy, self.test_cache) = tempfile.mkstemp() | 35 | _, self.test_cache = tempfile.mkstemp() |
96 | 34 | self.addCleanup(os.unlink, self.test_cache) | 36 | self.addCleanup(os.unlink, self.test_cache) |
97 | 35 | 37 | ||
98 | 36 | def test_simple(self): | 38 | def test_simple(self): |
99 | @@ -64,24 +66,27 @@ | |||
100 | 64 | self.path = tempfile.mkdtemp(prefix='testclient') | 66 | self.path = tempfile.mkdtemp(prefix='testclient') |
101 | 65 | os.makedirs(os.path.join(self.path, 'testclient/')) | 67 | os.makedirs(os.path.join(self.path, 'testclient/')) |
102 | 66 | self.addCleanup(shutil.rmtree, self.path) | 68 | self.addCleanup(shutil.rmtree, self.path) |
103 | 69 | |||
104 | 67 | os.chdir(self.path) | 70 | os.chdir(self.path) |
105 | 71 | cwd = os.getcwd() | ||
106 | 72 | self.addCleanup(os.chdir, cwd) | ||
107 | 73 | |||
108 | 74 | self.amqp_uris = ['memory://'] | ||
109 | 68 | 75 | ||
110 | 69 | def test_announce(self): | 76 | def test_announce(self): |
111 | 70 | # 'announce' post messages to the EXCHANGE_CANDIDATES exchange and | 77 | # 'announce' post messages to the EXCHANGE_CANDIDATES exchange and |
112 | 71 | # updates its internal cache. | 78 | # updates its internal cache. |
115 | 72 | amqp_uris = ['memory://'] | 79 | testclient = TestClient('vivid', self.amqp_uris) |
114 | 73 | testclient = TestClient('vivid', amqp_uris) | ||
116 | 74 | test_excuses = [ | 80 | test_excuses = [ |
117 | 75 | make_excuse('foo', '1.0'), | 81 | make_excuse('foo', '1.0'), |
118 | 76 | make_excuse('bar', '2.0'), | 82 | make_excuse('bar', '2.0'), |
119 | 77 | ] | 83 | ] |
120 | 78 | 84 | ||
122 | 79 | with kombu.Connection(amqp_uris) as connection: | 85 | with kombu.Connection(self.amqp_uris) as connection: |
123 | 80 | exchange = kombu.Exchange( | 86 | exchange = kombu.Exchange( |
124 | 81 | testclient.EXCHANGE_CANDIDATES, type="fanout") | 87 | testclient.EXCHANGE_CANDIDATES, type="fanout") |
125 | 82 | queue = kombu.Queue('testing', exchange) | 88 | queue = kombu.Queue('testing', exchange) |
126 | 83 | with connection.SimpleQueue(queue) as q: | 89 | with connection.SimpleQueue(queue) as q: |
127 | 84 | q.queue.purge() | ||
128 | 85 | testclient.announce(test_excuses) | 90 | testclient.announce(test_excuses) |
129 | 86 | self.assertEqual( | 91 | self.assertEqual( |
130 | 87 | [{'series': 'vivid', | 92 | [{'series': 'vivid', |
131 | @@ -100,8 +105,7 @@ | |||
132 | 100 | def test_collect(self): | 105 | def test_collect(self): |
133 | 101 | # 'collect' collects test results and aggregates them in its | 106 | # 'collect' collects test results and aggregates them in its |
134 | 102 | # internal cache. | 107 | # internal cache. |
137 | 103 | amqp_uris = ['memory://'] | 108 | testclient = TestClient('vivid', self.amqp_uris) |
136 | 104 | testclient = TestClient('vivid', amqp_uris) | ||
138 | 105 | 109 | ||
139 | 106 | result_payloads = [ | 110 | result_payloads = [ |
140 | 107 | {'source_name': 'foo', | 111 | {'source_name': 'foo', |
141 | @@ -131,7 +135,7 @@ | |||
142 | 131 | 'test_url': 'http://ubuntu.com/foo'}, | 135 | 'test_url': 'http://ubuntu.com/foo'}, |
143 | 132 | ] | 136 | ] |
144 | 133 | 137 | ||
146 | 134 | with kombu.Connection(amqp_uris) as connection: | 138 | with kombu.Connection(self.amqp_uris) as connection: |
147 | 135 | with producers[connection].acquire(block=True) as producer: | 139 | with producers[connection].acquire(block=True) as producer: |
148 | 136 | # Just for binding destination queue to the exchange. | 140 | # Just for binding destination queue to the exchange. |
149 | 137 | testclient.collect() | 141 | testclient.collect() |
150 | @@ -159,8 +163,7 @@ | |||
151 | 159 | def test_cleanup(self): | 163 | def test_cleanup(self): |
152 | 160 | # `cleanup` remove cache entries that are not present in the | 164 | # `cleanup` remove cache entries that are not present in the |
153 | 161 | # given excuses list (i.e. not relevant for promotion anymore). | 165 | # given excuses list (i.e. not relevant for promotion anymore). |
156 | 162 | amqp_uris = ['memory://'] | 166 | testclient = TestClient('vivid', self.amqp_uris) |
155 | 163 | testclient = TestClient('vivid', amqp_uris) | ||
157 | 164 | test_excuses = [ | 167 | test_excuses = [ |
158 | 165 | make_excuse('foo', '1.0'), | 168 | make_excuse('foo', '1.0'), |
159 | 166 | make_excuse('bar', '2.0'), | 169 | make_excuse('bar', '2.0'), |
160 | @@ -181,8 +184,7 @@ | |||
161 | 181 | def test_getTests(self): | 184 | def test_getTests(self): |
162 | 182 | # `getTests` yields cached tests results for a given source name | 185 | # `getTests` yields cached tests results for a given source name |
163 | 183 | # and version. | 186 | # and version. |
166 | 184 | amqp_uris = ['memory://'] | 187 | testclient = TestClient('vivid', self.amqp_uris) |
165 | 185 | testclient = TestClient('vivid', amqp_uris) | ||
167 | 186 | 188 | ||
168 | 187 | with json_cached_info(testclient.cache_path) as cache: | 189 | with json_cached_info(testclient.cache_path) as cache: |
169 | 188 | cache[make_cache_key('foo', '1.0')] = [ | 190 | cache[make_cache_key('foo', '1.0')] = [ |
170 | @@ -207,5 +209,222 @@ | |||
171 | 207 | [], list(testclient.getTests('bar', '1.0'))) | 209 | [], list(testclient.getTests('bar', '1.0'))) |
172 | 208 | 210 | ||
173 | 209 | 211 | ||
174 | 212 | def has_local_rabbitmq(): | ||
175 | 213 | """Whether a local rabbitmq server is available with default creds.""" | ||
176 | 214 | with kombu.Connection('amqp://guest:guest@localhost:5672//', | ||
177 | 215 | connect_timeout=.1) as c: | ||
178 | 216 | try: | ||
179 | 217 | c.connect() | ||
180 | 218 | except: | ||
181 | 219 | return False | ||
182 | 220 | return True | ||
183 | 221 | |||
184 | 222 | |||
185 | 223 | @unittest.skipUnless(has_local_rabbitmq(), 'No local rabbitmq') | ||
186 | 224 | class TestTestClientEnd2End(TestBase): | ||
187 | 225 | """End2End tests (calling `britney`) for the TestClient usage.""" | ||
188 | 226 | |||
189 | 227 | def setUp(self): | ||
190 | 228 | super(TestTestClientEnd2End, self).setUp() | ||
191 | 229 | |||
192 | 230 | # XXX cprov 20150525: unfortunately, this test requires a proper | ||
193 | 231 | # amqp transport/server layer (rabbitmq) because kombu 'memory://' | ||
194 | 232 | # cannot be shared across processes (britney & tests). | ||
195 | 233 | self.amqp_uris = ['amqp://guest:guest@localhost:5672//'] | ||
196 | 234 | |||
197 | 235 | self.path = tempfile.mkdtemp(prefix='testclient') | ||
198 | 236 | os.makedirs(os.path.join(self.path, 'testclient/')) | ||
199 | 237 | self.addCleanup(shutil.rmtree, self.path) | ||
200 | 238 | |||
201 | 239 | os.chdir(self.path) | ||
202 | 240 | cwd = os.getcwd() | ||
203 | 241 | self.addCleanup(os.chdir, cwd) | ||
204 | 242 | |||
205 | 243 | # Disable autopkgtests + boottest tests and use local rabbit | ||
206 | 244 | # for this testing context. | ||
207 | 245 | self.overrideConfig({ | ||
208 | 246 | 'ADT_ENABLE': 'no', | ||
209 | 247 | 'BOOTTEST_ENABLE': 'no', | ||
210 | 248 | 'TESTCLIENT_AMQP_URIS': ' '.join(self.amqp_uris), | ||
211 | 249 | }) | ||
212 | 250 | |||
213 | 251 | # We publish a version of 'foo' source to make it 'known'. | ||
214 | 252 | self.data.add('foo', False, {'Architecture': 'amd64'}) | ||
215 | 253 | |||
216 | 254 | def overrideConfig(self, overrides): | ||
217 | 255 | """Overrides briney configuration based on the given key-value map.""" | ||
218 | 256 | with open(self.britney_conf, 'r') as fp: | ||
219 | 257 | original_config = fp.read() | ||
220 | 258 | new_config = [] | ||
221 | 259 | for line in original_config.splitlines(): | ||
222 | 260 | for k, v in overrides.iteritems(): | ||
223 | 261 | if line.startswith(k): | ||
224 | 262 | line = '{} = {}'.format(k, v) | ||
225 | 263 | new_config.append(line) | ||
226 | 264 | with open(self.britney_conf, 'w') as fp: | ||
227 | 265 | fp.write('\n'.join(new_config)) | ||
228 | 266 | self.addCleanup(self.restore_config, original_config) | ||
229 | 267 | |||
230 | 268 | def publishTestResults(self, results): | ||
231 | 269 | """Publish the given list of test results.""" | ||
232 | 270 | with kombu.Connection(self.amqp_uris) as connection: | ||
233 | 271 | results_exchange = kombu.Exchange( | ||
234 | 272 | TestClient.EXCHANGE_RESULTS, type="fanout") | ||
235 | 273 | with producers[connection].acquire(block=True) as producer: | ||
236 | 274 | publisher = connection.ensure( | ||
237 | 275 | producer, producer.publish, max_retries=3) | ||
238 | 276 | for payload in results: | ||
239 | 277 | publisher(payload, exchange=results_exchange) | ||
240 | 278 | |||
241 | 279 | def getAnnouncements(self): | ||
242 | 280 | """Yields announcements payloads.""" | ||
243 | 281 | with kombu.Connection(self.amqp_uris) as connection: | ||
244 | 282 | candidates_exchange = kombu.Exchange( | ||
245 | 283 | TestClient.EXCHANGE_CANDIDATES, type="fanout") | ||
246 | 284 | queue = kombu.Queue('testing', candidates_exchange) | ||
247 | 285 | with connection.SimpleQueue(queue) as q: | ||
248 | 286 | for i in range(len(q)): | ||
249 | 287 | msg = q.get() | ||
250 | 288 | msg.ack() | ||
251 | 289 | yield msg.payload | ||
252 | 290 | |||
253 | 291 | def do_test(self, context, expect=None, no_expect=None): | ||
254 | 292 | """Process the given package context and assert britney results.""" | ||
255 | 293 | for (pkg, fields) in context: | ||
256 | 294 | self.data.add(pkg, True, fields) | ||
257 | 295 | |||
258 | 296 | # Creates a queue for collecting announcements from | ||
259 | 297 | # 'candidates.exchanges'. | ||
260 | 298 | with kombu.Connection(self.amqp_uris) as connection: | ||
261 | 299 | candidates_exchange = kombu.Exchange( | ||
262 | 300 | TestClient.EXCHANGE_CANDIDATES, type="fanout") | ||
263 | 301 | queue = kombu.Queue('testing', candidates_exchange) | ||
264 | 302 | with connection.SimpleQueue(queue) as q: | ||
265 | 303 | q.queue.purge() | ||
266 | 304 | |||
267 | 305 | (excuses, out) = self.run_britney() | ||
268 | 306 | |||
269 | 307 | #print('-------\nexcuses: %s\n-----' % excuses) | ||
270 | 308 | if expect: | ||
271 | 309 | for re in expect: | ||
272 | 310 | self.assertRegexpMatches(excuses, re) | ||
273 | 311 | if no_expect: | ||
274 | 312 | for re in no_expect: | ||
275 | 313 | self.assertNotRegexpMatches(excuses, re) | ||
276 | 314 | |||
277 | 315 | def test_non_required_test(self): | ||
278 | 316 | # Non-required test results are collected as part of the excuse | ||
279 | 317 | # report but do not block source promotion (i.e. the excuse is | ||
280 | 318 | # a 'Valid candidate' even if the test is 'in progress'). | ||
281 | 319 | |||
282 | 320 | # Publish 'in-progress' results for 'bazinga for "foo_1.1"'. | ||
283 | 321 | test_results = [{ | ||
284 | 322 | 'source_name': 'foo', | ||
285 | 323 | 'source_version': '1.1', | ||
286 | 324 | 'series': self.data.series, | ||
287 | 325 | 'test_name': 'bazinga', | ||
288 | 326 | 'test_status': 'RUNNING', | ||
289 | 327 | 'test_url': 'http://bazinga.com/foo', | ||
290 | 328 | }] | ||
291 | 329 | self.publishTestResults(test_results) | ||
292 | 330 | |||
293 | 331 | # Run britney for 'foo_1.1' and valid candidated is recorded. | ||
294 | 332 | context = [ | ||
295 | 333 | ('foo', {'Source': 'foo', 'Version': '1.1', | ||
296 | 334 | 'Architecture': 'amd64'}), | ||
297 | 335 | ] | ||
298 | 336 | self.do_test( | ||
299 | 337 | context, | ||
300 | 338 | [r'\bfoo\b.*>1</a> to .*>1.1<', | ||
301 | 339 | r'<li>Bazinga result: .*>Test in progress.*' | ||
302 | 340 | r'href="http://bazinga.com/foo">results', | ||
303 | 341 | '<li>Valid candidate']) | ||
304 | 342 | |||
305 | 343 | # 'foo_1.1' source candidate was announced. | ||
306 | 344 | self.assertEqual( | ||
307 | 345 | [{'source_name': 'foo', | ||
308 | 346 | 'source_version': '1.1', | ||
309 | 347 | 'series': self.data.series, | ||
310 | 348 | }], list(self.getAnnouncements())) | ||
311 | 349 | |||
312 | 350 | def test_required_test(self): | ||
313 | 351 | # A required-test result is collected and blocks source package | ||
314 | 352 | # promotion while it hasn't passed. | ||
315 | 353 | |||
316 | 354 | # Make 'bazinga' a required test. | ||
317 | 355 | self.overrideConfig({ | ||
318 | 356 | 'TESTCLIENT_REQUIRED_TESTS': 'bazinga', | ||
319 | 357 | }) | ||
320 | 358 | |||
321 | 359 | # Publish 'in-progress' results for 'bazinga for "foo_1.1"'. | ||
322 | 360 | test_results = [{ | ||
323 | 361 | 'source_name': 'foo', | ||
324 | 362 | 'source_version': '1.1', | ||
325 | 363 | 'series': self.data.series, | ||
326 | 364 | 'test_name': 'bazinga', | ||
327 | 365 | 'test_status': 'RUNNING', | ||
328 | 366 | 'test_url': 'http://bazinga.com/foo', | ||
329 | 367 | }] | ||
330 | 368 | self.publishTestResults(test_results) | ||
331 | 369 | |||
332 | 370 | # Run britney for 'foo_1.1' and an unconsidered excuse is recorded. | ||
333 | 371 | context = [ | ||
334 | 372 | ('foo', {'Source': 'foo', 'Version': '1.1', | ||
335 | 373 | 'Architecture': 'amd64'}), | ||
336 | 374 | ] | ||
337 | 375 | self.do_test( | ||
338 | 376 | context, | ||
339 | 377 | [r'\bfoo\b.*>1</a> to .*>1.1<', | ||
340 | 378 | r'<li>Bazinga result: .*>Test in progress.*' | ||
341 | 379 | r'href="http://bazinga.com/foo">results', | ||
342 | 380 | '<li>Not considered']) | ||
343 | 381 | |||
344 | 382 | # 'foo_1.1' source candidate was announced. | ||
345 | 383 | self.assertEqual( | ||
346 | 384 | [{'source_name': 'foo', | ||
347 | 385 | 'source_version': '1.1', | ||
348 | 386 | 'series': self.data.series, | ||
349 | 387 | }], list(self.getAnnouncements())) | ||
350 | 388 | |||
351 | 389 | def test_promoted(self): | ||
352 | 390 | # When all required tests passed (or were skipped) the source | ||
353 | 391 | # candidate can be promoted. | ||
354 | 392 | |||
355 | 393 | # Make 'bazinga' and 'zoing' required test. | ||
356 | 394 | self.overrideConfig({ | ||
357 | 395 | 'TESTCLIENT_REQUIRED_TESTS': 'bazinga zoing', | ||
358 | 396 | }) | ||
359 | 397 | |||
360 | 398 | # Publish 'in-progress' results for 'bazinga for "foo_1.1"'. | ||
361 | 399 | test_results = [{ | ||
362 | 400 | 'source_name': 'foo', | ||
363 | 401 | 'source_version': '1.1', | ||
364 | 402 | 'series': self.data.series, | ||
365 | 403 | 'test_name': 'bazinga', | ||
366 | 404 | 'test_status': 'SKIP', | ||
367 | 405 | 'test_url': 'http://bazinga.com/foo', | ||
368 | 406 | }, { | ||
369 | 407 | 'source_name': 'foo', | ||
370 | 408 | 'source_version': '1.1', | ||
371 | 409 | 'series': self.data.series, | ||
372 | 410 | 'test_name': 'zoing', | ||
373 | 411 | 'test_status': 'PASS', | ||
374 | 412 | 'test_url': 'http://zoing.com/foo', | ||
375 | 413 | }] | ||
376 | 414 | self.publishTestResults(test_results) | ||
377 | 415 | |||
378 | 416 | context = [ | ||
379 | 417 | ('foo', {'Source': 'foo', 'Version': '1.1', | ||
380 | 418 | 'Architecture': 'amd64'}), | ||
381 | 419 | ] | ||
382 | 420 | self.do_test( | ||
383 | 421 | context, | ||
384 | 422 | [r'\bfoo\b.*>1</a> to .*>1.1<', | ||
385 | 423 | r'<li>Bazinga result: .*>Test skipped.*' | ||
386 | 424 | 'href="http://bazinga.com/foo">results', | ||
387 | 425 | r'<li>Zoing result: .*>Pass.*href="http://zoing.com/foo">results', | ||
388 | 426 | '<li>Valid candidate']) | ||
389 | 427 | |||
390 | 428 | |||
391 | 210 | if __name__ == '__main__': | 429 | if __name__ == '__main__': |
392 | 211 | unittest.main() | 430 | unittest.main() |
A few comments, otherwise looks good.