Merge lp:~joetalbott/qa-dashboard/utah_log_parser into lp:qa-dashboard
- utah_log_parser
- Merge into dev
Proposed by
Joe Talbott
Status: | Merged |
---|---|
Approved by: | Chris Johnston |
Approved revision: | 151 |
Merged at revision: | 152 |
Proposed branch: | lp:~joetalbott/qa-dashboard/utah_log_parser |
Merge into: | lp:qa-dashboard |
Diff against target: |
585 lines (+280/-126) 5 files modified
bootspeed/management/commands/fakeup_bootspeed.py (+76/-56) dashboard/management/__init__.py (+9/-7) dashboard/management/commands/jenkins_pull.py (+161/-55) dashboard/utah_utils.py (+29/-6) dashboard/utils.py (+5/-2) |
To merge this branch: | bzr merge lp:~joetalbott/qa-dashboard/utah_log_parser |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Chris Johnston | Approve | ||
Review via email: mp+135544@code.launchpad.net |
Commit message
Add support for parsing utah logs to jenkins_pull.
Description of the change
This branch add utah log parsing to the jenkins_pull command.
This is to sync my current work and contains a few new minor features.
* jenkins_pull now supports '-w <jobname>' to skip all jenkins jobs until the named one is encountered. This greatly speeds up debugging initial pulls.
* jenkins_pull now supports '... [jobname] [jobname] ...' at the end of the command line to process only certain jobs whose names are supplied.
* adds proper logging support and uses the verbosity command line option. -v N where N is > 1 will turn on debugging.
* skips processing UTAH jobs as regular jenkins jobs.
To post a comment you must log in.
Revision history for this message
Chris Johnston (cjohnston) wrote : | # |
Revision history for this message
Chris Johnston (cjohnston) : | # |
review:
Approve
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'bootspeed/management/commands/fakeup_bootspeed.py' |
2 | --- bootspeed/management/commands/fakeup_bootspeed.py 2012-11-15 02:13:32 +0000 |
3 | +++ bootspeed/management/commands/fakeup_bootspeed.py 2012-11-28 17:34:20 +0000 |
4 | @@ -18,60 +18,80 @@ |
5 | variants = ['server', 'desktop', 'mini', 'alternate'] |
6 | build_numbers = ['20120909', '20120910', '20120911'] |
7 | |
8 | - machine, created = Machine.objects.get_or_create( |
9 | - name='Dell Optiplex 242', |
10 | - mac_address='01:02:03:0a:0b', |
11 | - ) |
12 | - |
13 | - releases = ['quantal'] |
14 | - arches = ['amd64', 'i386'] |
15 | - variants = ['server'] |
16 | - build_numbers = ['2012{:0>2}{:0>2}'.format(y,x) for y in [4,5,6,7,8,9] for x in range(1, 31)] |
17 | - build_numbers.append('20120917.1') |
18 | - build_numbers.append('20120917.2') |
19 | - |
20 | - """ |
21 | - '20120701', |
22 | - '20120914', |
23 | - '20120915', |
24 | - '20120916', |
25 | - '20120917', |
26 | - '20120918', |
27 | - ] |
28 | - """ |
29 | - |
30 | - for release in releases: |
31 | - for arch in arches: |
32 | - for variant in variants: |
33 | - for build_number in build_numbers: |
34 | - image, created = Image.objects.get_or_create( |
35 | - release=release, |
36 | - variant=variant, |
37 | - arch=arch, |
38 | - build_number=build_number, |
39 | - md5='fake{}{}{}{}'.format(build_number, release, variant, arch), |
40 | - ) |
41 | - |
42 | - now = datetime.datetime.now() |
43 | - for i in range(100): |
44 | - for j in range(10): |
45 | - kernel_init=random.random() * 20 |
46 | - kernel=random.random() * 20 |
47 | - plumbing=random.random() * 20 |
48 | - xorg=random.random() * 20 |
49 | - desktop=random.random() * 20 |
50 | - |
51 | - boot = kernel_init + kernel + plumbing + xorg + desktop |
52 | - ran_at = now - datetime.timedelta(days=i) |
53 | - |
54 | - imageresult = ImageResult.objects.create( |
55 | - image=image, |
56 | - machine=machine, |
57 | - kernel=kernel, |
58 | - kernel_init=kernel_init, |
59 | - plumbing=plumbing, |
60 | - xorg=xorg, |
61 | - desktop=desktop, |
62 | - boot=boot, |
63 | - ran_at=ran_at, |
64 | + machines = [ |
65 | + { |
66 | + 'name': 'Dell Optiplex 242', |
67 | + 'mac': '01:02:03:0a:0b', |
68 | + }, |
69 | + { |
70 | + 'name': 'Nexus 7', |
71 | + 'mac': '01:02:03:0a:0c', |
72 | + }, |
73 | + { |
74 | + 'name': 'Dell 242', |
75 | + 'mac': '01:02:03:0a:0d', |
76 | + }, |
77 | + { |
78 | + 'name': 'HP 242', |
79 | + 'mac': '01:02:03:0a:0e', |
80 | + }, |
81 | + ] |
82 | + |
83 | + for m in machines: |
84 | + machine, created = Machine.objects.get_or_create( |
85 | + name=m['name'], |
86 | + mac_address=m['mac'] |
87 | + ) |
88 | + |
89 | + releases = ['quantal'] |
90 | + arches = ['amd64', 'i386'] |
91 | + variants = ['server'] |
92 | + build_numbers = ['2012{:0>2}{:0>2}'.format(y,x) for y in [4,5,6,7,8,9] for x in range(1, 31)] |
93 | + build_numbers.append('20120917.1') |
94 | + build_numbers.append('20120917.2') |
95 | + |
96 | + """ |
97 | + '20120701', |
98 | + '20120914', |
99 | + '20120915', |
100 | + '20120916', |
101 | + '20120917', |
102 | + '20120918', |
103 | + ] |
104 | + """ |
105 | + |
106 | + for release in releases: |
107 | + for arch in arches: |
108 | + for variant in variants: |
109 | + for build_number in build_numbers: |
110 | + image, created = Image.objects.get_or_create( |
111 | + release=release, |
112 | + variant=variant, |
113 | + arch=arch, |
114 | + build_number=build_number, |
115 | + md5='fake{}{}{}{}'.format(build_number, release, variant, arch), |
116 | ) |
117 | + |
118 | + now = datetime.datetime.now() |
119 | + for i in range(10): |
120 | + for j in range(10): |
121 | + kernel_init=random.random() * 20 |
122 | + kernel=random.random() * 20 |
123 | + plumbing=random.random() * 20 |
124 | + xorg=random.random() * 20 |
125 | + desktop=random.random() * 20 |
126 | + |
127 | + boot = kernel_init + kernel + plumbing + xorg + desktop |
128 | + ran_at = now - datetime.timedelta(days=i) |
129 | + |
130 | + imageresult = ImageResult.objects.create( |
131 | + image=image, |
132 | + machine=machine, |
133 | + kernel=kernel, |
134 | + kernel_init=kernel_init, |
135 | + plumbing=plumbing, |
136 | + xorg=xorg, |
137 | + desktop=desktop, |
138 | + boot=boot, |
139 | + ran_at=ran_at, |
140 | + ) |
141 | |
142 | === modified file 'dashboard/management/__init__.py' |
143 | --- dashboard/management/__init__.py 2012-08-29 17:33:22 +0000 |
144 | +++ dashboard/management/__init__.py 2012-11-28 17:34:20 +0000 |
145 | @@ -11,13 +11,15 @@ |
146 | try_count = 0 |
147 | |
148 | while try_count < 5: |
149 | - try: |
150 | - f = urllib2.urlopen(req) |
151 | - response = f.read() |
152 | - f.close() |
153 | - break |
154 | - except urllib2.HTTPError: |
155 | - try_count += 1 |
156 | + try: |
157 | + f = urllib2.urlopen(req) |
158 | + response = f.read() |
159 | + f.close() |
160 | + break |
161 | + except urllib2.HTTPError: |
162 | + try_count += 1 |
163 | + except urllib2.URLError as e: |
164 | + try_count += 1 |
165 | |
166 | if response != '' and as_json: |
167 | return json.loads(response) |
168 | |
169 | === modified file 'dashboard/management/commands/jenkins_pull.py' |
170 | --- dashboard/management/commands/jenkins_pull.py 2012-09-19 17:54:56 +0000 |
171 | +++ dashboard/management/commands/jenkins_pull.py 2012-11-28 17:34:20 +0000 |
172 | @@ -23,12 +23,57 @@ |
173 | DEBUG_LIMITS, |
174 | ) |
175 | |
176 | +from dashboard.utah_utils import process_smoke_log |
177 | + |
178 | #l = logging.getLogger('django.db.backends') |
179 | #l.setLevel(logging.DEBUG) |
180 | #l.addHandler(logging.StreamHandler()) |
181 | |
182 | from dashboard.utils import regexes |
183 | |
184 | +def _get_utah_logs(artifacts): |
185 | + """ |
186 | + Check if there are files in 'clientlogs'. |
187 | + |
188 | + Returns a list of utah logs found. |
189 | + |
190 | + """ |
191 | + |
192 | + logs = [] |
193 | + |
194 | + for artifact in artifacts: |
195 | + logging.debug(artifact['relativePath']) |
196 | + if 'clientlogs' in artifact['relativePath']: |
197 | + logs.append(artifact) |
198 | + |
199 | + return logs |
200 | + |
201 | +def _process_utah_logs(logs, jenkins_build=None, jenkins_url=None, |
202 | + name=None): |
203 | + |
204 | + runs = [] |
205 | + builds = [] |
206 | + results = [] |
207 | + |
208 | + for log in logs: |
209 | + logging.debug("log: {}".format(log)) |
210 | + run, build, result = process_smoke_log( |
211 | + log['relativePath'], |
212 | + jenkins_build=jenkins_build, |
213 | + jenkins_url=jenkins_url, |
214 | + name=name, |
215 | + ) |
216 | + |
217 | + if run: |
218 | + runs.append(run) |
219 | + if build: |
220 | + builds.append(build) |
221 | + if result: |
222 | + results.append(result) |
223 | + |
224 | + return runs, builds, results |
225 | + |
226 | + |
227 | class Command(BaseCommand): |
228 | option_list = BaseCommand.option_list + ( |
229 | make_option('-I', '--initial-run', |
230 | @@ -36,8 +81,12 @@ |
231 | dest='initial_run', |
232 | default=False, |
233 | help='Initial run processes all data (takes longer)'), |
234 | + make_option('-w', '--wait-for', |
235 | + dest='waitfor', |
236 | + help='Skip all jobs until the named job is found'), |
237 | ) |
238 | |
239 | + args = "[job name] [job name]..." |
240 | help = 'Pull data from jenkins' |
241 | |
242 | def get_build_number(self, build_url): |
243 | @@ -55,48 +104,87 @@ |
244 | return None |
245 | |
246 | def handle(self, *args, **options): |
247 | + verbosity = int(options.get('verbosity')) |
248 | + log_level = logging.DEBUG if verbosity > 1 else logging.INFO |
249 | + |
250 | + logging.basicConfig(level=log_level) |
251 | + |
252 | + job_list = list(args) |
253 | + logging.debug("job_list: {}".format(job_list)) |
254 | + |
255 | starttime = datetime.now() |
256 | initial_run = options.get('initial_run') |
257 | + waitfor = options.get('waitfor') |
258 | + |
259 | + logging.debug("waitfor: {}".format(waitfor)) |
260 | |
261 | # for the initial run include older releases as well. |
262 | if initial_run: |
263 | regexes['smoke'] = re.compile(ur"^(lucid|natty|oneiric|precise|quantal)-(desktop|server|alternate)") |
264 | |
265 | + #regexes['smoke'] = re.compile(ur"^(raring)-(desktop)-i386_default") |
266 | + #regexes['smoke'] = re.compile(ur"^(quantal)-(desktop|server|alternate)-") |
267 | + #regexes['utah-smoke'] = re.compile(ur"^(raring)-(desktop)-i386-smoke-default") |
268 | + |
269 | + |
270 | jobs_top = jenkins_get('https://jenkins.qa.ubuntu.com/api/json') |
271 | jobs = jobs_top['jobs'] |
272 | |
273 | |
274 | job_count = 0 # XXX: DEBUG |
275 | + waiting = waitfor is not None |
276 | + logging.info("waiting for job {}".format(waitfor)) |
277 | for job in jobs: |
278 | + utah_job = False |
279 | name = job['name'] |
280 | url = job['url'] |
281 | - self.stdout.write('Name: %s\n' % name) |
282 | - if regexes['bootspeed'].match(name): |
283 | - self.stdout.write("Bootspeed test %s\n" % name) |
284 | - elif regexes['upgrade'].match(name): |
285 | - self.stdout.write("Upgrade test %s\n" % name) |
286 | - elif regexes['smoke'].match(name): |
287 | - self.stdout.write("Smoke test %s\n" % name) |
288 | + |
289 | + # skip jobs until the one we're waiting for |
290 | + if waiting: |
291 | + if name == waitfor: |
292 | + logging.info("found job {}".format(waitfor)) |
293 | + waiting = False |
294 | + else: |
295 | + continue |
296 | + |
297 | + # if passed a list of jobs skip those that don't match |
298 | + if len(job_list) > 0 and name not in job_list: |
299 | + continue |
300 | + |
301 | + if regexes['smoke'].match(name) or regexes['utah-smoke'].match(name): |
302 | + logging.info("Smoke test %s" % name) |
303 | flavor = 'ubuntu' |
304 | if regexes['smoke-ec2'].match(name): |
305 | if regexes['smoke-ec2-daily'].match(name): |
306 | m = regexes['smoke-ec2-daily-data'].match(name) |
307 | release, variant = m.group(1,2) |
308 | else: |
309 | - m = regexes['smoke-data'].match(name) |
310 | + # first check if it's a utah based job |
311 | + m = regexes['utah-smoke-data'].match(name) |
312 | orig_name = name |
313 | - release, variant, arch, name = m.group(1,2,3,4) |
314 | + if m: |
315 | + release, variant, arch, name = m.group(1,2,3,4) |
316 | + logging.debug(("Got a UTAH job: ", release, variant, arch, name)) |
317 | + utah_job = True |
318 | + else: |
319 | + m = regexes['smoke-data'].match(name) |
320 | + if m: |
321 | + release, variant, arch, name = m.group(1,2,3,4) |
322 | + else: |
323 | + logging.warn("Strange smoke name: {}" |
324 | + .format(name)) |
325 | + continue |
326 | |
327 | # XXX: debugging limit |
328 | job_count += 1 |
329 | if DEBUG_LIMITS and job_count > JOB_COUNT_MAX: |
330 | break |
331 | |
332 | - self.stdout.write("Release: %s, variant: %s, arch: %s, name: %s\n" % (release, variant, arch, name)) |
333 | + logging.info("Release: %s, variant: %s, arch: %s, name: %s" % (release, variant, arch, name)) |
334 | |
335 | # Ignore static validation per Gema's request via IRC. |
336 | if name == "static_validation": |
337 | - self.stdout.write("Skipping static validation test: {}".format(orig_name)) |
338 | + logging.info("Skipping static validation test: {}".format(orig_name)) |
339 | continue |
340 | |
341 | |
342 | @@ -131,6 +219,17 @@ |
343 | if building: |
344 | continue |
345 | |
346 | + if utah_job: # A utah job |
347 | + utah_logs = _get_utah_logs(artifacts) |
348 | + |
349 | + if len(utah_logs) > 0: |
350 | + runs, builds, results = _process_utah_logs( |
351 | + utah_logs, |
352 | + jenkins_build=build_number, |
353 | + jenkins_url=url, |
354 | + name=name, |
355 | + ) |
356 | + |
357 | # XXX: find out what TZ jenkins uses and use correct one here |
358 | if not initial_run and datetime.now() - build_date > timedelta(days=7): |
359 | continue |
360 | @@ -154,45 +253,56 @@ |
361 | if build_no is None: |
362 | build_no = build_date.strftime("%Y%m%d ?") |
363 | |
364 | - self.stdout.write("Build #%s, date: %s, build_no: %s\n" % (build_number, build_date, build_no)) |
365 | - |
366 | - run = None |
367 | - try: |
368 | - run = Run.objects.get(release=release, flavor=flavor, build_no=build_no) |
369 | - except Run.DoesNotExist: |
370 | - run = Run.objects.create(release=release, flavor=flavor, build_no=build_no, ran_at=build_date, test_type=0) |
371 | - |
372 | - build = None |
373 | - try: |
374 | - build = run.build_set.get(variant=variant, arch=arch, flavor=flavor) |
375 | - except Build.DoesNotExist: |
376 | - build = run.build_set.create(variant=variant, arch=arch, flavor=flavor) |
377 | + logging.info("Build #%s, date: %s, build_no: %s" % (build_number, build_date, build_no)) |
378 | |
379 | result = None |
380 | - try: |
381 | - result = build.result_set.get(name=name, jenkins_build=build_number) |
382 | - # break |
383 | - except Result.DoesNotExist: |
384 | - fail_count, skip_count, pass_count, total_count = (0, 0, 0, 0) |
385 | - for action in actions: |
386 | + if not utah_job: |
387 | + logging.debug("Processing results as jenkins job") |
388 | + # Start adding objects here. |
389 | + |
390 | + run, new_run = Run.objects.get_or_create( |
391 | + release=release, |
392 | + flavor=flavor, |
393 | + build_no=build_no, |
394 | + defaults={ |
395 | + 'ran_at': build_date, |
396 | + 'test_type': 0, |
397 | + }, |
398 | + ) |
399 | + |
400 | + build, new_build = run.build_set.get_or_create( |
401 | + variant=variant, |
402 | + flavor=flavor, |
403 | + arch=arch, |
404 | + ) |
405 | + |
406 | + try: |
407 | + result = build.result_set.get(name=name, jenkins_build=build_number) |
408 | + # break |
409 | + except Result.DoesNotExist: |
410 | + fail_count, skip_count, pass_count, total_count = (0, 0, 0, 0) |
411 | + for action in actions: |
412 | + try: |
413 | + fail_count, skip_count, total_count = (action['failCount'], action['skipCount'], action['totalCount']) |
414 | + pass_count = total_count - fail_count - skip_count |
415 | + break |
416 | + except KeyError: |
417 | + continue |
418 | + |
419 | + result = build.result_set.create(name=name, jenkins_build=build_number, ran_at=build_date, jenkins_url=build_url, fail_count=fail_count, skip_count=skip_count, pass_count=pass_count, total_count=total_count) |
420 | + |
421 | + for bug_no in lp_bugs: |
422 | + bug = None |
423 | try: |
424 | - fail_count, skip_count, total_count = (action['failCount'], action['skipCount'], action['totalCount']) |
425 | - pass_count = total_count - fail_count - skip_count |
426 | - break |
427 | - except KeyError: |
428 | - continue |
429 | - |
430 | - result = build.result_set.create(name=name, jenkins_build=build_number, ran_at=build_date, jenkins_url=build_url, fail_count=fail_count, skip_count=skip_count, pass_count=pass_count, total_count=total_count) |
431 | - |
432 | - for bug_no in lp_bugs: |
433 | - bug = None |
434 | - try: |
435 | - bug = Bug.objects.get(bug_no = bug_no) |
436 | - except Bug.DoesNotExist: |
437 | - bug = Bug.objects.create(bug_no=bug_no, status='unknown') |
438 | - if bug not in list(result.bugs.all()): |
439 | - result.bugs.add(bug) |
440 | - result.save() |
441 | + bug = Bug.objects.get(bug_no = bug_no) |
442 | + except Bug.DoesNotExist: |
443 | + bug = Bug.objects.create(bug_no=bug_no, status='unknown') |
444 | + if bug not in list(result.bugs.all()): |
445 | + result.bugs.add(bug) |
446 | + result.save() |
447 | + |
448 | + if result: |
449 | + results = [result] |
450 | |
451 | for artifact in artifacts: |
452 | path = artifact['relativePath'] |
453 | @@ -207,15 +317,11 @@ |
454 | 'remote_url': remote_url, |
455 | 'path': path, |
456 | } |
457 | - try: |
458 | - res_log = result.resultlog_set.get(**art_args) |
459 | - except ResultLog.DoesNotExist: |
460 | - result.resultlog_set.create(**art_args) |
461 | - |
462 | - elif regexes['kernel_sru'].match(name): |
463 | - self.stdout.write("Kernel SRU test %s\n" %name) |
464 | + for result in results: |
465 | + res_log, new_res = result.resultlog_set.get_or_create( |
466 | + **art_args) |
467 | |
468 | |
469 | endtime = datetime.now() |
470 | |
471 | - self.stdout.write("time: {}\n".format(endtime - starttime)) |
472 | + logging.info("time: {}".format(endtime - starttime)) |
473 | |
474 | === modified file 'dashboard/utah_utils.py' |
475 | --- dashboard/utah_utils.py 2012-11-19 21:06:18 +0000 |
476 | +++ dashboard/utah_utils.py 2012-11-28 17:34:20 +0000 |
477 | @@ -4,7 +4,7 @@ |
478 | |
479 | import logging |
480 | |
481 | -from utah.parser import UTAHParser |
482 | +from utah.parser import UTAHParser, ParserError |
483 | from dashboard.models import ( |
484 | Build, |
485 | Run, |
486 | @@ -13,11 +13,15 @@ |
487 | |
488 | FLAVOR='ubuntu' |
489 | |
490 | -def process_smoke_log(logfile, jenkins_build=None, jenkins_url=None): |
491 | +def process_smoke_log(logfile, jenkins_build=None, jenkins_url=None, |
492 | + name=None): |
493 | """ |
494 | Parse a utah client log for smoke test results. |
495 | """ |
496 | |
497 | + run = None |
498 | + build = None |
499 | + result = None |
500 | logfile_path = logfile |
501 | |
502 | if jenkins_url is not None and jenkins_build is not None: |
503 | @@ -35,10 +39,24 @@ |
504 | jenkins_url = "http://jenkins.qa.ubuntu.com/" |
505 | |
506 | parser = UTAHParser() |
507 | - data = parser.parse(logfile_path) |
508 | + |
509 | + try: |
510 | + data = parser.parse(logfile_path) |
511 | + except ParserError as e: |
512 | + logging.error(e) |
513 | + return run, build, result |
514 | + |
515 | + if data is None: |
516 | + logging.warn("Unable to parse {}".format(logfile_path)) |
517 | + return run, build, result |
518 | |
519 | build_number = data['build_number'] |
520 | - name = data['name'] |
521 | + |
522 | + # Use the name from the logs if there is one, if there isn't one |
523 | + # and one is passed in use that. |
524 | + if name is None or data['name'] != 'unnamed': |
525 | + name = data['name'] |
526 | + |
527 | arch = data['arch'] |
528 | ran_at = data['ran_at'] |
529 | release = data['release'] |
530 | @@ -61,6 +79,9 @@ |
531 | build_no=build_number, |
532 | release=release, |
533 | flavor=FLAVOR, |
534 | + defaults={ |
535 | + 'ran_at': ran_at, |
536 | + }, |
537 | ) |
538 | |
539 | logging.debug("run: {}".format(run)) |
540 | @@ -69,8 +90,8 @@ |
541 | build, new_build = Build.objects.get_or_create( |
542 | run=run, |
543 | flavor=FLAVOR, |
544 | + variant=install_type, |
545 | arch=arch, |
546 | - variant=install_type, |
547 | ) |
548 | |
549 | logging.debug("build: {}".format(build)) |
550 | @@ -86,8 +107,10 @@ |
551 | #XXX: need to be provided from outside |
552 | skip_count=0, |
553 | jenkins_build=jenkins_build, |
554 | - jenkins_url=jenkins_url, |
555 | + jenkins_url="{}{}".format(jenkins_url, jenkins_build), |
556 | ) |
557 | |
558 | logging.debug("result: {}".format(result)) |
559 | logging.debug("new_result: {}".format(new_result)) |
560 | + |
561 | + return run, build, result |
562 | |
563 | === modified file 'dashboard/utils.py' |
564 | --- dashboard/utils.py 2012-09-19 17:54:14 +0000 |
565 | +++ dashboard/utils.py 2012-11-28 17:34:20 +0000 |
566 | @@ -14,14 +14,17 @@ |
567 | 'bootspeed': re.compile(ur".*-bootspeed-.*"), |
568 | 'upgrade': re.compile(ur".*-upgrade-.*"), |
569 | 'smoke': re.compile(ur"^(precise|quantal)-(desktop|server|alternate)"), |
570 | + 'utah-smoke': re.compile(ur"^(raring)-(desktop|server)-.*-smoke-.*"), |
571 | + 'utah-smoke-data': re.compile(ur"^(raring)-(desktop|server)-([^-]*)-smoke-(.*)"), |
572 | + |
573 | 'smoke-ec2': re.compile(ur"(.*)-(.*)-ec2.*"), |
574 | 'smoke-ec2-daily': re.compile(ur".*-daily$"), |
575 | 'smoke-ec2-daily-data': re.compile(ur"^(.*)-(.*).*"), |
576 | - 'smoke-data': re.compile(ur"^(.*)-(.*)-([^_]*)_(.*)"), |
577 | + 'smoke-data': re.compile(ur"^(precise|quantal)-(.*)-([^_]*)_(.*)"), |
578 | 'smoke-buildid': re.compile(r"^[0-9]{8}(\.[0-9]+)?$"), |
579 | 'smoke-lpbug': re.compile(ur"LP:#[0-9]+$"), |
580 | 'smoke-build_desc': re.compile(ur"[^,\s]+"), |
581 | - 'kernel_sru': re.compile(ur"^(sru_kernel)(|_backport)-(lucid|maveric|natty|oneiric|precise|quantal)(|_lts_hwe)-(.*)-(.*)-(.*).*"), |
582 | + 'kernel_sru': re.compile(ur"^(sru_kernel)(|_backport)-(lucid|maveric|natty|oneiric|precise|quantal|raring)(|_lts_hwe)-(.*)-(.*)-(.*).*"), |
583 | 'kernel_sru-version': re.compile(ur"^([^,\s]+)(\s+|,)"), |
584 | 'lpbug': re.compile(ur"LP:#[0-9]+$"), |
585 | 'buildid': re.compile(r"^[0-9]{8}(\.[0-9]+)?$"), |
This is looking good.