Merge lp:~joetalbott/utah/dashboard_integration_parser into lp:utah

Proposed by Joe Talbott
Status: Merged
Approved by: Max Brustkern
Approved revision: 738
Merged at revision: 749
Proposed branch: lp:~joetalbott/utah/dashboard_integration_parser
Merge into: lp:utah
Diff against target: 331 lines (+290/-2)
4 files modified
tests/test_parser.py (+42/-0)
utah/client/common.py (+4/-2)
utah/parser.py (+108/-0)
utah/publish.py (+136/-0)
To merge this branch: bzr merge lp:~joetalbott/utah/dashboard_integration_parser
Reviewer Review Type Date Requested Status
Max Brustkern (community) Approve
Joe Talbott (community) Needs Resubmitting
Review via email: mp+134377@code.launchpad.net

Description of the change

This branch factors the result parser out into its own module so that we can package it separately and use it in the QA Dashboard.

I'm not sure how having two merge proposals based off of each other will work so this might supercede my dashboard_integration MP.

To post a comment you must log in.
738. By Joe Talbott

Don't use relative import paths.

Noticed-By: Max Brustkern

Revision history for this message
Joe Talbott (joetalbott) wrote :

Updated to not use relative import paths.

review: Needs Resubmitting
Revision history for this message
Max Brustkern (nuclearbob) wrote :

Looks good. I'll merge it and get to the packaging later.

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== added file 'tests/test_parser.py'
--- tests/test_parser.py 1970-01-01 00:00:00 +0000
+++ tests/test_parser.py 2012-11-14 22:20:26 +0000
@@ -0,0 +1,42 @@
1import jsonschema
2import os
3import tempfile
4import unittest
5
6from utah.parser import UTAHParser
7
8MALFORMED_CONTENT = """
9---
10 bad:
11 good:
12"""
13
14class TestParser(unittest.TestCase):
15 def setUp(self):
16 self.logfile = '/tmp/utah.yaml'
17 self.fakelog = '/tmp/fakeutah.yaml'
18 self.assertTrue(os.path.exists(self.logfile))
19 self.assertFalse(os.path.exists(self.fakelog))
20 self.parser = UTAHParser()
21 self.tmpfile = tempfile.TemporaryFile()
22
23 self.tmpfile.write(MALFORMED_CONTENT)
24
25 def tearDown(self):
26 self.tmpfile.close()
27
28 def test_missing_file(self):
29 with self.assertRaises(IOError):
30 data = self.parser.parse(self.fakelog)
31
32 def test_malformed_file(self):
33 with self.assertRaises(jsonschema.ValidationError):
34 data = self.parser.parse(self.tmpfile)
35
36 def test_parser(self):
37 data = self.parser.parse(self.logfile)
38
39 print("data: {}".format(data))
40 self.assertIn('passes', data)
41 self.assertIn('errors', data)
42 self.assertIn('failures', data)
043
=== modified file 'utah/client/common.py'
--- utah/client/common.py 2012-11-07 18:00:47 +0000
+++ utah/client/common.py 2012-11-14 22:20:26 +0000
@@ -54,6 +54,8 @@
54CMD_TS_SETUP = 'testsuite_setup'54CMD_TS_SETUP = 'testsuite_setup'
55CMD_TS_CLEANUP = 'testsuite_cleanup'55CMD_TS_CLEANUP = 'testsuite_cleanup'
5656
57DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
58
5759
58def do_nothing(_obj=None):60def do_nothing(_obj=None):
59 """61 """
@@ -114,7 +116,7 @@
114116
115 return make_result(command=command,117 return make_result(command=command,
116 retcode=ERROR_TIMEOUT,118 retcode=ERROR_TIMEOUT,
117 start_time=str(start_time),119 start_time=start_time.strftime(DATE_FORMAT),
118 time_delta=str(time_delta),120 time_delta=str(time_delta),
119 cmd_type=cmd_type,121 cmd_type=cmd_type,
120 user=run_as,122 user=run_as,
@@ -126,7 +128,7 @@
126 retcode=p.returncode,128 retcode=p.returncode,
127 stdout=stdout,129 stdout=stdout,
128 stderr=stderr,130 stderr=stderr,
129 start_time=str(start_time),131 start_time=start_time.strftime(DATE_FORMAT),
130 time_delta=str(time_delta),132 time_delta=str(time_delta),
131 cmd_type=cmd_type,133 cmd_type=cmd_type,
132 user=run_as,134 user=run_as,
133135
=== added file 'utah/parser.py'
--- utah/parser.py 1970-01-01 00:00:00 +0000
+++ utah/parser.py 2012-11-14 22:20:26 +0000
@@ -0,0 +1,108 @@
1import os
2import jsonschema
3import yaml
4
5class UTAHParser(object):
6 """
7 UTAH Client result parser class.
8 """
9
10 COMMAND_SCHEMA = {
11 'type': 'object',
12 'properties': {
13 'cmd_type': {'type': 'string'},
14 'command': {'type': 'string'},
15 'returncode': {'type': 'integer'},
16 'start_time': {'type': 'string'},
17 'stderr': {'type': 'string'},
18 'stdout': {'type': 'string'},
19 'testcase': {'type': 'string'},
20 'testsuite': {'type': 'string'},
21 'time_delta': {'type': 'string'},
22 'user': {'type': 'string'},
23 },
24 }
25
26 CLIENT_OUTPUT_SCHEMA = {
27 'type': 'object',
28 'properties': {
29 'runlist': {
30 'type': 'string',
31 'required': True,
32 },
33 'commands': {
34 'type': 'array',
35 'items': {'type': COMMAND_SCHEMA},
36 'required': True,
37 },
38 'fetch_errors': {
39 'type': 'integer',
40 'required': True,
41 },
42 'errors': {
43 'type': 'integer',
44 'required': True,
45 },
46 'failures': {
47 'type': 'integer',
48 'required': True,
49 },
50 'passes': {
51 'type': 'integer',
52 'required': True,
53 },
54 'uname': {
55 'type': 'array',
56 'items': {'type': 'string'},
57 'required': True,
58 },
59 'media-info': {
60 'type': 'string',
61 'required': True,
62 },
63 'install_type': {
64 'type': 'string',
65 'required': True,
66 },
67 },
68 }
69
70 def parse(self, logdata):
71 """
72 Parse utah client results.
73
74 logdata should be either a filename or a handle to a file
75 object.
76
77 Returns the parsed yaml data.
78
79 """
80
81 if isinstance(logdata, str):
82 return self._parse_logfile(logdata)
83 else:
84 return self._parse_stream(logdata)
85
86 def _parse_stream(self, stream):
87 """ Parse client output from stream. """
88 data = yaml.load(stream)
89
90 jsonschema.validate(data, self.CLIENT_OUTPUT_SCHEMA)
91
92 return data
93
94 def _parse_logfile(self, logfile):
95 """ Parse client output log. """
96
97 data = None
98
99 with open(logfile, 'r') as fp:
100 data = self._parse_stream(fp)
101
102 return data
103
104if __name__ == "__main__":
105 parser = UTAHParser()
106 data = parser.parse("/tmp/utah.yaml")
107 print(data)
108
0109
=== added file 'utah/publish.py'
--- utah/publish.py 1970-01-01 00:00:00 +0000
+++ utah/publish.py 2012-11-14 22:20:26 +0000
@@ -0,0 +1,136 @@
1import argparse
2import datetime
3import jsonschema
4import os
5import urllib
6import urllib2
7import yaml
8
9from utah.client import exceptions
10from utah.parser import UTAHParser
11
12
13COMMAND_SCHEMA = {
14 'type': 'object',
15 'properties': {
16 'cmd_type': {'type': 'string'},
17 'command': {'type': 'string'},
18 'returncode': {'type': 'integer'},
19 'start_time': {'type': 'string'},
20 'stderr': {'type': 'string'},
21 'stdout': {'type': 'string'},
22 'testcase': {'type': 'string'},
23 'testsuite': {'type': 'string'},
24 'time_delta': {'type': 'string'},
25 'user': {'type': 'string'},
26 },
27}
28
29CLIENT_OUTPUT_SCHEMA = {
30 'type': 'object',
31 'properties': {
32 'runlist': {
33 'type': 'string',
34 'required': True,
35 },
36 'commands': {
37 'type': 'array',
38 'items': {'type': COMMAND_SCHEMA},
39 'required': True,
40 },
41 'fetch_errors': {
42 'type': 'integer',
43 'required': True,
44 },
45 'errors': {
46 'type': 'integer',
47 'required': True,
48 },
49 'failures': {
50 'type': 'integer',
51 'required': True,
52 },
53 'passes': {
54 'type': 'integer',
55 'required': True,
56 },
57 'uname': {
58 'type': 'array',
59 'items': {'type': 'string'},
60 'required': True,
61 },
62 'media-info': {
63 'type': 'string',
64 'required': True,
65 },
66 'install_type': {
67 'type': 'string',
68 'required': True,
69 },
70 },
71}
72
73
74def _parse_logfile(logfile):
75 """ Parse client output log for publishing. """
76
77 utah_parser = UTAHParser()
78 return utah_parser.parse(logfile)
79
80
81def _publish_results(url, data=None):
82 """ Publish results to the QA Dashboard. """
83
84 if data is None:
85 return "Not publishing null data"
86
87 try:
88 u = urllib2.urlopen(url, urllib.urlencode(data))
89
90 return u.read()
91 except urllib2.URLError as exception:
92 return "ERROR: bad url {}: {}".format(url, exception)
93
94
95def publish(logfile, url, token):
96 """ Publish result from logfile to url using token. """
97
98 data = {}
99
100 # Defaults
101 data['flavor'] = 'ubuntu'
102 data['skip_count'] = 0
103
104 log_data = _parse_logfile(logfile)
105
106 if log_data is None:
107 return "Failed to parse logfile: {}".format(logfile)
108
109 data['token'] = token
110 data['fail_count'] = log_data['failures']
111 data['pass_count'] = log_data['passes']
112 data['error_count'] = log_data['errors']
113 data['build_no'] = log_data['build_number']
114 data['build_name'] = "{}-{}-{}_{}".format(
115 log_data['release'], log_data['install_type'],
116 log_data['arch'], log_data['name'])
117 data['ran_at'] = log_data['ran_at']
118
119 return _publish_results(url, data)
120
121if __name__ == "__main__":
122 parser = argparse.ArgumentParser(
123 description='utah results publisher'
124 )
125 parser.add_argument('logfile', metavar='LOGFILE', type=str,
126 help="utah client results log")
127 parser.add_argument('url', metavar='URL', type=str,
128 help="QA Dashboard API URL")
129 parser.add_argument('token', metavar='API_TOKEN', type=str,
130 help="QA Dashboard API token")
131
132 args = parser.parse_args()
133
134 print publish(args.logfile,
135 url=args.url,
136 token=args.token)

Subscribers

People subscribed via source and target branches