Merge lp:~joetalbott/utah/dashboard_integration_parser into lp:utah

Proposed by Joe Talbott
Status: Merged
Approved by: Max Brustkern
Approved revision: 738
Merged at revision: 749
Proposed branch: lp:~joetalbott/utah/dashboard_integration_parser
Merge into: lp:utah
Diff against target: 331 lines (+290/-2)
4 files modified
tests/test_parser.py (+42/-0)
utah/client/common.py (+4/-2)
utah/parser.py (+108/-0)
utah/publish.py (+136/-0)
To merge this branch: bzr merge lp:~joetalbott/utah/dashboard_integration_parser
Reviewer Review Type Date Requested Status
Max Brustkern (community) Approve
Joe Talbott (community) Needs Resubmitting
Review via email: mp+134377@code.launchpad.net

Description of the change

This branch factors the result parser out into its own module so that we can package it separately and use it in the QA Dashboard.

I'm not sure how having two merge proposals based off of each other will work so this might supercede my dashboard_integration MP.

To post a comment you must log in.
738. By Joe Talbott

Don't use relative import paths.

Noticed-By: Max Brustkern

Revision history for this message
Joe Talbott (joetalbott) wrote :

Updated to not use relative import paths.

review: Needs Resubmitting
Revision history for this message
Max Brustkern (nuclearbob) wrote :

Looks good. I'll merge it and get to the packaging later.

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== added file 'tests/test_parser.py'
2--- tests/test_parser.py 1970-01-01 00:00:00 +0000
3+++ tests/test_parser.py 2012-11-14 22:20:26 +0000
4@@ -0,0 +1,42 @@
5+import jsonschema
6+import os
7+import tempfile
8+import unittest
9+
10+from utah.parser import UTAHParser
11+
12+MALFORMED_CONTENT = """
13+---
14+ bad:
15+ good:
16+"""
17+
18+class TestParser(unittest.TestCase):
19+ def setUp(self):
20+ self.logfile = '/tmp/utah.yaml'
21+ self.fakelog = '/tmp/fakeutah.yaml'
22+ self.assertTrue(os.path.exists(self.logfile))
23+ self.assertFalse(os.path.exists(self.fakelog))
24+ self.parser = UTAHParser()
25+ self.tmpfile = tempfile.TemporaryFile()
26+
27+ self.tmpfile.write(MALFORMED_CONTENT)
28+
29+ def tearDown(self):
30+ self.tmpfile.close()
31+
32+ def test_missing_file(self):
33+ with self.assertRaises(IOError):
34+ data = self.parser.parse(self.fakelog)
35+
36+ def test_malformed_file(self):
37+ with self.assertRaises(jsonschema.ValidationError):
38+ data = self.parser.parse(self.tmpfile)
39+
40+ def test_parser(self):
41+ data = self.parser.parse(self.logfile)
42+
43+ print("data: {}".format(data))
44+ self.assertIn('passes', data)
45+ self.assertIn('errors', data)
46+ self.assertIn('failures', data)
47
48=== modified file 'utah/client/common.py'
49--- utah/client/common.py 2012-11-07 18:00:47 +0000
50+++ utah/client/common.py 2012-11-14 22:20:26 +0000
51@@ -54,6 +54,8 @@
52 CMD_TS_SETUP = 'testsuite_setup'
53 CMD_TS_CLEANUP = 'testsuite_cleanup'
54
55+DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
56+
57
58 def do_nothing(_obj=None):
59 """
60@@ -114,7 +116,7 @@
61
62 return make_result(command=command,
63 retcode=ERROR_TIMEOUT,
64- start_time=str(start_time),
65+ start_time=start_time.strftime(DATE_FORMAT),
66 time_delta=str(time_delta),
67 cmd_type=cmd_type,
68 user=run_as,
69@@ -126,7 +128,7 @@
70 retcode=p.returncode,
71 stdout=stdout,
72 stderr=stderr,
73- start_time=str(start_time),
74+ start_time=start_time.strftime(DATE_FORMAT),
75 time_delta=str(time_delta),
76 cmd_type=cmd_type,
77 user=run_as,
78
79=== added file 'utah/parser.py'
80--- utah/parser.py 1970-01-01 00:00:00 +0000
81+++ utah/parser.py 2012-11-14 22:20:26 +0000
82@@ -0,0 +1,108 @@
83+import os
84+import jsonschema
85+import yaml
86+
87+class UTAHParser(object):
88+ """
89+ UTAH Client result parser class.
90+ """
91+
92+ COMMAND_SCHEMA = {
93+ 'type': 'object',
94+ 'properties': {
95+ 'cmd_type': {'type': 'string'},
96+ 'command': {'type': 'string'},
97+ 'returncode': {'type': 'integer'},
98+ 'start_time': {'type': 'string'},
99+ 'stderr': {'type': 'string'},
100+ 'stdout': {'type': 'string'},
101+ 'testcase': {'type': 'string'},
102+ 'testsuite': {'type': 'string'},
103+ 'time_delta': {'type': 'string'},
104+ 'user': {'type': 'string'},
105+ },
106+ }
107+
108+ CLIENT_OUTPUT_SCHEMA = {
109+ 'type': 'object',
110+ 'properties': {
111+ 'runlist': {
112+ 'type': 'string',
113+ 'required': True,
114+ },
115+ 'commands': {
116+ 'type': 'array',
117+ 'items': {'type': COMMAND_SCHEMA},
118+ 'required': True,
119+ },
120+ 'fetch_errors': {
121+ 'type': 'integer',
122+ 'required': True,
123+ },
124+ 'errors': {
125+ 'type': 'integer',
126+ 'required': True,
127+ },
128+ 'failures': {
129+ 'type': 'integer',
130+ 'required': True,
131+ },
132+ 'passes': {
133+ 'type': 'integer',
134+ 'required': True,
135+ },
136+ 'uname': {
137+ 'type': 'array',
138+ 'items': {'type': 'string'},
139+ 'required': True,
140+ },
141+ 'media-info': {
142+ 'type': 'string',
143+ 'required': True,
144+ },
145+ 'install_type': {
146+ 'type': 'string',
147+ 'required': True,
148+ },
149+ },
150+ }
151+
152+ def parse(self, logdata):
153+ """
154+ Parse utah client results.
155+
156+ logdata should be either a filename or a handle to a file
157+ object.
158+
159+ Returns the parsed yaml data.
160+
161+ """
162+
163+ if isinstance(logdata, str):
164+ return self._parse_logfile(logdata)
165+ else:
166+ return self._parse_stream(logdata)
167+
168+ def _parse_stream(self, stream):
169+ """ Parse client output from stream. """
170+ data = yaml.load(stream)
171+
172+ jsonschema.validate(data, self.CLIENT_OUTPUT_SCHEMA)
173+
174+ return data
175+
176+ def _parse_logfile(self, logfile):
177+ """ Parse client output log. """
178+
179+ data = None
180+
181+ with open(logfile, 'r') as fp:
182+ data = self._parse_stream(fp)
183+
184+ return data
185+
186+if __name__ == "__main__":
187+ parser = UTAHParser()
188+ data = parser.parse("/tmp/utah.yaml")
189+ print(data)
190+
191
192=== added file 'utah/publish.py'
193--- utah/publish.py 1970-01-01 00:00:00 +0000
194+++ utah/publish.py 2012-11-14 22:20:26 +0000
195@@ -0,0 +1,136 @@
196+import argparse
197+import datetime
198+import jsonschema
199+import os
200+import urllib
201+import urllib2
202+import yaml
203+
204+from utah.client import exceptions
205+from utah.parser import UTAHParser
206+
207+
208+COMMAND_SCHEMA = {
209+ 'type': 'object',
210+ 'properties': {
211+ 'cmd_type': {'type': 'string'},
212+ 'command': {'type': 'string'},
213+ 'returncode': {'type': 'integer'},
214+ 'start_time': {'type': 'string'},
215+ 'stderr': {'type': 'string'},
216+ 'stdout': {'type': 'string'},
217+ 'testcase': {'type': 'string'},
218+ 'testsuite': {'type': 'string'},
219+ 'time_delta': {'type': 'string'},
220+ 'user': {'type': 'string'},
221+ },
222+}
223+
224+CLIENT_OUTPUT_SCHEMA = {
225+ 'type': 'object',
226+ 'properties': {
227+ 'runlist': {
228+ 'type': 'string',
229+ 'required': True,
230+ },
231+ 'commands': {
232+ 'type': 'array',
233+ 'items': {'type': COMMAND_SCHEMA},
234+ 'required': True,
235+ },
236+ 'fetch_errors': {
237+ 'type': 'integer',
238+ 'required': True,
239+ },
240+ 'errors': {
241+ 'type': 'integer',
242+ 'required': True,
243+ },
244+ 'failures': {
245+ 'type': 'integer',
246+ 'required': True,
247+ },
248+ 'passes': {
249+ 'type': 'integer',
250+ 'required': True,
251+ },
252+ 'uname': {
253+ 'type': 'array',
254+ 'items': {'type': 'string'},
255+ 'required': True,
256+ },
257+ 'media-info': {
258+ 'type': 'string',
259+ 'required': True,
260+ },
261+ 'install_type': {
262+ 'type': 'string',
263+ 'required': True,
264+ },
265+ },
266+}
267+
268+
269+def _parse_logfile(logfile):
270+ """ Parse client output log for publishing. """
271+
272+ utah_parser = UTAHParser()
273+ return utah_parser.parse(logfile)
274+
275+
276+def _publish_results(url, data=None):
277+ """ Publish results to the QA Dashboard. """
278+
279+ if data is None:
280+ return "Not publishing null data"
281+
282+ try:
283+ u = urllib2.urlopen(url, urllib.urlencode(data))
284+
285+ return u.read()
286+ except urllib2.URLError as exception:
287+ return "ERROR: bad url {}: {}".format(url, exception)
288+
289+
290+def publish(logfile, url, token):
291+ """ Publish result from logfile to url using token. """
292+
293+ data = {}
294+
295+ # Defaults
296+ data['flavor'] = 'ubuntu'
297+ data['skip_count'] = 0
298+
299+ log_data = _parse_logfile(logfile)
300+
301+ if log_data is None:
302+ return "Failed to parse logfile: {}".format(logfile)
303+
304+ data['token'] = token
305+ data['fail_count'] = log_data['failures']
306+ data['pass_count'] = log_data['passes']
307+ data['error_count'] = log_data['errors']
308+ data['build_no'] = log_data['build_number']
309+ data['build_name'] = "{}-{}-{}_{}".format(
310+ log_data['release'], log_data['install_type'],
311+ log_data['arch'], log_data['name'])
312+ data['ran_at'] = log_data['ran_at']
313+
314+ return _publish_results(url, data)
315+
316+if __name__ == "__main__":
317+ parser = argparse.ArgumentParser(
318+ description='utah results publisher'
319+ )
320+ parser.add_argument('logfile', metavar='LOGFILE', type=str,
321+ help="utah client results log")
322+ parser.add_argument('url', metavar='URL', type=str,
323+ help="QA Dashboard API URL")
324+ parser.add_argument('token', metavar='API_TOKEN', type=str,
325+ help="QA Dashboard API token")
326+
327+ args = parser.parse_args()
328+
329+ print publish(args.logfile,
330+ url=args.url,
331+ token=args.token)

Subscribers

People subscribed via source and target branches