Merge lp:~milo/launchpad-work-items-tracker/bug996948 into lp:launchpad-work-items-tracker
- bug996948
- Merge into trunk
Status: | Superseded |
---|---|
Proposed branch: | lp:~milo/launchpad-work-items-tracker/bug996948 |
Merge into: | lp:launchpad-work-items-tracker |
Diff against target: |
3380 lines (+2497/-125) (has conflicts) 32 files modified
all-projects (+64/-2) burndown-chart (+5/-0) collect (+160/-66) collect_jira (+229/-0) collect_roadmap (+301/-0) css/status.css (+58/-0) generate-all (+53/-14) html-report (+102/-0) jira.py (+55/-0) kanban-papyrs-to-jira (+397/-0) lpworkitems/collect.py (+20/-20) lpworkitems/collect_roadmap.py (+71/-0) lpworkitems/database.py (+74/-2) lpworkitems/error_collector.py (+10/-0) lpworkitems/factory.py (+32/-1) lpworkitems/models.py (+32/-14) lpworkitems/models_roadmap.py (+60/-0) lpworkitems/tests/test_collect.py (+0/-1) lpworkitems/tests/test_collect_roadmap.py (+69/-0) lpworkitems/tests/test_factory.py (+1/-1) lpworkitems/tests/test_models.py (+17/-2) report_tools.py (+162/-1) roadmap-bp-chart (+249/-0) roadmap_health.py (+102/-0) templates/base.html (+13/-0) templates/body.html (+4/-1) templates/roadmap_card.html (+71/-0) templates/roadmap_lane.html (+60/-0) templates/util.html (+9/-0) tests.py (+1/-0) themes/linaro/templates/footer.html (+10/-0) utils.py (+6/-0) Text conflict in collect Text conflict in report_tools.py |
To merge this branch: | bzr merge lp:~milo/launchpad-work-items-tracker/bug996948 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Linaro Infrastructure | Pending | ||
Review via email:
|
This proposal has been superseded by a proposal from 2012-10-09.
Commit message
Description of the change
Merge proposal in order to fix the integrity error we receive from status.l.o.
Changes done:
- catch the integrity error and print warning with name of duplicated Blueprint
- skip the Blueprint and continue, otherwise script will fail later in the code due to variable not being set
- 344. By Milo Casagrande
-
Used error logger, added URL to Blueprint.
Unmerged revisions
- 344. By Milo Casagrande
-
Used error logger, added URL to Blueprint.
- 343. By Milo Casagrande
-
Catch integrity error, and print warning.
- 342. By James Tunnicliffe
-
Removed template driven excaping from some pre-defined HTML so it renders correctly
- 341. By James Tunnicliffe
-
Fixed errors parsing meta values containing a colon
- 340. By Milo Casagrande
-
Merged lp:~lool/launchpad-work-items-tracker/fix-line-wrap-breakage.
- 339. By Milo Casagrande
- 338. By Данило Шеган
-
Merge HTML escaping fix from trunk r301 (by Martin Pitt).
- 337. By Данило Шеган
-
Fix problem with graph generation for milestones in the far future. Patch by Loic.
- 336. By Milo Casagrande
- 335. By James Tunnicliffe
-
Merge in migration tools
Preview Diff
1 | === modified file 'all-projects' |
2 | --- all-projects 2012-08-27 15:32:43 +0000 |
3 | +++ all-projects 2012-10-09 09:20:30 +0000 |
4 | @@ -14,9 +14,27 @@ |
5 | import sys |
6 | import datetime |
7 | |
8 | +import report_tools |
9 | + |
10 | |
11 | def collect(source_dir, db_file, config_file, extra_args): |
12 | - args = [os.path.join(source_dir, "collect")] |
13 | + return run_collect_script(source_dir, db_file, config_file, extra_args, |
14 | + "collect") |
15 | + |
16 | + |
17 | +def collect_jira(source_dir, db_file, config_file, extra_args): |
18 | + return run_collect_script(source_dir, db_file, config_file, extra_args, |
19 | + "collect_jira") |
20 | + |
21 | + |
22 | +def collect_roadmap(source_dir, db_file, config_file, extra_args): |
23 | + return run_collect_script(source_dir, db_file, config_file, extra_args, |
24 | + "collect_roadmap") |
25 | + |
26 | + |
27 | +def run_collect_script(source_dir, db_file, config_file, extra_args, |
28 | + collect_script): |
29 | + args = [os.path.join(source_dir, collect_script)] |
30 | args.extend(["-d", db_file]) |
31 | args.extend(["-c", config_file]) |
32 | args += extra_args |
33 | @@ -56,9 +74,14 @@ |
34 | |
35 | |
36 | def main(): |
37 | - parser = optparse.OptionParser(usage="%prog <database dir> <www root dir> [www root url]") |
38 | + parser = optparse.OptionParser( |
39 | + usage="%prog <database dir> <www root dir> [www root url]") |
40 | parser.add_option("--config-dir", dest="config_dir", default="config") |
41 | + parser.add_option("--roadmap-config-file", dest="roadmap_config_file", |
42 | + default="roadmap-config") |
43 | parser.add_option("--debug", dest="debug", action="store_true") |
44 | + parser.add_option("--kanban-token-file", dest="kanban_token_file") |
45 | + parser.add_option("--papyrs-token-file", dest="papyrs_token_file") |
46 | opts, args = parser.parse_args() |
47 | |
48 | if os.environ.get("DEBUG", None) is not None: |
49 | @@ -88,6 +111,11 @@ |
50 | os.path.join(source_dir, opts.config_dir, "*%s" % valid_config_suffix)) |
51 | |
52 | for config_file in filenames: |
53 | + # read roadmap config to find where to get cards from |
54 | + cfg = report_tools.load_config(config_file) |
55 | + # default to kanbantool |
56 | + cards_source = cfg.get('cards_source', 'kanban') |
57 | + |
58 | project_name = os.path.basename(config_file)[:-len(valid_config_suffix)] |
59 | project_output_dir = os.path.join(output_dir, project_name) |
60 | db_file = os.path.join(db_dir, "%s.db" % project_name) |
61 | @@ -111,6 +139,40 @@ |
62 | if not collect(source_dir, db_file, config_file, extra_collect_args): |
63 | sys.stderr.write("collect failed for %s" % project_name) |
64 | continue |
65 | + |
66 | + if cards_source == 'jira': |
67 | + extra_collect_jira_args = [] |
68 | + if not collect_jira(source_dir, db_file, opts.roadmap_config_file, |
69 | + extra_collect_jira_args): |
70 | + sys.stderr.write("collect_jira failed for %s" % project_name) |
71 | + continue |
72 | + elif cards_source == 'kanban': |
73 | + extra_collect_roadmap_args = [] |
74 | + extra_collect_roadmap_args.extend(["--board", '10721']) |
75 | + if opts.kanban_token_file is not None: |
76 | + with open(opts.kanban_token_file) as token_file: |
77 | + token = token_file.read() |
78 | + extra_collect_roadmap_args.extend(["--kanbantoken", token]) |
79 | + else: |
80 | + sys.stderr.write("No Kanbantool API token given to " |
81 | + "collect_roadmap for %s" % project_name) |
82 | + if opts.papyrs_token_file is not None: |
83 | + with open(opts.papyrs_token_file) as token_file: |
84 | + token = token_file.read() |
85 | + extra_collect_roadmap_args.extend(["--papyrstoken", token]) |
86 | + else: |
87 | + sys.stderr.write("No Papyrs API token given to " |
88 | + "collect_roadmap for %s" % project_name) |
89 | + |
90 | + if not collect_roadmap(source_dir, db_file, |
91 | + opts.roadmap_config_file, |
92 | + extra_collect_roadmap_args): |
93 | + sys.stderr.write("collect_roadmap failed for %s" % |
94 | + project_name) |
95 | + else: |
96 | + sys.stderr.write("Unknown cards source %s" % cards_source) |
97 | + continue |
98 | + |
99 | publish_new_db(project_name, project_output_dir, db_file) |
100 | generate_reports(project_output_dir, config_file, db_file, |
101 | source_dir, extra_generate_args, debug=opts.debug) |
102 | |
103 | === modified file 'burndown-chart' |
104 | --- burndown-chart 2012-08-24 05:29:13 +0000 |
105 | +++ burndown-chart 2012-10-09 09:20:30 +0000 |
106 | @@ -312,6 +312,11 @@ |
107 | opts.team or 'all', opts.group or 'none', milestone_collection and milestone_collection.display_name or 'none') |
108 | sys.exit(0) |
109 | |
110 | +if date_to_ordinal(end_date) - date_to_ordinal(start_date) > 366: |
111 | + print 'WARNING: date range %s - %s is over a year, not generating a chart' % ( |
112 | + start_date, end_date) |
113 | + sys.exit(0) |
114 | + |
115 | # title |
116 | if opts.team: |
117 | title = '/20' + opts.team |
118 | |
119 | === modified file 'collect' |
120 | --- collect 2012-07-11 22:36:56 +0000 |
121 | +++ collect 2012-10-09 09:20:30 +0000 |
122 | @@ -6,13 +6,27 @@ |
123 | # Copyright (C) 2010, 2011 Canonical Ltd. |
124 | # License: GPL-3 |
125 | |
126 | -import urllib, re, sys, optparse, smtplib, pwd, os, urlparse |
127 | import logging |
128 | +import optparse |
129 | +import os |
130 | +import pwd |
131 | +import re |
132 | +import smtplib |
133 | +import sys |
134 | +import urllib |
135 | +import urlparse |
136 | +import sqlite3 |
137 | + |
138 | from email.mime.text import MIMEText |
139 | |
140 | from launchpadlib.launchpad import Launchpad, EDGE_SERVICE_ROOT |
141 | |
142 | -from lpworkitems.collect import CollectorStore, PersonCache, WorkitemParser, bug_wi_states |
143 | +from lpworkitems.collect import ( |
144 | + CollectorStore, |
145 | + PersonCache, |
146 | + WorkitemParser, |
147 | + bug_wi_states, |
148 | + ) |
149 | from lpworkitems.database import get_store |
150 | from lpworkitems.error_collector import ( |
151 | BlueprintURLError, |
152 | @@ -26,6 +40,7 @@ |
153 | TeamParticipation, |
154 | Workitem, |
155 | ) |
156 | +from utils import unicode_or_None |
157 | import report_tools |
158 | |
159 | |
160 | @@ -58,20 +73,16 @@ |
161 | """Get a link to the Launchpad API object on the website.""" |
162 | api_link = item.self_link |
163 | parts = urlparse.urlparse(api_link) |
164 | - link = parts.scheme + "://" + parts.netloc.replace("api.", "") + "/" + parts.path.split("/", 2)[2] |
165 | + link = parts.scheme + "://" + parts.netloc.replace("api.", "") + \ |
166 | + "/" + parts.path.split("/", 2)[2] |
167 | return link.decode("utf-8") |
168 | |
169 | |
170 | -def unicode_or_None(attr): |
171 | - if attr is None: |
172 | - return attr |
173 | - if isinstance(attr, unicode): |
174 | - return attr |
175 | - return attr.decode("utf-8") |
176 | - |
177 | - |
178 | import simplejson |
179 | + |
180 | _orig_loads = simplejson.loads |
181 | + |
182 | + |
183 | def loads(something): |
184 | return _orig_loads(unicode_or_None(something)) |
185 | simplejson.loads = loads |
186 | @@ -96,7 +107,10 @@ |
187 | ''' |
188 | model_bp = Blueprint.from_launchpad(bp) |
189 | if model_bp.milestone_name not in collector.valid_milestone_names(): |
190 | - data_error(web_link(bp), 'milestone "%s" is unknown/invalid' % model_bp.milestone_name, True) |
191 | + data_error( |
192 | + web_link(bp), |
193 | + 'milestone "%s" is unknown/invalid' % model_bp.milestone_name, |
194 | + True) |
195 | model_bp = collector.store_blueprint(model_bp) |
196 | if model_bp: |
197 | dbg('lp_import_blueprint: added blueprint: %s' % bp.name) |
198 | @@ -116,7 +130,10 @@ |
199 | model_group = BlueprintGroup.from_launchpad(bp) |
200 | model_group.area = area |
201 | if model_group.milestone_name not in collector.valid_milestone_names(): |
202 | - data_error(web_link(bp), 'milestone "%s" is unknown/invalid' % model_group.milestone, True) |
203 | + data_error( |
204 | + web_link(bp), |
205 | + 'milestone "%s" is unknown/invalid' % model_group.milestone, |
206 | + True) |
207 | |
208 | model_group = collector.store_blueprint_group(model_group) |
209 | if model_group is None: |
210 | @@ -126,9 +143,9 @@ |
211 | add_dependencies(collector, model_group.name, deps) |
212 | return model_group |
213 | |
214 | + |
215 | def parse_meta_item(collector, line, bp_name): |
216 | '''Parse a meta information line from a blueprint |
217 | - |
218 | ''' |
219 | |
220 | line = line.strip() |
221 | @@ -136,18 +153,19 @@ |
222 | return |
223 | |
224 | try: |
225 | - (key, value) = line.rsplit(':', 1) |
226 | - key = key.strip() |
227 | + (key, value) = line.split(':', 1) |
228 | + key = key.strip() |
229 | value = value.strip() |
230 | except ValueError: |
231 | dbg("\tMeta line '%s' can not be parsed" % line) |
232 | return |
233 | |
234 | - dbg( "\t\tMeta for %s: key='%s' value='%s'" % (bp_name, key, value) ) |
235 | + dbg("\t\tMeta for %s: key='%s' value='%s'" % (bp_name, key, value)) |
236 | collector.store_meta(key, value, bp_name) |
237 | |
238 | |
239 | -def parse_complexity_item(collector, line, bp_name, bp_url, def_milestone, def_assignee): |
240 | +def parse_complexity_item(collector, line, bp_name, bp_url, def_milestone, |
241 | + def_assignee): |
242 | line = line.strip() |
243 | # remove special characters people tend to type |
244 | line = re.sub('[^\w -.]', '', line) |
245 | @@ -156,9 +174,9 @@ |
246 | |
247 | dbg("\tParsing complexity line '%s'" % line) |
248 | |
249 | - num = None |
250 | - milestone = None |
251 | - assignee = None |
252 | + num = None |
253 | + milestone = None |
254 | + assignee = None |
255 | |
256 | try: |
257 | complexity_list = line.split() |
258 | @@ -176,7 +194,9 @@ |
259 | dbg('\tComplexity: %s MS: %s Who: %s' % (num, milestone, assignee)) |
260 | collector.store_complexity(assignee, num, milestone, bp_name) |
261 | except ValueError: |
262 | - data_error(bp_url, "\tComplexity line '%s' could not be parsed %s" % (line, ValueError)) |
263 | + data_error(bp_url, |
264 | + "\tComplexity line '%s' could not be parsed %s" % |
265 | + (line, ValueError)) |
266 | |
267 | |
268 | def milestone_extract(text, valid_milestones): |
269 | @@ -188,7 +208,9 @@ |
270 | return word |
271 | return None |
272 | |
273 | -def lp_import_blueprint_workitems(collector, bp, distro_release, people_cache=None, projects=None): |
274 | + |
275 | +def lp_import_blueprint_workitems(collector, bp, distro_release, |
276 | + people_cache=None, projects=None): |
277 | '''Collect work items from a Launchpad blueprint. |
278 | |
279 | This includes work items from the whiteboard as well as linked bugs. |
280 | @@ -202,17 +224,26 @@ |
281 | in_complexity_block = False |
282 | work_items = [] |
283 | |
284 | - model_bp = collector.store.find( |
285 | - Blueprint, Blueprint.name == bp.name).one() |
286 | - assert model_bp is not None, "Asked to process workitems of %s when it is not in the db" % bp.name |
287 | - |
288 | - dbg('lp_import_blueprint_workitems(): processing %s (spec milestone: %s, spec assignee: %s, spec implementation: %s)' % ( |
289 | + try: |
290 | + model_bp = collector.store.find(Blueprint, |
291 | + Blueprint.name == bp.name).one() |
292 | + except sqlite3.IntegrityError: |
293 | + logger.warn('Duplicated Blueprint found: %s. It will not be ' |
294 | + 'considered.' % bp.name) |
295 | + return |
296 | + |
297 | + assert model_bp is not None, \ |
298 | + "Asked to process workitems of %s when it is not in the db" % bp.name |
299 | + |
300 | + dbg('lp_import_blueprint_workitems(): processing %s (spec milestone: %s,' \ |
301 | + ' spec assignee: %s, spec implementation: %s)' % ( |
302 | bp.name, model_bp.milestone_name, model_bp.assignee_name, |
303 | model_bp.implementation)) |
304 | |
305 | valid_milestones = collector.valid_milestone_names() |
306 | global error_collector |
307 | parser = WorkitemParser( |
308 | +<<<<<<< TREE |
309 | model_bp, model_bp.milestone_name, collector.lp, people_cache=people_cache, |
310 | error_collector=error_collector) |
311 | |
312 | @@ -228,19 +259,43 @@ |
313 | if workitems_text: |
314 | for l in workitems_text.splitlines(): |
315 | if not in_workitems_block: |
316 | +======= |
317 | + model_bp, model_bp.milestone_name, collector.lp, |
318 | + people_cache=people_cache, error_collector=error_collector) |
319 | + |
320 | + # Get work items from both the whiteboard and the new workitems_text |
321 | + # property. Once the migration is completed and nobody's using the |
322 | + # whiteboard for work items we can change this to pull work items only |
323 | + # from bp.workitems_text. |
324 | + workitems_text = bp.whiteboard |
325 | + if workitems_text is None: |
326 | + workitems_text = '' |
327 | + if getattr(bp, 'workitems_text', '') != '': |
328 | + workitems_text += "\n" + bp.workitems_text |
329 | + if workitems_text: |
330 | + for l in workitems_text.splitlines(): |
331 | + if not in_workitems_block: |
332 | +>>>>>>> MERGE-SOURCE |
333 | m = work_items_re.search(l) |
334 | if m: |
335 | in_workitems_block = True |
336 | - dbg('lp_import_blueprint_workitems(): starting work items block at ' + l) |
337 | + dbg('lp_import_blueprint_workitems():' |
338 | + ' starting work items block at ' + l) |
339 | milestone = milestone_extract(m.group(1), valid_milestones) |
340 | dbg(' ... setting milestone to ' + str(milestone)) |
341 | +<<<<<<< TREE |
342 | parser.milestone_name = milestone or parser.blueprint.milestone_name |
343 | +======= |
344 | + parser.milestone_name = \ |
345 | + milestone or parser.blueprint.milestone_name |
346 | +>>>>>>> MERGE-SOURCE |
347 | continue |
348 | |
349 | if in_workitems_block: |
350 | dbg("\tworkitem (raw): '%s'" % (l.strip())) |
351 | if not l.strip(): |
352 | - dbg('lp_import_blueprint_workitems(): closing work items block with line: ' + l) |
353 | + dbg('lp_import_blueprint_workitems():' |
354 | + ' closing work items block with line: ' + l) |
355 | in_workitems_block = False |
356 | parser.milestone_name = parser.blueprint.milestone_name |
357 | workitem = parser.parse_blueprint_workitem(l) |
358 | @@ -308,7 +363,8 @@ |
359 | member.name, team.name) |
360 | if recursive or team.name in cfg.get('recursive_teams', []): |
361 | _import_teams_recurse( |
362 | - collector, cfg, member, top_level_team_names + [member.name], |
363 | + collector, cfg, member, |
364 | + top_level_team_names + [member.name], |
365 | people_cache=people_cache, recursive=True) |
366 | |
367 | |
368 | @@ -346,12 +402,14 @@ |
369 | blueprint.status = lp_project.summary or name |
370 | collector.store_blueprint(blueprint) |
371 | |
372 | - for task in lp_project.searchTasks(status=bug_wi_states.keys(), **cfg['work_item_bugs']): |
373 | + for task in lp_project.searchTasks(status=bug_wi_states.keys(), |
374 | + **cfg['work_item_bugs']): |
375 | id = task.self_link.split('/')[-1] |
376 | title = task.title.split('"', 1)[1].rstrip('"') |
377 | state = bug_wi_states[task.status] |
378 | if state is None: |
379 | - dbg('lp_import_bug_workitems: ignoring #%s: %s (status: %s)' % (id, title, task.status)) |
380 | + dbg('lp_import_bug_workitems: ignoring #%s: %s (status: %s)' % ( |
381 | + id, title, task.status)) |
382 | continue |
383 | dbg('lp_import_bug_workitems: #%s: %s (%s)' % (id, title, state)) |
384 | |
385 | @@ -384,14 +442,17 @@ |
386 | milestones.extend([ms for ms in project.all_milestones]) |
387 | |
388 | if 'release' in cfg: |
389 | - lp_project = collector.lp.distributions['ubuntu'].getSeries(name_or_version=cfg['release']) |
390 | + lp_project = collector.lp.distributions['ubuntu'].getSeries( |
391 | + name_or_version=cfg['release']) |
392 | projects.append((lp_project, None)) |
393 | add_milestones(lp_project) |
394 | else: |
395 | - assert 'project' in cfg, 'Configuration needs to specify project or release' |
396 | + assert 'project' in cfg, \ |
397 | + 'Configuration needs to specify project or release' |
398 | lp_project = collector.lp.projects[cfg['project']] |
399 | if 'project_series' in cfg: |
400 | - lp_project_series = lp_project.getSeries(name=cfg['project_series']) |
401 | + lp_project_series = lp_project.getSeries( |
402 | + name=cfg['project_series']) |
403 | add_milestones(lp_project_series) |
404 | else: |
405 | lp_project_series = None |
406 | @@ -413,6 +474,10 @@ |
407 | if is_dict and extra_projects[extra_project_name] is not None: |
408 | extra_project_series = extra_project.getSeries( |
409 | name=extra_projects[extra_project_name]) |
410 | + if extra_project_series is None: |
411 | + raise AssertionError( |
412 | + "%s has no series named %s" |
413 | + % (extra_project_name, extra_projects[extra_project_name])) |
414 | add_milestones(extra_project_series) |
415 | else: |
416 | extra_project_series = None |
417 | @@ -453,10 +518,12 @@ |
418 | |
419 | for project, series in projects: |
420 | # XXX: should this be valid_ or all_specifications? |
421 | - project_spec_group_matcher = spec_group_matchers.get(project.name, None) |
422 | + project_spec_group_matcher = spec_group_matchers.get(project.name, |
423 | + None) |
424 | project_bps = project.valid_specifications |
425 | for bp in project_bps: |
426 | - if name_pattern is not None and re.search(name_pattern, bp.name) is None: |
427 | + if name_pattern is not None and \ |
428 | + re.search(name_pattern, bp.name) is None: |
429 | continue |
430 | if project_spec_group_matcher is not None: |
431 | match = re.search(project_spec_group_matcher, bp.name) |
432 | @@ -471,7 +538,8 @@ |
433 | add_blueprint(bp) |
434 | if series is not None: |
435 | for bp in series.valid_specifications: |
436 | - if name_pattern is not None and re.search(name_pattern, bp.name) is None: |
437 | + if name_pattern is not None and \ |
438 | + re.search(name_pattern, bp.name) is None: |
439 | continue |
440 | if project_spec_group_matcher is not None: |
441 | match = re.search(project_spec_group_matcher, bp.name) |
442 | @@ -492,7 +560,6 @@ |
443 | deps[possible_dep] = possible_deps[possible_dep] |
444 | if deps: |
445 | lp_import_spec_group(collector, spec_group, area, deps) |
446 | - |
447 | lp_import_bug_workitems(lp_project, collector, cfg) |
448 | |
449 | |
450 | @@ -540,7 +607,8 @@ |
451 | if in_section: |
452 | result.append([name, status, section]) |
453 | fields = line.strip().split(u'==') |
454 | - assert not fields[0] # should be empty |
455 | + # should be empty |
456 | + assert not fields[0] |
457 | name = fields[1].strip() |
458 | section = [] |
459 | collect = 1 |
460 | @@ -550,7 +618,8 @@ |
461 | in_section = True |
462 | collect = 0 |
463 | fields = line.strip().split(u'||') |
464 | - assert not fields[0] # should be empty |
465 | + # should be empty |
466 | + assert not fields[0] |
467 | assignee = default_assignee |
468 | istatus = u'todo' |
469 | milestone = None |
470 | @@ -562,11 +631,12 @@ |
471 | desc = fields[which].strip() |
472 | if u'status' in field_off: |
473 | which = field_off[u'status'] |
474 | - status_search = [ fields[which] ] |
475 | + status_search = [fields[which]] |
476 | else: |
477 | status_search = fields[2:] |
478 | for f in status_search: |
479 | - if u'DONE' in f or u'POSTPONED' in f or u'TODO' in f or u'INPROGRESS' in f or u'BLOCKED' in f: |
480 | + if u'DONE' in f or u'POSTPONED' in f or u'TODO' in f or \ |
481 | + u'INPROGRESS' in f or u'BLOCKED' in f: |
482 | ff = f.split() |
483 | if len(ff) == 2: |
484 | assignee = ff[1] |
485 | @@ -615,14 +685,17 @@ |
486 | for url, default_assignee in cfg.get('moin_pages', {}).iteritems(): |
487 | url = unicode_or_None(url) |
488 | default_assignee = unicode_or_None(default_assignee) |
489 | - dbg('moin_import(): processing %s (default assignee: %s)' % (url, default_assignee)) |
490 | - for group, status, items in get_moin_workitems_group(url, default_assignee): |
491 | + dbg('moin_import(): processing %s (default assignee: %s)' % ( |
492 | + url, default_assignee)) |
493 | + for group, status, items in get_moin_workitems_group(url, |
494 | + default_assignee): |
495 | url_clean = url.replace('?action=raw', '') |
496 | name = url_clean.split('://', 1)[1].split('/', 1)[1] |
497 | if group: |
498 | name += u' ' + group |
499 | spec_url = u'%s#%s' % (url_clean, escape_url(group)) |
500 | - dbg(' got group %s: name="%s", url="%s"' % (group, name, spec_url)) |
501 | + dbg(' got group %s: name="%s", url="%s"' % ( |
502 | + group, name, spec_url)) |
503 | else: |
504 | spec_url = url_clean |
505 | dbg(' no group: name="%s", url="%s"' % (name, spec_url)) |
506 | @@ -661,7 +734,8 @@ |
507 | optparser.add_option('-c', '--config', |
508 | help='Path to configuration file', dest='config', metavar='PATH') |
509 | optparser.add_option('-p', '--pattern', metavar='REGEX', |
510 | - help='Regex pattern for blueprint name (optional, mainly for testing)', dest='pattern') |
511 | + help='Regex pattern for blueprint name (optional, mainly for testing)', |
512 | + dest='pattern') |
513 | optparser.add_option('--debug', action='store_true', default=False, |
514 | help='Enable debugging output in parsing routines') |
515 | optparser.add_option('--mail', action='store_true', default=False, |
516 | @@ -679,41 +753,54 @@ |
517 | |
518 | return opts, args |
519 | |
520 | + |
521 | def send_error_mails(cfg): |
522 | '''Send data_errors to contacts. |
523 | |
524 | - Data error contacts are defined in the configuration in the "error_contact" |
525 | - map (which assigns a regexp over spec names to a list of email addresses). |
526 | - If no match is found, the error goes to stderr. |
527 | + Data error contacts are defined in the configuration in the |
528 | + "project_notification_addresses" map (which assigns project names to a list |
529 | + of email addresses). If no address list for a project is found, the error |
530 | + goes to stderr. |
531 | ''' |
532 | global error_collector |
533 | |
534 | # sort errors into address buckets |
535 | - emails = {} # email address -> contents |
536 | + # email address -> contents |
537 | + emails = {} |
538 | |
539 | dbg('mailing %i data errors' % len(error_collector.errors)) |
540 | for error in error_collector.errors: |
541 | - for pattern, addresses in cfg['error_contact'].iteritems(): |
542 | - if (error.get_blueprint_name() is not None |
543 | - and re.search(pattern, error.get_blueprint_name())): |
544 | - dbg('spec %s matches error_contact pattern "%s", mailing to %s' % (error.get_blueprint_name(), |
545 | - pattern, ', '.join(addresses))) |
546 | - for a in addresses: |
547 | - emails.setdefault(a, '') |
548 | - emails[a] += error.format_for_display() + '\n' |
549 | - break |
550 | + project_name = error.get_project_name() |
551 | + if project_name is not None: |
552 | + addresses = cfg['project_notification_addresses'][project_name] |
553 | + dbg('spec %s is targetted to "%s", mailing to %s' % ( |
554 | + error.get_blueprint_name(), project_name, |
555 | + ', '.join(addresses))) |
556 | + for a in addresses: |
557 | + emails.setdefault(a, '') |
558 | + emails[a] += error.format_for_display() + '\n' |
559 | else: |
560 | - print >> sys.stderr, error.format_for_display(), '(no error_contact pattern)' |
561 | + print >> sys.stderr, error.format_for_display(), \ |
562 | + '(no error_contact pattern)' |
563 | |
564 | # send mails |
565 | for addr, contents in emails.iteritems(): |
566 | msg = MIMEText(contents.encode('ascii', 'replace')) |
567 | msg['Subject'] = 'Errors in work item definitions' |
568 | - msg['From'] = 'Launchpad work item tracker <work-items-tracker-hackers@lists.launchpad.net>' |
569 | + msg['From'] = 'Launchpad work item tracker ' + \ |
570 | + '<work-items-tracker-hackers@lists.launchpad.net>' |
571 | msg['To'] = addr |
572 | s = smtplib.SMTP() |
573 | s.connect() |
574 | +<<<<<<< TREE |
575 | s.sendmail('devnull@canonical.com', addr, msg.as_string()) |
576 | +======= |
577 | + s.sendmail(os.environ.get( |
578 | + 'EMAIL', |
579 | + pwd.getpwuid(os.geteuid()).pw_name + '@localhost'), |
580 | + addr, |
581 | + msg.as_string()) |
582 | +>>>>>>> MERGE-SOURCE |
583 | s.quit() |
584 | |
585 | |
586 | @@ -753,22 +840,29 @@ |
587 | bug_status_map[key] = unicode_or_None(val) |
588 | bug_wi_states.update(bug_status_map) |
589 | |
590 | - lock_path = opts.database + ".collect_lock" |
591 | + lock_path = opts.database + ".lock" |
592 | lock_f = open(lock_path, "wb") |
593 | if report_tools.lock_file(lock_f) is None: |
594 | print "Another instance is already running" |
595 | sys.exit(0) |
596 | |
597 | if "beta" in EDGE_SERVICE_ROOT: |
598 | - lp = Launchpad.login_with('ubuntu-work-items', service_root=EDGE_SERVICE_ROOT.replace("edge.", "").replace("beta", "devel")) |
599 | + service_root = EDGE_SERVICE_ROOT |
600 | + service_root = service_root.replace("edge.", "") |
601 | + service_root = service_root.replace("beta", "devel") |
602 | + lp = Launchpad.login_with('ubuntu-work-items', |
603 | + service_root=service_root) |
604 | else: |
605 | - lp = Launchpad.login_with('ubuntu-work-items', service_root="production", version="devel") |
606 | + lp = Launchpad.login_with('ubuntu-work-items', |
607 | + service_root="production", version="devel") |
608 | |
609 | store = get_store(opts.database) |
610 | collector = CollectorStore(store, lp, error_collector) |
611 | |
612 | # reset status for current day |
613 | collector.clear_todays_workitems() |
614 | + # We can delete all blueprints while keeping work items for previous days |
615 | + # because there's no foreign key reference from WorkItem to Blueprint. |
616 | collector.clear_blueprints() |
617 | collector.clear_metas() |
618 | collector.clear_complexitys() |
619 | |
620 | === added file 'collect_jira' |
621 | --- collect_jira 1970-01-01 00:00:00 +0000 |
622 | +++ collect_jira 2012-10-09 09:20:30 +0000 |
623 | @@ -0,0 +1,229 @@ |
624 | +#!/usr/bin/python |
625 | +# |
626 | +# Pull items from cards.linaro.org and put them into a database. |
627 | + |
628 | +import logging |
629 | +import optparse |
630 | +import os |
631 | +import simplejson |
632 | +import sys |
633 | +import urllib2 |
634 | + |
635 | +import jira |
636 | +from lpworkitems.collect_roadmap import ( |
637 | + CollectorStore, |
638 | + ) |
639 | +from lpworkitems.database import get_store |
640 | +from lpworkitems.error_collector import ( |
641 | + ErrorCollector, |
642 | + StderrErrorCollector, |
643 | + ) |
644 | +from lpworkitems.models_roadmap import ( |
645 | + Lane, |
646 | + Card, |
647 | + ) |
648 | +import report_tools |
649 | + |
650 | + |
651 | +# An ErrorCollector to collect the data errors for later reporting |
652 | +error_collector = None |
653 | + |
654 | + |
655 | +logger = logging.getLogger("linarojira") |
656 | + |
657 | +JIRA_API_URL = 'http://cards.linaro.org/rest/api/2' |
658 | +JIRA_PROJECT_KEY = 'CARD' |
659 | +JIRA_ISSUE_BY_KEY_URL = 'http://cards.linaro.org/browse/%s' |
660 | + |
661 | + |
662 | +def dbg(msg): |
663 | + '''Print out debugging message if debugging is enabled.''' |
664 | + logger.debug(msg) |
665 | + |
666 | + |
667 | +def get_json_data(url): |
668 | + data = None |
669 | + try: |
670 | + data = simplejson.load(urllib2.urlopen(url)) |
671 | + except urllib2.HTTPError, e: |
672 | + print "HTTP error for url '%s': %d" % (url, e.code) |
673 | + except urllib2.URLError, e: |
674 | + print "Network error for url '%s': %s" % (url, e.reason.args[1]) |
675 | + except ValueError, e: |
676 | + print "Data error for url '%s': %s" % (url, e.args[0]) |
677 | + |
678 | + return data |
679 | + |
680 | + |
681 | +def jira_import(collector, cfg, opts): |
682 | + '''Collect roadmap items from JIRA into DB.''' |
683 | + |
684 | + # import JIRA versions as database Lanes |
685 | + result = jira.do_request(opts, 'project/%s/versions' % JIRA_PROJECT_KEY) |
686 | + for version in result: |
687 | + dbg('Adding lane (name = %s, id = %s)' % |
688 | + (version['name'], version['id'])) |
689 | + model_lane = Lane(unicode(version['name']), int(version['id'])) |
690 | + if model_lane.name == cfg['current_lane']: |
691 | + model_lane.is_current = True |
692 | + else: |
693 | + model_lane.is_current = False |
694 | + collector.store_lane(model_lane) |
695 | + |
696 | + # find id of "Sponsor" custom field in JIRA |
697 | + result = jira.do_request(opts, 'field') |
698 | + sponsor_fields = [field for field in result if field['name'] == 'Sponsor'] |
699 | + assert len(sponsor_fields) == 1, 'Not a single Sponsor field' |
700 | + sponsor_field_id = sponsor_fields[0]['id'] |
701 | + |
702 | + # import JIRA issues as database Cards |
703 | + result = jira.do_request( |
704 | + opts, 'search', jql='project = %s' % JIRA_PROJECT_KEY, |
705 | + fields=['summary', 'fixVersions', 'status', 'components', |
706 | + 'priority', 'description', 'timetracking', sponsor_field_id]) |
707 | + for issue in result['issues']: |
708 | + fields = issue['fields'] |
709 | + name = unicode(fields['summary']) |
710 | + card_id = int(issue['id']) |
711 | + key = unicode(issue['key']) |
712 | + fixVersions = fields['fixVersions'] |
713 | + if len(fixVersions) == 0: |
714 | + dbg('Skipping card without lane (name = %s, key = %s)' % |
715 | + (name, key)) |
716 | + continue |
717 | + # JIRA allows listing multiple versions in fixVersions |
718 | + assert len(fixVersions) == 1 |
719 | + lane_id = int(fixVersions[0]['id']) |
720 | + |
721 | + dbg('Adding card (name = %s, id = %s, lane_id = %s, key = %s)' % |
722 | + (name, card_id, lane_id, key)) |
723 | + model_card = Card(name, card_id, lane_id, key) |
724 | + model_card.status = unicode(fields['status']['name']) |
725 | + components = fields['components'] |
726 | + if len(components) == 0: |
727 | + dbg('Skipping card without component (name = %s, key = %s)' % |
728 | + (name, key)) |
729 | + # JIRA allows listing multiple components |
730 | + assert len(components) == 1 |
731 | + model_card.team = unicode(components[0]['name']) |
732 | + model_card.priority = unicode(fields['priority']['name']) |
733 | + size_fields = [] |
734 | + timetracking = fields['timetracking'] |
735 | + if 'originalEstimate' in timetracking: |
736 | + size_fields += [ |
737 | + 'original estimate: %s' % timetracking['originalEstimate']] |
738 | + if 'remainingEstimate' in timetracking: |
739 | + size_fields += [ |
740 | + 'remaining estimate: %s' % timetracking['remainingEstimate']] |
741 | + model_card.size = unicode(', '.join(size_fields)) |
742 | + model_card.sponsor = u'' |
743 | + # None if no sponsor is selected |
744 | + if fields[sponsor_field_id] is not None: |
745 | + sponsors = [s['value'] for s in fields[sponsor_field_id]] |
746 | + model_card.sponsor = unicode(', '.join(sorted(sponsors))) |
747 | + model_card.url = JIRA_ISSUE_BY_KEY_URL % key |
748 | + # XXX need to either download the HTML version or convert this to HTML |
749 | + model_card.description = unicode(fields['description']) |
750 | + # acceptance criteria is in the description |
751 | + model_card.acceptance_criteria = u'' |
752 | + collector.store_card(model_card) |
753 | + return |
754 | + |
755 | +######################################################################## |
756 | +# |
757 | +# Program operations and main |
758 | +# |
759 | +######################################################################## |
760 | + |
761 | + |
762 | +def parse_argv(): |
763 | + '''Parse CLI arguments. |
764 | + |
765 | + Return (options, args) tuple. |
766 | + ''' |
767 | + optparser = optparse.OptionParser() |
768 | + optparser.add_option('-d', '--database', |
769 | + help='Path to database', dest='database', metavar='PATH') |
770 | + optparser.add_option('-c', '--config', |
771 | + help='Path to configuration file', dest='config', metavar='PATH') |
772 | + optparser.add_option('--debug', action='store_true', default=False, |
773 | + help='Enable debugging output in parsing routines') |
774 | + optparser.add_option('--mail', action='store_true', default=False, |
775 | + help='Send data errors as email (according to "error_config" map in ' |
776 | + 'config file) instead of printing to stderr', dest='mail') |
777 | + optparser.add_option('--jira-username', default='robot', |
778 | + help='JIRA username for authentication', dest='jira_username') |
779 | + optparser.add_option('--jira-password', default='cuf4moh2', |
780 | + help='JIRA password for authentication', dest='jira_password') |
781 | + |
782 | + (opts, args) = optparser.parse_args() |
783 | + |
784 | + if not opts.database: |
785 | + optparser.error('No database given') |
786 | + if not opts.config: |
787 | + optparser.error('No config given') |
788 | + |
789 | + return opts, args |
790 | + |
791 | + |
792 | +def setup_logging(debug): |
793 | + ch = logging.StreamHandler() |
794 | + ch.setLevel(logging.INFO) |
795 | + formatter = logging.Formatter("%(message)s") |
796 | + ch.setFormatter(formatter) |
797 | + logger.setLevel(logging.INFO) |
798 | + logger.addHandler(ch) |
799 | + if debug: |
800 | + ch.setLevel(logging.DEBUG) |
801 | + formatter = logging.Formatter( |
802 | + "%(asctime)s - %(name)s - %(levelname)s - %(message)s") |
803 | + ch.setFormatter(formatter) |
804 | + logger.setLevel(logging.DEBUG) |
805 | + |
806 | + |
807 | +def update_todays_blueprint_daily_count_per_state(collector): |
808 | + """Clear today's entries and create them again to reflect the current |
809 | + state of blueprints.""" |
810 | + collector.clear_todays_blueprint_daily_count_per_state() |
811 | + collector.store_roadmap_bp_count_per_state() |
812 | + |
813 | + |
814 | +def main(): |
815 | + report_tools.fix_stdouterr() |
816 | + |
817 | + (opts, args) = parse_argv() |
818 | + opts.jira_api_url = JIRA_API_URL |
819 | + |
820 | + setup_logging(opts.debug) |
821 | + |
822 | + global error_collector |
823 | + if opts.mail: |
824 | + error_collector = ErrorCollector() |
825 | + else: |
826 | + error_collector = StderrErrorCollector() |
827 | + |
828 | + cfg = report_tools.load_config(opts.config) |
829 | + |
830 | + lock_path = opts.database + ".lock" |
831 | + lock_f = open(lock_path, "wb") |
832 | + if report_tools.lock_file(lock_f) is None: |
833 | + print "Another instance is already running" |
834 | + sys.exit(0) |
835 | + |
836 | + store = get_store(opts.database) |
837 | + collector = CollectorStore(store, '', error_collector) |
838 | + |
839 | + collector.clear_lanes() |
840 | + collector.clear_cards() |
841 | + |
842 | + jira_import(collector, cfg, opts) |
843 | + |
844 | + update_todays_blueprint_daily_count_per_state(collector) |
845 | + |
846 | + store.commit() |
847 | + |
848 | + os.unlink(lock_path) |
849 | + |
850 | + |
851 | +if __name__ == '__main__': |
852 | + main() |
853 | |
854 | === added file 'collect_roadmap' |
855 | --- collect_roadmap 1970-01-01 00:00:00 +0000 |
856 | +++ collect_roadmap 2012-10-09 09:20:30 +0000 |
857 | @@ -0,0 +1,301 @@ |
858 | +#!/usr/bin/python |
859 | +# |
860 | +# Pull items from the Linaro roadmap in Kanbantool and put them into a database. |
861 | + |
862 | +import logging |
863 | +import optparse |
864 | +import os |
865 | +import simplejson |
866 | +import sys |
867 | +import urllib2 |
868 | + |
869 | +from lpworkitems.collect_roadmap import ( |
870 | + CollectorStore, |
871 | + get_json_item, |
872 | + lookup_kanban_priority, |
873 | + ) |
874 | +from lpworkitems.database import get_store |
875 | +from lpworkitems.error_collector import ( |
876 | + ErrorCollector, |
877 | + StderrErrorCollector, |
878 | + ) |
879 | +from lpworkitems.models_roadmap import ( |
880 | + Lane, |
881 | + Card, |
882 | + ) |
883 | +from utils import unicode_or_None |
884 | +import report_tools |
885 | + |
886 | + |
887 | +# An ErrorCollector to collect the data errors for later reporting |
888 | +error_collector = None |
889 | + |
890 | + |
891 | +logger = logging.getLogger("linaroroadmap") |
892 | + |
893 | + |
894 | +def dbg(msg): |
895 | + '''Print out debugging message if debugging is enabled.''' |
896 | + logger.debug(msg) |
897 | + |
898 | + |
899 | +def get_kanban_url(item_url, api_token): |
900 | + base_url = 'https://linaro.kanbantool.com/api/v1' |
901 | + return "%s/%s?api_token=%s" % (base_url, item_url, api_token) |
902 | + |
903 | + |
904 | +def get_json_data(url): |
905 | + data = None |
906 | + try: |
907 | + data = simplejson.load(urllib2.urlopen(url)) |
908 | + except urllib2.HTTPError, e: |
909 | + print "HTTP error for url '%s': %d" % (url, e.code) |
910 | + except urllib2.URLError, e: |
911 | + print "Network error for url '%s': %s" % (url, e.reason.args[1]) |
912 | + except ValueError, e: |
913 | + print "Data error for url '%s': %s" % (url, e.args[0]) |
914 | + |
915 | + return data |
916 | + |
917 | + |
918 | +def kanban_import_lanes(collector, workflow_stages, cfg): |
919 | + nodes = {} |
920 | + root_node_id = None |
921 | + lanes_to_ignore = ['Legend'] |
922 | + |
923 | + # Iterate over all workflow_stages which may be in any order. |
924 | + for workflow_stage in workflow_stages: |
925 | + if workflow_stage['name'] in lanes_to_ignore: |
926 | + dbg("Ignoring lane %s." % workflow_stage['name']) |
927 | + continue |
928 | + parent_id = workflow_stage['parent_id'] |
929 | + if parent_id is None: |
930 | + assert root_node_id is None, 'We have already found the root node.' |
931 | + root_node_id = workflow_stage['id'] |
932 | + else: |
933 | + if parent_id not in nodes: |
934 | + nodes[parent_id] = [] |
935 | + # Add child workflow_stage |
936 | + nodes[parent_id].append(workflow_stage) |
937 | + |
938 | + statuses = [] |
939 | + for node in nodes[root_node_id]: |
940 | + assert node['parent_id'] == root_node_id |
941 | + model_lane = Lane(get_json_item(node, 'name'), |
942 | + node['id']) |
943 | + if model_lane.name == cfg['current_lane']: |
944 | + model_lane.is_current = True |
945 | + else: |
946 | + model_lane.is_current = False |
947 | + collector.store_lane(model_lane) |
948 | + node_id = node['id'] |
949 | + if node_id in nodes: |
950 | + statuses.extend(nodes[node_id]) |
951 | + return statuses |
952 | + |
953 | + |
954 | +def kanban_import_cards(collector, tasks, status_list, card_types, papyrs_token): |
955 | + types_to_ignore = ['Summits'] |
956 | + for task in tasks: |
957 | + dbg("Collecting card '%s'." % (task['task']['name'])) |
958 | + status_id = task['task']['workflow_stage_id'] |
959 | + assert status_id is not None |
960 | + task_status = None |
961 | + for status in status_list: |
962 | + if status['id'] == status_id: |
963 | + task_status = status |
964 | + break |
965 | + card_type_id = task['task']['card_type_id'] |
966 | + card_type_name = None |
967 | + for card_type in card_types: |
968 | + if card_type['id'] == card_type_id: |
969 | + card_type_name = card_type['name'] |
970 | + break |
971 | + else: |
972 | + dbg("Cannot find type for card '%s'." % (task['task']['name'])) |
973 | + if card_type_name in types_to_ignore: |
974 | + dbg("Ignoring card '%s' since it\'s type is '%s'." % \ |
975 | + (task['task']['name'], card_type['name'])) |
976 | + else: |
977 | + if task_status is not None: |
978 | + lane_id = task_status['parent_id'] |
979 | + assert lane_id is not None |
980 | + else: |
981 | + lane_id = status_id |
982 | + if not collector.lane_is_collected(lane_id): |
983 | + dbg("Ignoring card '%s' since it\'s Lane is ignored." % \ |
984 | + (task['task']['name'])) |
985 | + continue |
986 | + model_card = Card(get_json_item(task['task'], 'name'), |
987 | + task['task']['id'], lane_id, |
988 | + get_json_item(task['task'], 'external_id')) |
989 | + if task_status is not None: |
990 | + model_card.status = get_json_item(task_status, 'name') |
991 | + model_card.team = unicode_or_None(card_type_name) |
992 | + model_card.priority = lookup_kanban_priority( |
993 | + task['task']['priority']) |
994 | + model_card.size = get_json_item(task['task'], 'size_estimate') |
995 | + model_card.sponsor = get_json_item(task['task'], |
996 | + 'custom_field_1') |
997 | + |
998 | + external_link = task['task']['custom_field_2'] |
999 | + if external_link is not None and external_link is not '': |
1000 | + model_card.url = unicode_or_None(external_link) |
1001 | + dbg('Getting Papyrs information from %s.' % external_link) |
1002 | + papyrs_data = papyrs_import(collector, external_link, papyrs_token) |
1003 | + model_card.description = get_json_item(papyrs_data, |
1004 | + 'description') |
1005 | + model_card.acceptance_criteria = get_json_item( |
1006 | + papyrs_data, 'acceptance_criteria') |
1007 | + collector.store_card(model_card) |
1008 | + |
1009 | + |
1010 | +def kanban_import(collector, cfg, board_id, api_token, papyrs_token): |
1011 | + '''Collect roadmap items from KanbanTool into DB.''' |
1012 | + board_url = get_kanban_url('boards/%s.json' % board_id, api_token) |
1013 | + board = get_json_data(board_url) |
1014 | + assert board is not None, "Could not access board %s." % board_id |
1015 | + card_types = board['board']['card_types'] |
1016 | + status_list = kanban_import_lanes(collector, |
1017 | + board['board']['workflow_stages'], cfg) |
1018 | + |
1019 | + tasks_url = get_kanban_url('boards/%s/tasks.json' % board_id, api_token) |
1020 | + tasks = get_json_data(tasks_url) |
1021 | + kanban_import_cards(collector, tasks, status_list, card_types, papyrs_token) |
1022 | + |
1023 | + |
1024 | +def papyrs_import(collector, requirement_url, papyrs_token): |
1025 | + description = None |
1026 | + acceptance_criteria = None |
1027 | + |
1028 | + page = get_json_data(requirement_url + '?json&auth_token=%s' % papyrs_token) |
1029 | + if page is None: |
1030 | + return {'description': None, |
1031 | + 'acceptance_criteria': None} |
1032 | + |
1033 | + page_text_items = page[0] |
1034 | + page_extra_items = page[1] |
1035 | + |
1036 | + has_found_description = False |
1037 | + last_heading = '' |
1038 | + for page_item in page_text_items: |
1039 | + if page_item['classname'] == 'Heading': |
1040 | + last_heading = page_item['text'] |
1041 | + if page_item['classname'] == 'Paragraph': |
1042 | + if not has_found_description: |
1043 | + description = page_item['html'] |
1044 | + has_found_description = True |
1045 | + elif 'Acceptance Criteria' in last_heading: |
1046 | + acceptance_criteria = page_item['html'] |
1047 | + |
1048 | + return {'description': get_first_paragraph(description), |
1049 | + 'acceptance_criteria': get_first_paragraph(acceptance_criteria)} |
1050 | + |
1051 | + |
1052 | +def get_first_paragraph(text): |
1053 | + if text is None: |
1054 | + return None |
1055 | + # This might break, depending on what type of line breaks |
1056 | + # whoever authors the Papyrs document uses. |
1057 | + first_pararaph, _, _ = text.partition('<br>') |
1058 | + return first_pararaph |
1059 | + |
1060 | + |
1061 | +######################################################################## |
1062 | +# |
1063 | +# Program operations and main |
1064 | +# |
1065 | +######################################################################## |
1066 | + |
1067 | +def parse_argv(): |
1068 | + '''Parse CLI arguments. |
1069 | + |
1070 | + Return (options, args) tuple. |
1071 | + ''' |
1072 | + optparser = optparse.OptionParser() |
1073 | + optparser.add_option('-d', '--database', |
1074 | + help='Path to database', dest='database', metavar='PATH') |
1075 | + optparser.add_option('-c', '--config', |
1076 | + help='Path to configuration file', dest='config', metavar='PATH') |
1077 | + optparser.add_option('--debug', action='store_true', default=False, |
1078 | + help='Enable debugging output in parsing routines') |
1079 | + optparser.add_option('--mail', action='store_true', default=False, |
1080 | + help='Send data errors as email (according to "error_config" map in ' |
1081 | + 'config file) instead of printing to stderr', dest='mail') |
1082 | + optparser.add_option('--board', |
1083 | + help='Board id at Kanbantool', dest='board') |
1084 | + optparser.add_option('--kanbantoken', |
1085 | + help='Kanbantool API token for authentication', dest='kanban_token') |
1086 | + optparser.add_option('--papyrstoken', |
1087 | + help='Papyrs API token for authentication', dest='papyrs_token') |
1088 | + |
1089 | + (opts, args) = optparser.parse_args() |
1090 | + |
1091 | + if not opts.database: |
1092 | + optparser.error('No database given') |
1093 | + if not opts.config: |
1094 | + optparser.error('No config given') |
1095 | + |
1096 | + return opts, args |
1097 | + |
1098 | + |
1099 | +def setup_logging(debug): |
1100 | + ch = logging.StreamHandler() |
1101 | + ch.setLevel(logging.INFO) |
1102 | + formatter = logging.Formatter("%(message)s") |
1103 | + ch.setFormatter(formatter) |
1104 | + logger.setLevel(logging.INFO) |
1105 | + logger.addHandler(ch) |
1106 | + if debug: |
1107 | + ch.setLevel(logging.DEBUG) |
1108 | + formatter = logging.Formatter( |
1109 | + "%(asctime)s - %(name)s - %(levelname)s - %(message)s") |
1110 | + ch.setFormatter(formatter) |
1111 | + logger.setLevel(logging.DEBUG) |
1112 | + |
1113 | + |
1114 | +def update_todays_blueprint_daily_count_per_state(collector): |
1115 | + """Clear today's entries and create them again to reflect the current |
1116 | + state of blueprints.""" |
1117 | + collector.clear_todays_blueprint_daily_count_per_state() |
1118 | + collector.store_roadmap_bp_count_per_state() |
1119 | + |
1120 | + |
1121 | +def main(): |
1122 | + report_tools.fix_stdouterr() |
1123 | + |
1124 | + (opts, args) = parse_argv() |
1125 | + |
1126 | + setup_logging(opts.debug) |
1127 | + |
1128 | + global error_collector |
1129 | + if opts.mail: |
1130 | + error_collector = ErrorCollector() |
1131 | + else: |
1132 | + error_collector = StderrErrorCollector() |
1133 | + |
1134 | + cfg = report_tools.load_config(opts.config) |
1135 | + |
1136 | + lock_path = opts.database + ".lock" |
1137 | + lock_f = open(lock_path, "wb") |
1138 | + if report_tools.lock_file(lock_f) is None: |
1139 | + print "Another instance is already running" |
1140 | + sys.exit(0) |
1141 | + |
1142 | + store = get_store(opts.database) |
1143 | + collector = CollectorStore(store, '', error_collector) |
1144 | + |
1145 | + collector.clear_lanes() |
1146 | + collector.clear_cards() |
1147 | + |
1148 | + kanban_import(collector, cfg, opts.board, opts.kanban_token, opts.papyrs_token) |
1149 | + |
1150 | + update_todays_blueprint_daily_count_per_state(collector) |
1151 | + |
1152 | + store.commit() |
1153 | + |
1154 | + os.unlink(lock_path) |
1155 | + |
1156 | + |
1157 | +if __name__ == '__main__': |
1158 | + main() |
1159 | |
1160 | === modified file 'css/status.css' |
1161 | --- css/status.css 2011-05-18 20:51:50 +0000 |
1162 | +++ css/status.css 2012-10-09 09:20:30 +0000 |
1163 | @@ -46,6 +46,64 @@ |
1164 | font-size: 1.2em; |
1165 | } |
1166 | |
1167 | + |
1168 | +.roadmap_progress_text { |
1169 | + position: absolute; |
1170 | + top:0; left:0; |
1171 | + |
1172 | + padding-top: 5px; |
1173 | + |
1174 | + color: #ffffff; |
1175 | + text-align: center; |
1176 | + width: 100%; |
1177 | +} |
1178 | + |
1179 | +.roadmap_wrap { |
1180 | + border: 1px solid black; |
1181 | + position: relative; |
1182 | + margin-top: 2px; |
1183 | + margin-bottom: 3px; |
1184 | + margin-left: auto; |
1185 | + margin-right: auto; |
1186 | + background-color: #bdbdbd; |
1187 | +} |
1188 | + |
1189 | +.roadmap_wrap, .roadmap_value { |
1190 | + width: 300px; |
1191 | + height: 28px; |
1192 | +} |
1193 | + |
1194 | +table .roadmap_wrap, table .roadmap_value { |
1195 | + border: 0px; |
1196 | + width: 155px; |
1197 | + height: 1.4em; |
1198 | + background-color: #ffffff; |
1199 | +} |
1200 | + |
1201 | +.roadmap_value { |
1202 | + float: left; |
1203 | +} |
1204 | + |
1205 | +.roadmap_value .Completed { |
1206 | + background-color: green; |
1207 | + height: inherit |
1208 | +} |
1209 | + |
1210 | +.roadmap_value .Blocked { |
1211 | + background-color: red; |
1212 | + height: inherit |
1213 | +} |
1214 | + |
1215 | +.roadmap_value .InProgress { |
1216 | + background-color: gray; |
1217 | + height: inherit |
1218 | +} |
1219 | + |
1220 | +.roadmap_value .Planned { |
1221 | + background-color: orange; |
1222 | + height: inherit |
1223 | +} |
1224 | + |
1225 | .progress_wrap { |
1226 | position: relative; |
1227 | border: 1px solid black; |
1228 | |
1229 | === modified file 'generate-all' |
1230 | --- generate-all 2011-09-09 07:14:11 +0000 |
1231 | +++ generate-all 2012-10-09 09:20:30 +0000 |
1232 | @@ -6,7 +6,7 @@ |
1233 | # Copyright (C) 2010, 2011 Canonical Ltd. |
1234 | # License: GPL-3 |
1235 | |
1236 | -import optparse, os.path, sys |
1237 | +import optparse, os.path, sys, errno |
1238 | |
1239 | import report_tools |
1240 | |
1241 | @@ -47,13 +47,7 @@ |
1242 | burnup_chart_teams = [] |
1243 | primary_team = None |
1244 | |
1245 | -lock_path = opts.database + ".generate_lock" |
1246 | -lock_f = open(lock_path, "wb") |
1247 | -if report_tools.lock_file(lock_f) is None: |
1248 | - print "Another instance is already running" |
1249 | - sys.exit(0) |
1250 | - |
1251 | -lock_path = opts.database + ".generate_lock" |
1252 | +lock_path = opts.database + ".lock" |
1253 | lock_f = open(lock_path, "wb") |
1254 | if report_tools.lock_file(lock_f) is None: |
1255 | print "Another instance is already running" |
1256 | @@ -80,14 +74,60 @@ |
1257 | usersubdir = os.path.join(opts.output_dir, 'u') |
1258 | try: |
1259 | os.mkdir(usersubdir) |
1260 | -except OSError: |
1261 | - None |
1262 | +except OSError as exc: |
1263 | + if exc.errno == errno.EEXIST: |
1264 | + pass |
1265 | + else: |
1266 | + raise |
1267 | |
1268 | groupssubdir = os.path.join(opts.output_dir, 'group') |
1269 | try: |
1270 | os.mkdir(groupssubdir) |
1271 | -except OSError: |
1272 | - None |
1273 | +except OSError as exc: |
1274 | + if exc.errno == errno.EEXIST: |
1275 | + pass |
1276 | + else: |
1277 | + raise |
1278 | + |
1279 | +lanessubdir = os.path.join(opts.output_dir, '..', 'lane') |
1280 | +try: |
1281 | + os.mkdir(lanessubdir) |
1282 | +except OSError as exc: |
1283 | + if exc.errno == errno.EEXIST: |
1284 | + pass |
1285 | + else: |
1286 | + raise |
1287 | + |
1288 | +cardssubdir = os.path.join(opts.output_dir, '..', 'card') |
1289 | +try: |
1290 | + os.mkdir(cardssubdir) |
1291 | +except OSError as exc: |
1292 | + if exc.errno == errno.EEXIST: |
1293 | + pass |
1294 | + else: |
1295 | + raise |
1296 | + |
1297 | +# roadmap lanes |
1298 | +for lane in report_tools.lanes(store): |
1299 | + basename = os.path.join(lanessubdir, lane.name) |
1300 | + report_tools.roadmap_pages(my_path, opts.database, basename, opts.config, |
1301 | + lane, root=opts.root) |
1302 | + |
1303 | +# roadmap cards |
1304 | +for card in report_tools.cards(store): |
1305 | + if card.roadmap_id != '': |
1306 | + page_name = card.roadmap_id |
1307 | + else: |
1308 | + page_name = str(card.card_id) |
1309 | + basename = os.path.join(cardssubdir, page_name) |
1310 | + report_tools.roadmap_cards(my_path, opts.database, basename, opts.config, |
1311 | + card, root=opts.root) |
1312 | + |
1313 | +# roadmap front page |
1314 | +basename = os.path.join(lanessubdir, 'index') |
1315 | +lane = report_tools.current_lane(store) |
1316 | +report_tools.roadmap_pages(my_path, opts.database, basename, opts.config, |
1317 | + lane, root=opts.root) |
1318 | |
1319 | for u in users: |
1320 | for m in milestones: |
1321 | @@ -156,11 +196,10 @@ |
1322 | basename = os.path.join(opts.output_dir, status) |
1323 | report_tools.workitem_list(my_path, opts.database, basename, opts.config, status, root=opts.root) |
1324 | |
1325 | -# front page |
1326 | +# cycle front page |
1327 | basename = os.path.join(opts.output_dir, 'index') |
1328 | report_tools.status_overview(my_path, opts.database, basename, opts.config, root=opts.root) |
1329 | |
1330 | - |
1331 | def copy_files(source_dir): |
1332 | for filename in os.listdir(source_dir): |
1333 | dest = open(os.path.join(opts.output_dir, filename), 'w') |
1334 | |
1335 | === modified file 'html-report' |
1336 | --- html-report 2012-06-20 19:54:52 +0000 |
1337 | +++ html-report 2012-10-09 09:20:30 +0000 |
1338 | @@ -10,6 +10,13 @@ |
1339 | |
1340 | from report_tools import escape_url |
1341 | import report_tools |
1342 | +from roadmap_health import ( |
1343 | + card_health_checks, |
1344 | +) |
1345 | +from lpworkitems.models import ( |
1346 | + ROADMAP_STATUSES_MAP, |
1347 | + ROADMAP_ORDERED_STATUSES, |
1348 | +) |
1349 | |
1350 | |
1351 | class WorkitemTarget(object): |
1352 | @@ -453,6 +460,97 @@ |
1353 | print report_tools.fill_template( |
1354 | "workitem_list.html", data, theme=opts.theme) |
1355 | |
1356 | + def roadmap_page(self, store, opts): |
1357 | + if opts.lane is None: |
1358 | + print "<h1>Error, no lane specified.</h1>" |
1359 | + if not opts.title: |
1360 | + title = opts.lane |
1361 | + else: |
1362 | + title = opts.title |
1363 | + |
1364 | + data = self.template_data(store, opts) |
1365 | + lane = report_tools.lane(store, opts.lane) |
1366 | + lanes = report_tools.lanes(store) |
1367 | + statuses = [] |
1368 | + bp_status_totals = {'Completed': 0, 'Total': 0, 'Percentage': 0} |
1369 | + for status, cards in report_tools.statuses(store, lane): |
1370 | + cards_with_bps = [] |
1371 | + for card in cards: |
1372 | + report_tools.check_card_health(store, card_health_checks, card) |
1373 | + blueprint_status_counts = report_tools.card_bp_status_counts( |
1374 | + store, card.roadmap_id) |
1375 | + total = sum(blueprint_status_counts.values()) |
1376 | + bp_percentages = dict.fromkeys(ROADMAP_ORDERED_STATUSES, 0) |
1377 | + bp_status_totals['Completed'] += \ |
1378 | + blueprint_status_counts['Completed'] |
1379 | + for key in ROADMAP_STATUSES_MAP: |
1380 | + bp_status_totals['Total'] += blueprint_status_counts[key] |
1381 | + if total > 0: |
1382 | + bp_percentages[key] = ( |
1383 | + 100.0 * blueprint_status_counts[key] / total) |
1384 | + |
1385 | + cards_with_bps.append({'card': card, |
1386 | + 'bp_statuses': blueprint_status_counts, |
1387 | + 'bp_percentages': bp_percentages}) |
1388 | + statuses.append(dict(name=status, cards=cards_with_bps)) |
1389 | + if bp_status_totals['Total'] > 0: |
1390 | + bp_status_totals['Percentage'] = (100 * bp_status_totals['Completed'] / |
1391 | + bp_status_totals['Total']) |
1392 | + |
1393 | + data.update(dict(statuses=statuses)) |
1394 | + data.update(dict(bp_status_totals=bp_status_totals)) |
1395 | + data.update(dict(status_order=ROADMAP_ORDERED_STATUSES)) |
1396 | + data.update(dict(page_type="roadmap_lane")) |
1397 | + data.update(dict(lane_title=title)) |
1398 | + data.update(dict(lanes=lanes)) |
1399 | + data.update(dict(chart_url=opts.chart_url)) |
1400 | + print report_tools.fill_template( |
1401 | + "roadmap_lane.html", data, theme=opts.theme) |
1402 | + |
1403 | + |
1404 | + def roadmap_card(self, store, opts): |
1405 | + if opts.card is None: |
1406 | + print "<h1>Error, no card specified.</h1>" |
1407 | + |
1408 | + data = self.template_data(store, opts) |
1409 | + card = report_tools.card(store, int(opts.card)).one() |
1410 | + health_checks = report_tools.check_card_health(store, card_health_checks, card) |
1411 | + lane = report_tools.lane(store, None, id=card.lane_id) |
1412 | + |
1413 | + if not opts.title: |
1414 | + title = card.name |
1415 | + else: |
1416 | + title = opts.title |
1417 | + |
1418 | + blueprints = report_tools.card_blueprints_by_status(store, card.roadmap_id) |
1419 | + bp_status_totals = {'Completed': 0, 'Total': 0, 'Percentage': 0} |
1420 | + bp_status_totals['Total'] = (len(blueprints['Planned']) + |
1421 | + len(blueprints['Blocked']) + |
1422 | + len(blueprints['In Progress']) + |
1423 | + len(blueprints['Completed'])) |
1424 | + bp_status_totals['Completed'] = len(blueprints['Completed']) |
1425 | + if bp_status_totals['Total'] > 0: |
1426 | + bp_status_totals['Percentage'] = (100 * bp_status_totals['Completed'] / |
1427 | + bp_status_totals['Total']) |
1428 | + |
1429 | + card_has_blueprints = bp_status_totals['Total'] > 0 |
1430 | + |
1431 | + status_order = ROADMAP_ORDERED_STATUSES[:] |
1432 | + status_order.reverse() |
1433 | + |
1434 | + data.update(dict(page_type="roadmap_card")) |
1435 | + data.update(dict(card_title=title)) |
1436 | + data.update(dict(card=card)) |
1437 | + data.update(dict(health_checks=health_checks)) |
1438 | + data.update(dict(lane=lane.name)) |
1439 | + data.update(dict(status_order=status_order)) |
1440 | + data.update(dict(blueprints=blueprints)) |
1441 | + data.update(dict(bp_status_totals=bp_status_totals)) |
1442 | + data.update(dict(card_has_blueprints=card_has_blueprints)) |
1443 | + |
1444 | + print report_tools.fill_template( |
1445 | + "roadmap_card.html", data, theme=opts.theme) |
1446 | + |
1447 | |
1448 | class WorkitemsOnDate(object): |
1449 | |
1450 | @@ -531,6 +629,10 @@ |
1451 | help="Include all milestones targetted to this date.") |
1452 | optparser.add_option('--theme', dest="theme", |
1453 | help="The theme to use.", default="linaro") |
1454 | + optparser.add_option('--lane', |
1455 | + help='Roadmap lane', dest='lane') |
1456 | + optparser.add_option('--card', |
1457 | + help='Roadmap card', dest='card') |
1458 | |
1459 | (opts, args) = optparser.parse_args() |
1460 | if not opts.database: |
1461 | |
1462 | === added file 'jira.py' |
1463 | --- jira.py 1970-01-01 00:00:00 +0000 |
1464 | +++ jira.py 2012-10-09 09:20:30 +0000 |
1465 | @@ -0,0 +1,55 @@ |
1466 | +#!/usr/bin/python |
1467 | +# -*- coding: UTF-8 -*- |
1468 | + |
1469 | +import base64 |
1470 | +import optparse |
1471 | +import simplejson |
1472 | +import urllib2 |
1473 | + |
1474 | + |
1475 | +def do_request(opts, relpathname, **kwargs): |
1476 | + request = urllib2.Request('%s/%s' % (opts.jira_api_url, relpathname)) |
1477 | + if opts.jira_username and opts.jira_password: |
1478 | + base64string = base64.encodestring( |
1479 | + '%s:%s' % (opts.jira_username, opts.jira_password) |
1480 | + ).replace('\n', '') |
1481 | + request.add_header('Authorization', 'Basic %s' % base64string) |
1482 | + request_data = None |
1483 | + if kwargs.keys(): |
1484 | + request.add_header('Content-Type', 'application/json') |
1485 | + request_data = simplejson.dumps(kwargs) |
1486 | + response_data = urllib2.urlopen(request, request_data) |
1487 | + return simplejson.load(response_data) |
1488 | + |
1489 | + |
1490 | +def main(): |
1491 | + parser = optparse.OptionParser(usage="%prog") |
1492 | + parser.add_option("--jira-api-url", dest="jira_api_url", |
1493 | + default="http://cards.linaro.org/rest/api/2") |
1494 | + parser.add_option("--jira-username", dest="jira_username", |
1495 | + default="robot") |
1496 | + parser.add_option("--jira-password", dest="jira_password", |
1497 | + default="cuf4moh2") |
1498 | + opts, args = parser.parse_args() |
1499 | + |
1500 | + # simple search |
1501 | + print do_request(opts, 'search', maxResults=1, jql='project = CARD', |
1502 | + fields=['summary', 'status']) |
1503 | + |
1504 | + # information about a project |
1505 | + #print do_request(opts, 'project/CARD') |
1506 | + |
1507 | + # on creating issues |
1508 | + #print do_request(opts, 'issue/createmeta?projectIds=10000') |
1509 | + |
1510 | + # on statuses |
1511 | + #print do_request(opts, 'status') |
1512 | + |
1513 | + # on fields |
1514 | + #print do_request(opts, 'field') |
1515 | + |
1516 | + # on a security level |
1517 | + #print do_request(opts, 'securitylevel/10000') |
1518 | + |
1519 | +if __name__ == "__main__": |
1520 | + main() |
1521 | |
1522 | === added file 'kanban-papyrs-to-jira' |
1523 | --- kanban-papyrs-to-jira 1970-01-01 00:00:00 +0000 |
1524 | +++ kanban-papyrs-to-jira 2012-10-09 09:20:30 +0000 |
1525 | @@ -0,0 +1,397 @@ |
1526 | +#!/usr/bin/python |
1527 | +# -*- coding: UTF-8 -*- |
1528 | +# Copyright (C) 2012 Linaro Ltd. |
1529 | +# Author: Loïc Minier <loic.minier@linaro.org> |
1530 | +# License: GPL-3 |
1531 | + |
1532 | +import jira |
1533 | + |
1534 | +from bs4 import BeautifulSoup |
1535 | +import logging |
1536 | +import optparse |
1537 | +import os |
1538 | +import re |
1539 | +import simplejson |
1540 | +import sys |
1541 | +import urllib2 |
1542 | + |
1543 | +logger = logging.getLogger("linaroroadmap") |
1544 | + |
1545 | +def dbg(msg): |
1546 | + '''Print out debugging message if debugging is enabled.''' |
1547 | + logger.debug(msg) |
1548 | + |
1549 | +class InMemCollector: |
1550 | + def __init__(self): |
1551 | + self.lanes = [] |
1552 | + self.cards = [] |
1553 | + |
1554 | + def store_lane(self, lane): |
1555 | + self.lanes.append(lane) |
1556 | + |
1557 | + def store_card(self, card): |
1558 | + self.cards.append(card) |
1559 | + |
1560 | + def lane_is_collected(self, lane_id): |
1561 | + for l in self.lanes: |
1562 | + if l.lane_id == lane_id: |
1563 | + return True |
1564 | + return False |
1565 | + |
1566 | +def kanban_request(opts, relpathname, method='GET', **kwargs): |
1567 | + request = urllib2.Request( |
1568 | + '%s/%s.json?_m=%s' % (opts.kanban_api_url, relpathname, method)) |
1569 | + if opts.kanban_token: |
1570 | + request.add_header('X-KanbanToolToken', opts.kanban_token) |
1571 | + request_data = None |
1572 | + if kwargs.keys(): |
1573 | + request.add_header('Content-Type', 'application/json') |
1574 | + request_data = simplejson.dumps(kwargs) |
1575 | + print request_data |
1576 | + response_data = urllib2.urlopen(request, request_data) |
1577 | + return simplejson.load(response_data) |
1578 | + |
1579 | +def get_papyrs_page(papyrs_url, token): |
1580 | + url = '%s?json&auth_token=%s' % (papyrs_url, token) |
1581 | + return simplejson.load(urllib2.urlopen(url)) |
1582 | + |
1583 | +def get_kanban_boards(opts): |
1584 | + return kanban_request(opts, 'boards') |
1585 | + |
1586 | +def get_kanban_board(opts, board_id): |
1587 | + return kanban_request(opts, 'boards/%s' % board_id) |
1588 | + |
1589 | +def get_kanban_tasks(opts, board_id): |
1590 | + return kanban_request(opts, 'boards/%s/tasks' % board_id) |
1591 | + |
1592 | +def get_kanban_task(opts, board_id, task_id): |
1593 | + return kanban_request(opts, 'boards/%s/tasks/%s' % (board_id, task_id)) |
1594 | + |
1595 | +def put_kanban_task(opts, board_id, task_id, **kwargs): |
1596 | + return kanban_request(opts, 'boards/%s/tasks/%s' % (board_id, task_id), method='PUT', **kwargs) |
1597 | + |
1598 | +def main(): |
1599 | + # TODO: add support for passing a card id or papyrs URL |
1600 | + parser = optparse.OptionParser(usage="%prog") |
1601 | + parser.add_option("--kanban-api-url", dest="kanban_api_url", |
1602 | + default="https://linaro.kanbantool.com/api/v1") |
1603 | + parser.add_option("--jira-api-url", dest="jira_api_url", |
1604 | + default="http://cards.linaro.org/rest/api/2") |
1605 | + # defaults are read-only ~linaro-infrastructure tokens |
1606 | + parser.add_option("--kanban-token", dest="kanban_token", |
1607 | + default="9F209W7Y84TE") |
1608 | + parser.add_option("--papyrs-token", dest="papyrs_token", |
1609 | + default="868e9088b53c") |
1610 | + parser.add_option("--jira-username", dest="jira_username", |
1611 | + default="robot") |
1612 | + parser.add_option("--jira-password", dest="jira_password", |
1613 | + default="cuf4moh2") |
1614 | + parser.add_option("--jira-project", dest="jira_project_name", |
1615 | + default="CARD") |
1616 | + parser.add_option("--jira-issuetype", dest="jira_issuetype_name", |
1617 | + default="Roadmap Card") |
1618 | + parser.add_option("--board-id", dest="board_id", default="10721") |
1619 | + parser.add_option('--debug', action='store_true', default=True, |
1620 | + help='Enable debugging output in parsing routines') |
1621 | + parser.add_option('--board', |
1622 | + help='Board id at Kanban Tool', dest='board', default='10721') |
1623 | + opts, args = parser.parse_args() |
1624 | + |
1625 | + if os.environ.get("DEBUG", None) is not None: |
1626 | + opts.debug = True |
1627 | + |
1628 | + if len(args) != 0: |
1629 | + parser.error("You can not pass any argument") |
1630 | + |
1631 | + if opts.kanban_token is None: |
1632 | + sys.stderr.write("No Kanbantool API token given") |
1633 | + if opts.papyrs_token is None: |
1634 | + sys.stderr.write("No Papyrs API token given") |
1635 | + |
1636 | + # logging setup |
1637 | + logger = logging.getLogger() |
1638 | + ch = logging.StreamHandler() |
1639 | + formatter = logging.Formatter("%(asctime)s %(message)s") |
1640 | + ch.setFormatter(formatter) |
1641 | + logger.addHandler(ch) |
1642 | + if opts.debug: |
1643 | + logger.setLevel(logging.DEBUG) |
1644 | + |
1645 | + boards = get_kanban_boards(opts) |
1646 | + # dump |
1647 | + for board in boards: |
1648 | + board = board['board'] |
1649 | + dbg('Found board "%s" with id %s' % (board['name'], board['id'])) |
1650 | + dbg('') |
1651 | + |
1652 | + assert 1 == len(filter(lambda b: str(b['board']['id']) == opts.board_id, boards)), \ |
1653 | + 'Expected exactly one board with id %s' % opts.board_id |
1654 | + |
1655 | + board = get_kanban_board(opts, opts.board_id) |
1656 | + board = board['board'] |
1657 | + |
1658 | + workflow_stages = board['workflow_stages'] |
1659 | + # ideally order wouldn't matter but the "position" field of our workflow stages |
1660 | + # is bogus (always 1) so we can't use it |
1661 | + leaf_workflow_stages = [] |
1662 | + for workflow_stage in workflow_stages: |
1663 | + childs = filter( |
1664 | + lambda ws: ws['parent_id'] == workflow_stage['id'], workflow_stages) |
1665 | + if not childs: |
1666 | + # build a name list for leaf workflow stages |
1667 | + name = [] |
1668 | + id = workflow_stage['id'] |
1669 | + while True: |
1670 | + ws = filter(lambda ws: ws['id'] == id, workflow_stages)[0] |
1671 | + if ws['name'] is None: |
1672 | + break |
1673 | + name = [ws['name']] + name |
1674 | + id = ws['parent_id'] |
1675 | + leaf_workflow_stages.append((workflow_stage['id'], name)) |
1676 | + # dump |
1677 | + for id, name in leaf_workflow_stages: |
1678 | + pretty_name = "/".join(name) |
1679 | + dbg('Found leaf workflow stage %s with id %s' % (pretty_name, id)) |
1680 | + dbg('') |
1681 | + |
1682 | + card_types = board['card_types'] |
1683 | + # dump |
1684 | + for card_type in card_types: |
1685 | + dbg('Found card type %s with id %s' % (card_type['name'], card_type['id'])) |
1686 | + dbg('') |
1687 | + |
1688 | + def get_leaf_workflow_stage_name(worfklow_stage_id): |
1689 | + return [name |
1690 | + for id, name |
1691 | + in leaf_workflow_stages |
1692 | + if id == worfklow_stage_id][0] |
1693 | + |
1694 | + def get_card_type_name(card_type_id): |
1695 | + return [card_type['name'] |
1696 | + for card_type |
1697 | + in card_types |
1698 | + if card_type['id'] == card_type_id][0] |
1699 | + |
1700 | + def filter_tasks(task): |
1701 | + # ignore tasks in Legend and Deferred workflow stages |
1702 | + lwsn = get_leaf_workflow_stage_name(task['workflow_stage_id']) |
1703 | + if lwsn in (['Legend'], ['Deferred']): |
1704 | + dbg('Ignoring task %s in workflow stage %s' |
1705 | + % (task['external_id'], "/".join(lwsn))) |
1706 | + return False |
1707 | + # ignore tasks with Summit and Unknown card type names |
1708 | + card_type_name = get_card_type_name(task['card_type_id']) |
1709 | + if card_type_name in ('Summits', 'Unknown'): |
1710 | + dbg('Ignoring task %s with card type name %s' |
1711 | + % (task['external_id'], card_type_name)) |
1712 | + return False |
1713 | + return True |
1714 | + |
1715 | + tasks = get_kanban_tasks(opts, opts.board_id) |
1716 | + tasks = [t['task'] for t in tasks if filter_tasks(t['task'])] |
1717 | + # dump |
1718 | + for task in tasks: |
1719 | + dbg('Found task %s with id %s, workflow_stage_id %s, priority %s, ' |
1720 | + 'card_type_id %s, custom_field_2 %s, and external_id %s' |
1721 | + % (task['name'], task['id'], task['workflow_stage_id'], |
1722 | + task['priority'], task['card_type_id'], |
1723 | + task['custom_field_2'], task['external_id'])) |
1724 | + |
1725 | + CARD_TYPE_NAMES_TO_PREFIXES = { |
1726 | + 'LAVA': 'LAVA', |
1727 | + 'Android': 'ANDROID', |
1728 | + 'Linux & Ubuntu': 'LINUX', |
1729 | + 'TCWG': 'TCWG', |
1730 | + 'GWG': 'GWG', |
1731 | + 'MMWG': 'MMWG', |
1732 | + 'KWG': 'KWG', |
1733 | + 'PMWG': 'PMWG', |
1734 | + 'OCTO': 'OCTO', |
1735 | + } |
1736 | + |
1737 | + # check consistency of external_id with external_link and custom_field_2 |
1738 | + # (papyrs URL), and of external_id with card_type name |
1739 | + for task in tasks: |
1740 | + external_id = task['external_id'] |
1741 | + papyrs_url = task['custom_field_2'] |
1742 | + external_link = task['external_link'] |
1743 | + assert papyrs_url == 'https://linaro.papyrs.com/%s' % external_id, \ |
1744 | + 'Incorrect papyrs URL %s for task %s' % (papyrs_url, external_id) |
1745 | + assert external_link == 'http://status.linaro.org/card/%s' % external_id, \ |
1746 | + 'Incorrect external_link %s for task %s' % (external_link, external_id) |
1747 | + card_type_name = get_card_type_name(task['card_type_id']) |
1748 | + prefix = CARD_TYPE_NAMES_TO_PREFIXES[card_type_name] |
1749 | + assert external_id.startswith(prefix), \ |
1750 | + 'Incorrect card type prefix %s for task %s' % (prefix, external_id) |
1751 | + |
1752 | + # verify papyrs pages |
1753 | + #for task in tasks: |
1754 | + for task in []: |
1755 | + external_id = task['external_id'] |
1756 | + papyrs_url = task['custom_field_2'] |
1757 | + dbg('Fetching card %s' % task['name']) |
1758 | + papyrs_json = get_papyrs_page(papyrs_url, opts.papyrs_token) |
1759 | + |
1760 | + try: |
1761 | + # number of columns |
1762 | + ncols = len(papyrs_json) |
1763 | + assert ncols == 2, 'Expected exactly two columns but got %s' % len(ncols) |
1764 | + |
1765 | + # first column |
1766 | + col0 = papyrs_json[0] |
1767 | + p0 = col0[0] |
1768 | + classname = p0['classname'] |
1769 | + assert classname == 'Heading', \ |
1770 | + "First paragraph of first column should be a a heading but is %s" % classname |
1771 | + assert p0['text'] == p0['html'], \ |
1772 | + "Expected text (%s) and HTML (%s) to be identical for first heading" % (p0['text'], p0['html']) |
1773 | + assert p0['text'] == task['name'], \ |
1774 | + 'Mismatch between first heading (%s) and task (%s)' % (p0['text'], task['name']) |
1775 | + for p in col0[1:-2]: |
1776 | + assert p['classname'] in ('Heading', 'Paragraph'), \ |
1777 | + 'Got unexpected classname %s' % p['classname'] |
1778 | + if p['classname'] == 'Heading': |
1779 | + assert p['text'] == p['html'], \ |
1780 | + 'Expected heading HTML (%s) to match text (%s)' % (p['html'], p['text']) |
1781 | + if p['classname'] == 'Paragraph': |
1782 | + soup = BeautifulSoup('<root>%s</root>' % p['html'], 'xml') |
1783 | + for tag in soup.root.find_all(True): |
1784 | + assert tag.name in ('font', 'b', 'a', 'ul', 'ol', 'li', 'br', 'p', 'span', 'div', 'u'), 'Unexpected tag %s' % tag.name |
1785 | + |
1786 | + # second column |
1787 | + pm1 = col0[-1] |
1788 | + assert pm1['classname'] == 'Discuss', \ |
1789 | + 'Expect last classname to be Discuss but got %s' % pm1['classname'] |
1790 | + |
1791 | + col1 = papyrs_json[1] |
1792 | + skip_next_paragraph = False |
1793 | + nattachs = 0 |
1794 | + for p in col1: |
1795 | + if p['classname'] in ('Checklist', 'Twitters', 'Navigation'): |
1796 | + pass |
1797 | + elif p['classname'] == 'Attachment': |
1798 | + nattachs += 1 |
1799 | + elif p['classname'] == 'Heading' and p['text'] == 'Attachments': |
1800 | + pass |
1801 | + elif p['classname'] == 'Heading' and p['text'] == 'Metadata': |
1802 | + skip_next_paragraph = True |
1803 | + elif p['classname'] == 'Paragraph' and skip_next_paragraph: |
1804 | + skip_next_paragraph = False |
1805 | + else: |
1806 | + assert False, 'Unexpected paragraph %s' % p |
1807 | + if nattachs > 0: |
1808 | + dbg('Found %s attachment(s) on card %s' % (nattachs, task['name'])) |
1809 | + except Exception, e: |
1810 | + dbg(e) |
1811 | + |
1812 | + # query jira data |
1813 | + jira_project_result = jira.do_request(opts, 'project/%s' % opts.jira_project_name) |
1814 | + jira_statuses_result = jira.do_request(opts, 'status') |
1815 | + jira_fields_result = jira.do_request(opts, 'field') |
1816 | + # not allowed |
1817 | + #jira_securitylevels_result = jira.do_request(opts, 'securitylevel') |
1818 | + jira_priorities_result = jira.do_request(opts, 'priority') |
1819 | + |
1820 | + def search_jira_id(jira_result, name): |
1821 | + return [r['id'] for r in jira_result if r['name'] == name][0] |
1822 | + |
1823 | + # http://cards.linaro.org/rest/api/2/project/CARD has id 10000 |
1824 | + #jira_project_id = 10000 |
1825 | + jira_project_id = jira_project_result['id'] |
1826 | + # issuetype for "Roadmap Card" http://cards.linaro.org/rest/api/2/issuetype/9 |
1827 | + #jira_issuetype_id = 9 |
1828 | + jira_issuetype_id = search_jira_id(jira_project_result['issueTypes'], opts.jira_issuetype_name) |
1829 | + dbg('Found id %s for %s issueType' % (jira_issuetype_id, opts.jira_issuetype_name)) |
1830 | + for component in jira_project_result['components']: |
1831 | + dbg('Found component %s with id %s' % (component['name'], component['id'])) |
1832 | + for version in jira_project_result['versions']: |
1833 | + dbg('Found version %s with id %s' % (version['name'], version['id'])) |
1834 | + for status in jira_statuses_result: |
1835 | + dbg('Found status %s with id %s' % (status['name'], status['id'])) |
1836 | + |
1837 | + TYPE_TO_COMPONENT = { |
1838 | + 'LAVA': 'LAVA', |
1839 | + 'Android': 'Android', |
1840 | + 'Linux & Ubuntu': 'Linux & Ubuntu', |
1841 | + 'TCWG': 'Toolchain WG', |
1842 | + 'GWG': 'Graphics WG', |
1843 | + 'MMWG': 'Multimedia WG', |
1844 | + 'KWG': 'Kernel WG', |
1845 | + 'PMWG': 'Power Management WG', |
1846 | + 'OCTO': 'OCTO', |
1847 | + } |
1848 | + |
1849 | + STAGE_TO_STATUS = { |
1850 | + 'New/Draft': 'New/Drafting', |
1851 | + 'New/Needs Work': 'New/Drafting', |
1852 | + 'New/TSC Reviewed': 'New/Reviewed', |
1853 | + '2012Q1/Done': 'Approved', |
1854 | + '2012Q1/Ready': 'Approved', |
1855 | + '2012Q2/Forecast': 'Approved', |
1856 | + '2012Q3/Forecast': 'Approved', |
1857 | + '2012H2/Forecast': 'Approved', |
1858 | + '2013/Forecast': 'Approved', |
1859 | + } |
1860 | + |
1861 | + STAGE_TO_VERSION = { |
1862 | + 'New/Draft': None, |
1863 | + 'New/Needs Work': None, |
1864 | + 'New/TSC Reviewed': None, |
1865 | + '2012Q1/Done': '2012Q1', |
1866 | + '2012Q1/Ready': '2012Q1', |
1867 | + '2012Q2/Forecast': '2012Q2', |
1868 | + '2012Q3/Forecast': '2012Q3', |
1869 | + '2012H2/Forecast': '2012H2', |
1870 | + '2013/Forecast': '2013', |
1871 | + } |
1872 | + |
1873 | + PRIORITY_MAP = { |
1874 | + -1: 'Minor', |
1875 | + 0: 'Major', |
1876 | + 1: 'Critical', |
1877 | + } |
1878 | + |
1879 | + # actual copy |
1880 | + for task in tasks: |
1881 | + print task['name'] |
1882 | + print task['external_id'] |
1883 | + external_id = task['external_id'] |
1884 | + papyrs_url = task['custom_field_2'] |
1885 | + papyrs_json = get_papyrs_page(papyrs_url, opts.papyrs_token) |
1886 | + # first column |
1887 | + col0 = papyrs_json[0] |
1888 | + p0 = col0[0] |
1889 | + # assemble HTML of description |
1890 | + html = "" |
1891 | + for p in col0[1:-1]: |
1892 | + if p['classname'] == 'Heading': |
1893 | + html += '<h1>%s</h1>\n' % p['text'] |
1894 | + if p['classname'] == 'Paragraph': |
1895 | + html += '%s\n' % p['html'] |
1896 | + html = '{html}\n%s{html}\n' % html |
1897 | + |
1898 | + stage = "/".join(get_leaf_workflow_stage_name(task['workflow_stage_id'])) |
1899 | + status = STAGE_TO_STATUS[stage] |
1900 | + version = STAGE_TO_VERSION[stage] |
1901 | + type_name = get_card_type_name(task['card_type_id']) |
1902 | + component = TYPE_TO_COMPONENT[type_name] |
1903 | + priority = PRIORITY_MAP[task['priority']] |
1904 | + |
1905 | + fields = {'project': {'id': jira_project_id}, |
1906 | + 'summary': task['name'], |
1907 | + 'issuetype': {'id': jira_issuetype_id}, |
1908 | + 'description': html, |
1909 | + 'components': [{'id': search_jira_id(jira_project_result['components'], component)}], |
1910 | + search_jira_id(jira_fields_result, 'Alias Card ID'): task['external_id'], |
1911 | + # XXX hardcoded default security level; also, can't set security level to Public via API |
1912 | + #'security': {'id': search_jira_id(jira_securitylevels_result, 'Public')}, |
1913 | + 'priority': {'id': search_jira_id(jira_priorities_result, priority)}, |
1914 | + } |
1915 | + #'status': search_jira_id(jira_statuses_result, status), |
1916 | + if version: |
1917 | + fields['fixVersions'] = [{'id': search_jira_id(jira_project_result['versions'], version)}] |
1918 | + dbg('Uploading card %s' % fields) |
1919 | + print jira.do_request(opts, 'issue', fields=fields) |
1920 | + |
1921 | +if __name__ == "__main__": |
1922 | + main() |
1923 | |
1924 | === modified file 'lpworkitems/collect.py' |
1925 | --- lpworkitems/collect.py 2011-12-06 15:20:43 +0000 |
1926 | +++ lpworkitems/collect.py 2012-10-09 09:20:30 +0000 |
1927 | @@ -26,6 +26,7 @@ |
1928 | # "interesting") |
1929 | workitem_precedence = [None, u'done', u'postponed', u'blocked', u'todo', u'inprogress'] |
1930 | |
1931 | + |
1932 | class PersonCache(object): |
1933 | """A cache of Launchpad accounts.""" |
1934 | |
1935 | @@ -91,31 +92,29 @@ |
1936 | project_name = self.lp.load(milestone.target.self_link).name |
1937 | existing_milestone = self.store.find( |
1938 | models.Milestone, |
1939 | - models.Milestone.name==milestone_name).any() |
1940 | + models.Milestone.name == milestone_name).any() |
1941 | if existing_milestone is not None: |
1942 | - # We only store a milestone for the first project that we |
1943 | - # see it in. |
1944 | - # Check that the dates match, otherwise it's very confusing |
1945 | + # TODO: We now allow for the same milestone in different projects |
1946 | + # to have different due dates (within reasonable limits). |
1947 | + # However, the old algorithm which relied on all due dates to match, |
1948 | + # only stores a single milestone with due date for the first project |
1949 | + # that we see it in (essentially, random one). This is expected to |
1950 | + # be elaborated shortly. Then this block can be removed completely, |
1951 | + # until then it is left as a reminder. |
1952 | target_date = None |
1953 | if milestone.date_targeted is not None: |
1954 | - target_date = milestone.date_targeted.strftime("%Y-%m-%d") |
1955 | + target_date = milestone.date_targeted.strftime("%Y-%m") |
1956 | existing_target_date = existing_milestone.due_date |
1957 | if isinstance(existing_target_date, datetime.date): |
1958 | - existing_target_date = existing_target_date.strftime("%Y-%m-%d") |
1959 | + existing_target_date = existing_target_date.strftime("%Y-%m") |
1960 | if (target_date and existing_target_date != target_date): |
1961 | - error = MilestoneError( |
1962 | - milestone, |
1963 | - "Milestone %s (%s) has due_date %s but %s already has " |
1964 | - "the due date as %s" % (milestone.name, project_name, |
1965 | - target_date, existing_milestone.project, |
1966 | - existing_target_date)) |
1967 | - self.error_collector.store_error(error) |
1968 | - return |
1969 | - db_milestone = models.Milestone() |
1970 | - db_milestone.name = milestone_name |
1971 | - db_milestone.due_date = milestone.date_targeted |
1972 | - db_milestone.project = project_name |
1973 | - self.store.add(db_milestone) |
1974 | + existing_milestone.due_date = milestone.date_targeted |
1975 | + else: |
1976 | + db_milestone = models.Milestone() |
1977 | + db_milestone.name = milestone_name |
1978 | + db_milestone.due_date = milestone.date_targeted |
1979 | + db_milestone.project = project_name |
1980 | + self.store.add(db_milestone) |
1981 | |
1982 | def store_lp_milestones(self, milestones): |
1983 | if self.store.find(models.Milestone).any() is not None: |
1984 | @@ -270,7 +269,7 @@ |
1985 | if ']' in desc: |
1986 | off = desc.index(']') |
1987 | assignee_name = desc[1:off] |
1988 | - desc = desc[off+1:].strip() |
1989 | + desc = desc[off + 1:].strip() |
1990 | else: |
1991 | self.error_collector.record_blueprint_error( |
1992 | self.blueprint, |
1993 | @@ -314,6 +313,7 @@ |
1994 | def get_workitem_if_tracked(self, task, projects=None, |
1995 | distro_release=None): |
1996 | target = self.lp.load(task.target.self_link) |
1997 | + |
1998 | def get_rtype(obj): |
1999 | return urllib.splittag(obj.resource_type_link)[1] |
2000 | rtype = get_rtype(target) |
2001 | |
2002 | === added file 'lpworkitems/collect_roadmap.py' |
2003 | --- lpworkitems/collect_roadmap.py 1970-01-01 00:00:00 +0000 |
2004 | +++ lpworkitems/collect_roadmap.py 2012-10-09 09:20:30 +0000 |
2005 | @@ -0,0 +1,71 @@ |
2006 | +import datetime |
2007 | + |
2008 | +from lpworkitems import models_roadmap |
2009 | +from utils import unicode_or_None |
2010 | + |
2011 | + |
2012 | +class CollectorStore(object): |
2013 | + |
2014 | + def __init__(self, store, base_url, error_collector): |
2015 | + self.store = store |
2016 | + self.base_url = base_url |
2017 | + self.error_collector = error_collector |
2018 | + |
2019 | + def _clear_all(self, *find_args): |
2020 | + self.store.find(*find_args).remove() |
2021 | + |
2022 | + def clear_lanes(self): |
2023 | + self._clear_all(models_roadmap.Lane) |
2024 | + |
2025 | + def clear_cards(self): |
2026 | + self._clear_all(models_roadmap.Card) |
2027 | + |
2028 | + def store_lane(self, lane): |
2029 | + self.store.add(lane) |
2030 | + |
2031 | + def store_card(self, card): |
2032 | + self.store.add(card) |
2033 | + |
2034 | + def clear_todays_blueprint_daily_count_per_state(self): |
2035 | + self._clear_all( |
2036 | + models_roadmap.BlueprintDailyCountPerState, |
2037 | + models_roadmap.BlueprintDailyCountPerState.day == datetime.date.today()) |
2038 | + |
2039 | + def store_roadmap_bp_count_per_state(self): |
2040 | + query = """ |
2041 | + SELECT implementation, lane_id, count(*) |
2042 | + FROM specs |
2043 | + JOIN meta on spec = specs.name |
2044 | + JOIN card on roadmap_id = value |
2045 | + WHERE key = 'Roadmap id' |
2046 | + GROUP BY implementation, lane_id |
2047 | + """ |
2048 | + day = datetime.date.today() |
2049 | + result = self.store.execute(query) |
2050 | + for status, lane_id, count in result: |
2051 | + obj = models_roadmap.BlueprintDailyCountPerState() |
2052 | + obj.day = day |
2053 | + obj.status = status |
2054 | + obj.lane_id = lane_id |
2055 | + obj.count = count |
2056 | + self.store.add(obj) |
2057 | + |
2058 | + def lane_is_collected(self, lane_id): |
2059 | + return self.store.find(models_roadmap.Lane, models_roadmap. |
2060 | + Lane.lane_id == lane_id).one() is not None |
2061 | + |
2062 | + |
2063 | +def get_json_item(data, item_name): |
2064 | + item = data[item_name] |
2065 | + if item is not None: |
2066 | + item = item.strip() |
2067 | + return unicode_or_None(item) |
2068 | + |
2069 | + |
2070 | +def lookup_kanban_priority(numeric_priority): |
2071 | + priority_lookup = {-1: "low", |
2072 | + 0: "normal", |
2073 | + 1: "high"} |
2074 | + assert numeric_priority in priority_lookup, ( |
2075 | + "Priority '%s' is unknown." % numeric_priority) |
2076 | + return unicode_or_None(priority_lookup[numeric_priority]) |
2077 | |
2078 | === modified file 'lpworkitems/database.py' |
2079 | --- lpworkitems/database.py 2011-06-24 19:09:26 +0000 |
2080 | +++ lpworkitems/database.py 2012-10-09 09:20:30 +0000 |
2081 | @@ -7,13 +7,13 @@ |
2082 | store.execute('''CREATE TABLE version ( |
2083 | db_layout_ref INT NOT NULL |
2084 | )''') |
2085 | - store.execute('''INSERT INTO version VALUES (10)''') |
2086 | + store.execute('''INSERT INTO version VALUES (15)''') |
2087 | |
2088 | store.execute('''CREATE TABLE specs ( |
2089 | name VARCHAR(255) PRIMARY KEY, |
2090 | url VARCHAR(1000) NOT NULL, |
2091 | priority CHAR(20), |
2092 | - implementation CHAR(30), |
2093 | + implementation CHAR(30) NOT NULL, |
2094 | assignee CHAR(50), |
2095 | team CHAR(50), |
2096 | status VARCHAR(5000) NOT NULL, |
2097 | @@ -25,6 +25,13 @@ |
2098 | roadmap_notes VARCHAR(5000) |
2099 | )''') |
2100 | |
2101 | + store.execute('''CREATE TABLE spec_daily_count_per_state ( |
2102 | + status VARCHAR(5000) NOT NULL, |
2103 | + day DATE NOT NULL, |
2104 | + lane_id REFERENCES lane(lane_id), |
2105 | + count INT NOT NULL |
2106 | + )''') |
2107 | + |
2108 | store.execute('''CREATE TABLE work_items ( |
2109 | description VARCHAR(1000) NOT NULL, |
2110 | spec VARCHAR(255) REFERENCES specs(name), |
2111 | @@ -90,6 +97,30 @@ |
2112 | display_name VARCHAR(50) |
2113 | )''') |
2114 | |
2115 | + store.execute('''CREATE TABLE lane ( |
2116 | + name VARCHAR(200) NOT NULL, |
2117 | + lane_id NOT NULL, |
2118 | + is_current BOOLEAN, |
2119 | + cards REFERENCES card(card_id) |
2120 | + )''') |
2121 | + |
2122 | + store.execute('''CREATE TABLE card ( |
2123 | + name VARCHAR(200) NOT NULL, |
2124 | + card_id NOT NULL, |
2125 | + url VARCHAR(200), |
2126 | + is_healthy BOOLEAN, |
2127 | + status VARCHAR(50), |
2128 | + team VARCHAR(50), |
2129 | + priority VARCHAR(50), |
2130 | + size VARCHAR(50), |
2131 | + sponsor VARCHAR(50), |
2132 | + contact VARCHAR(50), |
2133 | + description BLOB, |
2134 | + acceptance_criteria BLOB, |
2135 | + roadmap_id VARCHAR(50), |
2136 | + lane_id REFERENCES lane(lane_id) |
2137 | + )''') |
2138 | + |
2139 | |
2140 | def upgrade_if_needed(store): |
2141 | # upgrade DB layout |
2142 | @@ -177,7 +208,47 @@ |
2143 | )''') |
2144 | store.execute('UPDATE version SET db_layout_ref = 10') |
2145 | ver = 10 |
2146 | + if ver == 10: |
2147 | + store.execute('''CREATE TABLE lane ( |
2148 | + name VARCHAR(200) NOT NULL, |
2149 | + lane_id NOT NULL, |
2150 | + cards REFERENCES card(card_id) |
2151 | + )''') |
2152 | |
2153 | + store.execute('''CREATE TABLE card ( |
2154 | + name VARCHAR(200) NOT NULL, |
2155 | + card_id NOT NULL, |
2156 | + status VARCHAR(50), |
2157 | + lane_id REFERENCES lane(lane_id) |
2158 | + )''') |
2159 | + store.execute('UPDATE version SET db_layout_ref = 11') |
2160 | + ver = 11 |
2161 | + if ver == 11: |
2162 | + store.execute('ALTER TABLE card ADD COLUMN roadmap_id VARCHAR(50)') |
2163 | + store.execute('UPDATE version SET db_layout_ref = 12') |
2164 | + ver = 12 |
2165 | + if ver == 12: |
2166 | + store.execute('ALTER TABLE card ADD COLUMN team VARCHAR(50)') |
2167 | + store.execute('ALTER TABLE card ADD COLUMN priority VARCHAR(50)') |
2168 | + store.execute('ALTER TABLE card ADD COLUMN size VARCHAR(50)') |
2169 | + store.execute('ALTER TABLE card ADD COLUMN sponsor VARCHAR(50)') |
2170 | + store.execute('ALTER TABLE card ADD COLUMN contact VARCHAR(50)') |
2171 | + store.execute('ALTER TABLE card ADD COLUMN description BLOB') |
2172 | + store.execute('ALTER TABLE card ADD COLUMN acceptance_criteria BLOB') |
2173 | + store.execute('ALTER TABLE lane ADD COLUMN is_current BOOLEAN') |
2174 | + store.execute('UPDATE version SET db_layout_ref = 13') |
2175 | + if ver == 13: |
2176 | + store.execute('ALTER TABLE card ADD COLUMN url VARCHAR(200)') |
2177 | + store.execute('ALTER TABLE card ADD COLUMN is_healthy BOOLEAN') |
2178 | + store.execute('UPDATE version SET db_layout_ref = 14') |
2179 | + if ver == 14: |
2180 | + store.execute('''CREATE TABLE spec_daily_count_per_state ( |
2181 | + status VARCHAR(5000) NOT NULL, |
2182 | + day DATE NOT NULL, |
2183 | + lane_id REFERENCES lane(lane_id), |
2184 | + count INT NOT NULL |
2185 | + )''') |
2186 | + store.execute('UPDATE version SET db_layout_ref = 15') |
2187 | |
2188 | def get_store(dbpath): |
2189 | '''Open/initialize database. |
2190 | @@ -205,5 +276,6 @@ |
2191 | store.execute('''CREATE INDEX work_items_date_idx ON work_items (date)''') |
2192 | store.execute('''CREATE INDEX work_items_status_idx ON work_items (status)''') |
2193 | |
2194 | + |
2195 | def create_v6_indexes(store): |
2196 | store.execute('''CREATE INDEX work_items_assignee_milestone_idx on work_items(assignee,milestone)''') |
2197 | |
2198 | === modified file 'lpworkitems/error_collector.py' |
2199 | --- lpworkitems/error_collector.py 2011-06-08 19:30:24 +0000 |
2200 | +++ lpworkitems/error_collector.py 2012-10-09 09:20:30 +0000 |
2201 | @@ -48,6 +48,10 @@ |
2202 | """Get the name of the blueprint, or None if not a blueprint.""" |
2203 | return None |
2204 | |
2205 | + def get_project_name(self): |
2206 | + """Get the name of the project, or None if not a project.""" |
2207 | + return None |
2208 | + |
2209 | def format_for_display(self): |
2210 | """Produce a string representation of the Error. |
2211 | |
2212 | @@ -84,6 +88,9 @@ |
2213 | def get_blueprint_name(self): |
2214 | return self.blueprint.name |
2215 | |
2216 | + def get_project_name(self): |
2217 | + return self.blueprint.url.split('/')[-3] |
2218 | + |
2219 | |
2220 | class BlueprintURLError(Error): |
2221 | """A deprecated class for backwards-compatibility. |
2222 | @@ -101,6 +108,9 @@ |
2223 | def get_blueprint_name(self): |
2224 | return self.blueprint_url.split('/')[-1] |
2225 | |
2226 | + def get_project_name(self): |
2227 | + return self.blueprint_url.split('/')[-3] |
2228 | + |
2229 | |
2230 | class MilestoneError(Error): |
2231 | |
2232 | |
2233 | === modified file 'lpworkitems/factory.py' |
2234 | --- lpworkitems/factory.py 2011-06-14 22:00:21 +0000 |
2235 | +++ lpworkitems/factory.py 2012-10-09 09:20:30 +0000 |
2236 | @@ -11,6 +11,7 @@ |
2237 | TeamStructure, |
2238 | Workitem, |
2239 | ) |
2240 | +from lpworkitems.models_roadmap import BlueprintDailyCountPerState, Card |
2241 | |
2242 | |
2243 | class Factory(object): |
2244 | @@ -63,6 +64,8 @@ |
2245 | url = self.getUniqueUnicode(prefix=name+"_url") |
2246 | if status is None: |
2247 | status = self.getUniqueUnicode(prefix=name+"_status") |
2248 | + if implementation is None: |
2249 | + implementation = u'Unknown' |
2250 | blueprint.name = name |
2251 | blueprint.url = url |
2252 | blueprint.status = status |
2253 | @@ -109,8 +112,11 @@ |
2254 | self.store.add(workitem) |
2255 | return workitem |
2256 | |
2257 | - def make_meta(self, store=True): |
2258 | + def make_meta(self, key=None, value=None, blueprint=None, store=True): |
2259 | meta = Meta() |
2260 | + meta.key = key |
2261 | + meta.value = value |
2262 | + meta.blueprint = blueprint |
2263 | if store: |
2264 | self.store.add(meta) |
2265 | return meta |
2266 | @@ -155,3 +161,28 @@ |
2267 | if store: |
2268 | self.store.add(person) |
2269 | return person |
2270 | + |
2271 | + def make_blueprint_daily_count_per_state(self, status=None, count=1, |
2272 | + day=None, store=True): |
2273 | + if status is None: |
2274 | + status = self.getUniqueUnicode() |
2275 | + if day is None: |
2276 | + day = datetime.date.today() |
2277 | + obj = BlueprintDailyCountPerState() |
2278 | + obj.day = day |
2279 | + obj.status = status |
2280 | + obj.count = count |
2281 | + obj.lane_id = 1 |
2282 | + if store: |
2283 | + self.store.add(obj) |
2284 | + return obj |
2285 | + |
2286 | + def make_card(self, store=True): |
2287 | + name = self.getUniqueUnicode() |
2288 | + card_id = self.getUniqueInteger() |
2289 | + lane_id = self.getUniqueInteger() |
2290 | + roadmap_id = self.getUniqueUnicode() |
2291 | + card = Card(name, card_id, lane_id, roadmap_id) |
2292 | + if store: |
2293 | + self.store.add(card) |
2294 | + return card |
2295 | |
2296 | === modified file 'lpworkitems/models.py' |
2297 | --- lpworkitems/models.py 2011-12-06 15:20:43 +0000 |
2298 | +++ lpworkitems/models.py 2012-10-09 09:20:30 +0000 |
2299 | @@ -1,15 +1,20 @@ |
2300 | import datetime |
2301 | import re |
2302 | - |
2303 | -from storm.locals import Date, Reference, ReferenceSet, Unicode |
2304 | - |
2305 | - |
2306 | -def unicode_or_None(attr): |
2307 | - if attr is None: |
2308 | - return attr |
2309 | - if isinstance(attr, unicode): |
2310 | - return attr |
2311 | - return attr.decode("utf-8") |
2312 | +from utils import unicode_or_None |
2313 | + |
2314 | +from storm.locals import Date, Int, Reference, ReferenceSet, Unicode |
2315 | + |
2316 | +ROADMAP_STATUSES_MAP = { |
2317 | + u'Completed': [u'Implemented'], |
2318 | + u'Blocked': [u'Needs Infrastructure', u'Blocked', u'Deferred'], |
2319 | + u'In Progress': [u'Deployment', u'Needs Code Review', |
2320 | + u'Beta Available', u'Good progress', |
2321 | + u'Slow progress', u'Started'], |
2322 | + u'Planned': [u'Unknown', u'Not started', u'Informational']} |
2323 | + |
2324 | +ROADMAP_ORDERED_STATUSES = ['Completed', 'In Progress', 'Blocked', 'Planned'] |
2325 | +assert set(ROADMAP_ORDERED_STATUSES) == set(ROADMAP_STATUSES_MAP.keys()), ( |
2326 | + 'The roadmap statuses are incorrect: %s' % ROADMAP_ORDERED_STATUSES) |
2327 | |
2328 | |
2329 | def fill_blueprint_info_from_launchpad(model_bp, lp_bp): |
2330 | @@ -55,6 +60,14 @@ |
2331 | project = Unicode() |
2332 | |
2333 | |
2334 | +def get_roadmap_status_for_bp_implementation_status(implementation): |
2335 | + for key in ROADMAP_STATUSES_MAP: |
2336 | + if implementation in ROADMAP_STATUSES_MAP[key]: |
2337 | + return key |
2338 | + # XXX: Is None the appropriate return value here? |
2339 | + return None |
2340 | + |
2341 | + |
2342 | class Blueprint(object): |
2343 | |
2344 | __storm_table__ = "specs" |
2345 | @@ -88,6 +101,15 @@ |
2346 | lp_bp.whiteboard, "Roadmap\s+Notes") |
2347 | return model_bp |
2348 | |
2349 | + @property |
2350 | + def roadmap_status(self): |
2351 | + return get_roadmap_status_for_bp_implementation_status( |
2352 | + self.implementation) |
2353 | + |
2354 | + |
2355 | +def current_date(): |
2356 | + return datetime.date.today() |
2357 | + |
2358 | |
2359 | class Person(object): |
2360 | |
2361 | @@ -115,10 +137,6 @@ |
2362 | superteam_name = Unicode(name="team") |
2363 | |
2364 | |
2365 | -def current_date(): |
2366 | - return datetime.date.today() |
2367 | - |
2368 | - |
2369 | class Meta(object): |
2370 | |
2371 | __storm_table__ = "meta" |
2372 | |
2373 | === added file 'lpworkitems/models_roadmap.py' |
2374 | --- lpworkitems/models_roadmap.py 1970-01-01 00:00:00 +0000 |
2375 | +++ lpworkitems/models_roadmap.py 2012-10-09 09:20:30 +0000 |
2376 | @@ -0,0 +1,60 @@ |
2377 | +import datetime |
2378 | +import re |
2379 | +from utils import unicode_or_None |
2380 | + |
2381 | +from storm.locals import Date, Reference, ReferenceSet, Unicode, Int, Bool |
2382 | + |
2383 | +from lpworkitems import models |
2384 | + |
2385 | + |
2386 | +class Card(object): |
2387 | + |
2388 | + __storm_table__ = "card" |
2389 | + |
2390 | + name = Unicode() |
2391 | + status = Unicode() |
2392 | + card_id = Int(primary=True) |
2393 | + lane_id = Int() |
2394 | + roadmap_id = Unicode() |
2395 | + team = Unicode() |
2396 | + priority = Unicode() |
2397 | + size = Unicode() |
2398 | + sponsor = Unicode() |
2399 | + contact = Unicode() |
2400 | + description = Unicode() |
2401 | + acceptance_criteria = Unicode() |
2402 | + url = Unicode() |
2403 | + is_healthy = Bool() |
2404 | + |
2405 | + def __init__(self, name, card_id, lane_id, roadmap_id): |
2406 | + self.lane_id = lane_id |
2407 | + self.card_id = card_id |
2408 | + self.name = name |
2409 | + self.roadmap_id = roadmap_id |
2410 | + |
2411 | + |
2412 | +class Lane(object): |
2413 | + |
2414 | + __storm_table__ = "lane" |
2415 | + |
2416 | + name = Unicode() |
2417 | + lane_id = Int(primary=True) |
2418 | + is_current = Bool() |
2419 | + cards = ReferenceSet(lane_id, Card.lane_id) |
2420 | + |
2421 | + def __init__(self, name, lane_id): |
2422 | + self.lane_id = lane_id |
2423 | + self.name = name |
2424 | + |
2425 | + |
2426 | +def current_date(): |
2427 | + return datetime.date.today() |
2428 | + |
2429 | + |
2430 | +class BlueprintDailyCountPerState(object): |
2431 | + __storm_table__ = 'spec_daily_count_per_state' |
2432 | + __storm_primary__ = 'status', 'day' |
2433 | + day = Date(default_factory=current_date) |
2434 | + status = Unicode() |
2435 | + lane_id = Int() |
2436 | + count = Int() |
2437 | |
2438 | === modified file 'lpworkitems/tests/test_collect.py' |
2439 | --- lpworkitems/tests/test_collect.py 2011-06-14 22:00:21 +0000 |
2440 | +++ lpworkitems/tests/test_collect.py 2012-10-09 09:20:30 +0000 |
2441 | @@ -184,7 +184,6 @@ |
2442 | self.store.find( |
2443 | Milestone, Milestone.name == name).one()) |
2444 | |
2445 | - |
2446 | def test_store_blueprint_stores_blueprint(self): |
2447 | blueprint = self.factory.make_blueprint(store=False) |
2448 | ret = self.collector.store_blueprint(blueprint) |
2449 | |
2450 | === added file 'lpworkitems/tests/test_collect_roadmap.py' |
2451 | --- lpworkitems/tests/test_collect_roadmap.py 1970-01-01 00:00:00 +0000 |
2452 | +++ lpworkitems/tests/test_collect_roadmap.py 2012-10-09 09:20:30 +0000 |
2453 | @@ -0,0 +1,69 @@ |
2454 | +import datetime |
2455 | + |
2456 | +from lpworkitems.collect_roadmap import ( |
2457 | + CollectorStore, |
2458 | + get_json_item, |
2459 | + ) |
2460 | +from lpworkitems.models_roadmap import BlueprintDailyCountPerState |
2461 | +from lpworkitems.error_collector import ( |
2462 | + ErrorCollector, |
2463 | + ) |
2464 | +from lpworkitems.testing import TestCaseWithFakeLaunchpad |
2465 | + |
2466 | + |
2467 | +class CollectorTests(TestCaseWithFakeLaunchpad): |
2468 | + |
2469 | + def setUp(self): |
2470 | + super(CollectorTests, self).setUp() |
2471 | + self.error_collector = ErrorCollector() |
2472 | + self.collector = CollectorStore( |
2473 | + self.store, self.lp, self.error_collector) |
2474 | + |
2475 | + def assertClears(self, cls, fn): |
2476 | + self.assertTrue(self.store.find(cls).count() > 0) |
2477 | + fn() |
2478 | + self.assertEqual(0, self.store.find(cls).count()) |
2479 | + |
2480 | + def test_clear_todays_blueprint_daily_count_per_state(self): |
2481 | + self.factory.make_blueprint_daily_count_per_state( |
2482 | + day=datetime.date.today()) |
2483 | + self.assertClears( |
2484 | + BlueprintDailyCountPerState, |
2485 | + self.collector.clear_todays_blueprint_daily_count_per_state) |
2486 | + |
2487 | + def test_store_roadmap_bp_count_per_state(self): |
2488 | + bp = self.factory.make_blueprint() |
2489 | + card = self.factory.make_card() |
2490 | + meta = self.factory.make_meta( |
2491 | + key=u'Roadmap id', value=card.roadmap_id, blueprint=bp) |
2492 | + self.collector.store_roadmap_bp_count_per_state() |
2493 | + self.assertEqual( |
2494 | + 1, self.store.find(BlueprintDailyCountPerState).count()) |
2495 | + entry = self.store.find(BlueprintDailyCountPerState).one() |
2496 | + self.assertEqual(1, entry.count) |
2497 | + self.assertEqual(card.lane_id, entry.lane_id) |
2498 | + self.assertEqual(bp.implementation, entry.status) |
2499 | + |
2500 | + # XXX Add tests for the roadmap classes. |
2501 | + |
2502 | + |
2503 | +class RoadmapUtilsTests(TestCaseWithFakeLaunchpad): |
2504 | + |
2505 | + def setUp(self): |
2506 | + super(RoadmapUtilsTests, self).setUp() |
2507 | + self.json_data = {"data": "Text", |
2508 | + "whitespace": " Text ", |
2509 | + "none": None |
2510 | + } |
2511 | + |
2512 | + def test_get_json_data_unicode(self): |
2513 | + item = get_json_item(self.json_data, 'data') |
2514 | + self.assertEquals(item, u'Text') |
2515 | + |
2516 | + def test_get_json_data_whitespace(self): |
2517 | + item = get_json_item(self.json_data, 'whitespace') |
2518 | + self.assertEquals(item, u'Text') |
2519 | + |
2520 | + def test_get_json_data_none(self): |
2521 | + item = get_json_item(self.json_data, 'none') |
2522 | + self.assertEquals(item, None) |
2523 | |
2524 | === modified file 'lpworkitems/tests/test_factory.py' |
2525 | --- lpworkitems/tests/test_factory.py 2011-06-04 18:48:23 +0000 |
2526 | +++ lpworkitems/tests/test_factory.py 2012-10-09 09:20:30 +0000 |
2527 | @@ -162,7 +162,7 @@ |
2528 | implementation = u"Implemented" |
2529 | self.assert_with_and_without( |
2530 | self.factory.make_blueprint, "implementation", implementation, |
2531 | - Equals(None)) |
2532 | + Equals("Unknown")) |
2533 | |
2534 | def test_uses_assignee_name(self): |
2535 | assignee_name = self.factory.getUniqueUnicode( |
2536 | |
2537 | === modified file 'lpworkitems/tests/test_models.py' |
2538 | --- lpworkitems/tests/test_models.py 2011-12-06 15:20:43 +0000 |
2539 | +++ lpworkitems/tests/test_models.py 2012-10-09 09:20:30 +0000 |
2540 | @@ -6,8 +6,11 @@ |
2541 | extract_last_path_segment_from_url, |
2542 | extract_user_name_from_url, |
2543 | get_whiteboard_section, |
2544 | - ) |
2545 | -from lpworkitems.testing import TestCaseWithFakeLaunchpad |
2546 | + ROADMAP_STATUSES_MAP, |
2547 | + ) |
2548 | +from lpworkitems.testing import ( |
2549 | + TestCaseWithFakeLaunchpad, |
2550 | + ) |
2551 | |
2552 | |
2553 | class GetWhiteboardSectionTests(TestCase): |
2554 | @@ -42,6 +45,18 @@ |
2555 | |
2556 | class BlueprintTests(TestCaseWithFakeLaunchpad): |
2557 | |
2558 | + def test_roadmap_status(self): |
2559 | + roadmap_status = "Completed" |
2560 | + bp_implementation = ROADMAP_STATUSES_MAP[roadmap_status][0] |
2561 | + bp_status = self.factory.make_blueprint( |
2562 | + implementation=bp_implementation) |
2563 | + self.assertEqual(roadmap_status, bp_status.roadmap_status) |
2564 | + |
2565 | + def test_roadmap_status_unknown_status(self): |
2566 | + blueprint = self.factory.make_blueprint( |
2567 | + implementation=u"Not Expected") |
2568 | + self.assertEqual(None, blueprint.roadmap_status) |
2569 | + |
2570 | def test_from_launchpad_sets_name(self): |
2571 | name = self.factory.getUniqueUnicode(prefix="lpblueprint") |
2572 | lp_bp = self.lp.make_blueprint(name=name) |
2573 | |
2574 | === modified file 'report_tools.py' |
2575 | --- report_tools.py 2012-07-17 06:00:48 +0000 |
2576 | +++ report_tools.py 2012-10-09 09:20:30 +0000 |
2577 | @@ -3,12 +3,25 @@ |
2578 | # Tools for generating reports |
2579 | |
2580 | import datetime |
2581 | -import urllib, sys, os.path, re |
2582 | +<<<<<<< TREE |
2583 | +import urllib, sys, os.path, re |
2584 | +======= |
2585 | +import urllib, sys, os.path, re |
2586 | +from storm.locals import create_database, Store |
2587 | +>>>>>>> MERGE-SOURCE |
2588 | from subprocess import Popen |
2589 | from cgi import escape |
2590 | from lpworkitems import database |
2591 | import errno |
2592 | import fcntl |
2593 | +from lpworkitems.models_roadmap import ( |
2594 | + Lane, |
2595 | + Card, |
2596 | +) |
2597 | +from lpworkitems.models import ( |
2598 | + Meta, ROADMAP_STATUSES_MAP, |
2599 | + get_roadmap_status_for_bp_implementation_status, |
2600 | +) |
2601 | |
2602 | valid_states = [u'todo', u'blocked', u'inprogress', u'done', u'postponed'] |
2603 | state_labels = [u'Todo', u'Blocked', u'In Progress', u'Done', u'Postponed'] |
2604 | @@ -180,6 +193,52 @@ |
2605 | fh.close() |
2606 | |
2607 | |
2608 | +def roadmap_pages(my_path, database, basename, config, lane, root=None): |
2609 | + cfg = load_config(config) |
2610 | + fh = open(basename + '.html', 'w') |
2611 | + chart_path, _ = os.path.split(basename) |
2612 | + chart_name = os.path.join(chart_path, 'current_quarter.svg') |
2613 | + try: |
2614 | + args = [os.path.join(my_path, 'html-report'), '-d', database] |
2615 | + args += ['--report-type', 'roadmap_page'] |
2616 | + args += ['--lane', lane.name] |
2617 | + if root: |
2618 | + args += ['--root', root] |
2619 | + if lane.is_current: |
2620 | + args += ['--chart', chart_name] |
2621 | + report_args(args, theme=get_theme(cfg)) |
2622 | + proc = Popen(args, stdout=fh) |
2623 | + print basename + '.html' |
2624 | + proc.wait() |
2625 | + finally: |
2626 | + fh.close() |
2627 | + |
2628 | + if lane.is_current: |
2629 | + args = [os.path.join(my_path, 'roadmap-bp-chart'), '-d', database, |
2630 | + '-o', chart_name] |
2631 | + args += ['--inverted'] |
2632 | + proc = Popen(args) |
2633 | + print chart_name |
2634 | + proc.wait() |
2635 | + |
2636 | + |
2637 | +def roadmap_cards(my_path, database, basename, config, card, root=None): |
2638 | + cfg = load_config(config) |
2639 | + fh = open(basename + '.html', 'w') |
2640 | + try: |
2641 | + args = [os.path.join(my_path, 'html-report'), '-d', database] |
2642 | + args += ['--report-type', 'roadmap_card'] |
2643 | + args += ['--card', '%s' % card.card_id] |
2644 | + if root: |
2645 | + args += ['--root', root] |
2646 | + report_args(args, theme=get_theme(cfg)) |
2647 | + proc = Popen(args, stdout=fh) |
2648 | + print basename + '.html' |
2649 | + proc.wait() |
2650 | + finally: |
2651 | + fh.close() |
2652 | + |
2653 | + |
2654 | def run_reports(my_path, database, basename, config, milestone=None, team=None, |
2655 | user=None, trend_starts=None, trend_override=None, burnup=False, root=None, date=None): |
2656 | |
2657 | @@ -281,6 +340,34 @@ |
2658 | return escape(html, True) |
2659 | |
2660 | |
2661 | +def blueprints_over_time(store): |
2662 | + '''Calculate blueprint development over time for the current lane. |
2663 | + |
2664 | + We do not need to care about teams or groups since this is intended for the |
2665 | + roadmap overview. |
2666 | + |
2667 | + Return date -> state -> count mapping. states are |
2668 | + {planned,inprogress,completed,blocked}. |
2669 | + ''' |
2670 | + data = {} |
2671 | + result = store.execute(""" |
2672 | + SELECT status, day, count |
2673 | + FROM spec_daily_count_per_state |
2674 | + JOIN lane on lane.lane_id = spec_daily_count_per_state.lane_id |
2675 | + WHERE lane.is_current = 1 |
2676 | + """) |
2677 | + for status, day, count in result: |
2678 | + roadmap_status = get_roadmap_status_for_bp_implementation_status( |
2679 | + status) |
2680 | + assert roadmap_status is not None |
2681 | + if day not in data: |
2682 | + data[day] = {} |
2683 | + if roadmap_status not in data[day]: |
2684 | + data[day][roadmap_status] = 0 |
2685 | + data[day][roadmap_status] += count |
2686 | + return data |
2687 | + |
2688 | + |
2689 | def workitems_over_time(store, team=None, group=None, milestone_collection=None): |
2690 | '''Calculate work item development over time. |
2691 | |
2692 | @@ -876,6 +963,80 @@ |
2693 | return rv |
2694 | |
2695 | |
2696 | +def lanes(store): |
2697 | + return store.find(Lane) |
2698 | + |
2699 | + |
2700 | +def lane(store, name, id=None): |
2701 | + if id is None: |
2702 | + return store.find(Lane, Lane.name == unicode(name)).one() |
2703 | + else: |
2704 | + return store.find(Lane, Lane.lane_id == id).one() |
2705 | + |
2706 | + |
2707 | +def current_lane(store): |
2708 | + return store.find(Lane, Lane.is_current).one() |
2709 | + |
2710 | + |
2711 | +def lane_cards(store, lane): |
2712 | + return lane.cards |
2713 | + |
2714 | + |
2715 | +def statuses(store, lane): |
2716 | + result = [] |
2717 | + for status in store.find(Card.status, |
2718 | + Card.lane_id == lane.lane_id).config(distinct=True): |
2719 | + result.append((status, store.find(Card, |
2720 | + Card.lane_id == lane.lane_id, |
2721 | + Card.status == status))) |
2722 | + return result |
2723 | + |
2724 | + |
2725 | +def cards(store): |
2726 | + return store.find(Card) |
2727 | + |
2728 | + |
2729 | +def card(store, card_id): |
2730 | + return store.find(Card, Card.card_id == card_id) |
2731 | + |
2732 | + |
2733 | +def card_blueprints(store, roadmap_id): |
2734 | + metas = store.find(Meta, |
2735 | + Meta.key == u'Roadmap id', |
2736 | + Meta.value == roadmap_id) |
2737 | + return [meta.blueprint for meta in metas] |
2738 | + |
2739 | + |
2740 | +def card_blueprints_by_status(store, roadmap_id): |
2741 | + blueprints = card_blueprints(store, roadmap_id) |
2742 | + bp_by_status = {} |
2743 | + for key in ROADMAP_STATUSES_MAP: |
2744 | + bp_by_status[key] = [] |
2745 | + for bp in blueprints: |
2746 | + bp_by_status[bp.roadmap_status].append(bp) |
2747 | + return bp_by_status |
2748 | + |
2749 | + |
2750 | +def card_bp_status_counts(store, roadmap_id): |
2751 | + blueprints = card_blueprints(store, roadmap_id) |
2752 | + total_by_status = dict([(key, 0) for key in ROADMAP_STATUSES_MAP]) |
2753 | + for bp in blueprints: |
2754 | + total_by_status[bp.roadmap_status] += 1 |
2755 | + return total_by_status |
2756 | + |
2757 | + |
2758 | +def check_card_health(store, card_health_checks, card): |
2759 | + performed_checks = [] |
2760 | + card.is_healthy = True |
2761 | + for check in card_health_checks: |
2762 | + result = check.execute(card, store) |
2763 | + if result == check.NOT_OK: |
2764 | + card.is_healthy = False |
2765 | + performed_checks.append({'name': check.name, |
2766 | + 'result': result}) |
2767 | + return performed_checks |
2768 | + |
2769 | + |
2770 | def subteams(store, team): |
2771 | result = store.execute('SELECT name from team_structure where team = ?', (unicode(team),)) |
2772 | return [i[0] for i in result] |
2773 | |
2774 | === added file 'roadmap-bp-chart' |
2775 | --- roadmap-bp-chart 1970-01-01 00:00:00 +0000 |
2776 | +++ roadmap-bp-chart 2012-10-09 09:20:30 +0000 |
2777 | @@ -0,0 +1,249 @@ |
2778 | +#!/usr/bin/python |
2779 | +# |
2780 | +# Create a blueprint tracking chart from a blueprint database. |
2781 | +# |
2782 | +# Copyright (C) 2010, 2011 Canonical Ltd. |
2783 | +# License: GPL-3 |
2784 | + |
2785 | +import optparse, datetime, sys |
2786 | +import report_tools |
2787 | + |
2788 | +from pychart import * |
2789 | + |
2790 | +def date_to_ordinal(s): |
2791 | + '''Turn yyyy-mm-dd strings to ordinals''' |
2792 | + return report_tools.date_to_python(s).toordinal() |
2793 | + |
2794 | + |
2795 | +def ordinal_to_date(ordinal): |
2796 | + '''Turn an ordinal date into a string''' |
2797 | + d = datetime.date.fromordinal(int(ordinal)) |
2798 | + return d.strftime('%Y-%m-%d') |
2799 | + |
2800 | +def format_date(ordinal): |
2801 | + d = datetime.date.fromordinal(int(ordinal)) |
2802 | + return '/a60{}' + d.strftime('%b %d, %y') |
2803 | + |
2804 | +def do_chart(data, start_date, end_date, trend_start, title, filename, only_weekdays, inverted): |
2805 | + #set up default values |
2806 | + format = 'svg' |
2807 | + height = 450 |
2808 | + width = 1000 |
2809 | + legend_x = 700 |
2810 | + legend_y = 200 |
2811 | + title_x = 300 |
2812 | + title_y = 350 |
2813 | + |
2814 | + if inverted: |
2815 | + legend_x=200 |
2816 | + |
2817 | + # Tell pychart to use colors |
2818 | + theme.use_color = True |
2819 | + theme.default_font_size = 12 |
2820 | + theme.reinitialize() |
2821 | + |
2822 | + # turn into pychart data model and calculate maximum number of WIs |
2823 | + max_items = 1 # start at 1 to avoid zero div |
2824 | + lastactive = 0 |
2825 | + pcdata = [] |
2826 | + |
2827 | + for date in xrange(date_to_ordinal(start_date), date_to_ordinal(end_date)+1): |
2828 | + if (not only_weekdays or datetime.date.fromordinal(date).weekday() < 5): |
2829 | + end_date = ordinal_to_date(date) |
2830 | + i = data.get(ordinal_to_date(date), {}) |
2831 | + count = i.get('Completed', 0) + i.get('Planned', 0) + i.get('Blocked', 0) + i.get('In Progress', 0) |
2832 | + if max_items < count: |
2833 | + max_items = count |
2834 | + pcdata.append((date, i.get('Planned', 0),0, |
2835 | + i.get('Blocked', 0),0, |
2836 | + i.get('In Progress', 0),0, |
2837 | + i.get('Completed',0),0, count)) |
2838 | + if count > 0: |
2839 | + lastactive = len(pcdata) - 1 |
2840 | + |
2841 | + # add some extra space to look nicer |
2842 | + max_items = int(max_items * 1.05) |
2843 | + |
2844 | + x_interval = len(pcdata)/20 |
2845 | + if max_items > 500: |
2846 | + y_interval = max_items/200*10 |
2847 | + elif max_items < 20: |
2848 | + y_interval = 1 |
2849 | + else: |
2850 | + y_interval = max_items/20 |
2851 | + |
2852 | + # create the chart object |
2853 | + chart_object.set_defaults(area.T, size=(width, height), |
2854 | + y_range=(0, None), x_coord=category_coord.T(pcdata, 0)) |
2855 | + |
2856 | + # tell the chart object it will use a bar chart, and will |
2857 | + # use the data list for it's model |
2858 | + chart_object.set_defaults(bar_plot.T, data=pcdata) |
2859 | + |
2860 | + # create the chart area |
2861 | + # tell it to start at coords 0,0 |
2862 | + # tell it the labels, and the tics, etc.. |
2863 | + # HACK: to prevent 0 div |
2864 | + if max_items == 0: |
2865 | + max_items = 1 |
2866 | + ar = area.T(legend=legend.T(loc=(legend_x,legend_y)), loc=(0,0), |
2867 | + x_axis=axis.X(label='Date', tic_interval=x_interval,format=format_date), |
2868 | + y_axis=axis.Y(label='Blueprints', tic_interval=y_interval), |
2869 | + y_range=(0, max_items)) |
2870 | + |
2871 | + #initialize the blar_plot fill styles |
2872 | + bar_plot.fill_styles.reset() |
2873 | + |
2874 | + # create each set of data to plot |
2875 | + # note that index zero is the label col |
2876 | + # for each column of data, tell it what to use for the legend and |
2877 | + # what color to make the bar, no lines, and |
2878 | + # what plot to stack on |
2879 | + |
2880 | + tlabel = '' |
2881 | + |
2882 | + if inverted: |
2883 | + plot1 = bar_plot.T(label='Completed' + tlabel, hcol=7) |
2884 | + plot1.fill_style = fill_style.Plain(bgcolor=color.seagreen) |
2885 | + |
2886 | + plot3 = bar_plot.T(label='In Progress' + tlabel, hcol=5, stack_on = plot1) |
2887 | + plot3.fill_style = fill_style.Plain(bgcolor=color.gray65) |
2888 | + |
2889 | + plot5 = bar_plot.T(label='Blocked' + tlabel, hcol=3, stack_on = plot3) |
2890 | + plot5.fill_style = fill_style.Plain(bgcolor=color.red1) |
2891 | + |
2892 | + plot7 = bar_plot.T(label='Planned' + tlabel, hcol=1, stack_on = plot5) |
2893 | + plot7.fill_style = fill_style.Plain(bgcolor=color.darkorange1) |
2894 | + else: |
2895 | + plot1 = bar_plot.T(label='Planned' + tlabel, hcol=1) |
2896 | + plot1.fill_style = fill_style.Plain(bgcolor=color.darkorange1) |
2897 | + |
2898 | + plot3 = bar_plot.T(label='Blocked' + tlabel, hcol=3, stack_on = plot1) |
2899 | + plot3.fill_style = fill_style.Plain(bgcolor=color.red1) |
2900 | + |
2901 | + plot5 = bar_plot.T(label='In Progress' + tlabel, hcol=5, stack_on = plot3) |
2902 | + plot5.fill_style = fill_style.Plain(bgcolor=color.gray65) |
2903 | + |
2904 | + plot7 = bar_plot.T(label='Completed' + tlabel, hcol=7, stack_on = plot5) |
2905 | + plot7.fill_style = fill_style.Plain(bgcolor=color.seagreen) |
2906 | + |
2907 | + |
2908 | + plot1.line_style = None |
2909 | + plot3.line_style = None |
2910 | + plot5.line_style = None |
2911 | + plot7.line_style = None |
2912 | + |
2913 | + plot11 = bar_plot.T(label='total', hcol=9) |
2914 | + plot11.fill_style = None |
2915 | + plot11.line_style = line_style.gray30 |
2916 | + |
2917 | + # create the canvas with the specified filename and file format |
2918 | + can = canvas.init(filename,format) |
2919 | + |
2920 | + # add the data to the area and draw it |
2921 | + ar.add_plot(plot1, plot3, plot5, plot7) |
2922 | + ar.draw() |
2923 | + |
2924 | + # title |
2925 | + tb = text_box.T(loc=(title_x, title_y), text=title, line_style=None) |
2926 | + tb.fill_style = None |
2927 | + tb.draw() |
2928 | + |
2929 | +# |
2930 | +# main |
2931 | +# |
2932 | + |
2933 | +# argv parsing |
2934 | +optparser = optparse.OptionParser() |
2935 | +optparser.add_option('-d', '--database', |
2936 | + help='Path to database', dest='database', metavar='PATH') |
2937 | +optparser.add_option('-t', '--team', |
2938 | + help='Restrict report to a particular team', dest='team') |
2939 | +optparser.add_option('-m', '--milestone', |
2940 | + help='Restrict report to a particular milestone', dest='milestone') |
2941 | +optparser.add_option('-o', '--output', |
2942 | + help='Output file', dest='output') |
2943 | +optparser.add_option('--trend-start', type='int', |
2944 | + help='Explicitly set start of trend line', dest='trendstart') |
2945 | +optparser.add_option('-u', '--user', |
2946 | + help='Run for this user', dest='user') |
2947 | +optparser.add_option('--only-weekdays', action='store_true', |
2948 | + help='Skip Saturdays and Sundays in the resulting graph', dest='only_weekdays') |
2949 | +optparser.add_option('--inverted', action='store_true', |
2950 | + help='Generate an inverted burndown chart', dest='inverted') |
2951 | +optparser.add_option('-s', '--start-date', |
2952 | + help='Explicitly set the start date of the burndown data', dest='start_date') |
2953 | +optparser.add_option('-e', '--end-date', |
2954 | + help='Explicitly set the end date of the burndown data', dest='end_date') |
2955 | +optparser.add_option('--no-foreign', action='store_true', default=False, |
2956 | + help='Do not show foreign totals separate', dest='noforeign') |
2957 | +optparser.add_option('--group', |
2958 | + help='Run for this group', dest='group') |
2959 | +optparser.add_option('--date', |
2960 | + help='Run for this date', dest='date') |
2961 | + |
2962 | +(opts, args) = optparser.parse_args() |
2963 | +if not opts.database: |
2964 | + optparser.error('No database given') |
2965 | +if not opts.output: |
2966 | + optparser.error('No output file given') |
2967 | + |
2968 | +if opts.user and opts.team: |
2969 | + optparser.error('team and user options are mutually exclusive') |
2970 | +if opts.user and opts.group: |
2971 | + optparser.error('user and group options are mutually exclusive') |
2972 | +if opts.team and opts.group: |
2973 | + optparser.error('team and group options are mutually exclusive') |
2974 | +if opts.milestone and opts.date: |
2975 | + optparser.error('milestone and date options are mutually exclusive') |
2976 | + |
2977 | +# The typing allows polymorphic behavior |
2978 | +if opts.user: |
2979 | + opts.team = report_tools.user_string(opts.user) |
2980 | +elif opts.team: |
2981 | + opts.team = report_tools.team_string(opts.team) |
2982 | + |
2983 | +store = report_tools.get_store(opts.database) |
2984 | + |
2985 | +milestone_collection = None |
2986 | +if opts.milestone: |
2987 | + milestone_collection = report_tools.get_milestone(store, opts.milestone) |
2988 | +elif opts.date: |
2989 | + milestone_collection = report_tools.MilestoneGroup( |
2990 | + report_tools.date_to_python(opts.date)) |
2991 | + |
2992 | + |
2993 | +# get date -> state -> count mapping |
2994 | +data = report_tools.blueprints_over_time(store) |
2995 | + |
2996 | +if len(data) == 0: |
2997 | + print 'WARNING: no blueprints, not generating chart (team: %s, group: %s, due date: %s)' % ( |
2998 | + opts.team or 'all', opts.group or 'none', milestone_collection and milestone_collection.display_name or 'none') |
2999 | + sys.exit(0) |
3000 | + |
3001 | +# calculate start/end date if no dates are given |
3002 | +if opts.start_date is None: |
3003 | + start_date = sorted(data.keys())[0] |
3004 | +else: |
3005 | + start_date=opts.start_date |
3006 | + |
3007 | +if opts.end_date is None: |
3008 | + if milestone_collection is not None: |
3009 | + end_date = milestone_collection.due_date_str |
3010 | + else: |
3011 | + end_date=report_tools.milestone_due_date(store) |
3012 | +else: |
3013 | + end_date=opts.end_date |
3014 | + |
3015 | +if not start_date or not end_date or date_to_ordinal(start_date) > date_to_ordinal(end_date): |
3016 | + print 'WARNING: empty date range, not generating chart (team: %s, group: %s, due date: %s)' % ( |
3017 | + opts.team or 'all', opts.group or 'none', milestone_collection and milestone_collection.display_name or 'none') |
3018 | + sys.exit(0) |
3019 | + |
3020 | +# title |
3021 | +title = '/20all quarters' |
3022 | + |
3023 | +if milestone_collection is not None: |
3024 | + title += ' (%s)' % milestone_collection.name |
3025 | + |
3026 | +do_chart(data, start_date, end_date, opts.trendstart, title, opts.output, opts.only_weekdays, opts.inverted) |
3027 | |
3028 | === added file 'roadmap_health.py' |
3029 | --- roadmap_health.py 1970-01-01 00:00:00 +0000 |
3030 | +++ roadmap_health.py 2012-10-09 09:20:30 +0000 |
3031 | @@ -0,0 +1,102 @@ |
3032 | +from report_tools import ( |
3033 | + card_blueprints, |
3034 | + card_blueprints_by_status, |
3035 | +) |
3036 | + |
3037 | +card_health_checks = [] |
3038 | + |
3039 | + |
3040 | +def register_health_check(cls): |
3041 | + card_health_checks.append(cls) |
3042 | + return cls |
3043 | + |
3044 | + |
3045 | +class HealthCheck(object): |
3046 | + NOT_APPLICABLE = 'n/a' |
3047 | + OK = 'OK' |
3048 | + NOT_OK = 'Not OK' |
3049 | + name = 'Base check, not to be used' |
3050 | + |
3051 | + @classmethod |
3052 | + def applicable(cls, card, store=None): |
3053 | + raise NotImplementedError() |
3054 | + |
3055 | + @classmethod |
3056 | + def check(cls, card, store=None): |
3057 | + raise NotImplementedError() |
3058 | + |
3059 | + @classmethod |
3060 | + def execute(cls, card, store=None): |
3061 | + if cls.applicable(card, store): |
3062 | + if cls.check(card, store): |
3063 | + return cls.OK |
3064 | + else: |
3065 | + return cls.NOT_OK |
3066 | + else: |
3067 | + return cls.NOT_APPLICABLE |
3068 | + |
3069 | + |
3070 | +@register_health_check |
3071 | +class DescriptionHealthCheck(HealthCheck): |
3072 | + name = 'Has description' |
3073 | + |
3074 | + @classmethod |
3075 | + def applicable(cls, card, store=None): |
3076 | + return True |
3077 | + |
3078 | + @classmethod |
3079 | + def check(cls, card, store=None): |
3080 | + return card.description is not None |
3081 | + |
3082 | + |
3083 | +@register_health_check |
3084 | +class CriteriaHealthCheck(HealthCheck): |
3085 | + name = 'Has acceptance criteria' |
3086 | + |
3087 | + @classmethod |
3088 | + def applicable(cls, card, store=None): |
3089 | + return True |
3090 | + |
3091 | + @classmethod |
3092 | + def check(cls, card, store=None): |
3093 | + return card.acceptance_criteria is not None |
3094 | + |
3095 | + |
3096 | +@register_health_check |
3097 | +class BlueprintsHealthCheck(HealthCheck): |
3098 | + name = 'Has blueprints' |
3099 | + |
3100 | + @classmethod |
3101 | + def applicable(cls, card, store): |
3102 | + return card.status == 'Ready' |
3103 | + |
3104 | + @classmethod |
3105 | + def check(cls, card, store): |
3106 | + return len(card_blueprints(store, card.roadmap_id)) > 0 |
3107 | + |
3108 | + |
3109 | +@register_health_check |
3110 | +class BlueprintsBlockedHealthCheck(HealthCheck): |
3111 | + name = 'Has no Blocked blueprints' |
3112 | + |
3113 | + @classmethod |
3114 | + def applicable(cls, card, store): |
3115 | + return card.status != 'Ready' |
3116 | + |
3117 | + @classmethod |
3118 | + def check(cls, card, store): |
3119 | + blueprints = card_blueprints_by_status(store, card.roadmap_id) |
3120 | + return len(blueprints['Blocked']) == 0 |
3121 | + |
3122 | + |
3123 | +@register_health_check |
3124 | +class RoadmapIdHealthCheck(HealthCheck): |
3125 | + name = 'Has a roadmap id' |
3126 | + |
3127 | + @classmethod |
3128 | + def applicable(cls, card, store=None): |
3129 | + return True |
3130 | + |
3131 | + @classmethod |
3132 | + def check(cls, card, store=None): |
3133 | + return card.roadmap_id != '' |
3134 | |
3135 | === modified file 'templates/base.html' |
3136 | --- templates/base.html 2011-06-02 15:00:45 +0000 |
3137 | +++ templates/base.html 2012-10-09 09:20:30 +0000 |
3138 | @@ -127,6 +127,19 @@ |
3139 | |
3140 | }); |
3141 | </script> |
3142 | + <script type="text/javascript"> |
3143 | + |
3144 | + var _gaq = _gaq || []; |
3145 | + _gaq.push(['_setAccount', 'UA-16756069-4']); |
3146 | + _gaq.push(['_trackPageview']); |
3147 | + |
3148 | + (function() { |
3149 | + var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true; |
3150 | + ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js'; |
3151 | + var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s); |
3152 | + })(); |
3153 | + |
3154 | + </script> |
3155 | </head> |
3156 | |
3157 | ${next.body()} |
3158 | |
3159 | === modified file 'templates/body.html' |
3160 | --- templates/body.html 2011-05-27 20:07:25 +0000 |
3161 | +++ templates/body.html 2012-10-09 09:20:30 +0000 |
3162 | @@ -12,7 +12,7 @@ |
3163 | % if page_type == "overview": |
3164 | active |
3165 | % endif |
3166 | - " title="Overview" id="overview_nav"><a href="${util.url('')}">Overview</a></li> |
3167 | + " title="Roadmap" id="overview_nav"><a href="${root}../lane/">Roadmap</a></li> |
3168 | <li class="link |
3169 | % if page_type == "about": |
3170 | active |
3171 | @@ -39,6 +39,9 @@ |
3172 | <div id="content_pane"> |
3173 | <div id="main_content"> |
3174 | ${next.body()} |
3175 | + |
3176 | +<%namespace name="footer" file="footer.html"/> |
3177 | +${footer.body()} |
3178 | </div> |
3179 | </div> |
3180 | </div> |
3181 | |
3182 | === added file 'templates/footer.html' |
3183 | === added file 'templates/roadmap_card.html' |
3184 | --- templates/roadmap_card.html 1970-01-01 00:00:00 +0000 |
3185 | +++ templates/roadmap_card.html 2012-10-09 09:20:30 +0000 |
3186 | @@ -0,0 +1,71 @@ |
3187 | +<%inherit file="body.html"/> |
3188 | +<%namespace name="util" file="util.html"/> |
3189 | +<%namespace name="terminology" file="terminology.html"/> |
3190 | + |
3191 | +<%! |
3192 | +import report_tools |
3193 | +%> |
3194 | + |
3195 | +<%def name="title()"> |
3196 | +${card_title} |
3197 | +</%def> |
3198 | + |
3199 | +<h1>${title()}</h1> |
3200 | +<div style="float: right"> |
3201 | +<h3>Health check</h3> |
3202 | + |
3203 | +<table>${'<tr colspan="2"><td><font color="#FF0000"><b>Needs attention!</b></font></td></tr>' if not card.is_healthy else ''} |
3204 | +% for result in health_checks: |
3205 | + <tr ${'bgcolor="#FFAAAA"' if result['result'] == 'Not OK' else 'bgcolor="#FFFFFF"'}> |
3206 | + <td>${result['name']}</td> |
3207 | + <td>${result['result']}</td> |
3208 | + </tr> |
3209 | +% endfor |
3210 | +</table> |
3211 | +</div> |
3212 | +<h2>${card.status} in <a href="../lane/${lane}.html">${lane}</a></h2> |
3213 | +<p> |
3214 | +<ul> |
3215 | + <li>Card ID: <a href="${card.url}">${card.roadmap_id}</a> |
3216 | + <li>Sponsor: ${card.sponsor} |
3217 | + <li>Contact: ${card.contact} |
3218 | + <li>Priority: ${card.priority} |
3219 | + <li>Size: ${card.size} |
3220 | + <li>Team: ${card.team} |
3221 | +</ul> |
3222 | + |
3223 | +<div style="clear:both; text-align: center">Overall blueprint completion</div> |
3224 | +% if card_has_blueprints: |
3225 | +${util.roadmap_progress_bar(bp_status_totals)} |
3226 | +% else: |
3227 | +<center><i>Progress graph pending linked blueprints.</i></center> |
3228 | +% endif |
3229 | + |
3230 | +<h3>Description</h3> ${card.description if card.description is not None else '<i>No description could be found.</i>'} |
3231 | +<p><a href="${card.url}">Read the full description</a>. |
3232 | +<h3>Acceptance criteria</h3> ${card.acceptance_criteria if card.acceptance_criteria is not None else '<i>No acceptance criteria could be found.</i>'} |
3233 | +<p><a href="${card.url}">Read the full acceptance criteria</a>. |
3234 | +<p> |
3235 | +% if card_has_blueprints: |
3236 | +<table> |
3237 | +<thead> |
3238 | + <tr><th>Title</th> |
3239 | + <th>Assignee</th> |
3240 | + <th>Priority</th> |
3241 | + <th>Status</th> |
3242 | + <th>Expected milestone</th> |
3243 | + </tr> |
3244 | +</thead> |
3245 | +% for status in status_order: |
3246 | +% for bp in sorted(blueprints[status], key=lambda bp: bp.milestone_name): |
3247 | + <tr><td><a href="${bp.url}">${bp.name}</a></td> |
3248 | + <td>${bp.assignee_name}</td> |
3249 | + <td>${bp.priority}</td> |
3250 | + <td>${status} |
3251 | + <td>${bp.milestone_name}</td> |
3252 | + </tr> |
3253 | +% endfor |
3254 | +% endfor |
3255 | +</table> |
3256 | +% endif |
3257 | + |
3258 | |
3259 | === added file 'templates/roadmap_lane.html' |
3260 | --- templates/roadmap_lane.html 1970-01-01 00:00:00 +0000 |
3261 | +++ templates/roadmap_lane.html 2012-10-09 09:20:30 +0000 |
3262 | @@ -0,0 +1,60 @@ |
3263 | +<%inherit file="body.html"/> |
3264 | +<%namespace name="util" file="util.html"/> |
3265 | +<%namespace name="terminology" file="terminology.html"/> |
3266 | + |
3267 | +<%! |
3268 | +import report_tools |
3269 | +%> |
3270 | + |
3271 | +<%def name="title()"> |
3272 | +Progress for ${lane_title} |
3273 | +</%def> |
3274 | + |
3275 | +<p style="text-align: right; color: green; font-size: 13pt; float: right"> |
3276 | +Lane: <select name="laneselect" onchange="window.location=this.value;"> |
3277 | +% for lane in lanes: |
3278 | +<option value="${lane.name}.html"${' selected="selected"' if lane.name == lane_title else ''}>${lane.name}${' (current)' if lane.is_current else ''}</option> |
3279 | +% endfor |
3280 | +</select> |
3281 | +<h1>${title()}</h1> |
3282 | +${util.roadmap_progress_bar(bp_status_totals)} |
3283 | +<p> |
3284 | +<table width="100%"> |
3285 | +<thead><tr><th>Card</th><th>Status</th><th>Team</th><th>Priority</th><th>Blueprints</th><th>Health</th></tr></thead> |
3286 | +% for status in statuses: |
3287 | +% for card_dict in status['cards']: |
3288 | + <tr> |
3289 | + <td> |
3290 | + <a href="../card/${card_dict['card'].roadmap_id if card_dict['card'].roadmap_id != '' else card_dict['card'].card_id}.html">${card_dict['card'].name}</a> |
3291 | + </td> |
3292 | + <td>${status['name']}</td> |
3293 | + <td>${card_dict['card'].team}</td><td align=right>${card_dict['card'].priority}</td> |
3294 | + <td> |
3295 | + <div class="roadmap_wrap" title=" |
3296 | +% for bp_status in status_order: |
3297 | +${bp_status}: ${card_dict['bp_statuses'][bp_status]} |
3298 | +% endfor |
3299 | +"> |
3300 | +% for bp_status in status_order: |
3301 | + <div class="roadmap_value" style="width:${card_dict['bp_percentages'][bp_status]}%"> |
3302 | + <div class="${bp_status.replace(' ', '')}"> </div> |
3303 | + </div> |
3304 | +% endfor |
3305 | + </div> |
3306 | + </td> |
3307 | + <td> |
3308 | + ${'<font color="#FF0000">Needs attention!</font>' if not card_dict['card'].is_healthy else '' | n} |
3309 | + </td> |
3310 | +% endfor |
3311 | +% endfor |
3312 | +</table> |
3313 | + |
3314 | +% if chart_url != 'burndown.svg': |
3315 | +<!-- The cli option defaults to burndown.svg! :( --> |
3316 | +<div class="overview_graph"> |
3317 | +<h3>Blueprint progress</h3><p><a href="current_quarter.svg">(enlarge)</a></p> |
3318 | +<object |
3319 | + height="500" width="833" |
3320 | + data="current_quarter.svg" type="image/svg+xml">Blueprint progress</object> |
3321 | +</div> |
3322 | +% endif |
3323 | |
3324 | === modified file 'templates/util.html' |
3325 | --- templates/util.html 2011-06-01 20:55:59 +0000 |
3326 | +++ templates/util.html 2012-10-09 09:20:30 +0000 |
3327 | @@ -23,6 +23,15 @@ |
3328 | </div> |
3329 | </%def> |
3330 | |
3331 | +<%def name="roadmap_progress_bar(item)"> |
3332 | +<div class="roadmap_wrap" title="${item['Completed']} blueprints complete of ${item['Total']}"> |
3333 | + <div class="roadmap_value" style="width:${item['Percentage']}%"> |
3334 | + <div class="Completed"> </div> |
3335 | + </div> |
3336 | + <div class="roadmap_progress_text">${item['Percentage']} % complete of ${item['Total']}</div> |
3337 | +</div> |
3338 | +</%def> |
3339 | + |
3340 | <%def name="url(end)">${root}${end}</%def> |
3341 | |
3342 | <%def name="burndown_chart(chart_url, large=False)"> |
3343 | |
3344 | === modified file 'tests.py' |
3345 | --- tests.py 2011-06-01 03:36:17 +0000 |
3346 | +++ tests.py 2012-10-09 09:20:30 +0000 |
3347 | @@ -168,6 +168,7 @@ |
3348 | loader = TestLoader() |
3349 | suite = loader.loadTestsFromName(__name__) |
3350 | suite.addTests(loader.loadTestsFromName("lpworkitems.tests.test_collect")) |
3351 | + suite.addTests(loader.loadTestsFromName("lpworkitems.tests.test_collect_roadmap")) |
3352 | suite.addTests(loader.loadTestsFromName("lpworkitems.tests.test_error_collector")) |
3353 | suite.addTests(loader.loadTestsFromName("lpworkitems.tests.test_factory")) |
3354 | suite.addTests(loader.loadTestsFromName("lpworkitems.tests.test_fake_launchpad")) |
3355 | |
3356 | === added file 'themes/linaro/templates/footer.html' |
3357 | --- themes/linaro/templates/footer.html 1970-01-01 00:00:00 +0000 |
3358 | +++ themes/linaro/templates/footer.html 2012-10-09 09:20:30 +0000 |
3359 | @@ -0,0 +1,10 @@ |
3360 | +<%! |
3361 | + import datetime |
3362 | +%> |
3363 | +<div id="footer"> |
3364 | + Last updated: ${datetime.datetime.utcnow().strftime("%a %d %B %Y, %H:%M UTC")} | |
3365 | + <a href="https://bugs.launchpad.net/launchpad-work-items-tracker">Bugs</a> | |
3366 | + <a href="https://code.launchpad.net/~linaro-infrastructure/launchpad-work-items-tracker/linaro">Code</a> | |
3367 | + <a href="https://code.launchpad.net/~linaro-infrastructure/launchpad-work-items-tracker/linaro-config">Config</a> | |
3368 | + <a href="/update.log.txt">Update log</a> (<a href="/update.log.txt.1">yesterday</a>) |
3369 | +</div> |
3370 | |
3371 | === added file 'utils.py' |
3372 | --- utils.py 1970-01-01 00:00:00 +0000 |
3373 | +++ utils.py 2012-10-09 09:20:30 +0000 |
3374 | @@ -0,0 +1,6 @@ |
3375 | +def unicode_or_None(attr): |
3376 | + if attr is None: |
3377 | + return attr |
3378 | + if isinstance(attr, unicode): |
3379 | + return attr |
3380 | + return attr.decode("utf-8") |