Merge lp:~milo/launchpad-work-items-tracker/bug996948 into lp:launchpad-work-items-tracker
- bug996948
- Merge into trunk
Status: | Superseded |
---|---|
Proposed branch: | lp:~milo/launchpad-work-items-tracker/bug996948 |
Merge into: | lp:launchpad-work-items-tracker |
Diff against target: |
3380 lines (+2497/-125) (has conflicts) 32 files modified
all-projects (+64/-2) burndown-chart (+5/-0) collect (+160/-66) collect_jira (+229/-0) collect_roadmap (+301/-0) css/status.css (+58/-0) generate-all (+53/-14) html-report (+102/-0) jira.py (+55/-0) kanban-papyrs-to-jira (+397/-0) lpworkitems/collect.py (+20/-20) lpworkitems/collect_roadmap.py (+71/-0) lpworkitems/database.py (+74/-2) lpworkitems/error_collector.py (+10/-0) lpworkitems/factory.py (+32/-1) lpworkitems/models.py (+32/-14) lpworkitems/models_roadmap.py (+60/-0) lpworkitems/tests/test_collect.py (+0/-1) lpworkitems/tests/test_collect_roadmap.py (+69/-0) lpworkitems/tests/test_factory.py (+1/-1) lpworkitems/tests/test_models.py (+17/-2) report_tools.py (+162/-1) roadmap-bp-chart (+249/-0) roadmap_health.py (+102/-0) templates/base.html (+13/-0) templates/body.html (+4/-1) templates/roadmap_card.html (+71/-0) templates/roadmap_lane.html (+60/-0) templates/util.html (+9/-0) tests.py (+1/-0) themes/linaro/templates/footer.html (+10/-0) utils.py (+6/-0) Text conflict in collect Text conflict in report_tools.py |
To merge this branch: | bzr merge lp:~milo/launchpad-work-items-tracker/bug996948 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Linaro Infrastructure | Pending | ||
Review via email: mp+128653@code.launchpad.net |
This proposal has been superseded by a proposal from 2012-10-09.
Commit message
Description of the change
Merge proposal in order to fix the integrity error we receive from status.l.o.
Changes done:
- catch the integrity error and print warning with name of duplicated Blueprint
- skip the Blueprint and continue, otherwise script will fail later in the code due to variable not being set
- 344. By Milo Casagrande
-
Used error logger, added URL to Blueprint.
Unmerged revisions
- 344. By Milo Casagrande
-
Used error logger, added URL to Blueprint.
- 343. By Milo Casagrande
-
Catch integrity error, and print warning.
- 342. By James Tunnicliffe
-
Removed template driven excaping from some pre-defined HTML so it renders correctly
- 341. By James Tunnicliffe
-
Fixed errors parsing meta values containing a colon
- 340. By Milo Casagrande
-
Merged lp:~lool/launchpad-work-items-tracker/fix-line-wrap-breakage.
- 339. By Milo Casagrande
- 338. By Данило Шеган
-
Merge HTML escaping fix from trunk r301 (by Martin Pitt).
- 337. By Данило Шеган
-
Fix problem with graph generation for milestones in the far future. Patch by Loic.
- 336. By Milo Casagrande
- 335. By James Tunnicliffe
-
Merge in migration tools
Preview Diff
1 | === modified file 'all-projects' | |||
2 | --- all-projects 2012-08-27 15:32:43 +0000 | |||
3 | +++ all-projects 2012-10-09 09:20:30 +0000 | |||
4 | @@ -14,9 +14,27 @@ | |||
5 | 14 | import sys | 14 | import sys |
6 | 15 | import datetime | 15 | import datetime |
7 | 16 | 16 | ||
8 | 17 | import report_tools | ||
9 | 18 | |||
10 | 17 | 19 | ||
11 | 18 | def collect(source_dir, db_file, config_file, extra_args): | 20 | def collect(source_dir, db_file, config_file, extra_args): |
13 | 19 | args = [os.path.join(source_dir, "collect")] | 21 | return run_collect_script(source_dir, db_file, config_file, extra_args, |
14 | 22 | "collect") | ||
15 | 23 | |||
16 | 24 | |||
17 | 25 | def collect_jira(source_dir, db_file, config_file, extra_args): | ||
18 | 26 | return run_collect_script(source_dir, db_file, config_file, extra_args, | ||
19 | 27 | "collect_jira") | ||
20 | 28 | |||
21 | 29 | |||
22 | 30 | def collect_roadmap(source_dir, db_file, config_file, extra_args): | ||
23 | 31 | return run_collect_script(source_dir, db_file, config_file, extra_args, | ||
24 | 32 | "collect_roadmap") | ||
25 | 33 | |||
26 | 34 | |||
27 | 35 | def run_collect_script(source_dir, db_file, config_file, extra_args, | ||
28 | 36 | collect_script): | ||
29 | 37 | args = [os.path.join(source_dir, collect_script)] | ||
30 | 20 | args.extend(["-d", db_file]) | 38 | args.extend(["-d", db_file]) |
31 | 21 | args.extend(["-c", config_file]) | 39 | args.extend(["-c", config_file]) |
32 | 22 | args += extra_args | 40 | args += extra_args |
33 | @@ -56,9 +74,14 @@ | |||
34 | 56 | 74 | ||
35 | 57 | 75 | ||
36 | 58 | def main(): | 76 | def main(): |
38 | 59 | parser = optparse.OptionParser(usage="%prog <database dir> <www root dir> [www root url]") | 77 | parser = optparse.OptionParser( |
39 | 78 | usage="%prog <database dir> <www root dir> [www root url]") | ||
40 | 60 | parser.add_option("--config-dir", dest="config_dir", default="config") | 79 | parser.add_option("--config-dir", dest="config_dir", default="config") |
41 | 80 | parser.add_option("--roadmap-config-file", dest="roadmap_config_file", | ||
42 | 81 | default="roadmap-config") | ||
43 | 61 | parser.add_option("--debug", dest="debug", action="store_true") | 82 | parser.add_option("--debug", dest="debug", action="store_true") |
44 | 83 | parser.add_option("--kanban-token-file", dest="kanban_token_file") | ||
45 | 84 | parser.add_option("--papyrs-token-file", dest="papyrs_token_file") | ||
46 | 62 | opts, args = parser.parse_args() | 85 | opts, args = parser.parse_args() |
47 | 63 | 86 | ||
48 | 64 | if os.environ.get("DEBUG", None) is not None: | 87 | if os.environ.get("DEBUG", None) is not None: |
49 | @@ -88,6 +111,11 @@ | |||
50 | 88 | os.path.join(source_dir, opts.config_dir, "*%s" % valid_config_suffix)) | 111 | os.path.join(source_dir, opts.config_dir, "*%s" % valid_config_suffix)) |
51 | 89 | 112 | ||
52 | 90 | for config_file in filenames: | 113 | for config_file in filenames: |
53 | 114 | # read roadmap config to find where to get cards from | ||
54 | 115 | cfg = report_tools.load_config(config_file) | ||
55 | 116 | # default to kanbantool | ||
56 | 117 | cards_source = cfg.get('cards_source', 'kanban') | ||
57 | 118 | |||
58 | 91 | project_name = os.path.basename(config_file)[:-len(valid_config_suffix)] | 119 | project_name = os.path.basename(config_file)[:-len(valid_config_suffix)] |
59 | 92 | project_output_dir = os.path.join(output_dir, project_name) | 120 | project_output_dir = os.path.join(output_dir, project_name) |
60 | 93 | db_file = os.path.join(db_dir, "%s.db" % project_name) | 121 | db_file = os.path.join(db_dir, "%s.db" % project_name) |
61 | @@ -111,6 +139,40 @@ | |||
62 | 111 | if not collect(source_dir, db_file, config_file, extra_collect_args): | 139 | if not collect(source_dir, db_file, config_file, extra_collect_args): |
63 | 112 | sys.stderr.write("collect failed for %s" % project_name) | 140 | sys.stderr.write("collect failed for %s" % project_name) |
64 | 113 | continue | 141 | continue |
65 | 142 | |||
66 | 143 | if cards_source == 'jira': | ||
67 | 144 | extra_collect_jira_args = [] | ||
68 | 145 | if not collect_jira(source_dir, db_file, opts.roadmap_config_file, | ||
69 | 146 | extra_collect_jira_args): | ||
70 | 147 | sys.stderr.write("collect_jira failed for %s" % project_name) | ||
71 | 148 | continue | ||
72 | 149 | elif cards_source == 'kanban': | ||
73 | 150 | extra_collect_roadmap_args = [] | ||
74 | 151 | extra_collect_roadmap_args.extend(["--board", '10721']) | ||
75 | 152 | if opts.kanban_token_file is not None: | ||
76 | 153 | with open(opts.kanban_token_file) as token_file: | ||
77 | 154 | token = token_file.read() | ||
78 | 155 | extra_collect_roadmap_args.extend(["--kanbantoken", token]) | ||
79 | 156 | else: | ||
80 | 157 | sys.stderr.write("No Kanbantool API token given to " | ||
81 | 158 | "collect_roadmap for %s" % project_name) | ||
82 | 159 | if opts.papyrs_token_file is not None: | ||
83 | 160 | with open(opts.papyrs_token_file) as token_file: | ||
84 | 161 | token = token_file.read() | ||
85 | 162 | extra_collect_roadmap_args.extend(["--papyrstoken", token]) | ||
86 | 163 | else: | ||
87 | 164 | sys.stderr.write("No Papyrs API token given to " | ||
88 | 165 | "collect_roadmap for %s" % project_name) | ||
89 | 166 | |||
90 | 167 | if not collect_roadmap(source_dir, db_file, | ||
91 | 168 | opts.roadmap_config_file, | ||
92 | 169 | extra_collect_roadmap_args): | ||
93 | 170 | sys.stderr.write("collect_roadmap failed for %s" % | ||
94 | 171 | project_name) | ||
95 | 172 | else: | ||
96 | 173 | sys.stderr.write("Unknown cards source %s" % cards_source) | ||
97 | 174 | continue | ||
98 | 175 | |||
99 | 114 | publish_new_db(project_name, project_output_dir, db_file) | 176 | publish_new_db(project_name, project_output_dir, db_file) |
100 | 115 | generate_reports(project_output_dir, config_file, db_file, | 177 | generate_reports(project_output_dir, config_file, db_file, |
101 | 116 | source_dir, extra_generate_args, debug=opts.debug) | 178 | source_dir, extra_generate_args, debug=opts.debug) |
102 | 117 | 179 | ||
103 | === modified file 'burndown-chart' | |||
104 | --- burndown-chart 2012-08-24 05:29:13 +0000 | |||
105 | +++ burndown-chart 2012-10-09 09:20:30 +0000 | |||
106 | @@ -312,6 +312,11 @@ | |||
107 | 312 | opts.team or 'all', opts.group or 'none', milestone_collection and milestone_collection.display_name or 'none') | 312 | opts.team or 'all', opts.group or 'none', milestone_collection and milestone_collection.display_name or 'none') |
108 | 313 | sys.exit(0) | 313 | sys.exit(0) |
109 | 314 | 314 | ||
110 | 315 | if date_to_ordinal(end_date) - date_to_ordinal(start_date) > 366: | ||
111 | 316 | print 'WARNING: date range %s - %s is over a year, not generating a chart' % ( | ||
112 | 317 | start_date, end_date) | ||
113 | 318 | sys.exit(0) | ||
114 | 319 | |||
115 | 315 | # title | 320 | # title |
116 | 316 | if opts.team: | 321 | if opts.team: |
117 | 317 | title = '/20' + opts.team | 322 | title = '/20' + opts.team |
118 | 318 | 323 | ||
119 | === modified file 'collect' | |||
120 | --- collect 2012-07-11 22:36:56 +0000 | |||
121 | +++ collect 2012-10-09 09:20:30 +0000 | |||
122 | @@ -6,13 +6,27 @@ | |||
123 | 6 | # Copyright (C) 2010, 2011 Canonical Ltd. | 6 | # Copyright (C) 2010, 2011 Canonical Ltd. |
124 | 7 | # License: GPL-3 | 7 | # License: GPL-3 |
125 | 8 | 8 | ||
126 | 9 | import urllib, re, sys, optparse, smtplib, pwd, os, urlparse | ||
127 | 10 | import logging | 9 | import logging |
128 | 10 | import optparse | ||
129 | 11 | import os | ||
130 | 12 | import pwd | ||
131 | 13 | import re | ||
132 | 14 | import smtplib | ||
133 | 15 | import sys | ||
134 | 16 | import urllib | ||
135 | 17 | import urlparse | ||
136 | 18 | import sqlite3 | ||
137 | 19 | |||
138 | 11 | from email.mime.text import MIMEText | 20 | from email.mime.text import MIMEText |
139 | 12 | 21 | ||
140 | 13 | from launchpadlib.launchpad import Launchpad, EDGE_SERVICE_ROOT | 22 | from launchpadlib.launchpad import Launchpad, EDGE_SERVICE_ROOT |
141 | 14 | 23 | ||
143 | 15 | from lpworkitems.collect import CollectorStore, PersonCache, WorkitemParser, bug_wi_states | 24 | from lpworkitems.collect import ( |
144 | 25 | CollectorStore, | ||
145 | 26 | PersonCache, | ||
146 | 27 | WorkitemParser, | ||
147 | 28 | bug_wi_states, | ||
148 | 29 | ) | ||
149 | 16 | from lpworkitems.database import get_store | 30 | from lpworkitems.database import get_store |
150 | 17 | from lpworkitems.error_collector import ( | 31 | from lpworkitems.error_collector import ( |
151 | 18 | BlueprintURLError, | 32 | BlueprintURLError, |
152 | @@ -26,6 +40,7 @@ | |||
153 | 26 | TeamParticipation, | 40 | TeamParticipation, |
154 | 27 | Workitem, | 41 | Workitem, |
155 | 28 | ) | 42 | ) |
156 | 43 | from utils import unicode_or_None | ||
157 | 29 | import report_tools | 44 | import report_tools |
158 | 30 | 45 | ||
159 | 31 | 46 | ||
160 | @@ -58,20 +73,16 @@ | |||
161 | 58 | """Get a link to the Launchpad API object on the website.""" | 73 | """Get a link to the Launchpad API object on the website.""" |
162 | 59 | api_link = item.self_link | 74 | api_link = item.self_link |
163 | 60 | parts = urlparse.urlparse(api_link) | 75 | parts = urlparse.urlparse(api_link) |
165 | 61 | link = parts.scheme + "://" + parts.netloc.replace("api.", "") + "/" + parts.path.split("/", 2)[2] | 76 | link = parts.scheme + "://" + parts.netloc.replace("api.", "") + \ |
166 | 77 | "/" + parts.path.split("/", 2)[2] | ||
167 | 62 | return link.decode("utf-8") | 78 | return link.decode("utf-8") |
168 | 63 | 79 | ||
169 | 64 | 80 | ||
170 | 65 | def unicode_or_None(attr): | ||
171 | 66 | if attr is None: | ||
172 | 67 | return attr | ||
173 | 68 | if isinstance(attr, unicode): | ||
174 | 69 | return attr | ||
175 | 70 | return attr.decode("utf-8") | ||
176 | 71 | |||
177 | 72 | |||
178 | 73 | import simplejson | 81 | import simplejson |
179 | 82 | |||
180 | 74 | _orig_loads = simplejson.loads | 83 | _orig_loads = simplejson.loads |
181 | 84 | |||
182 | 85 | |||
183 | 75 | def loads(something): | 86 | def loads(something): |
184 | 76 | return _orig_loads(unicode_or_None(something)) | 87 | return _orig_loads(unicode_or_None(something)) |
185 | 77 | simplejson.loads = loads | 88 | simplejson.loads = loads |
186 | @@ -96,7 +107,10 @@ | |||
187 | 96 | ''' | 107 | ''' |
188 | 97 | model_bp = Blueprint.from_launchpad(bp) | 108 | model_bp = Blueprint.from_launchpad(bp) |
189 | 98 | if model_bp.milestone_name not in collector.valid_milestone_names(): | 109 | if model_bp.milestone_name not in collector.valid_milestone_names(): |
191 | 99 | data_error(web_link(bp), 'milestone "%s" is unknown/invalid' % model_bp.milestone_name, True) | 110 | data_error( |
192 | 111 | web_link(bp), | ||
193 | 112 | 'milestone "%s" is unknown/invalid' % model_bp.milestone_name, | ||
194 | 113 | True) | ||
195 | 100 | model_bp = collector.store_blueprint(model_bp) | 114 | model_bp = collector.store_blueprint(model_bp) |
196 | 101 | if model_bp: | 115 | if model_bp: |
197 | 102 | dbg('lp_import_blueprint: added blueprint: %s' % bp.name) | 116 | dbg('lp_import_blueprint: added blueprint: %s' % bp.name) |
198 | @@ -116,7 +130,10 @@ | |||
199 | 116 | model_group = BlueprintGroup.from_launchpad(bp) | 130 | model_group = BlueprintGroup.from_launchpad(bp) |
200 | 117 | model_group.area = area | 131 | model_group.area = area |
201 | 118 | if model_group.milestone_name not in collector.valid_milestone_names(): | 132 | if model_group.milestone_name not in collector.valid_milestone_names(): |
203 | 119 | data_error(web_link(bp), 'milestone "%s" is unknown/invalid' % model_group.milestone, True) | 133 | data_error( |
204 | 134 | web_link(bp), | ||
205 | 135 | 'milestone "%s" is unknown/invalid' % model_group.milestone, | ||
206 | 136 | True) | ||
207 | 120 | 137 | ||
208 | 121 | model_group = collector.store_blueprint_group(model_group) | 138 | model_group = collector.store_blueprint_group(model_group) |
209 | 122 | if model_group is None: | 139 | if model_group is None: |
210 | @@ -126,9 +143,9 @@ | |||
211 | 126 | add_dependencies(collector, model_group.name, deps) | 143 | add_dependencies(collector, model_group.name, deps) |
212 | 127 | return model_group | 144 | return model_group |
213 | 128 | 145 | ||
214 | 146 | |||
215 | 129 | def parse_meta_item(collector, line, bp_name): | 147 | def parse_meta_item(collector, line, bp_name): |
216 | 130 | '''Parse a meta information line from a blueprint | 148 | '''Parse a meta information line from a blueprint |
217 | 131 | |||
218 | 132 | ''' | 149 | ''' |
219 | 133 | 150 | ||
220 | 134 | line = line.strip() | 151 | line = line.strip() |
221 | @@ -136,18 +153,19 @@ | |||
222 | 136 | return | 153 | return |
223 | 137 | 154 | ||
224 | 138 | try: | 155 | try: |
227 | 139 | (key, value) = line.rsplit(':', 1) | 156 | (key, value) = line.split(':', 1) |
228 | 140 | key = key.strip() | 157 | key = key.strip() |
229 | 141 | value = value.strip() | 158 | value = value.strip() |
230 | 142 | except ValueError: | 159 | except ValueError: |
231 | 143 | dbg("\tMeta line '%s' can not be parsed" % line) | 160 | dbg("\tMeta line '%s' can not be parsed" % line) |
232 | 144 | return | 161 | return |
233 | 145 | 162 | ||
235 | 146 | dbg( "\t\tMeta for %s: key='%s' value='%s'" % (bp_name, key, value) ) | 163 | dbg("\t\tMeta for %s: key='%s' value='%s'" % (bp_name, key, value)) |
236 | 147 | collector.store_meta(key, value, bp_name) | 164 | collector.store_meta(key, value, bp_name) |
237 | 148 | 165 | ||
238 | 149 | 166 | ||
240 | 150 | def parse_complexity_item(collector, line, bp_name, bp_url, def_milestone, def_assignee): | 167 | def parse_complexity_item(collector, line, bp_name, bp_url, def_milestone, |
241 | 168 | def_assignee): | ||
242 | 151 | line = line.strip() | 169 | line = line.strip() |
243 | 152 | # remove special characters people tend to type | 170 | # remove special characters people tend to type |
244 | 153 | line = re.sub('[^\w -.]', '', line) | 171 | line = re.sub('[^\w -.]', '', line) |
245 | @@ -156,9 +174,9 @@ | |||
246 | 156 | 174 | ||
247 | 157 | dbg("\tParsing complexity line '%s'" % line) | 175 | dbg("\tParsing complexity line '%s'" % line) |
248 | 158 | 176 | ||
252 | 159 | num = None | 177 | num = None |
253 | 160 | milestone = None | 178 | milestone = None |
254 | 161 | assignee = None | 179 | assignee = None |
255 | 162 | 180 | ||
256 | 163 | try: | 181 | try: |
257 | 164 | complexity_list = line.split() | 182 | complexity_list = line.split() |
258 | @@ -176,7 +194,9 @@ | |||
259 | 176 | dbg('\tComplexity: %s MS: %s Who: %s' % (num, milestone, assignee)) | 194 | dbg('\tComplexity: %s MS: %s Who: %s' % (num, milestone, assignee)) |
260 | 177 | collector.store_complexity(assignee, num, milestone, bp_name) | 195 | collector.store_complexity(assignee, num, milestone, bp_name) |
261 | 178 | except ValueError: | 196 | except ValueError: |
263 | 179 | data_error(bp_url, "\tComplexity line '%s' could not be parsed %s" % (line, ValueError)) | 197 | data_error(bp_url, |
264 | 198 | "\tComplexity line '%s' could not be parsed %s" % | ||
265 | 199 | (line, ValueError)) | ||
266 | 180 | 200 | ||
267 | 181 | 201 | ||
268 | 182 | def milestone_extract(text, valid_milestones): | 202 | def milestone_extract(text, valid_milestones): |
269 | @@ -188,7 +208,9 @@ | |||
270 | 188 | return word | 208 | return word |
271 | 189 | return None | 209 | return None |
272 | 190 | 210 | ||
274 | 191 | def lp_import_blueprint_workitems(collector, bp, distro_release, people_cache=None, projects=None): | 211 | |
275 | 212 | def lp_import_blueprint_workitems(collector, bp, distro_release, | ||
276 | 213 | people_cache=None, projects=None): | ||
277 | 192 | '''Collect work items from a Launchpad blueprint. | 214 | '''Collect work items from a Launchpad blueprint. |
278 | 193 | 215 | ||
279 | 194 | This includes work items from the whiteboard as well as linked bugs. | 216 | This includes work items from the whiteboard as well as linked bugs. |
280 | @@ -202,17 +224,26 @@ | |||
281 | 202 | in_complexity_block = False | 224 | in_complexity_block = False |
282 | 203 | work_items = [] | 225 | work_items = [] |
283 | 204 | 226 | ||
289 | 205 | model_bp = collector.store.find( | 227 | try: |
290 | 206 | Blueprint, Blueprint.name == bp.name).one() | 228 | model_bp = collector.store.find(Blueprint, |
291 | 207 | assert model_bp is not None, "Asked to process workitems of %s when it is not in the db" % bp.name | 229 | Blueprint.name == bp.name).one() |
292 | 208 | 230 | except sqlite3.IntegrityError: | |
293 | 209 | dbg('lp_import_blueprint_workitems(): processing %s (spec milestone: %s, spec assignee: %s, spec implementation: %s)' % ( | 231 | logger.warn('Duplicated Blueprint found: %s. It will not be ' |
294 | 232 | 'considered.' % bp.name) | ||
295 | 233 | return | ||
296 | 234 | |||
297 | 235 | assert model_bp is not None, \ | ||
298 | 236 | "Asked to process workitems of %s when it is not in the db" % bp.name | ||
299 | 237 | |||
300 | 238 | dbg('lp_import_blueprint_workitems(): processing %s (spec milestone: %s,' \ | ||
301 | 239 | ' spec assignee: %s, spec implementation: %s)' % ( | ||
302 | 210 | bp.name, model_bp.milestone_name, model_bp.assignee_name, | 240 | bp.name, model_bp.milestone_name, model_bp.assignee_name, |
303 | 211 | model_bp.implementation)) | 241 | model_bp.implementation)) |
304 | 212 | 242 | ||
305 | 213 | valid_milestones = collector.valid_milestone_names() | 243 | valid_milestones = collector.valid_milestone_names() |
306 | 214 | global error_collector | 244 | global error_collector |
307 | 215 | parser = WorkitemParser( | 245 | parser = WorkitemParser( |
308 | 246 | <<<<<<< TREE | ||
309 | 216 | model_bp, model_bp.milestone_name, collector.lp, people_cache=people_cache, | 247 | model_bp, model_bp.milestone_name, collector.lp, people_cache=people_cache, |
310 | 217 | error_collector=error_collector) | 248 | error_collector=error_collector) |
311 | 218 | 249 | ||
312 | @@ -228,19 +259,43 @@ | |||
313 | 228 | if workitems_text: | 259 | if workitems_text: |
314 | 229 | for l in workitems_text.splitlines(): | 260 | for l in workitems_text.splitlines(): |
315 | 230 | if not in_workitems_block: | 261 | if not in_workitems_block: |
316 | 262 | ======= | ||
317 | 263 | model_bp, model_bp.milestone_name, collector.lp, | ||
318 | 264 | people_cache=people_cache, error_collector=error_collector) | ||
319 | 265 | |||
320 | 266 | # Get work items from both the whiteboard and the new workitems_text | ||
321 | 267 | # property. Once the migration is completed and nobody's using the | ||
322 | 268 | # whiteboard for work items we can change this to pull work items only | ||
323 | 269 | # from bp.workitems_text. | ||
324 | 270 | workitems_text = bp.whiteboard | ||
325 | 271 | if workitems_text is None: | ||
326 | 272 | workitems_text = '' | ||
327 | 273 | if getattr(bp, 'workitems_text', '') != '': | ||
328 | 274 | workitems_text += "\n" + bp.workitems_text | ||
329 | 275 | if workitems_text: | ||
330 | 276 | for l in workitems_text.splitlines(): | ||
331 | 277 | if not in_workitems_block: | ||
332 | 278 | >>>>>>> MERGE-SOURCE | ||
333 | 231 | m = work_items_re.search(l) | 279 | m = work_items_re.search(l) |
334 | 232 | if m: | 280 | if m: |
335 | 233 | in_workitems_block = True | 281 | in_workitems_block = True |
337 | 234 | dbg('lp_import_blueprint_workitems(): starting work items block at ' + l) | 282 | dbg('lp_import_blueprint_workitems():' |
338 | 283 | ' starting work items block at ' + l) | ||
339 | 235 | milestone = milestone_extract(m.group(1), valid_milestones) | 284 | milestone = milestone_extract(m.group(1), valid_milestones) |
340 | 236 | dbg(' ... setting milestone to ' + str(milestone)) | 285 | dbg(' ... setting milestone to ' + str(milestone)) |
341 | 286 | <<<<<<< TREE | ||
342 | 237 | parser.milestone_name = milestone or parser.blueprint.milestone_name | 287 | parser.milestone_name = milestone or parser.blueprint.milestone_name |
343 | 288 | ======= | ||
344 | 289 | parser.milestone_name = \ | ||
345 | 290 | milestone or parser.blueprint.milestone_name | ||
346 | 291 | >>>>>>> MERGE-SOURCE | ||
347 | 238 | continue | 292 | continue |
348 | 239 | 293 | ||
349 | 240 | if in_workitems_block: | 294 | if in_workitems_block: |
350 | 241 | dbg("\tworkitem (raw): '%s'" % (l.strip())) | 295 | dbg("\tworkitem (raw): '%s'" % (l.strip())) |
351 | 242 | if not l.strip(): | 296 | if not l.strip(): |
353 | 243 | dbg('lp_import_blueprint_workitems(): closing work items block with line: ' + l) | 297 | dbg('lp_import_blueprint_workitems():' |
354 | 298 | ' closing work items block with line: ' + l) | ||
355 | 244 | in_workitems_block = False | 299 | in_workitems_block = False |
356 | 245 | parser.milestone_name = parser.blueprint.milestone_name | 300 | parser.milestone_name = parser.blueprint.milestone_name |
357 | 246 | workitem = parser.parse_blueprint_workitem(l) | 301 | workitem = parser.parse_blueprint_workitem(l) |
358 | @@ -308,7 +363,8 @@ | |||
359 | 308 | member.name, team.name) | 363 | member.name, team.name) |
360 | 309 | if recursive or team.name in cfg.get('recursive_teams', []): | 364 | if recursive or team.name in cfg.get('recursive_teams', []): |
361 | 310 | _import_teams_recurse( | 365 | _import_teams_recurse( |
363 | 311 | collector, cfg, member, top_level_team_names + [member.name], | 366 | collector, cfg, member, |
364 | 367 | top_level_team_names + [member.name], | ||
365 | 312 | people_cache=people_cache, recursive=True) | 368 | people_cache=people_cache, recursive=True) |
366 | 313 | 369 | ||
367 | 314 | 370 | ||
368 | @@ -346,12 +402,14 @@ | |||
369 | 346 | blueprint.status = lp_project.summary or name | 402 | blueprint.status = lp_project.summary or name |
370 | 347 | collector.store_blueprint(blueprint) | 403 | collector.store_blueprint(blueprint) |
371 | 348 | 404 | ||
373 | 349 | for task in lp_project.searchTasks(status=bug_wi_states.keys(), **cfg['work_item_bugs']): | 405 | for task in lp_project.searchTasks(status=bug_wi_states.keys(), |
374 | 406 | **cfg['work_item_bugs']): | ||
375 | 350 | id = task.self_link.split('/')[-1] | 407 | id = task.self_link.split('/')[-1] |
376 | 351 | title = task.title.split('"', 1)[1].rstrip('"') | 408 | title = task.title.split('"', 1)[1].rstrip('"') |
377 | 352 | state = bug_wi_states[task.status] | 409 | state = bug_wi_states[task.status] |
378 | 353 | if state is None: | 410 | if state is None: |
380 | 354 | dbg('lp_import_bug_workitems: ignoring #%s: %s (status: %s)' % (id, title, task.status)) | 411 | dbg('lp_import_bug_workitems: ignoring #%s: %s (status: %s)' % ( |
381 | 412 | id, title, task.status)) | ||
382 | 355 | continue | 413 | continue |
383 | 356 | dbg('lp_import_bug_workitems: #%s: %s (%s)' % (id, title, state)) | 414 | dbg('lp_import_bug_workitems: #%s: %s (%s)' % (id, title, state)) |
384 | 357 | 415 | ||
385 | @@ -384,14 +442,17 @@ | |||
386 | 384 | milestones.extend([ms for ms in project.all_milestones]) | 442 | milestones.extend([ms for ms in project.all_milestones]) |
387 | 385 | 443 | ||
388 | 386 | if 'release' in cfg: | 444 | if 'release' in cfg: |
390 | 387 | lp_project = collector.lp.distributions['ubuntu'].getSeries(name_or_version=cfg['release']) | 445 | lp_project = collector.lp.distributions['ubuntu'].getSeries( |
391 | 446 | name_or_version=cfg['release']) | ||
392 | 388 | projects.append((lp_project, None)) | 447 | projects.append((lp_project, None)) |
393 | 389 | add_milestones(lp_project) | 448 | add_milestones(lp_project) |
394 | 390 | else: | 449 | else: |
396 | 391 | assert 'project' in cfg, 'Configuration needs to specify project or release' | 450 | assert 'project' in cfg, \ |
397 | 451 | 'Configuration needs to specify project or release' | ||
398 | 392 | lp_project = collector.lp.projects[cfg['project']] | 452 | lp_project = collector.lp.projects[cfg['project']] |
399 | 393 | if 'project_series' in cfg: | 453 | if 'project_series' in cfg: |
401 | 394 | lp_project_series = lp_project.getSeries(name=cfg['project_series']) | 454 | lp_project_series = lp_project.getSeries( |
402 | 455 | name=cfg['project_series']) | ||
403 | 395 | add_milestones(lp_project_series) | 456 | add_milestones(lp_project_series) |
404 | 396 | else: | 457 | else: |
405 | 397 | lp_project_series = None | 458 | lp_project_series = None |
406 | @@ -413,6 +474,10 @@ | |||
407 | 413 | if is_dict and extra_projects[extra_project_name] is not None: | 474 | if is_dict and extra_projects[extra_project_name] is not None: |
408 | 414 | extra_project_series = extra_project.getSeries( | 475 | extra_project_series = extra_project.getSeries( |
409 | 415 | name=extra_projects[extra_project_name]) | 476 | name=extra_projects[extra_project_name]) |
410 | 477 | if extra_project_series is None: | ||
411 | 478 | raise AssertionError( | ||
412 | 479 | "%s has no series named %s" | ||
413 | 480 | % (extra_project_name, extra_projects[extra_project_name])) | ||
414 | 416 | add_milestones(extra_project_series) | 481 | add_milestones(extra_project_series) |
415 | 417 | else: | 482 | else: |
416 | 418 | extra_project_series = None | 483 | extra_project_series = None |
417 | @@ -453,10 +518,12 @@ | |||
418 | 453 | 518 | ||
419 | 454 | for project, series in projects: | 519 | for project, series in projects: |
420 | 455 | # XXX: should this be valid_ or all_specifications? | 520 | # XXX: should this be valid_ or all_specifications? |
422 | 456 | project_spec_group_matcher = spec_group_matchers.get(project.name, None) | 521 | project_spec_group_matcher = spec_group_matchers.get(project.name, |
423 | 522 | None) | ||
424 | 457 | project_bps = project.valid_specifications | 523 | project_bps = project.valid_specifications |
425 | 458 | for bp in project_bps: | 524 | for bp in project_bps: |
427 | 459 | if name_pattern is not None and re.search(name_pattern, bp.name) is None: | 525 | if name_pattern is not None and \ |
428 | 526 | re.search(name_pattern, bp.name) is None: | ||
429 | 460 | continue | 527 | continue |
430 | 461 | if project_spec_group_matcher is not None: | 528 | if project_spec_group_matcher is not None: |
431 | 462 | match = re.search(project_spec_group_matcher, bp.name) | 529 | match = re.search(project_spec_group_matcher, bp.name) |
432 | @@ -471,7 +538,8 @@ | |||
433 | 471 | add_blueprint(bp) | 538 | add_blueprint(bp) |
434 | 472 | if series is not None: | 539 | if series is not None: |
435 | 473 | for bp in series.valid_specifications: | 540 | for bp in series.valid_specifications: |
437 | 474 | if name_pattern is not None and re.search(name_pattern, bp.name) is None: | 541 | if name_pattern is not None and \ |
438 | 542 | re.search(name_pattern, bp.name) is None: | ||
439 | 475 | continue | 543 | continue |
440 | 476 | if project_spec_group_matcher is not None: | 544 | if project_spec_group_matcher is not None: |
441 | 477 | match = re.search(project_spec_group_matcher, bp.name) | 545 | match = re.search(project_spec_group_matcher, bp.name) |
442 | @@ -492,7 +560,6 @@ | |||
443 | 492 | deps[possible_dep] = possible_deps[possible_dep] | 560 | deps[possible_dep] = possible_deps[possible_dep] |
444 | 493 | if deps: | 561 | if deps: |
445 | 494 | lp_import_spec_group(collector, spec_group, area, deps) | 562 | lp_import_spec_group(collector, spec_group, area, deps) |
446 | 495 | |||
447 | 496 | lp_import_bug_workitems(lp_project, collector, cfg) | 563 | lp_import_bug_workitems(lp_project, collector, cfg) |
448 | 497 | 564 | ||
449 | 498 | 565 | ||
450 | @@ -540,7 +607,8 @@ | |||
451 | 540 | if in_section: | 607 | if in_section: |
452 | 541 | result.append([name, status, section]) | 608 | result.append([name, status, section]) |
453 | 542 | fields = line.strip().split(u'==') | 609 | fields = line.strip().split(u'==') |
455 | 543 | assert not fields[0] # should be empty | 610 | # should be empty |
456 | 611 | assert not fields[0] | ||
457 | 544 | name = fields[1].strip() | 612 | name = fields[1].strip() |
458 | 545 | section = [] | 613 | section = [] |
459 | 546 | collect = 1 | 614 | collect = 1 |
460 | @@ -550,7 +618,8 @@ | |||
461 | 550 | in_section = True | 618 | in_section = True |
462 | 551 | collect = 0 | 619 | collect = 0 |
463 | 552 | fields = line.strip().split(u'||') | 620 | fields = line.strip().split(u'||') |
465 | 553 | assert not fields[0] # should be empty | 621 | # should be empty |
466 | 622 | assert not fields[0] | ||
467 | 554 | assignee = default_assignee | 623 | assignee = default_assignee |
468 | 555 | istatus = u'todo' | 624 | istatus = u'todo' |
469 | 556 | milestone = None | 625 | milestone = None |
470 | @@ -562,11 +631,12 @@ | |||
471 | 562 | desc = fields[which].strip() | 631 | desc = fields[which].strip() |
472 | 563 | if u'status' in field_off: | 632 | if u'status' in field_off: |
473 | 564 | which = field_off[u'status'] | 633 | which = field_off[u'status'] |
475 | 565 | status_search = [ fields[which] ] | 634 | status_search = [fields[which]] |
476 | 566 | else: | 635 | else: |
477 | 567 | status_search = fields[2:] | 636 | status_search = fields[2:] |
478 | 568 | for f in status_search: | 637 | for f in status_search: |
480 | 569 | if u'DONE' in f or u'POSTPONED' in f or u'TODO' in f or u'INPROGRESS' in f or u'BLOCKED' in f: | 638 | if u'DONE' in f or u'POSTPONED' in f or u'TODO' in f or \ |
481 | 639 | u'INPROGRESS' in f or u'BLOCKED' in f: | ||
482 | 570 | ff = f.split() | 640 | ff = f.split() |
483 | 571 | if len(ff) == 2: | 641 | if len(ff) == 2: |
484 | 572 | assignee = ff[1] | 642 | assignee = ff[1] |
485 | @@ -615,14 +685,17 @@ | |||
486 | 615 | for url, default_assignee in cfg.get('moin_pages', {}).iteritems(): | 685 | for url, default_assignee in cfg.get('moin_pages', {}).iteritems(): |
487 | 616 | url = unicode_or_None(url) | 686 | url = unicode_or_None(url) |
488 | 617 | default_assignee = unicode_or_None(default_assignee) | 687 | default_assignee = unicode_or_None(default_assignee) |
491 | 618 | dbg('moin_import(): processing %s (default assignee: %s)' % (url, default_assignee)) | 688 | dbg('moin_import(): processing %s (default assignee: %s)' % ( |
492 | 619 | for group, status, items in get_moin_workitems_group(url, default_assignee): | 689 | url, default_assignee)) |
493 | 690 | for group, status, items in get_moin_workitems_group(url, | ||
494 | 691 | default_assignee): | ||
495 | 620 | url_clean = url.replace('?action=raw', '') | 692 | url_clean = url.replace('?action=raw', '') |
496 | 621 | name = url_clean.split('://', 1)[1].split('/', 1)[1] | 693 | name = url_clean.split('://', 1)[1].split('/', 1)[1] |
497 | 622 | if group: | 694 | if group: |
498 | 623 | name += u' ' + group | 695 | name += u' ' + group |
499 | 624 | spec_url = u'%s#%s' % (url_clean, escape_url(group)) | 696 | spec_url = u'%s#%s' % (url_clean, escape_url(group)) |
501 | 625 | dbg(' got group %s: name="%s", url="%s"' % (group, name, spec_url)) | 697 | dbg(' got group %s: name="%s", url="%s"' % ( |
502 | 698 | group, name, spec_url)) | ||
503 | 626 | else: | 699 | else: |
504 | 627 | spec_url = url_clean | 700 | spec_url = url_clean |
505 | 628 | dbg(' no group: name="%s", url="%s"' % (name, spec_url)) | 701 | dbg(' no group: name="%s", url="%s"' % (name, spec_url)) |
506 | @@ -661,7 +734,8 @@ | |||
507 | 661 | optparser.add_option('-c', '--config', | 734 | optparser.add_option('-c', '--config', |
508 | 662 | help='Path to configuration file', dest='config', metavar='PATH') | 735 | help='Path to configuration file', dest='config', metavar='PATH') |
509 | 663 | optparser.add_option('-p', '--pattern', metavar='REGEX', | 736 | optparser.add_option('-p', '--pattern', metavar='REGEX', |
511 | 664 | help='Regex pattern for blueprint name (optional, mainly for testing)', dest='pattern') | 737 | help='Regex pattern for blueprint name (optional, mainly for testing)', |
512 | 738 | dest='pattern') | ||
513 | 665 | optparser.add_option('--debug', action='store_true', default=False, | 739 | optparser.add_option('--debug', action='store_true', default=False, |
514 | 666 | help='Enable debugging output in parsing routines') | 740 | help='Enable debugging output in parsing routines') |
515 | 667 | optparser.add_option('--mail', action='store_true', default=False, | 741 | optparser.add_option('--mail', action='store_true', default=False, |
516 | @@ -679,41 +753,54 @@ | |||
517 | 679 | 753 | ||
518 | 680 | return opts, args | 754 | return opts, args |
519 | 681 | 755 | ||
520 | 756 | |||
521 | 682 | def send_error_mails(cfg): | 757 | def send_error_mails(cfg): |
522 | 683 | '''Send data_errors to contacts. | 758 | '''Send data_errors to contacts. |
523 | 684 | 759 | ||
527 | 685 | Data error contacts are defined in the configuration in the "error_contact" | 760 | Data error contacts are defined in the configuration in the |
528 | 686 | map (which assigns a regexp over spec names to a list of email addresses). | 761 | "project_notification_addresses" map (which assigns project names to a list |
529 | 687 | If no match is found, the error goes to stderr. | 762 | of email addresses). If no address list for a project is found, the error |
530 | 763 | goes to stderr. | ||
531 | 688 | ''' | 764 | ''' |
532 | 689 | global error_collector | 765 | global error_collector |
533 | 690 | 766 | ||
534 | 691 | # sort errors into address buckets | 767 | # sort errors into address buckets |
536 | 692 | emails = {} # email address -> contents | 768 | # email address -> contents |
537 | 769 | emails = {} | ||
538 | 693 | 770 | ||
539 | 694 | dbg('mailing %i data errors' % len(error_collector.errors)) | 771 | dbg('mailing %i data errors' % len(error_collector.errors)) |
540 | 695 | for error in error_collector.errors: | 772 | for error in error_collector.errors: |
550 | 696 | for pattern, addresses in cfg['error_contact'].iteritems(): | 773 | project_name = error.get_project_name() |
551 | 697 | if (error.get_blueprint_name() is not None | 774 | if project_name is not None: |
552 | 698 | and re.search(pattern, error.get_blueprint_name())): | 775 | addresses = cfg['project_notification_addresses'][project_name] |
553 | 699 | dbg('spec %s matches error_contact pattern "%s", mailing to %s' % (error.get_blueprint_name(), | 776 | dbg('spec %s is targetted to "%s", mailing to %s' % ( |
554 | 700 | pattern, ', '.join(addresses))) | 777 | error.get_blueprint_name(), project_name, |
555 | 701 | for a in addresses: | 778 | ', '.join(addresses))) |
556 | 702 | emails.setdefault(a, '') | 779 | for a in addresses: |
557 | 703 | emails[a] += error.format_for_display() + '\n' | 780 | emails.setdefault(a, '') |
558 | 704 | break | 781 | emails[a] += error.format_for_display() + '\n' |
559 | 705 | else: | 782 | else: |
561 | 706 | print >> sys.stderr, error.format_for_display(), '(no error_contact pattern)' | 783 | print >> sys.stderr, error.format_for_display(), \ |
562 | 784 | '(no error_contact pattern)' | ||
563 | 707 | 785 | ||
564 | 708 | # send mails | 786 | # send mails |
565 | 709 | for addr, contents in emails.iteritems(): | 787 | for addr, contents in emails.iteritems(): |
566 | 710 | msg = MIMEText(contents.encode('ascii', 'replace')) | 788 | msg = MIMEText(contents.encode('ascii', 'replace')) |
567 | 711 | msg['Subject'] = 'Errors in work item definitions' | 789 | msg['Subject'] = 'Errors in work item definitions' |
569 | 712 | msg['From'] = 'Launchpad work item tracker <work-items-tracker-hackers@lists.launchpad.net>' | 790 | msg['From'] = 'Launchpad work item tracker ' + \ |
570 | 791 | '<work-items-tracker-hackers@lists.launchpad.net>' | ||
571 | 713 | msg['To'] = addr | 792 | msg['To'] = addr |
572 | 714 | s = smtplib.SMTP() | 793 | s = smtplib.SMTP() |
573 | 715 | s.connect() | 794 | s.connect() |
574 | 795 | <<<<<<< TREE | ||
575 | 716 | s.sendmail('devnull@canonical.com', addr, msg.as_string()) | 796 | s.sendmail('devnull@canonical.com', addr, msg.as_string()) |
576 | 797 | ======= | ||
577 | 798 | s.sendmail(os.environ.get( | ||
578 | 799 | 'EMAIL', | ||
579 | 800 | pwd.getpwuid(os.geteuid()).pw_name + '@localhost'), | ||
580 | 801 | addr, | ||
581 | 802 | msg.as_string()) | ||
582 | 803 | >>>>>>> MERGE-SOURCE | ||
583 | 717 | s.quit() | 804 | s.quit() |
584 | 718 | 805 | ||
585 | 719 | 806 | ||
586 | @@ -753,22 +840,29 @@ | |||
587 | 753 | bug_status_map[key] = unicode_or_None(val) | 840 | bug_status_map[key] = unicode_or_None(val) |
588 | 754 | bug_wi_states.update(bug_status_map) | 841 | bug_wi_states.update(bug_status_map) |
589 | 755 | 842 | ||
591 | 756 | lock_path = opts.database + ".collect_lock" | 843 | lock_path = opts.database + ".lock" |
592 | 757 | lock_f = open(lock_path, "wb") | 844 | lock_f = open(lock_path, "wb") |
593 | 758 | if report_tools.lock_file(lock_f) is None: | 845 | if report_tools.lock_file(lock_f) is None: |
594 | 759 | print "Another instance is already running" | 846 | print "Another instance is already running" |
595 | 760 | sys.exit(0) | 847 | sys.exit(0) |
596 | 761 | 848 | ||
597 | 762 | if "beta" in EDGE_SERVICE_ROOT: | 849 | if "beta" in EDGE_SERVICE_ROOT: |
599 | 763 | lp = Launchpad.login_with('ubuntu-work-items', service_root=EDGE_SERVICE_ROOT.replace("edge.", "").replace("beta", "devel")) | 850 | service_root = EDGE_SERVICE_ROOT |
600 | 851 | service_root = service_root.replace("edge.", "") | ||
601 | 852 | service_root = service_root.replace("beta", "devel") | ||
602 | 853 | lp = Launchpad.login_with('ubuntu-work-items', | ||
603 | 854 | service_root=service_root) | ||
604 | 764 | else: | 855 | else: |
606 | 765 | lp = Launchpad.login_with('ubuntu-work-items', service_root="production", version="devel") | 856 | lp = Launchpad.login_with('ubuntu-work-items', |
607 | 857 | service_root="production", version="devel") | ||
608 | 766 | 858 | ||
609 | 767 | store = get_store(opts.database) | 859 | store = get_store(opts.database) |
610 | 768 | collector = CollectorStore(store, lp, error_collector) | 860 | collector = CollectorStore(store, lp, error_collector) |
611 | 769 | 861 | ||
612 | 770 | # reset status for current day | 862 | # reset status for current day |
613 | 771 | collector.clear_todays_workitems() | 863 | collector.clear_todays_workitems() |
614 | 864 | # We can delete all blueprints while keeping work items for previous days | ||
615 | 865 | # because there's no foreign key reference from WorkItem to Blueprint. | ||
616 | 772 | collector.clear_blueprints() | 866 | collector.clear_blueprints() |
617 | 773 | collector.clear_metas() | 867 | collector.clear_metas() |
618 | 774 | collector.clear_complexitys() | 868 | collector.clear_complexitys() |
619 | 775 | 869 | ||
620 | === added file 'collect_jira' | |||
621 | --- collect_jira 1970-01-01 00:00:00 +0000 | |||
622 | +++ collect_jira 2012-10-09 09:20:30 +0000 | |||
623 | @@ -0,0 +1,229 @@ | |||
624 | 1 | #!/usr/bin/python | ||
625 | 2 | # | ||
626 | 3 | # Pull items from cards.linaro.org and put them into a database. | ||
627 | 4 | |||
628 | 5 | import logging | ||
629 | 6 | import optparse | ||
630 | 7 | import os | ||
631 | 8 | import simplejson | ||
632 | 9 | import sys | ||
633 | 10 | import urllib2 | ||
634 | 11 | |||
635 | 12 | import jira | ||
636 | 13 | from lpworkitems.collect_roadmap import ( | ||
637 | 14 | CollectorStore, | ||
638 | 15 | ) | ||
639 | 16 | from lpworkitems.database import get_store | ||
640 | 17 | from lpworkitems.error_collector import ( | ||
641 | 18 | ErrorCollector, | ||
642 | 19 | StderrErrorCollector, | ||
643 | 20 | ) | ||
644 | 21 | from lpworkitems.models_roadmap import ( | ||
645 | 22 | Lane, | ||
646 | 23 | Card, | ||
647 | 24 | ) | ||
648 | 25 | import report_tools | ||
649 | 26 | |||
650 | 27 | |||
651 | 28 | # An ErrorCollector to collect the data errors for later reporting | ||
652 | 29 | error_collector = None | ||
653 | 30 | |||
654 | 31 | |||
655 | 32 | logger = logging.getLogger("linarojira") | ||
656 | 33 | |||
657 | 34 | JIRA_API_URL = 'http://cards.linaro.org/rest/api/2' | ||
658 | 35 | JIRA_PROJECT_KEY = 'CARD' | ||
659 | 36 | JIRA_ISSUE_BY_KEY_URL = 'http://cards.linaro.org/browse/%s' | ||
660 | 37 | |||
661 | 38 | |||
662 | 39 | def dbg(msg): | ||
663 | 40 | '''Print out debugging message if debugging is enabled.''' | ||
664 | 41 | logger.debug(msg) | ||
665 | 42 | |||
666 | 43 | |||
667 | 44 | def get_json_data(url): | ||
668 | 45 | data = None | ||
669 | 46 | try: | ||
670 | 47 | data = simplejson.load(urllib2.urlopen(url)) | ||
671 | 48 | except urllib2.HTTPError, e: | ||
672 | 49 | print "HTTP error for url '%s': %d" % (url, e.code) | ||
673 | 50 | except urllib2.URLError, e: | ||
674 | 51 | print "Network error for url '%s': %s" % (url, e.reason.args[1]) | ||
675 | 52 | except ValueError, e: | ||
676 | 53 | print "Data error for url '%s': %s" % (url, e.args[0]) | ||
677 | 54 | |||
678 | 55 | return data | ||
679 | 56 | |||
680 | 57 | |||
681 | 58 | def jira_import(collector, cfg, opts): | ||
682 | 59 | '''Collect roadmap items from JIRA into DB.''' | ||
683 | 60 | |||
684 | 61 | # import JIRA versions as database Lanes | ||
685 | 62 | result = jira.do_request(opts, 'project/%s/versions' % JIRA_PROJECT_KEY) | ||
686 | 63 | for version in result: | ||
687 | 64 | dbg('Adding lane (name = %s, id = %s)' % | ||
688 | 65 | (version['name'], version['id'])) | ||
689 | 66 | model_lane = Lane(unicode(version['name']), int(version['id'])) | ||
690 | 67 | if model_lane.name == cfg['current_lane']: | ||
691 | 68 | model_lane.is_current = True | ||
692 | 69 | else: | ||
693 | 70 | model_lane.is_current = False | ||
694 | 71 | collector.store_lane(model_lane) | ||
695 | 72 | |||
696 | 73 | # find id of "Sponsor" custom field in JIRA | ||
697 | 74 | result = jira.do_request(opts, 'field') | ||
698 | 75 | sponsor_fields = [field for field in result if field['name'] == 'Sponsor'] | ||
699 | 76 | assert len(sponsor_fields) == 1, 'Not a single Sponsor field' | ||
700 | 77 | sponsor_field_id = sponsor_fields[0]['id'] | ||
701 | 78 | |||
702 | 79 | # import JIRA issues as database Cards | ||
703 | 80 | result = jira.do_request( | ||
704 | 81 | opts, 'search', jql='project = %s' % JIRA_PROJECT_KEY, | ||
705 | 82 | fields=['summary', 'fixVersions', 'status', 'components', | ||
706 | 83 | 'priority', 'description', 'timetracking', sponsor_field_id]) | ||
707 | 84 | for issue in result['issues']: | ||
708 | 85 | fields = issue['fields'] | ||
709 | 86 | name = unicode(fields['summary']) | ||
710 | 87 | card_id = int(issue['id']) | ||
711 | 88 | key = unicode(issue['key']) | ||
712 | 89 | fixVersions = fields['fixVersions'] | ||
713 | 90 | if len(fixVersions) == 0: | ||
714 | 91 | dbg('Skipping card without lane (name = %s, key = %s)' % | ||
715 | 92 | (name, key)) | ||
716 | 93 | continue | ||
717 | 94 | # JIRA allows listing multiple versions in fixVersions | ||
718 | 95 | assert len(fixVersions) == 1 | ||
719 | 96 | lane_id = int(fixVersions[0]['id']) | ||
720 | 97 | |||
721 | 98 | dbg('Adding card (name = %s, id = %s, lane_id = %s, key = %s)' % | ||
722 | 99 | (name, card_id, lane_id, key)) | ||
723 | 100 | model_card = Card(name, card_id, lane_id, key) | ||
724 | 101 | model_card.status = unicode(fields['status']['name']) | ||
725 | 102 | components = fields['components'] | ||
726 | 103 | if len(components) == 0: | ||
727 | 104 | dbg('Skipping card without component (name = %s, key = %s)' % | ||
728 | 105 | (name, key)) | ||
729 | 106 | # JIRA allows listing multiple components | ||
730 | 107 | assert len(components) == 1 | ||
731 | 108 | model_card.team = unicode(components[0]['name']) | ||
732 | 109 | model_card.priority = unicode(fields['priority']['name']) | ||
733 | 110 | size_fields = [] | ||
734 | 111 | timetracking = fields['timetracking'] | ||
735 | 112 | if 'originalEstimate' in timetracking: | ||
736 | 113 | size_fields += [ | ||
737 | 114 | 'original estimate: %s' % timetracking['originalEstimate']] | ||
738 | 115 | if 'remainingEstimate' in timetracking: | ||
739 | 116 | size_fields += [ | ||
740 | 117 | 'remaining estimate: %s' % timetracking['remainingEstimate']] | ||
741 | 118 | model_card.size = unicode(', '.join(size_fields)) | ||
742 | 119 | model_card.sponsor = u'' | ||
743 | 120 | # None if no sponsor is selected | ||
744 | 121 | if fields[sponsor_field_id] is not None: | ||
745 | 122 | sponsors = [s['value'] for s in fields[sponsor_field_id]] | ||
746 | 123 | model_card.sponsor = unicode(', '.join(sorted(sponsors))) | ||
747 | 124 | model_card.url = JIRA_ISSUE_BY_KEY_URL % key | ||
748 | 125 | # XXX need to either download the HTML version or convert this to HTML | ||
749 | 126 | model_card.description = unicode(fields['description']) | ||
750 | 127 | # acceptance criteria is in the description | ||
751 | 128 | model_card.acceptance_criteria = u'' | ||
752 | 129 | collector.store_card(model_card) | ||
753 | 130 | return | ||
754 | 131 | |||
755 | 132 | ######################################################################## | ||
756 | 133 | # | ||
757 | 134 | # Program operations and main | ||
758 | 135 | # | ||
759 | 136 | ######################################################################## | ||
760 | 137 | |||
761 | 138 | |||
762 | 139 | def parse_argv(): | ||
763 | 140 | '''Parse CLI arguments. | ||
764 | 141 | |||
765 | 142 | Return (options, args) tuple. | ||
766 | 143 | ''' | ||
767 | 144 | optparser = optparse.OptionParser() | ||
768 | 145 | optparser.add_option('-d', '--database', | ||
769 | 146 | help='Path to database', dest='database', metavar='PATH') | ||
770 | 147 | optparser.add_option('-c', '--config', | ||
771 | 148 | help='Path to configuration file', dest='config', metavar='PATH') | ||
772 | 149 | optparser.add_option('--debug', action='store_true', default=False, | ||
773 | 150 | help='Enable debugging output in parsing routines') | ||
774 | 151 | optparser.add_option('--mail', action='store_true', default=False, | ||
775 | 152 | help='Send data errors as email (according to "error_config" map in ' | ||
776 | 153 | 'config file) instead of printing to stderr', dest='mail') | ||
777 | 154 | optparser.add_option('--jira-username', default='robot', | ||
778 | 155 | help='JIRA username for authentication', dest='jira_username') | ||
779 | 156 | optparser.add_option('--jira-password', default='cuf4moh2', | ||
780 | 157 | help='JIRA password for authentication', dest='jira_password') | ||
781 | 158 | |||
782 | 159 | (opts, args) = optparser.parse_args() | ||
783 | 160 | |||
784 | 161 | if not opts.database: | ||
785 | 162 | optparser.error('No database given') | ||
786 | 163 | if not opts.config: | ||
787 | 164 | optparser.error('No config given') | ||
788 | 165 | |||
789 | 166 | return opts, args | ||
790 | 167 | |||
791 | 168 | |||
792 | 169 | def setup_logging(debug): | ||
793 | 170 | ch = logging.StreamHandler() | ||
794 | 171 | ch.setLevel(logging.INFO) | ||
795 | 172 | formatter = logging.Formatter("%(message)s") | ||
796 | 173 | ch.setFormatter(formatter) | ||
797 | 174 | logger.setLevel(logging.INFO) | ||
798 | 175 | logger.addHandler(ch) | ||
799 | 176 | if debug: | ||
800 | 177 | ch.setLevel(logging.DEBUG) | ||
801 | 178 | formatter = logging.Formatter( | ||
802 | 179 | "%(asctime)s - %(name)s - %(levelname)s - %(message)s") | ||
803 | 180 | ch.setFormatter(formatter) | ||
804 | 181 | logger.setLevel(logging.DEBUG) | ||
805 | 182 | |||
806 | 183 | |||
807 | 184 | def update_todays_blueprint_daily_count_per_state(collector): | ||
808 | 185 | """Clear today's entries and create them again to reflect the current | ||
809 | 186 | state of blueprints.""" | ||
810 | 187 | collector.clear_todays_blueprint_daily_count_per_state() | ||
811 | 188 | collector.store_roadmap_bp_count_per_state() | ||
812 | 189 | |||
813 | 190 | |||
814 | 191 | def main(): | ||
815 | 192 | report_tools.fix_stdouterr() | ||
816 | 193 | |||
817 | 194 | (opts, args) = parse_argv() | ||
818 | 195 | opts.jira_api_url = JIRA_API_URL | ||
819 | 196 | |||
820 | 197 | setup_logging(opts.debug) | ||
821 | 198 | |||
822 | 199 | global error_collector | ||
823 | 200 | if opts.mail: | ||
824 | 201 | error_collector = ErrorCollector() | ||
825 | 202 | else: | ||
826 | 203 | error_collector = StderrErrorCollector() | ||
827 | 204 | |||
828 | 205 | cfg = report_tools.load_config(opts.config) | ||
829 | 206 | |||
830 | 207 | lock_path = opts.database + ".lock" | ||
831 | 208 | lock_f = open(lock_path, "wb") | ||
832 | 209 | if report_tools.lock_file(lock_f) is None: | ||
833 | 210 | print "Another instance is already running" | ||
834 | 211 | sys.exit(0) | ||
835 | 212 | |||
836 | 213 | store = get_store(opts.database) | ||
837 | 214 | collector = CollectorStore(store, '', error_collector) | ||
838 | 215 | |||
839 | 216 | collector.clear_lanes() | ||
840 | 217 | collector.clear_cards() | ||
841 | 218 | |||
842 | 219 | jira_import(collector, cfg, opts) | ||
843 | 220 | |||
844 | 221 | update_todays_blueprint_daily_count_per_state(collector) | ||
845 | 222 | |||
846 | 223 | store.commit() | ||
847 | 224 | |||
848 | 225 | os.unlink(lock_path) | ||
849 | 226 | |||
850 | 227 | |||
851 | 228 | if __name__ == '__main__': | ||
852 | 229 | main() | ||
853 | 0 | 230 | ||
854 | === added file 'collect_roadmap' | |||
855 | --- collect_roadmap 1970-01-01 00:00:00 +0000 | |||
856 | +++ collect_roadmap 2012-10-09 09:20:30 +0000 | |||
857 | @@ -0,0 +1,301 @@ | |||
858 | 1 | #!/usr/bin/python | ||
859 | 2 | # | ||
860 | 3 | # Pull items from the Linaro roadmap in Kanbantool and put them into a database. | ||
861 | 4 | |||
862 | 5 | import logging | ||
863 | 6 | import optparse | ||
864 | 7 | import os | ||
865 | 8 | import simplejson | ||
866 | 9 | import sys | ||
867 | 10 | import urllib2 | ||
868 | 11 | |||
869 | 12 | from lpworkitems.collect_roadmap import ( | ||
870 | 13 | CollectorStore, | ||
871 | 14 | get_json_item, | ||
872 | 15 | lookup_kanban_priority, | ||
873 | 16 | ) | ||
874 | 17 | from lpworkitems.database import get_store | ||
875 | 18 | from lpworkitems.error_collector import ( | ||
876 | 19 | ErrorCollector, | ||
877 | 20 | StderrErrorCollector, | ||
878 | 21 | ) | ||
879 | 22 | from lpworkitems.models_roadmap import ( | ||
880 | 23 | Lane, | ||
881 | 24 | Card, | ||
882 | 25 | ) | ||
883 | 26 | from utils import unicode_or_None | ||
884 | 27 | import report_tools | ||
885 | 28 | |||
886 | 29 | |||
887 | 30 | # An ErrorCollector to collect the data errors for later reporting | ||
888 | 31 | error_collector = None | ||
889 | 32 | |||
890 | 33 | |||
891 | 34 | logger = logging.getLogger("linaroroadmap") | ||
892 | 35 | |||
893 | 36 | |||
894 | 37 | def dbg(msg): | ||
895 | 38 | '''Print out debugging message if debugging is enabled.''' | ||
896 | 39 | logger.debug(msg) | ||
897 | 40 | |||
898 | 41 | |||
899 | 42 | def get_kanban_url(item_url, api_token): | ||
900 | 43 | base_url = 'https://linaro.kanbantool.com/api/v1' | ||
901 | 44 | return "%s/%s?api_token=%s" % (base_url, item_url, api_token) | ||
902 | 45 | |||
903 | 46 | |||
904 | 47 | def get_json_data(url): | ||
905 | 48 | data = None | ||
906 | 49 | try: | ||
907 | 50 | data = simplejson.load(urllib2.urlopen(url)) | ||
908 | 51 | except urllib2.HTTPError, e: | ||
909 | 52 | print "HTTP error for url '%s': %d" % (url, e.code) | ||
910 | 53 | except urllib2.URLError, e: | ||
911 | 54 | print "Network error for url '%s': %s" % (url, e.reason.args[1]) | ||
912 | 55 | except ValueError, e: | ||
913 | 56 | print "Data error for url '%s': %s" % (url, e.args[0]) | ||
914 | 57 | |||
915 | 58 | return data | ||
916 | 59 | |||
917 | 60 | |||
918 | 61 | def kanban_import_lanes(collector, workflow_stages, cfg): | ||
919 | 62 | nodes = {} | ||
920 | 63 | root_node_id = None | ||
921 | 64 | lanes_to_ignore = ['Legend'] | ||
922 | 65 | |||
923 | 66 | # Iterate over all workflow_stages which may be in any order. | ||
924 | 67 | for workflow_stage in workflow_stages: | ||
925 | 68 | if workflow_stage['name'] in lanes_to_ignore: | ||
926 | 69 | dbg("Ignoring lane %s." % workflow_stage['name']) | ||
927 | 70 | continue | ||
928 | 71 | parent_id = workflow_stage['parent_id'] | ||
929 | 72 | if parent_id is None: | ||
930 | 73 | assert root_node_id is None, 'We have already found the root node.' | ||
931 | 74 | root_node_id = workflow_stage['id'] | ||
932 | 75 | else: | ||
933 | 76 | if parent_id not in nodes: | ||
934 | 77 | nodes[parent_id] = [] | ||
935 | 78 | # Add child workflow_stage | ||
936 | 79 | nodes[parent_id].append(workflow_stage) | ||
937 | 80 | |||
938 | 81 | statuses = [] | ||
939 | 82 | for node in nodes[root_node_id]: | ||
940 | 83 | assert node['parent_id'] == root_node_id | ||
941 | 84 | model_lane = Lane(get_json_item(node, 'name'), | ||
942 | 85 | node['id']) | ||
943 | 86 | if model_lane.name == cfg['current_lane']: | ||
944 | 87 | model_lane.is_current = True | ||
945 | 88 | else: | ||
946 | 89 | model_lane.is_current = False | ||
947 | 90 | collector.store_lane(model_lane) | ||
948 | 91 | node_id = node['id'] | ||
949 | 92 | if node_id in nodes: | ||
950 | 93 | statuses.extend(nodes[node_id]) | ||
951 | 94 | return statuses | ||
952 | 95 | |||
953 | 96 | |||
954 | 97 | def kanban_import_cards(collector, tasks, status_list, card_types, papyrs_token): | ||
955 | 98 | types_to_ignore = ['Summits'] | ||
956 | 99 | for task in tasks: | ||
957 | 100 | dbg("Collecting card '%s'." % (task['task']['name'])) | ||
958 | 101 | status_id = task['task']['workflow_stage_id'] | ||
959 | 102 | assert status_id is not None | ||
960 | 103 | task_status = None | ||
961 | 104 | for status in status_list: | ||
962 | 105 | if status['id'] == status_id: | ||
963 | 106 | task_status = status | ||
964 | 107 | break | ||
965 | 108 | card_type_id = task['task']['card_type_id'] | ||
966 | 109 | card_type_name = None | ||
967 | 110 | for card_type in card_types: | ||
968 | 111 | if card_type['id'] == card_type_id: | ||
969 | 112 | card_type_name = card_type['name'] | ||
970 | 113 | break | ||
971 | 114 | else: | ||
972 | 115 | dbg("Cannot find type for card '%s'." % (task['task']['name'])) | ||
973 | 116 | if card_type_name in types_to_ignore: | ||
974 | 117 | dbg("Ignoring card '%s' since it\'s type is '%s'." % \ | ||
975 | 118 | (task['task']['name'], card_type['name'])) | ||
976 | 119 | else: | ||
977 | 120 | if task_status is not None: | ||
978 | 121 | lane_id = task_status['parent_id'] | ||
979 | 122 | assert lane_id is not None | ||
980 | 123 | else: | ||
981 | 124 | lane_id = status_id | ||
982 | 125 | if not collector.lane_is_collected(lane_id): | ||
983 | 126 | dbg("Ignoring card '%s' since it\'s Lane is ignored." % \ | ||
984 | 127 | (task['task']['name'])) | ||
985 | 128 | continue | ||
986 | 129 | model_card = Card(get_json_item(task['task'], 'name'), | ||
987 | 130 | task['task']['id'], lane_id, | ||
988 | 131 | get_json_item(task['task'], 'external_id')) | ||
989 | 132 | if task_status is not None: | ||
990 | 133 | model_card.status = get_json_item(task_status, 'name') | ||
991 | 134 | model_card.team = unicode_or_None(card_type_name) | ||
992 | 135 | model_card.priority = lookup_kanban_priority( | ||
993 | 136 | task['task']['priority']) | ||
994 | 137 | model_card.size = get_json_item(task['task'], 'size_estimate') | ||
995 | 138 | model_card.sponsor = get_json_item(task['task'], | ||
996 | 139 | 'custom_field_1') | ||
997 | 140 | |||
998 | 141 | external_link = task['task']['custom_field_2'] | ||
999 | 142 | if external_link is not None and external_link is not '': | ||
1000 | 143 | model_card.url = unicode_or_None(external_link) | ||
1001 | 144 | dbg('Getting Papyrs information from %s.' % external_link) | ||
1002 | 145 | papyrs_data = papyrs_import(collector, external_link, papyrs_token) | ||
1003 | 146 | model_card.description = get_json_item(papyrs_data, | ||
1004 | 147 | 'description') | ||
1005 | 148 | model_card.acceptance_criteria = get_json_item( | ||
1006 | 149 | papyrs_data, 'acceptance_criteria') | ||
1007 | 150 | collector.store_card(model_card) | ||
1008 | 151 | |||
1009 | 152 | |||
1010 | 153 | def kanban_import(collector, cfg, board_id, api_token, papyrs_token): | ||
1011 | 154 | '''Collect roadmap items from KanbanTool into DB.''' | ||
1012 | 155 | board_url = get_kanban_url('boards/%s.json' % board_id, api_token) | ||
1013 | 156 | board = get_json_data(board_url) | ||
1014 | 157 | assert board is not None, "Could not access board %s." % board_id | ||
1015 | 158 | card_types = board['board']['card_types'] | ||
1016 | 159 | status_list = kanban_import_lanes(collector, | ||
1017 | 160 | board['board']['workflow_stages'], cfg) | ||
1018 | 161 | |||
1019 | 162 | tasks_url = get_kanban_url('boards/%s/tasks.json' % board_id, api_token) | ||
1020 | 163 | tasks = get_json_data(tasks_url) | ||
1021 | 164 | kanban_import_cards(collector, tasks, status_list, card_types, papyrs_token) | ||
1022 | 165 | |||
1023 | 166 | |||
1024 | 167 | def papyrs_import(collector, requirement_url, papyrs_token): | ||
1025 | 168 | description = None | ||
1026 | 169 | acceptance_criteria = None | ||
1027 | 170 | |||
1028 | 171 | page = get_json_data(requirement_url + '?json&auth_token=%s' % papyrs_token) | ||
1029 | 172 | if page is None: | ||
1030 | 173 | return {'description': None, | ||
1031 | 174 | 'acceptance_criteria': None} | ||
1032 | 175 | |||
1033 | 176 | page_text_items = page[0] | ||
1034 | 177 | page_extra_items = page[1] | ||
1035 | 178 | |||
1036 | 179 | has_found_description = False | ||
1037 | 180 | last_heading = '' | ||
1038 | 181 | for page_item in page_text_items: | ||
1039 | 182 | if page_item['classname'] == 'Heading': | ||
1040 | 183 | last_heading = page_item['text'] | ||
1041 | 184 | if page_item['classname'] == 'Paragraph': | ||
1042 | 185 | if not has_found_description: | ||
1043 | 186 | description = page_item['html'] | ||
1044 | 187 | has_found_description = True | ||
1045 | 188 | elif 'Acceptance Criteria' in last_heading: | ||
1046 | 189 | acceptance_criteria = page_item['html'] | ||
1047 | 190 | |||
1048 | 191 | return {'description': get_first_paragraph(description), | ||
1049 | 192 | 'acceptance_criteria': get_first_paragraph(acceptance_criteria)} | ||
1050 | 193 | |||
1051 | 194 | |||
1052 | 195 | def get_first_paragraph(text): | ||
1053 | 196 | if text is None: | ||
1054 | 197 | return None | ||
1055 | 198 | # This might break, depending on what type of line breaks | ||
1056 | 199 | # whoever authors the Papyrs document uses. | ||
1057 | 200 | first_pararaph, _, _ = text.partition('<br>') | ||
1058 | 201 | return first_pararaph | ||
1059 | 202 | |||
1060 | 203 | |||
1061 | 204 | ######################################################################## | ||
1062 | 205 | # | ||
1063 | 206 | # Program operations and main | ||
1064 | 207 | # | ||
1065 | 208 | ######################################################################## | ||
1066 | 209 | |||
1067 | 210 | def parse_argv(): | ||
1068 | 211 | '''Parse CLI arguments. | ||
1069 | 212 | |||
1070 | 213 | Return (options, args) tuple. | ||
1071 | 214 | ''' | ||
1072 | 215 | optparser = optparse.OptionParser() | ||
1073 | 216 | optparser.add_option('-d', '--database', | ||
1074 | 217 | help='Path to database', dest='database', metavar='PATH') | ||
1075 | 218 | optparser.add_option('-c', '--config', | ||
1076 | 219 | help='Path to configuration file', dest='config', metavar='PATH') | ||
1077 | 220 | optparser.add_option('--debug', action='store_true', default=False, | ||
1078 | 221 | help='Enable debugging output in parsing routines') | ||
1079 | 222 | optparser.add_option('--mail', action='store_true', default=False, | ||
1080 | 223 | help='Send data errors as email (according to "error_config" map in ' | ||
1081 | 224 | 'config file) instead of printing to stderr', dest='mail') | ||
1082 | 225 | optparser.add_option('--board', | ||
1083 | 226 | help='Board id at Kanbantool', dest='board') | ||
1084 | 227 | optparser.add_option('--kanbantoken', | ||
1085 | 228 | help='Kanbantool API token for authentication', dest='kanban_token') | ||
1086 | 229 | optparser.add_option('--papyrstoken', | ||
1087 | 230 | help='Papyrs API token for authentication', dest='papyrs_token') | ||
1088 | 231 | |||
1089 | 232 | (opts, args) = optparser.parse_args() | ||
1090 | 233 | |||
1091 | 234 | if not opts.database: | ||
1092 | 235 | optparser.error('No database given') | ||
1093 | 236 | if not opts.config: | ||
1094 | 237 | optparser.error('No config given') | ||
1095 | 238 | |||
1096 | 239 | return opts, args | ||
1097 | 240 | |||
1098 | 241 | |||
1099 | 242 | def setup_logging(debug): | ||
1100 | 243 | ch = logging.StreamHandler() | ||
1101 | 244 | ch.setLevel(logging.INFO) | ||
1102 | 245 | formatter = logging.Formatter("%(message)s") | ||
1103 | 246 | ch.setFormatter(formatter) | ||
1104 | 247 | logger.setLevel(logging.INFO) | ||
1105 | 248 | logger.addHandler(ch) | ||
1106 | 249 | if debug: | ||
1107 | 250 | ch.setLevel(logging.DEBUG) | ||
1108 | 251 | formatter = logging.Formatter( | ||
1109 | 252 | "%(asctime)s - %(name)s - %(levelname)s - %(message)s") | ||
1110 | 253 | ch.setFormatter(formatter) | ||
1111 | 254 | logger.setLevel(logging.DEBUG) | ||
1112 | 255 | |||
1113 | 256 | |||
1114 | 257 | def update_todays_blueprint_daily_count_per_state(collector): | ||
1115 | 258 | """Clear today's entries and create them again to reflect the current | ||
1116 | 259 | state of blueprints.""" | ||
1117 | 260 | collector.clear_todays_blueprint_daily_count_per_state() | ||
1118 | 261 | collector.store_roadmap_bp_count_per_state() | ||
1119 | 262 | |||
1120 | 263 | |||
1121 | 264 | def main(): | ||
1122 | 265 | report_tools.fix_stdouterr() | ||
1123 | 266 | |||
1124 | 267 | (opts, args) = parse_argv() | ||
1125 | 268 | |||
1126 | 269 | setup_logging(opts.debug) | ||
1127 | 270 | |||
1128 | 271 | global error_collector | ||
1129 | 272 | if opts.mail: | ||
1130 | 273 | error_collector = ErrorCollector() | ||
1131 | 274 | else: | ||
1132 | 275 | error_collector = StderrErrorCollector() | ||
1133 | 276 | |||
1134 | 277 | cfg = report_tools.load_config(opts.config) | ||
1135 | 278 | |||
1136 | 279 | lock_path = opts.database + ".lock" | ||
1137 | 280 | lock_f = open(lock_path, "wb") | ||
1138 | 281 | if report_tools.lock_file(lock_f) is None: | ||
1139 | 282 | print "Another instance is already running" | ||
1140 | 283 | sys.exit(0) | ||
1141 | 284 | |||
1142 | 285 | store = get_store(opts.database) | ||
1143 | 286 | collector = CollectorStore(store, '', error_collector) | ||
1144 | 287 | |||
1145 | 288 | collector.clear_lanes() | ||
1146 | 289 | collector.clear_cards() | ||
1147 | 290 | |||
1148 | 291 | kanban_import(collector, cfg, opts.board, opts.kanban_token, opts.papyrs_token) | ||
1149 | 292 | |||
1150 | 293 | update_todays_blueprint_daily_count_per_state(collector) | ||
1151 | 294 | |||
1152 | 295 | store.commit() | ||
1153 | 296 | |||
1154 | 297 | os.unlink(lock_path) | ||
1155 | 298 | |||
1156 | 299 | |||
1157 | 300 | if __name__ == '__main__': | ||
1158 | 301 | main() | ||
1159 | 0 | 302 | ||
1160 | === modified file 'css/status.css' | |||
1161 | --- css/status.css 2011-05-18 20:51:50 +0000 | |||
1162 | +++ css/status.css 2012-10-09 09:20:30 +0000 | |||
1163 | @@ -46,6 +46,64 @@ | |||
1164 | 46 | font-size: 1.2em; | 46 | font-size: 1.2em; |
1165 | 47 | } | 47 | } |
1166 | 48 | 48 | ||
1167 | 49 | |||
1168 | 50 | .roadmap_progress_text { | ||
1169 | 51 | position: absolute; | ||
1170 | 52 | top:0; left:0; | ||
1171 | 53 | |||
1172 | 54 | padding-top: 5px; | ||
1173 | 55 | |||
1174 | 56 | color: #ffffff; | ||
1175 | 57 | text-align: center; | ||
1176 | 58 | width: 100%; | ||
1177 | 59 | } | ||
1178 | 60 | |||
1179 | 61 | .roadmap_wrap { | ||
1180 | 62 | border: 1px solid black; | ||
1181 | 63 | position: relative; | ||
1182 | 64 | margin-top: 2px; | ||
1183 | 65 | margin-bottom: 3px; | ||
1184 | 66 | margin-left: auto; | ||
1185 | 67 | margin-right: auto; | ||
1186 | 68 | background-color: #bdbdbd; | ||
1187 | 69 | } | ||
1188 | 70 | |||
1189 | 71 | .roadmap_wrap, .roadmap_value { | ||
1190 | 72 | width: 300px; | ||
1191 | 73 | height: 28px; | ||
1192 | 74 | } | ||
1193 | 75 | |||
1194 | 76 | table .roadmap_wrap, table .roadmap_value { | ||
1195 | 77 | border: 0px; | ||
1196 | 78 | width: 155px; | ||
1197 | 79 | height: 1.4em; | ||
1198 | 80 | background-color: #ffffff; | ||
1199 | 81 | } | ||
1200 | 82 | |||
1201 | 83 | .roadmap_value { | ||
1202 | 84 | float: left; | ||
1203 | 85 | } | ||
1204 | 86 | |||
1205 | 87 | .roadmap_value .Completed { | ||
1206 | 88 | background-color: green; | ||
1207 | 89 | height: inherit | ||
1208 | 90 | } | ||
1209 | 91 | |||
1210 | 92 | .roadmap_value .Blocked { | ||
1211 | 93 | background-color: red; | ||
1212 | 94 | height: inherit | ||
1213 | 95 | } | ||
1214 | 96 | |||
1215 | 97 | .roadmap_value .InProgress { | ||
1216 | 98 | background-color: gray; | ||
1217 | 99 | height: inherit | ||
1218 | 100 | } | ||
1219 | 101 | |||
1220 | 102 | .roadmap_value .Planned { | ||
1221 | 103 | background-color: orange; | ||
1222 | 104 | height: inherit | ||
1223 | 105 | } | ||
1224 | 106 | |||
1225 | 49 | .progress_wrap { | 107 | .progress_wrap { |
1226 | 50 | position: relative; | 108 | position: relative; |
1227 | 51 | border: 1px solid black; | 109 | border: 1px solid black; |
1228 | 52 | 110 | ||
1229 | === modified file 'generate-all' | |||
1230 | --- generate-all 2011-09-09 07:14:11 +0000 | |||
1231 | +++ generate-all 2012-10-09 09:20:30 +0000 | |||
1232 | @@ -6,7 +6,7 @@ | |||
1233 | 6 | # Copyright (C) 2010, 2011 Canonical Ltd. | 6 | # Copyright (C) 2010, 2011 Canonical Ltd. |
1234 | 7 | # License: GPL-3 | 7 | # License: GPL-3 |
1235 | 8 | 8 | ||
1237 | 9 | import optparse, os.path, sys | 9 | import optparse, os.path, sys, errno |
1238 | 10 | 10 | ||
1239 | 11 | import report_tools | 11 | import report_tools |
1240 | 12 | 12 | ||
1241 | @@ -47,13 +47,7 @@ | |||
1242 | 47 | burnup_chart_teams = [] | 47 | burnup_chart_teams = [] |
1243 | 48 | primary_team = None | 48 | primary_team = None |
1244 | 49 | 49 | ||
1252 | 50 | lock_path = opts.database + ".generate_lock" | 50 | lock_path = opts.database + ".lock" |
1246 | 51 | lock_f = open(lock_path, "wb") | ||
1247 | 52 | if report_tools.lock_file(lock_f) is None: | ||
1248 | 53 | print "Another instance is already running" | ||
1249 | 54 | sys.exit(0) | ||
1250 | 55 | |||
1251 | 56 | lock_path = opts.database + ".generate_lock" | ||
1253 | 57 | lock_f = open(lock_path, "wb") | 51 | lock_f = open(lock_path, "wb") |
1254 | 58 | if report_tools.lock_file(lock_f) is None: | 52 | if report_tools.lock_file(lock_f) is None: |
1255 | 59 | print "Another instance is already running" | 53 | print "Another instance is already running" |
1256 | @@ -80,14 +74,60 @@ | |||
1257 | 80 | usersubdir = os.path.join(opts.output_dir, 'u') | 74 | usersubdir = os.path.join(opts.output_dir, 'u') |
1258 | 81 | try: | 75 | try: |
1259 | 82 | os.mkdir(usersubdir) | 76 | os.mkdir(usersubdir) |
1262 | 83 | except OSError: | 77 | except OSError as exc: |
1263 | 84 | None | 78 | if exc.errno == errno.EEXIST: |
1264 | 79 | pass | ||
1265 | 80 | else: | ||
1266 | 81 | raise | ||
1267 | 85 | 82 | ||
1268 | 86 | groupssubdir = os.path.join(opts.output_dir, 'group') | 83 | groupssubdir = os.path.join(opts.output_dir, 'group') |
1269 | 87 | try: | 84 | try: |
1270 | 88 | os.mkdir(groupssubdir) | 85 | os.mkdir(groupssubdir) |
1273 | 89 | except OSError: | 86 | except OSError as exc: |
1274 | 90 | None | 87 | if exc.errno == errno.EEXIST: |
1275 | 88 | pass | ||
1276 | 89 | else: | ||
1277 | 90 | raise | ||
1278 | 91 | |||
1279 | 92 | lanessubdir = os.path.join(opts.output_dir, '..', 'lane') | ||
1280 | 93 | try: | ||
1281 | 94 | os.mkdir(lanessubdir) | ||
1282 | 95 | except OSError as exc: | ||
1283 | 96 | if exc.errno == errno.EEXIST: | ||
1284 | 97 | pass | ||
1285 | 98 | else: | ||
1286 | 99 | raise | ||
1287 | 100 | |||
1288 | 101 | cardssubdir = os.path.join(opts.output_dir, '..', 'card') | ||
1289 | 102 | try: | ||
1290 | 103 | os.mkdir(cardssubdir) | ||
1291 | 104 | except OSError as exc: | ||
1292 | 105 | if exc.errno == errno.EEXIST: | ||
1293 | 106 | pass | ||
1294 | 107 | else: | ||
1295 | 108 | raise | ||
1296 | 109 | |||
1297 | 110 | # roadmap lanes | ||
1298 | 111 | for lane in report_tools.lanes(store): | ||
1299 | 112 | basename = os.path.join(lanessubdir, lane.name) | ||
1300 | 113 | report_tools.roadmap_pages(my_path, opts.database, basename, opts.config, | ||
1301 | 114 | lane, root=opts.root) | ||
1302 | 115 | |||
1303 | 116 | # roadmap cards | ||
1304 | 117 | for card in report_tools.cards(store): | ||
1305 | 118 | if card.roadmap_id != '': | ||
1306 | 119 | page_name = card.roadmap_id | ||
1307 | 120 | else: | ||
1308 | 121 | page_name = str(card.card_id) | ||
1309 | 122 | basename = os.path.join(cardssubdir, page_name) | ||
1310 | 123 | report_tools.roadmap_cards(my_path, opts.database, basename, opts.config, | ||
1311 | 124 | card, root=opts.root) | ||
1312 | 125 | |||
1313 | 126 | # roadmap front page | ||
1314 | 127 | basename = os.path.join(lanessubdir, 'index') | ||
1315 | 128 | lane = report_tools.current_lane(store) | ||
1316 | 129 | report_tools.roadmap_pages(my_path, opts.database, basename, opts.config, | ||
1317 | 130 | lane, root=opts.root) | ||
1318 | 91 | 131 | ||
1319 | 92 | for u in users: | 132 | for u in users: |
1320 | 93 | for m in milestones: | 133 | for m in milestones: |
1321 | @@ -156,11 +196,10 @@ | |||
1322 | 156 | basename = os.path.join(opts.output_dir, status) | 196 | basename = os.path.join(opts.output_dir, status) |
1323 | 157 | report_tools.workitem_list(my_path, opts.database, basename, opts.config, status, root=opts.root) | 197 | report_tools.workitem_list(my_path, opts.database, basename, opts.config, status, root=opts.root) |
1324 | 158 | 198 | ||
1326 | 159 | # front page | 199 | # cycle front page |
1327 | 160 | basename = os.path.join(opts.output_dir, 'index') | 200 | basename = os.path.join(opts.output_dir, 'index') |
1328 | 161 | report_tools.status_overview(my_path, opts.database, basename, opts.config, root=opts.root) | 201 | report_tools.status_overview(my_path, opts.database, basename, opts.config, root=opts.root) |
1329 | 162 | 202 | ||
1330 | 163 | |||
1331 | 164 | def copy_files(source_dir): | 203 | def copy_files(source_dir): |
1332 | 165 | for filename in os.listdir(source_dir): | 204 | for filename in os.listdir(source_dir): |
1333 | 166 | dest = open(os.path.join(opts.output_dir, filename), 'w') | 205 | dest = open(os.path.join(opts.output_dir, filename), 'w') |
1334 | 167 | 206 | ||
1335 | === modified file 'html-report' | |||
1336 | --- html-report 2012-06-20 19:54:52 +0000 | |||
1337 | +++ html-report 2012-10-09 09:20:30 +0000 | |||
1338 | @@ -10,6 +10,13 @@ | |||
1339 | 10 | 10 | ||
1340 | 11 | from report_tools import escape_url | 11 | from report_tools import escape_url |
1341 | 12 | import report_tools | 12 | import report_tools |
1342 | 13 | from roadmap_health import ( | ||
1343 | 14 | card_health_checks, | ||
1344 | 15 | ) | ||
1345 | 16 | from lpworkitems.models import ( | ||
1346 | 17 | ROADMAP_STATUSES_MAP, | ||
1347 | 18 | ROADMAP_ORDERED_STATUSES, | ||
1348 | 19 | ) | ||
1349 | 13 | 20 | ||
1350 | 14 | 21 | ||
1351 | 15 | class WorkitemTarget(object): | 22 | class WorkitemTarget(object): |
1352 | @@ -453,6 +460,97 @@ | |||
1353 | 453 | print report_tools.fill_template( | 460 | print report_tools.fill_template( |
1354 | 454 | "workitem_list.html", data, theme=opts.theme) | 461 | "workitem_list.html", data, theme=opts.theme) |
1355 | 455 | 462 | ||
1356 | 463 | def roadmap_page(self, store, opts): | ||
1357 | 464 | if opts.lane is None: | ||
1358 | 465 | print "<h1>Error, no lane specified.</h1>" | ||
1359 | 466 | if not opts.title: | ||
1360 | 467 | title = opts.lane | ||
1361 | 468 | else: | ||
1362 | 469 | title = opts.title | ||
1363 | 470 | |||
1364 | 471 | data = self.template_data(store, opts) | ||
1365 | 472 | lane = report_tools.lane(store, opts.lane) | ||
1366 | 473 | lanes = report_tools.lanes(store) | ||
1367 | 474 | statuses = [] | ||
1368 | 475 | bp_status_totals = {'Completed': 0, 'Total': 0, 'Percentage': 0} | ||
1369 | 476 | for status, cards in report_tools.statuses(store, lane): | ||
1370 | 477 | cards_with_bps = [] | ||
1371 | 478 | for card in cards: | ||
1372 | 479 | report_tools.check_card_health(store, card_health_checks, card) | ||
1373 | 480 | blueprint_status_counts = report_tools.card_bp_status_counts( | ||
1374 | 481 | store, card.roadmap_id) | ||
1375 | 482 | total = sum(blueprint_status_counts.values()) | ||
1376 | 483 | bp_percentages = dict.fromkeys(ROADMAP_ORDERED_STATUSES, 0) | ||
1377 | 484 | bp_status_totals['Completed'] += \ | ||
1378 | 485 | blueprint_status_counts['Completed'] | ||
1379 | 486 | for key in ROADMAP_STATUSES_MAP: | ||
1380 | 487 | bp_status_totals['Total'] += blueprint_status_counts[key] | ||
1381 | 488 | if total > 0: | ||
1382 | 489 | bp_percentages[key] = ( | ||
1383 | 490 | 100.0 * blueprint_status_counts[key] / total) | ||
1384 | 491 | |||
1385 | 492 | cards_with_bps.append({'card': card, | ||
1386 | 493 | 'bp_statuses': blueprint_status_counts, | ||
1387 | 494 | 'bp_percentages': bp_percentages}) | ||
1388 | 495 | statuses.append(dict(name=status, cards=cards_with_bps)) | ||
1389 | 496 | if bp_status_totals['Total'] > 0: | ||
1390 | 497 | bp_status_totals['Percentage'] = (100 * bp_status_totals['Completed'] / | ||
1391 | 498 | bp_status_totals['Total']) | ||
1392 | 499 | |||
1393 | 500 | data.update(dict(statuses=statuses)) | ||
1394 | 501 | data.update(dict(bp_status_totals=bp_status_totals)) | ||
1395 | 502 | data.update(dict(status_order=ROADMAP_ORDERED_STATUSES)) | ||
1396 | 503 | data.update(dict(page_type="roadmap_lane")) | ||
1397 | 504 | data.update(dict(lane_title=title)) | ||
1398 | 505 | data.update(dict(lanes=lanes)) | ||
1399 | 506 | data.update(dict(chart_url=opts.chart_url)) | ||
1400 | 507 | print report_tools.fill_template( | ||
1401 | 508 | "roadmap_lane.html", data, theme=opts.theme) | ||
1402 | 509 | |||
1403 | 510 | |||
1404 | 511 | def roadmap_card(self, store, opts): | ||
1405 | 512 | if opts.card is None: | ||
1406 | 513 | print "<h1>Error, no card specified.</h1>" | ||
1407 | 514 | |||
1408 | 515 | data = self.template_data(store, opts) | ||
1409 | 516 | card = report_tools.card(store, int(opts.card)).one() | ||
1410 | 517 | health_checks = report_tools.check_card_health(store, card_health_checks, card) | ||
1411 | 518 | lane = report_tools.lane(store, None, id=card.lane_id) | ||
1412 | 519 | |||
1413 | 520 | if not opts.title: | ||
1414 | 521 | title = card.name | ||
1415 | 522 | else: | ||
1416 | 523 | title = opts.title | ||
1417 | 524 | |||
1418 | 525 | blueprints = report_tools.card_blueprints_by_status(store, card.roadmap_id) | ||
1419 | 526 | bp_status_totals = {'Completed': 0, 'Total': 0, 'Percentage': 0} | ||
1420 | 527 | bp_status_totals['Total'] = (len(blueprints['Planned']) + | ||
1421 | 528 | len(blueprints['Blocked']) + | ||
1422 | 529 | len(blueprints['In Progress']) + | ||
1423 | 530 | len(blueprints['Completed'])) | ||
1424 | 531 | bp_status_totals['Completed'] = len(blueprints['Completed']) | ||
1425 | 532 | if bp_status_totals['Total'] > 0: | ||
1426 | 533 | bp_status_totals['Percentage'] = (100 * bp_status_totals['Completed'] / | ||
1427 | 534 | bp_status_totals['Total']) | ||
1428 | 535 | |||
1429 | 536 | card_has_blueprints = bp_status_totals['Total'] > 0 | ||
1430 | 537 | |||
1431 | 538 | status_order = ROADMAP_ORDERED_STATUSES[:] | ||
1432 | 539 | status_order.reverse() | ||
1433 | 540 | |||
1434 | 541 | data.update(dict(page_type="roadmap_card")) | ||
1435 | 542 | data.update(dict(card_title=title)) | ||
1436 | 543 | data.update(dict(card=card)) | ||
1437 | 544 | data.update(dict(health_checks=health_checks)) | ||
1438 | 545 | data.update(dict(lane=lane.name)) | ||
1439 | 546 | data.update(dict(status_order=status_order)) | ||
1440 | 547 | data.update(dict(blueprints=blueprints)) | ||
1441 | 548 | data.update(dict(bp_status_totals=bp_status_totals)) | ||
1442 | 549 | data.update(dict(card_has_blueprints=card_has_blueprints)) | ||
1443 | 550 | |||
1444 | 551 | print report_tools.fill_template( | ||
1445 | 552 | "roadmap_card.html", data, theme=opts.theme) | ||
1446 | 553 | |||
1447 | 456 | 554 | ||
1448 | 457 | class WorkitemsOnDate(object): | 555 | class WorkitemsOnDate(object): |
1449 | 458 | 556 | ||
1450 | @@ -531,6 +629,10 @@ | |||
1451 | 531 | help="Include all milestones targetted to this date.") | 629 | help="Include all milestones targetted to this date.") |
1452 | 532 | optparser.add_option('--theme', dest="theme", | 630 | optparser.add_option('--theme', dest="theme", |
1453 | 533 | help="The theme to use.", default="linaro") | 631 | help="The theme to use.", default="linaro") |
1454 | 632 | optparser.add_option('--lane', | ||
1455 | 633 | help='Roadmap lane', dest='lane') | ||
1456 | 634 | optparser.add_option('--card', | ||
1457 | 635 | help='Roadmap card', dest='card') | ||
1458 | 534 | 636 | ||
1459 | 535 | (opts, args) = optparser.parse_args() | 637 | (opts, args) = optparser.parse_args() |
1460 | 536 | if not opts.database: | 638 | if not opts.database: |
1461 | 537 | 639 | ||
1462 | === added file 'jira.py' | |||
1463 | --- jira.py 1970-01-01 00:00:00 +0000 | |||
1464 | +++ jira.py 2012-10-09 09:20:30 +0000 | |||
1465 | @@ -0,0 +1,55 @@ | |||
1466 | 1 | #!/usr/bin/python | ||
1467 | 2 | # -*- coding: UTF-8 -*- | ||
1468 | 3 | |||
1469 | 4 | import base64 | ||
1470 | 5 | import optparse | ||
1471 | 6 | import simplejson | ||
1472 | 7 | import urllib2 | ||
1473 | 8 | |||
1474 | 9 | |||
1475 | 10 | def do_request(opts, relpathname, **kwargs): | ||
1476 | 11 | request = urllib2.Request('%s/%s' % (opts.jira_api_url, relpathname)) | ||
1477 | 12 | if opts.jira_username and opts.jira_password: | ||
1478 | 13 | base64string = base64.encodestring( | ||
1479 | 14 | '%s:%s' % (opts.jira_username, opts.jira_password) | ||
1480 | 15 | ).replace('\n', '') | ||
1481 | 16 | request.add_header('Authorization', 'Basic %s' % base64string) | ||
1482 | 17 | request_data = None | ||
1483 | 18 | if kwargs.keys(): | ||
1484 | 19 | request.add_header('Content-Type', 'application/json') | ||
1485 | 20 | request_data = simplejson.dumps(kwargs) | ||
1486 | 21 | response_data = urllib2.urlopen(request, request_data) | ||
1487 | 22 | return simplejson.load(response_data) | ||
1488 | 23 | |||
1489 | 24 | |||
1490 | 25 | def main(): | ||
1491 | 26 | parser = optparse.OptionParser(usage="%prog") | ||
1492 | 27 | parser.add_option("--jira-api-url", dest="jira_api_url", | ||
1493 | 28 | default="http://cards.linaro.org/rest/api/2") | ||
1494 | 29 | parser.add_option("--jira-username", dest="jira_username", | ||
1495 | 30 | default="robot") | ||
1496 | 31 | parser.add_option("--jira-password", dest="jira_password", | ||
1497 | 32 | default="cuf4moh2") | ||
1498 | 33 | opts, args = parser.parse_args() | ||
1499 | 34 | |||
1500 | 35 | # simple search | ||
1501 | 36 | print do_request(opts, 'search', maxResults=1, jql='project = CARD', | ||
1502 | 37 | fields=['summary', 'status']) | ||
1503 | 38 | |||
1504 | 39 | # information about a project | ||
1505 | 40 | #print do_request(opts, 'project/CARD') | ||
1506 | 41 | |||
1507 | 42 | # on creating issues | ||
1508 | 43 | #print do_request(opts, 'issue/createmeta?projectIds=10000') | ||
1509 | 44 | |||
1510 | 45 | # on statuses | ||
1511 | 46 | #print do_request(opts, 'status') | ||
1512 | 47 | |||
1513 | 48 | # on fields | ||
1514 | 49 | #print do_request(opts, 'field') | ||
1515 | 50 | |||
1516 | 51 | # on a security level | ||
1517 | 52 | #print do_request(opts, 'securitylevel/10000') | ||
1518 | 53 | |||
1519 | 54 | if __name__ == "__main__": | ||
1520 | 55 | main() | ||
1521 | 0 | 56 | ||
1522 | === added file 'kanban-papyrs-to-jira' | |||
1523 | --- kanban-papyrs-to-jira 1970-01-01 00:00:00 +0000 | |||
1524 | +++ kanban-papyrs-to-jira 2012-10-09 09:20:30 +0000 | |||
1525 | @@ -0,0 +1,397 @@ | |||
1526 | 1 | #!/usr/bin/python | ||
1527 | 2 | # -*- coding: UTF-8 -*- | ||
1528 | 3 | # Copyright (C) 2012 Linaro Ltd. | ||
1529 | 4 | # Author: Loïc Minier <loic.minier@linaro.org> | ||
1530 | 5 | # License: GPL-3 | ||
1531 | 6 | |||
1532 | 7 | import jira | ||
1533 | 8 | |||
1534 | 9 | from bs4 import BeautifulSoup | ||
1535 | 10 | import logging | ||
1536 | 11 | import optparse | ||
1537 | 12 | import os | ||
1538 | 13 | import re | ||
1539 | 14 | import simplejson | ||
1540 | 15 | import sys | ||
1541 | 16 | import urllib2 | ||
1542 | 17 | |||
1543 | 18 | logger = logging.getLogger("linaroroadmap") | ||
1544 | 19 | |||
1545 | 20 | def dbg(msg): | ||
1546 | 21 | '''Print out debugging message if debugging is enabled.''' | ||
1547 | 22 | logger.debug(msg) | ||
1548 | 23 | |||
1549 | 24 | class InMemCollector: | ||
1550 | 25 | def __init__(self): | ||
1551 | 26 | self.lanes = [] | ||
1552 | 27 | self.cards = [] | ||
1553 | 28 | |||
1554 | 29 | def store_lane(self, lane): | ||
1555 | 30 | self.lanes.append(lane) | ||
1556 | 31 | |||
1557 | 32 | def store_card(self, card): | ||
1558 | 33 | self.cards.append(card) | ||
1559 | 34 | |||
1560 | 35 | def lane_is_collected(self, lane_id): | ||
1561 | 36 | for l in self.lanes: | ||
1562 | 37 | if l.lane_id == lane_id: | ||
1563 | 38 | return True | ||
1564 | 39 | return False | ||
1565 | 40 | |||
1566 | 41 | def kanban_request(opts, relpathname, method='GET', **kwargs): | ||
1567 | 42 | request = urllib2.Request( | ||
1568 | 43 | '%s/%s.json?_m=%s' % (opts.kanban_api_url, relpathname, method)) | ||
1569 | 44 | if opts.kanban_token: | ||
1570 | 45 | request.add_header('X-KanbanToolToken', opts.kanban_token) | ||
1571 | 46 | request_data = None | ||
1572 | 47 | if kwargs.keys(): | ||
1573 | 48 | request.add_header('Content-Type', 'application/json') | ||
1574 | 49 | request_data = simplejson.dumps(kwargs) | ||
1575 | 50 | print request_data | ||
1576 | 51 | response_data = urllib2.urlopen(request, request_data) | ||
1577 | 52 | return simplejson.load(response_data) | ||
1578 | 53 | |||
1579 | 54 | def get_papyrs_page(papyrs_url, token): | ||
1580 | 55 | url = '%s?json&auth_token=%s' % (papyrs_url, token) | ||
1581 | 56 | return simplejson.load(urllib2.urlopen(url)) | ||
1582 | 57 | |||
1583 | 58 | def get_kanban_boards(opts): | ||
1584 | 59 | return kanban_request(opts, 'boards') | ||
1585 | 60 | |||
1586 | 61 | def get_kanban_board(opts, board_id): | ||
1587 | 62 | return kanban_request(opts, 'boards/%s' % board_id) | ||
1588 | 63 | |||
1589 | 64 | def get_kanban_tasks(opts, board_id): | ||
1590 | 65 | return kanban_request(opts, 'boards/%s/tasks' % board_id) | ||
1591 | 66 | |||
1592 | 67 | def get_kanban_task(opts, board_id, task_id): | ||
1593 | 68 | return kanban_request(opts, 'boards/%s/tasks/%s' % (board_id, task_id)) | ||
1594 | 69 | |||
1595 | 70 | def put_kanban_task(opts, board_id, task_id, **kwargs): | ||
1596 | 71 | return kanban_request(opts, 'boards/%s/tasks/%s' % (board_id, task_id), method='PUT', **kwargs) | ||
1597 | 72 | |||
1598 | 73 | def main(): | ||
1599 | 74 | # TODO: add support for passing a card id or papyrs URL | ||
1600 | 75 | parser = optparse.OptionParser(usage="%prog") | ||
1601 | 76 | parser.add_option("--kanban-api-url", dest="kanban_api_url", | ||
1602 | 77 | default="https://linaro.kanbantool.com/api/v1") | ||
1603 | 78 | parser.add_option("--jira-api-url", dest="jira_api_url", | ||
1604 | 79 | default="http://cards.linaro.org/rest/api/2") | ||
1605 | 80 | # defaults are read-only ~linaro-infrastructure tokens | ||
1606 | 81 | parser.add_option("--kanban-token", dest="kanban_token", | ||
1607 | 82 | default="9F209W7Y84TE") | ||
1608 | 83 | parser.add_option("--papyrs-token", dest="papyrs_token", | ||
1609 | 84 | default="868e9088b53c") | ||
1610 | 85 | parser.add_option("--jira-username", dest="jira_username", | ||
1611 | 86 | default="robot") | ||
1612 | 87 | parser.add_option("--jira-password", dest="jira_password", | ||
1613 | 88 | default="cuf4moh2") | ||
1614 | 89 | parser.add_option("--jira-project", dest="jira_project_name", | ||
1615 | 90 | default="CARD") | ||
1616 | 91 | parser.add_option("--jira-issuetype", dest="jira_issuetype_name", | ||
1617 | 92 | default="Roadmap Card") | ||
1618 | 93 | parser.add_option("--board-id", dest="board_id", default="10721") | ||
1619 | 94 | parser.add_option('--debug', action='store_true', default=True, | ||
1620 | 95 | help='Enable debugging output in parsing routines') | ||
1621 | 96 | parser.add_option('--board', | ||
1622 | 97 | help='Board id at Kanban Tool', dest='board', default='10721') | ||
1623 | 98 | opts, args = parser.parse_args() | ||
1624 | 99 | |||
1625 | 100 | if os.environ.get("DEBUG", None) is not None: | ||
1626 | 101 | opts.debug = True | ||
1627 | 102 | |||
1628 | 103 | if len(args) != 0: | ||
1629 | 104 | parser.error("You can not pass any argument") | ||
1630 | 105 | |||
1631 | 106 | if opts.kanban_token is None: | ||
1632 | 107 | sys.stderr.write("No Kanbantool API token given") | ||
1633 | 108 | if opts.papyrs_token is None: | ||
1634 | 109 | sys.stderr.write("No Papyrs API token given") | ||
1635 | 110 | |||
1636 | 111 | # logging setup | ||
1637 | 112 | logger = logging.getLogger() | ||
1638 | 113 | ch = logging.StreamHandler() | ||
1639 | 114 | formatter = logging.Formatter("%(asctime)s %(message)s") | ||
1640 | 115 | ch.setFormatter(formatter) | ||
1641 | 116 | logger.addHandler(ch) | ||
1642 | 117 | if opts.debug: | ||
1643 | 118 | logger.setLevel(logging.DEBUG) | ||
1644 | 119 | |||
1645 | 120 | boards = get_kanban_boards(opts) | ||
1646 | 121 | # dump | ||
1647 | 122 | for board in boards: | ||
1648 | 123 | board = board['board'] | ||
1649 | 124 | dbg('Found board "%s" with id %s' % (board['name'], board['id'])) | ||
1650 | 125 | dbg('') | ||
1651 | 126 | |||
1652 | 127 | assert 1 == len(filter(lambda b: str(b['board']['id']) == opts.board_id, boards)), \ | ||
1653 | 128 | 'Expected exactly one board with id %s' % opts.board_id | ||
1654 | 129 | |||
1655 | 130 | board = get_kanban_board(opts, opts.board_id) | ||
1656 | 131 | board = board['board'] | ||
1657 | 132 | |||
1658 | 133 | workflow_stages = board['workflow_stages'] | ||
1659 | 134 | # ideally order wouldn't matter but the "position" field of our workflow stages | ||
1660 | 135 | # is bogus (always 1) so we can't use it | ||
1661 | 136 | leaf_workflow_stages = [] | ||
1662 | 137 | for workflow_stage in workflow_stages: | ||
1663 | 138 | childs = filter( | ||
1664 | 139 | lambda ws: ws['parent_id'] == workflow_stage['id'], workflow_stages) | ||
1665 | 140 | if not childs: | ||
1666 | 141 | # build a name list for leaf workflow stages | ||
1667 | 142 | name = [] | ||
1668 | 143 | id = workflow_stage['id'] | ||
1669 | 144 | while True: | ||
1670 | 145 | ws = filter(lambda ws: ws['id'] == id, workflow_stages)[0] | ||
1671 | 146 | if ws['name'] is None: | ||
1672 | 147 | break | ||
1673 | 148 | name = [ws['name']] + name | ||
1674 | 149 | id = ws['parent_id'] | ||
1675 | 150 | leaf_workflow_stages.append((workflow_stage['id'], name)) | ||
1676 | 151 | # dump | ||
1677 | 152 | for id, name in leaf_workflow_stages: | ||
1678 | 153 | pretty_name = "/".join(name) | ||
1679 | 154 | dbg('Found leaf workflow stage %s with id %s' % (pretty_name, id)) | ||
1680 | 155 | dbg('') | ||
1681 | 156 | |||
1682 | 157 | card_types = board['card_types'] | ||
1683 | 158 | # dump | ||
1684 | 159 | for card_type in card_types: | ||
1685 | 160 | dbg('Found card type %s with id %s' % (card_type['name'], card_type['id'])) | ||
1686 | 161 | dbg('') | ||
1687 | 162 | |||
1688 | 163 | def get_leaf_workflow_stage_name(worfklow_stage_id): | ||
1689 | 164 | return [name | ||
1690 | 165 | for id, name | ||
1691 | 166 | in leaf_workflow_stages | ||
1692 | 167 | if id == worfklow_stage_id][0] | ||
1693 | 168 | |||
1694 | 169 | def get_card_type_name(card_type_id): | ||
1695 | 170 | return [card_type['name'] | ||
1696 | 171 | for card_type | ||
1697 | 172 | in card_types | ||
1698 | 173 | if card_type['id'] == card_type_id][0] | ||
1699 | 174 | |||
1700 | 175 | def filter_tasks(task): | ||
1701 | 176 | # ignore tasks in Legend and Deferred workflow stages | ||
1702 | 177 | lwsn = get_leaf_workflow_stage_name(task['workflow_stage_id']) | ||
1703 | 178 | if lwsn in (['Legend'], ['Deferred']): | ||
1704 | 179 | dbg('Ignoring task %s in workflow stage %s' | ||
1705 | 180 | % (task['external_id'], "/".join(lwsn))) | ||
1706 | 181 | return False | ||
1707 | 182 | # ignore tasks with Summit and Unknown card type names | ||
1708 | 183 | card_type_name = get_card_type_name(task['card_type_id']) | ||
1709 | 184 | if card_type_name in ('Summits', 'Unknown'): | ||
1710 | 185 | dbg('Ignoring task %s with card type name %s' | ||
1711 | 186 | % (task['external_id'], card_type_name)) | ||
1712 | 187 | return False | ||
1713 | 188 | return True | ||
1714 | 189 | |||
1715 | 190 | tasks = get_kanban_tasks(opts, opts.board_id) | ||
1716 | 191 | tasks = [t['task'] for t in tasks if filter_tasks(t['task'])] | ||
1717 | 192 | # dump | ||
1718 | 193 | for task in tasks: | ||
1719 | 194 | dbg('Found task %s with id %s, workflow_stage_id %s, priority %s, ' | ||
1720 | 195 | 'card_type_id %s, custom_field_2 %s, and external_id %s' | ||
1721 | 196 | % (task['name'], task['id'], task['workflow_stage_id'], | ||
1722 | 197 | task['priority'], task['card_type_id'], | ||
1723 | 198 | task['custom_field_2'], task['external_id'])) | ||
1724 | 199 | |||
1725 | 200 | CARD_TYPE_NAMES_TO_PREFIXES = { | ||
1726 | 201 | 'LAVA': 'LAVA', | ||
1727 | 202 | 'Android': 'ANDROID', | ||
1728 | 203 | 'Linux & Ubuntu': 'LINUX', | ||
1729 | 204 | 'TCWG': 'TCWG', | ||
1730 | 205 | 'GWG': 'GWG', | ||
1731 | 206 | 'MMWG': 'MMWG', | ||
1732 | 207 | 'KWG': 'KWG', | ||
1733 | 208 | 'PMWG': 'PMWG', | ||
1734 | 209 | 'OCTO': 'OCTO', | ||
1735 | 210 | } | ||
1736 | 211 | |||
1737 | 212 | # check consistency of external_id with external_link and custom_field_2 | ||
1738 | 213 | # (papyrs URL), and of external_id with card_type name | ||
1739 | 214 | for task in tasks: | ||
1740 | 215 | external_id = task['external_id'] | ||
1741 | 216 | papyrs_url = task['custom_field_2'] | ||
1742 | 217 | external_link = task['external_link'] | ||
1743 | 218 | assert papyrs_url == 'https://linaro.papyrs.com/%s' % external_id, \ | ||
1744 | 219 | 'Incorrect papyrs URL %s for task %s' % (papyrs_url, external_id) | ||
1745 | 220 | assert external_link == 'http://status.linaro.org/card/%s' % external_id, \ | ||
1746 | 221 | 'Incorrect external_link %s for task %s' % (external_link, external_id) | ||
1747 | 222 | card_type_name = get_card_type_name(task['card_type_id']) | ||
1748 | 223 | prefix = CARD_TYPE_NAMES_TO_PREFIXES[card_type_name] | ||
1749 | 224 | assert external_id.startswith(prefix), \ | ||
1750 | 225 | 'Incorrect card type prefix %s for task %s' % (prefix, external_id) | ||
1751 | 226 | |||
1752 | 227 | # verify papyrs pages | ||
1753 | 228 | #for task in tasks: | ||
1754 | 229 | for task in []: | ||
1755 | 230 | external_id = task['external_id'] | ||
1756 | 231 | papyrs_url = task['custom_field_2'] | ||
1757 | 232 | dbg('Fetching card %s' % task['name']) | ||
1758 | 233 | papyrs_json = get_papyrs_page(papyrs_url, opts.papyrs_token) | ||
1759 | 234 | |||
1760 | 235 | try: | ||
1761 | 236 | # number of columns | ||
1762 | 237 | ncols = len(papyrs_json) | ||
1763 | 238 | assert ncols == 2, 'Expected exactly two columns but got %s' % len(ncols) | ||
1764 | 239 | |||
1765 | 240 | # first column | ||
1766 | 241 | col0 = papyrs_json[0] | ||
1767 | 242 | p0 = col0[0] | ||
1768 | 243 | classname = p0['classname'] | ||
1769 | 244 | assert classname == 'Heading', \ | ||
1770 | 245 | "First paragraph of first column should be a a heading but is %s" % classname | ||
1771 | 246 | assert p0['text'] == p0['html'], \ | ||
1772 | 247 | "Expected text (%s) and HTML (%s) to be identical for first heading" % (p0['text'], p0['html']) | ||
1773 | 248 | assert p0['text'] == task['name'], \ | ||
1774 | 249 | 'Mismatch between first heading (%s) and task (%s)' % (p0['text'], task['name']) | ||
1775 | 250 | for p in col0[1:-2]: | ||
1776 | 251 | assert p['classname'] in ('Heading', 'Paragraph'), \ | ||
1777 | 252 | 'Got unexpected classname %s' % p['classname'] | ||
1778 | 253 | if p['classname'] == 'Heading': | ||
1779 | 254 | assert p['text'] == p['html'], \ | ||
1780 | 255 | 'Expected heading HTML (%s) to match text (%s)' % (p['html'], p['text']) | ||
1781 | 256 | if p['classname'] == 'Paragraph': | ||
1782 | 257 | soup = BeautifulSoup('<root>%s</root>' % p['html'], 'xml') | ||
1783 | 258 | for tag in soup.root.find_all(True): | ||
1784 | 259 | assert tag.name in ('font', 'b', 'a', 'ul', 'ol', 'li', 'br', 'p', 'span', 'div', 'u'), 'Unexpected tag %s' % tag.name | ||
1785 | 260 | |||
1786 | 261 | # second column | ||
1787 | 262 | pm1 = col0[-1] | ||
1788 | 263 | assert pm1['classname'] == 'Discuss', \ | ||
1789 | 264 | 'Expect last classname to be Discuss but got %s' % pm1['classname'] | ||
1790 | 265 | |||
1791 | 266 | col1 = papyrs_json[1] | ||
1792 | 267 | skip_next_paragraph = False | ||
1793 | 268 | nattachs = 0 | ||
1794 | 269 | for p in col1: | ||
1795 | 270 | if p['classname'] in ('Checklist', 'Twitters', 'Navigation'): | ||
1796 | 271 | pass | ||
1797 | 272 | elif p['classname'] == 'Attachment': | ||
1798 | 273 | nattachs += 1 | ||
1799 | 274 | elif p['classname'] == 'Heading' and p['text'] == 'Attachments': | ||
1800 | 275 | pass | ||
1801 | 276 | elif p['classname'] == 'Heading' and p['text'] == 'Metadata': | ||
1802 | 277 | skip_next_paragraph = True | ||
1803 | 278 | elif p['classname'] == 'Paragraph' and skip_next_paragraph: | ||
1804 | 279 | skip_next_paragraph = False | ||
1805 | 280 | else: | ||
1806 | 281 | assert False, 'Unexpected paragraph %s' % p | ||
1807 | 282 | if nattachs > 0: | ||
1808 | 283 | dbg('Found %s attachment(s) on card %s' % (nattachs, task['name'])) | ||
1809 | 284 | except Exception, e: | ||
1810 | 285 | dbg(e) | ||
1811 | 286 | |||
1812 | 287 | # query jira data | ||
1813 | 288 | jira_project_result = jira.do_request(opts, 'project/%s' % opts.jira_project_name) | ||
1814 | 289 | jira_statuses_result = jira.do_request(opts, 'status') | ||
1815 | 290 | jira_fields_result = jira.do_request(opts, 'field') | ||
1816 | 291 | # not allowed | ||
1817 | 292 | #jira_securitylevels_result = jira.do_request(opts, 'securitylevel') | ||
1818 | 293 | jira_priorities_result = jira.do_request(opts, 'priority') | ||
1819 | 294 | |||
1820 | 295 | def search_jira_id(jira_result, name): | ||
1821 | 296 | return [r['id'] for r in jira_result if r['name'] == name][0] | ||
1822 | 297 | |||
1823 | 298 | # http://cards.linaro.org/rest/api/2/project/CARD has id 10000 | ||
1824 | 299 | #jira_project_id = 10000 | ||
1825 | 300 | jira_project_id = jira_project_result['id'] | ||
1826 | 301 | # issuetype for "Roadmap Card" http://cards.linaro.org/rest/api/2/issuetype/9 | ||
1827 | 302 | #jira_issuetype_id = 9 | ||
1828 | 303 | jira_issuetype_id = search_jira_id(jira_project_result['issueTypes'], opts.jira_issuetype_name) | ||
1829 | 304 | dbg('Found id %s for %s issueType' % (jira_issuetype_id, opts.jira_issuetype_name)) | ||
1830 | 305 | for component in jira_project_result['components']: | ||
1831 | 306 | dbg('Found component %s with id %s' % (component['name'], component['id'])) | ||
1832 | 307 | for version in jira_project_result['versions']: | ||
1833 | 308 | dbg('Found version %s with id %s' % (version['name'], version['id'])) | ||
1834 | 309 | for status in jira_statuses_result: | ||
1835 | 310 | dbg('Found status %s with id %s' % (status['name'], status['id'])) | ||
1836 | 311 | |||
1837 | 312 | TYPE_TO_COMPONENT = { | ||
1838 | 313 | 'LAVA': 'LAVA', | ||
1839 | 314 | 'Android': 'Android', | ||
1840 | 315 | 'Linux & Ubuntu': 'Linux & Ubuntu', | ||
1841 | 316 | 'TCWG': 'Toolchain WG', | ||
1842 | 317 | 'GWG': 'Graphics WG', | ||
1843 | 318 | 'MMWG': 'Multimedia WG', | ||
1844 | 319 | 'KWG': 'Kernel WG', | ||
1845 | 320 | 'PMWG': 'Power Management WG', | ||
1846 | 321 | 'OCTO': 'OCTO', | ||
1847 | 322 | } | ||
1848 | 323 | |||
1849 | 324 | STAGE_TO_STATUS = { | ||
1850 | 325 | 'New/Draft': 'New/Drafting', | ||
1851 | 326 | 'New/Needs Work': 'New/Drafting', | ||
1852 | 327 | 'New/TSC Reviewed': 'New/Reviewed', | ||
1853 | 328 | '2012Q1/Done': 'Approved', | ||
1854 | 329 | '2012Q1/Ready': 'Approved', | ||
1855 | 330 | '2012Q2/Forecast': 'Approved', | ||
1856 | 331 | '2012Q3/Forecast': 'Approved', | ||
1857 | 332 | '2012H2/Forecast': 'Approved', | ||
1858 | 333 | '2013/Forecast': 'Approved', | ||
1859 | 334 | } | ||
1860 | 335 | |||
1861 | 336 | STAGE_TO_VERSION = { | ||
1862 | 337 | 'New/Draft': None, | ||
1863 | 338 | 'New/Needs Work': None, | ||
1864 | 339 | 'New/TSC Reviewed': None, | ||
1865 | 340 | '2012Q1/Done': '2012Q1', | ||
1866 | 341 | '2012Q1/Ready': '2012Q1', | ||
1867 | 342 | '2012Q2/Forecast': '2012Q2', | ||
1868 | 343 | '2012Q3/Forecast': '2012Q3', | ||
1869 | 344 | '2012H2/Forecast': '2012H2', | ||
1870 | 345 | '2013/Forecast': '2013', | ||
1871 | 346 | } | ||
1872 | 347 | |||
1873 | 348 | PRIORITY_MAP = { | ||
1874 | 349 | -1: 'Minor', | ||
1875 | 350 | 0: 'Major', | ||
1876 | 351 | 1: 'Critical', | ||
1877 | 352 | } | ||
1878 | 353 | |||
1879 | 354 | # actual copy | ||
1880 | 355 | for task in tasks: | ||
1881 | 356 | print task['name'] | ||
1882 | 357 | print task['external_id'] | ||
1883 | 358 | external_id = task['external_id'] | ||
1884 | 359 | papyrs_url = task['custom_field_2'] | ||
1885 | 360 | papyrs_json = get_papyrs_page(papyrs_url, opts.papyrs_token) | ||
1886 | 361 | # first column | ||
1887 | 362 | col0 = papyrs_json[0] | ||
1888 | 363 | p0 = col0[0] | ||
1889 | 364 | # assemble HTML of description | ||
1890 | 365 | html = "" | ||
1891 | 366 | for p in col0[1:-1]: | ||
1892 | 367 | if p['classname'] == 'Heading': | ||
1893 | 368 | html += '<h1>%s</h1>\n' % p['text'] | ||
1894 | 369 | if p['classname'] == 'Paragraph': | ||
1895 | 370 | html += '%s\n' % p['html'] | ||
1896 | 371 | html = '{html}\n%s{html}\n' % html | ||
1897 | 372 | |||
1898 | 373 | stage = "/".join(get_leaf_workflow_stage_name(task['workflow_stage_id'])) | ||
1899 | 374 | status = STAGE_TO_STATUS[stage] | ||
1900 | 375 | version = STAGE_TO_VERSION[stage] | ||
1901 | 376 | type_name = get_card_type_name(task['card_type_id']) | ||
1902 | 377 | component = TYPE_TO_COMPONENT[type_name] | ||
1903 | 378 | priority = PRIORITY_MAP[task['priority']] | ||
1904 | 379 | |||
1905 | 380 | fields = {'project': {'id': jira_project_id}, | ||
1906 | 381 | 'summary': task['name'], | ||
1907 | 382 | 'issuetype': {'id': jira_issuetype_id}, | ||
1908 | 383 | 'description': html, | ||
1909 | 384 | 'components': [{'id': search_jira_id(jira_project_result['components'], component)}], | ||
1910 | 385 | search_jira_id(jira_fields_result, 'Alias Card ID'): task['external_id'], | ||
1911 | 386 | # XXX hardcoded default security level; also, can't set security level to Public via API | ||
1912 | 387 | #'security': {'id': search_jira_id(jira_securitylevels_result, 'Public')}, | ||
1913 | 388 | 'priority': {'id': search_jira_id(jira_priorities_result, priority)}, | ||
1914 | 389 | } | ||
1915 | 390 | #'status': search_jira_id(jira_statuses_result, status), | ||
1916 | 391 | if version: | ||
1917 | 392 | fields['fixVersions'] = [{'id': search_jira_id(jira_project_result['versions'], version)}] | ||
1918 | 393 | dbg('Uploading card %s' % fields) | ||
1919 | 394 | print jira.do_request(opts, 'issue', fields=fields) | ||
1920 | 395 | |||
1921 | 396 | if __name__ == "__main__": | ||
1922 | 397 | main() | ||
1923 | 0 | 398 | ||
1924 | === modified file 'lpworkitems/collect.py' | |||
1925 | --- lpworkitems/collect.py 2011-12-06 15:20:43 +0000 | |||
1926 | +++ lpworkitems/collect.py 2012-10-09 09:20:30 +0000 | |||
1927 | @@ -26,6 +26,7 @@ | |||
1928 | 26 | # "interesting") | 26 | # "interesting") |
1929 | 27 | workitem_precedence = [None, u'done', u'postponed', u'blocked', u'todo', u'inprogress'] | 27 | workitem_precedence = [None, u'done', u'postponed', u'blocked', u'todo', u'inprogress'] |
1930 | 28 | 28 | ||
1931 | 29 | |||
1932 | 29 | class PersonCache(object): | 30 | class PersonCache(object): |
1933 | 30 | """A cache of Launchpad accounts.""" | 31 | """A cache of Launchpad accounts.""" |
1934 | 31 | 32 | ||
1935 | @@ -91,31 +92,29 @@ | |||
1936 | 91 | project_name = self.lp.load(milestone.target.self_link).name | 92 | project_name = self.lp.load(milestone.target.self_link).name |
1937 | 92 | existing_milestone = self.store.find( | 93 | existing_milestone = self.store.find( |
1938 | 93 | models.Milestone, | 94 | models.Milestone, |
1940 | 94 | models.Milestone.name==milestone_name).any() | 95 | models.Milestone.name == milestone_name).any() |
1941 | 95 | if existing_milestone is not None: | 96 | if existing_milestone is not None: |
1945 | 96 | # We only store a milestone for the first project that we | 97 | # TODO: We now allow for the same milestone in different projects |
1946 | 97 | # see it in. | 98 | # to have different due dates (within reasonable limits). |
1947 | 98 | # Check that the dates match, otherwise it's very confusing | 99 | # However, the old algorithm which relied on all due dates to match, |
1948 | 100 | # only stores a single milestone with due date for the first project | ||
1949 | 101 | # that we see it in (essentially, random one). This is expected to | ||
1950 | 102 | # be elaborated shortly. Then this block can be removed completely, | ||
1951 | 103 | # until then it is left as a reminder. | ||
1952 | 99 | target_date = None | 104 | target_date = None |
1953 | 100 | if milestone.date_targeted is not None: | 105 | if milestone.date_targeted is not None: |
1955 | 101 | target_date = milestone.date_targeted.strftime("%Y-%m-%d") | 106 | target_date = milestone.date_targeted.strftime("%Y-%m") |
1956 | 102 | existing_target_date = existing_milestone.due_date | 107 | existing_target_date = existing_milestone.due_date |
1957 | 103 | if isinstance(existing_target_date, datetime.date): | 108 | if isinstance(existing_target_date, datetime.date): |
1959 | 104 | existing_target_date = existing_target_date.strftime("%Y-%m-%d") | 109 | existing_target_date = existing_target_date.strftime("%Y-%m") |
1960 | 105 | if (target_date and existing_target_date != target_date): | 110 | if (target_date and existing_target_date != target_date): |
1974 | 106 | error = MilestoneError( | 111 | existing_milestone.due_date = milestone.date_targeted |
1975 | 107 | milestone, | 112 | else: |
1976 | 108 | "Milestone %s (%s) has due_date %s but %s already has " | 113 | db_milestone = models.Milestone() |
1977 | 109 | "the due date as %s" % (milestone.name, project_name, | 114 | db_milestone.name = milestone_name |
1978 | 110 | target_date, existing_milestone.project, | 115 | db_milestone.due_date = milestone.date_targeted |
1979 | 111 | existing_target_date)) | 116 | db_milestone.project = project_name |
1980 | 112 | self.error_collector.store_error(error) | 117 | self.store.add(db_milestone) |
1968 | 113 | return | ||
1969 | 114 | db_milestone = models.Milestone() | ||
1970 | 115 | db_milestone.name = milestone_name | ||
1971 | 116 | db_milestone.due_date = milestone.date_targeted | ||
1972 | 117 | db_milestone.project = project_name | ||
1973 | 118 | self.store.add(db_milestone) | ||
1981 | 119 | 118 | ||
1982 | 120 | def store_lp_milestones(self, milestones): | 119 | def store_lp_milestones(self, milestones): |
1983 | 121 | if self.store.find(models.Milestone).any() is not None: | 120 | if self.store.find(models.Milestone).any() is not None: |
1984 | @@ -270,7 +269,7 @@ | |||
1985 | 270 | if ']' in desc: | 269 | if ']' in desc: |
1986 | 271 | off = desc.index(']') | 270 | off = desc.index(']') |
1987 | 272 | assignee_name = desc[1:off] | 271 | assignee_name = desc[1:off] |
1989 | 273 | desc = desc[off+1:].strip() | 272 | desc = desc[off + 1:].strip() |
1990 | 274 | else: | 273 | else: |
1991 | 275 | self.error_collector.record_blueprint_error( | 274 | self.error_collector.record_blueprint_error( |
1992 | 276 | self.blueprint, | 275 | self.blueprint, |
1993 | @@ -314,6 +313,7 @@ | |||
1994 | 314 | def get_workitem_if_tracked(self, task, projects=None, | 313 | def get_workitem_if_tracked(self, task, projects=None, |
1995 | 315 | distro_release=None): | 314 | distro_release=None): |
1996 | 316 | target = self.lp.load(task.target.self_link) | 315 | target = self.lp.load(task.target.self_link) |
1997 | 316 | |||
1998 | 317 | def get_rtype(obj): | 317 | def get_rtype(obj): |
1999 | 318 | return urllib.splittag(obj.resource_type_link)[1] | 318 | return urllib.splittag(obj.resource_type_link)[1] |
2000 | 319 | rtype = get_rtype(target) | 319 | rtype = get_rtype(target) |
2001 | 320 | 320 | ||
2002 | === added file 'lpworkitems/collect_roadmap.py' | |||
2003 | --- lpworkitems/collect_roadmap.py 1970-01-01 00:00:00 +0000 | |||
2004 | +++ lpworkitems/collect_roadmap.py 2012-10-09 09:20:30 +0000 | |||
2005 | @@ -0,0 +1,71 @@ | |||
2006 | 1 | import datetime | ||
2007 | 2 | |||
2008 | 3 | from lpworkitems import models_roadmap | ||
2009 | 4 | from utils import unicode_or_None | ||
2010 | 5 | |||
2011 | 6 | |||
2012 | 7 | class CollectorStore(object): | ||
2013 | 8 | |||
2014 | 9 | def __init__(self, store, base_url, error_collector): | ||
2015 | 10 | self.store = store | ||
2016 | 11 | self.base_url = base_url | ||
2017 | 12 | self.error_collector = error_collector | ||
2018 | 13 | |||
2019 | 14 | def _clear_all(self, *find_args): | ||
2020 | 15 | self.store.find(*find_args).remove() | ||
2021 | 16 | |||
2022 | 17 | def clear_lanes(self): | ||
2023 | 18 | self._clear_all(models_roadmap.Lane) | ||
2024 | 19 | |||
2025 | 20 | def clear_cards(self): | ||
2026 | 21 | self._clear_all(models_roadmap.Card) | ||
2027 | 22 | |||
2028 | 23 | def store_lane(self, lane): | ||
2029 | 24 | self.store.add(lane) | ||
2030 | 25 | |||
2031 | 26 | def store_card(self, card): | ||
2032 | 27 | self.store.add(card) | ||
2033 | 28 | |||
2034 | 29 | def clear_todays_blueprint_daily_count_per_state(self): | ||
2035 | 30 | self._clear_all( | ||
2036 | 31 | models_roadmap.BlueprintDailyCountPerState, | ||
2037 | 32 | models_roadmap.BlueprintDailyCountPerState.day == datetime.date.today()) | ||
2038 | 33 | |||
2039 | 34 | def store_roadmap_bp_count_per_state(self): | ||
2040 | 35 | query = """ | ||
2041 | 36 | SELECT implementation, lane_id, count(*) | ||
2042 | 37 | FROM specs | ||
2043 | 38 | JOIN meta on spec = specs.name | ||
2044 | 39 | JOIN card on roadmap_id = value | ||
2045 | 40 | WHERE key = 'Roadmap id' | ||
2046 | 41 | GROUP BY implementation, lane_id | ||
2047 | 42 | """ | ||
2048 | 43 | day = datetime.date.today() | ||
2049 | 44 | result = self.store.execute(query) | ||
2050 | 45 | for status, lane_id, count in result: | ||
2051 | 46 | obj = models_roadmap.BlueprintDailyCountPerState() | ||
2052 | 47 | obj.day = day | ||
2053 | 48 | obj.status = status | ||
2054 | 49 | obj.lane_id = lane_id | ||
2055 | 50 | obj.count = count | ||
2056 | 51 | self.store.add(obj) | ||
2057 | 52 | |||
2058 | 53 | def lane_is_collected(self, lane_id): | ||
2059 | 54 | return self.store.find(models_roadmap.Lane, models_roadmap. | ||
2060 | 55 | Lane.lane_id == lane_id).one() is not None | ||
2061 | 56 | |||
2062 | 57 | |||
2063 | 58 | def get_json_item(data, item_name): | ||
2064 | 59 | item = data[item_name] | ||
2065 | 60 | if item is not None: | ||
2066 | 61 | item = item.strip() | ||
2067 | 62 | return unicode_or_None(item) | ||
2068 | 63 | |||
2069 | 64 | |||
2070 | 65 | def lookup_kanban_priority(numeric_priority): | ||
2071 | 66 | priority_lookup = {-1: "low", | ||
2072 | 67 | 0: "normal", | ||
2073 | 68 | 1: "high"} | ||
2074 | 69 | assert numeric_priority in priority_lookup, ( | ||
2075 | 70 | "Priority '%s' is unknown." % numeric_priority) | ||
2076 | 71 | return unicode_or_None(priority_lookup[numeric_priority]) | ||
2077 | 0 | 72 | ||
2078 | === modified file 'lpworkitems/database.py' | |||
2079 | --- lpworkitems/database.py 2011-06-24 19:09:26 +0000 | |||
2080 | +++ lpworkitems/database.py 2012-10-09 09:20:30 +0000 | |||
2081 | @@ -7,13 +7,13 @@ | |||
2082 | 7 | store.execute('''CREATE TABLE version ( | 7 | store.execute('''CREATE TABLE version ( |
2083 | 8 | db_layout_ref INT NOT NULL | 8 | db_layout_ref INT NOT NULL |
2084 | 9 | )''') | 9 | )''') |
2086 | 10 | store.execute('''INSERT INTO version VALUES (10)''') | 10 | store.execute('''INSERT INTO version VALUES (15)''') |
2087 | 11 | 11 | ||
2088 | 12 | store.execute('''CREATE TABLE specs ( | 12 | store.execute('''CREATE TABLE specs ( |
2089 | 13 | name VARCHAR(255) PRIMARY KEY, | 13 | name VARCHAR(255) PRIMARY KEY, |
2090 | 14 | url VARCHAR(1000) NOT NULL, | 14 | url VARCHAR(1000) NOT NULL, |
2091 | 15 | priority CHAR(20), | 15 | priority CHAR(20), |
2093 | 16 | implementation CHAR(30), | 16 | implementation CHAR(30) NOT NULL, |
2094 | 17 | assignee CHAR(50), | 17 | assignee CHAR(50), |
2095 | 18 | team CHAR(50), | 18 | team CHAR(50), |
2096 | 19 | status VARCHAR(5000) NOT NULL, | 19 | status VARCHAR(5000) NOT NULL, |
2097 | @@ -25,6 +25,13 @@ | |||
2098 | 25 | roadmap_notes VARCHAR(5000) | 25 | roadmap_notes VARCHAR(5000) |
2099 | 26 | )''') | 26 | )''') |
2100 | 27 | 27 | ||
2101 | 28 | store.execute('''CREATE TABLE spec_daily_count_per_state ( | ||
2102 | 29 | status VARCHAR(5000) NOT NULL, | ||
2103 | 30 | day DATE NOT NULL, | ||
2104 | 31 | lane_id REFERENCES lane(lane_id), | ||
2105 | 32 | count INT NOT NULL | ||
2106 | 33 | )''') | ||
2107 | 34 | |||
2108 | 28 | store.execute('''CREATE TABLE work_items ( | 35 | store.execute('''CREATE TABLE work_items ( |
2109 | 29 | description VARCHAR(1000) NOT NULL, | 36 | description VARCHAR(1000) NOT NULL, |
2110 | 30 | spec VARCHAR(255) REFERENCES specs(name), | 37 | spec VARCHAR(255) REFERENCES specs(name), |
2111 | @@ -90,6 +97,30 @@ | |||
2112 | 90 | display_name VARCHAR(50) | 97 | display_name VARCHAR(50) |
2113 | 91 | )''') | 98 | )''') |
2114 | 92 | 99 | ||
2115 | 100 | store.execute('''CREATE TABLE lane ( | ||
2116 | 101 | name VARCHAR(200) NOT NULL, | ||
2117 | 102 | lane_id NOT NULL, | ||
2118 | 103 | is_current BOOLEAN, | ||
2119 | 104 | cards REFERENCES card(card_id) | ||
2120 | 105 | )''') | ||
2121 | 106 | |||
2122 | 107 | store.execute('''CREATE TABLE card ( | ||
2123 | 108 | name VARCHAR(200) NOT NULL, | ||
2124 | 109 | card_id NOT NULL, | ||
2125 | 110 | url VARCHAR(200), | ||
2126 | 111 | is_healthy BOOLEAN, | ||
2127 | 112 | status VARCHAR(50), | ||
2128 | 113 | team VARCHAR(50), | ||
2129 | 114 | priority VARCHAR(50), | ||
2130 | 115 | size VARCHAR(50), | ||
2131 | 116 | sponsor VARCHAR(50), | ||
2132 | 117 | contact VARCHAR(50), | ||
2133 | 118 | description BLOB, | ||
2134 | 119 | acceptance_criteria BLOB, | ||
2135 | 120 | roadmap_id VARCHAR(50), | ||
2136 | 121 | lane_id REFERENCES lane(lane_id) | ||
2137 | 122 | )''') | ||
2138 | 123 | |||
2139 | 93 | 124 | ||
2140 | 94 | def upgrade_if_needed(store): | 125 | def upgrade_if_needed(store): |
2141 | 95 | # upgrade DB layout | 126 | # upgrade DB layout |
2142 | @@ -177,7 +208,47 @@ | |||
2143 | 177 | )''') | 208 | )''') |
2144 | 178 | store.execute('UPDATE version SET db_layout_ref = 10') | 209 | store.execute('UPDATE version SET db_layout_ref = 10') |
2145 | 179 | ver = 10 | 210 | ver = 10 |
2146 | 211 | if ver == 10: | ||
2147 | 212 | store.execute('''CREATE TABLE lane ( | ||
2148 | 213 | name VARCHAR(200) NOT NULL, | ||
2149 | 214 | lane_id NOT NULL, | ||
2150 | 215 | cards REFERENCES card(card_id) | ||
2151 | 216 | )''') | ||
2152 | 180 | 217 | ||
2153 | 218 | store.execute('''CREATE TABLE card ( | ||
2154 | 219 | name VARCHAR(200) NOT NULL, | ||
2155 | 220 | card_id NOT NULL, | ||
2156 | 221 | status VARCHAR(50), | ||
2157 | 222 | lane_id REFERENCES lane(lane_id) | ||
2158 | 223 | )''') | ||
2159 | 224 | store.execute('UPDATE version SET db_layout_ref = 11') | ||
2160 | 225 | ver = 11 | ||
2161 | 226 | if ver == 11: | ||
2162 | 227 | store.execute('ALTER TABLE card ADD COLUMN roadmap_id VARCHAR(50)') | ||
2163 | 228 | store.execute('UPDATE version SET db_layout_ref = 12') | ||
2164 | 229 | ver = 12 | ||
2165 | 230 | if ver == 12: | ||
2166 | 231 | store.execute('ALTER TABLE card ADD COLUMN team VARCHAR(50)') | ||
2167 | 232 | store.execute('ALTER TABLE card ADD COLUMN priority VARCHAR(50)') | ||
2168 | 233 | store.execute('ALTER TABLE card ADD COLUMN size VARCHAR(50)') | ||
2169 | 234 | store.execute('ALTER TABLE card ADD COLUMN sponsor VARCHAR(50)') | ||
2170 | 235 | store.execute('ALTER TABLE card ADD COLUMN contact VARCHAR(50)') | ||
2171 | 236 | store.execute('ALTER TABLE card ADD COLUMN description BLOB') | ||
2172 | 237 | store.execute('ALTER TABLE card ADD COLUMN acceptance_criteria BLOB') | ||
2173 | 238 | store.execute('ALTER TABLE lane ADD COLUMN is_current BOOLEAN') | ||
2174 | 239 | store.execute('UPDATE version SET db_layout_ref = 13') | ||
2175 | 240 | if ver == 13: | ||
2176 | 241 | store.execute('ALTER TABLE card ADD COLUMN url VARCHAR(200)') | ||
2177 | 242 | store.execute('ALTER TABLE card ADD COLUMN is_healthy BOOLEAN') | ||
2178 | 243 | store.execute('UPDATE version SET db_layout_ref = 14') | ||
2179 | 244 | if ver == 14: | ||
2180 | 245 | store.execute('''CREATE TABLE spec_daily_count_per_state ( | ||
2181 | 246 | status VARCHAR(5000) NOT NULL, | ||
2182 | 247 | day DATE NOT NULL, | ||
2183 | 248 | lane_id REFERENCES lane(lane_id), | ||
2184 | 249 | count INT NOT NULL | ||
2185 | 250 | )''') | ||
2186 | 251 | store.execute('UPDATE version SET db_layout_ref = 15') | ||
2187 | 181 | 252 | ||
2188 | 182 | def get_store(dbpath): | 253 | def get_store(dbpath): |
2189 | 183 | '''Open/initialize database. | 254 | '''Open/initialize database. |
2190 | @@ -205,5 +276,6 @@ | |||
2191 | 205 | store.execute('''CREATE INDEX work_items_date_idx ON work_items (date)''') | 276 | store.execute('''CREATE INDEX work_items_date_idx ON work_items (date)''') |
2192 | 206 | store.execute('''CREATE INDEX work_items_status_idx ON work_items (status)''') | 277 | store.execute('''CREATE INDEX work_items_status_idx ON work_items (status)''') |
2193 | 207 | 278 | ||
2194 | 279 | |||
2195 | 208 | def create_v6_indexes(store): | 280 | def create_v6_indexes(store): |
2196 | 209 | store.execute('''CREATE INDEX work_items_assignee_milestone_idx on work_items(assignee,milestone)''') | 281 | store.execute('''CREATE INDEX work_items_assignee_milestone_idx on work_items(assignee,milestone)''') |
2197 | 210 | 282 | ||
2198 | === modified file 'lpworkitems/error_collector.py' | |||
2199 | --- lpworkitems/error_collector.py 2011-06-08 19:30:24 +0000 | |||
2200 | +++ lpworkitems/error_collector.py 2012-10-09 09:20:30 +0000 | |||
2201 | @@ -48,6 +48,10 @@ | |||
2202 | 48 | """Get the name of the blueprint, or None if not a blueprint.""" | 48 | """Get the name of the blueprint, or None if not a blueprint.""" |
2203 | 49 | return None | 49 | return None |
2204 | 50 | 50 | ||
2205 | 51 | def get_project_name(self): | ||
2206 | 52 | """Get the name of the project, or None if not a project.""" | ||
2207 | 53 | return None | ||
2208 | 54 | |||
2209 | 51 | def format_for_display(self): | 55 | def format_for_display(self): |
2210 | 52 | """Produce a string representation of the Error. | 56 | """Produce a string representation of the Error. |
2211 | 53 | 57 | ||
2212 | @@ -84,6 +88,9 @@ | |||
2213 | 84 | def get_blueprint_name(self): | 88 | def get_blueprint_name(self): |
2214 | 85 | return self.blueprint.name | 89 | return self.blueprint.name |
2215 | 86 | 90 | ||
2216 | 91 | def get_project_name(self): | ||
2217 | 92 | return self.blueprint.url.split('/')[-3] | ||
2218 | 93 | |||
2219 | 87 | 94 | ||
2220 | 88 | class BlueprintURLError(Error): | 95 | class BlueprintURLError(Error): |
2221 | 89 | """A deprecated class for backwards-compatibility. | 96 | """A deprecated class for backwards-compatibility. |
2222 | @@ -101,6 +108,9 @@ | |||
2223 | 101 | def get_blueprint_name(self): | 108 | def get_blueprint_name(self): |
2224 | 102 | return self.blueprint_url.split('/')[-1] | 109 | return self.blueprint_url.split('/')[-1] |
2225 | 103 | 110 | ||
2226 | 111 | def get_project_name(self): | ||
2227 | 112 | return self.blueprint_url.split('/')[-3] | ||
2228 | 113 | |||
2229 | 104 | 114 | ||
2230 | 105 | class MilestoneError(Error): | 115 | class MilestoneError(Error): |
2231 | 106 | 116 | ||
2232 | 107 | 117 | ||
2233 | === modified file 'lpworkitems/factory.py' | |||
2234 | --- lpworkitems/factory.py 2011-06-14 22:00:21 +0000 | |||
2235 | +++ lpworkitems/factory.py 2012-10-09 09:20:30 +0000 | |||
2236 | @@ -11,6 +11,7 @@ | |||
2237 | 11 | TeamStructure, | 11 | TeamStructure, |
2238 | 12 | Workitem, | 12 | Workitem, |
2239 | 13 | ) | 13 | ) |
2240 | 14 | from lpworkitems.models_roadmap import BlueprintDailyCountPerState, Card | ||
2241 | 14 | 15 | ||
2242 | 15 | 16 | ||
2243 | 16 | class Factory(object): | 17 | class Factory(object): |
2244 | @@ -63,6 +64,8 @@ | |||
2245 | 63 | url = self.getUniqueUnicode(prefix=name+"_url") | 64 | url = self.getUniqueUnicode(prefix=name+"_url") |
2246 | 64 | if status is None: | 65 | if status is None: |
2247 | 65 | status = self.getUniqueUnicode(prefix=name+"_status") | 66 | status = self.getUniqueUnicode(prefix=name+"_status") |
2248 | 67 | if implementation is None: | ||
2249 | 68 | implementation = u'Unknown' | ||
2250 | 66 | blueprint.name = name | 69 | blueprint.name = name |
2251 | 67 | blueprint.url = url | 70 | blueprint.url = url |
2252 | 68 | blueprint.status = status | 71 | blueprint.status = status |
2253 | @@ -109,8 +112,11 @@ | |||
2254 | 109 | self.store.add(workitem) | 112 | self.store.add(workitem) |
2255 | 110 | return workitem | 113 | return workitem |
2256 | 111 | 114 | ||
2258 | 112 | def make_meta(self, store=True): | 115 | def make_meta(self, key=None, value=None, blueprint=None, store=True): |
2259 | 113 | meta = Meta() | 116 | meta = Meta() |
2260 | 117 | meta.key = key | ||
2261 | 118 | meta.value = value | ||
2262 | 119 | meta.blueprint = blueprint | ||
2263 | 114 | if store: | 120 | if store: |
2264 | 115 | self.store.add(meta) | 121 | self.store.add(meta) |
2265 | 116 | return meta | 122 | return meta |
2266 | @@ -155,3 +161,28 @@ | |||
2267 | 155 | if store: | 161 | if store: |
2268 | 156 | self.store.add(person) | 162 | self.store.add(person) |
2269 | 157 | return person | 163 | return person |
2270 | 164 | |||
2271 | 165 | def make_blueprint_daily_count_per_state(self, status=None, count=1, | ||
2272 | 166 | day=None, store=True): | ||
2273 | 167 | if status is None: | ||
2274 | 168 | status = self.getUniqueUnicode() | ||
2275 | 169 | if day is None: | ||
2276 | 170 | day = datetime.date.today() | ||
2277 | 171 | obj = BlueprintDailyCountPerState() | ||
2278 | 172 | obj.day = day | ||
2279 | 173 | obj.status = status | ||
2280 | 174 | obj.count = count | ||
2281 | 175 | obj.lane_id = 1 | ||
2282 | 176 | if store: | ||
2283 | 177 | self.store.add(obj) | ||
2284 | 178 | return obj | ||
2285 | 179 | |||
2286 | 180 | def make_card(self, store=True): | ||
2287 | 181 | name = self.getUniqueUnicode() | ||
2288 | 182 | card_id = self.getUniqueInteger() | ||
2289 | 183 | lane_id = self.getUniqueInteger() | ||
2290 | 184 | roadmap_id = self.getUniqueUnicode() | ||
2291 | 185 | card = Card(name, card_id, lane_id, roadmap_id) | ||
2292 | 186 | if store: | ||
2293 | 187 | self.store.add(card) | ||
2294 | 188 | return card | ||
2295 | 158 | 189 | ||
2296 | === modified file 'lpworkitems/models.py' | |||
2297 | --- lpworkitems/models.py 2011-12-06 15:20:43 +0000 | |||
2298 | +++ lpworkitems/models.py 2012-10-09 09:20:30 +0000 | |||
2299 | @@ -1,15 +1,20 @@ | |||
2300 | 1 | import datetime | 1 | import datetime |
2301 | 2 | import re | 2 | import re |
2312 | 3 | 3 | from utils import unicode_or_None | |
2313 | 4 | from storm.locals import Date, Reference, ReferenceSet, Unicode | 4 | |
2314 | 5 | 5 | from storm.locals import Date, Int, Reference, ReferenceSet, Unicode | |
2315 | 6 | 6 | ||
2316 | 7 | def unicode_or_None(attr): | 7 | ROADMAP_STATUSES_MAP = { |
2317 | 8 | if attr is None: | 8 | u'Completed': [u'Implemented'], |
2318 | 9 | return attr | 9 | u'Blocked': [u'Needs Infrastructure', u'Blocked', u'Deferred'], |
2319 | 10 | if isinstance(attr, unicode): | 10 | u'In Progress': [u'Deployment', u'Needs Code Review', |
2320 | 11 | return attr | 11 | u'Beta Available', u'Good progress', |
2321 | 12 | return attr.decode("utf-8") | 12 | u'Slow progress', u'Started'], |
2322 | 13 | u'Planned': [u'Unknown', u'Not started', u'Informational']} | ||
2323 | 14 | |||
2324 | 15 | ROADMAP_ORDERED_STATUSES = ['Completed', 'In Progress', 'Blocked', 'Planned'] | ||
2325 | 16 | assert set(ROADMAP_ORDERED_STATUSES) == set(ROADMAP_STATUSES_MAP.keys()), ( | ||
2326 | 17 | 'The roadmap statuses are incorrect: %s' % ROADMAP_ORDERED_STATUSES) | ||
2327 | 13 | 18 | ||
2328 | 14 | 19 | ||
2329 | 15 | def fill_blueprint_info_from_launchpad(model_bp, lp_bp): | 20 | def fill_blueprint_info_from_launchpad(model_bp, lp_bp): |
2330 | @@ -55,6 +60,14 @@ | |||
2331 | 55 | project = Unicode() | 60 | project = Unicode() |
2332 | 56 | 61 | ||
2333 | 57 | 62 | ||
2334 | 63 | def get_roadmap_status_for_bp_implementation_status(implementation): | ||
2335 | 64 | for key in ROADMAP_STATUSES_MAP: | ||
2336 | 65 | if implementation in ROADMAP_STATUSES_MAP[key]: | ||
2337 | 66 | return key | ||
2338 | 67 | # XXX: Is None the appropriate return value here? | ||
2339 | 68 | return None | ||
2340 | 69 | |||
2341 | 70 | |||
2342 | 58 | class Blueprint(object): | 71 | class Blueprint(object): |
2343 | 59 | 72 | ||
2344 | 60 | __storm_table__ = "specs" | 73 | __storm_table__ = "specs" |
2345 | @@ -88,6 +101,15 @@ | |||
2346 | 88 | lp_bp.whiteboard, "Roadmap\s+Notes") | 101 | lp_bp.whiteboard, "Roadmap\s+Notes") |
2347 | 89 | return model_bp | 102 | return model_bp |
2348 | 90 | 103 | ||
2349 | 104 | @property | ||
2350 | 105 | def roadmap_status(self): | ||
2351 | 106 | return get_roadmap_status_for_bp_implementation_status( | ||
2352 | 107 | self.implementation) | ||
2353 | 108 | |||
2354 | 109 | |||
2355 | 110 | def current_date(): | ||
2356 | 111 | return datetime.date.today() | ||
2357 | 112 | |||
2358 | 91 | 113 | ||
2359 | 92 | class Person(object): | 114 | class Person(object): |
2360 | 93 | 115 | ||
2361 | @@ -115,10 +137,6 @@ | |||
2362 | 115 | superteam_name = Unicode(name="team") | 137 | superteam_name = Unicode(name="team") |
2363 | 116 | 138 | ||
2364 | 117 | 139 | ||
2365 | 118 | def current_date(): | ||
2366 | 119 | return datetime.date.today() | ||
2367 | 120 | |||
2368 | 121 | |||
2369 | 122 | class Meta(object): | 140 | class Meta(object): |
2370 | 123 | 141 | ||
2371 | 124 | __storm_table__ = "meta" | 142 | __storm_table__ = "meta" |
2372 | 125 | 143 | ||
2373 | === added file 'lpworkitems/models_roadmap.py' | |||
2374 | --- lpworkitems/models_roadmap.py 1970-01-01 00:00:00 +0000 | |||
2375 | +++ lpworkitems/models_roadmap.py 2012-10-09 09:20:30 +0000 | |||
2376 | @@ -0,0 +1,60 @@ | |||
2377 | 1 | import datetime | ||
2378 | 2 | import re | ||
2379 | 3 | from utils import unicode_or_None | ||
2380 | 4 | |||
2381 | 5 | from storm.locals import Date, Reference, ReferenceSet, Unicode, Int, Bool | ||
2382 | 6 | |||
2383 | 7 | from lpworkitems import models | ||
2384 | 8 | |||
2385 | 9 | |||
2386 | 10 | class Card(object): | ||
2387 | 11 | |||
2388 | 12 | __storm_table__ = "card" | ||
2389 | 13 | |||
2390 | 14 | name = Unicode() | ||
2391 | 15 | status = Unicode() | ||
2392 | 16 | card_id = Int(primary=True) | ||
2393 | 17 | lane_id = Int() | ||
2394 | 18 | roadmap_id = Unicode() | ||
2395 | 19 | team = Unicode() | ||
2396 | 20 | priority = Unicode() | ||
2397 | 21 | size = Unicode() | ||
2398 | 22 | sponsor = Unicode() | ||
2399 | 23 | contact = Unicode() | ||
2400 | 24 | description = Unicode() | ||
2401 | 25 | acceptance_criteria = Unicode() | ||
2402 | 26 | url = Unicode() | ||
2403 | 27 | is_healthy = Bool() | ||
2404 | 28 | |||
2405 | 29 | def __init__(self, name, card_id, lane_id, roadmap_id): | ||
2406 | 30 | self.lane_id = lane_id | ||
2407 | 31 | self.card_id = card_id | ||
2408 | 32 | self.name = name | ||
2409 | 33 | self.roadmap_id = roadmap_id | ||
2410 | 34 | |||
2411 | 35 | |||
2412 | 36 | class Lane(object): | ||
2413 | 37 | |||
2414 | 38 | __storm_table__ = "lane" | ||
2415 | 39 | |||
2416 | 40 | name = Unicode() | ||
2417 | 41 | lane_id = Int(primary=True) | ||
2418 | 42 | is_current = Bool() | ||
2419 | 43 | cards = ReferenceSet(lane_id, Card.lane_id) | ||
2420 | 44 | |||
2421 | 45 | def __init__(self, name, lane_id): | ||
2422 | 46 | self.lane_id = lane_id | ||
2423 | 47 | self.name = name | ||
2424 | 48 | |||
2425 | 49 | |||
2426 | 50 | def current_date(): | ||
2427 | 51 | return datetime.date.today() | ||
2428 | 52 | |||
2429 | 53 | |||
2430 | 54 | class BlueprintDailyCountPerState(object): | ||
2431 | 55 | __storm_table__ = 'spec_daily_count_per_state' | ||
2432 | 56 | __storm_primary__ = 'status', 'day' | ||
2433 | 57 | day = Date(default_factory=current_date) | ||
2434 | 58 | status = Unicode() | ||
2435 | 59 | lane_id = Int() | ||
2436 | 60 | count = Int() | ||
2437 | 0 | 61 | ||
2438 | === modified file 'lpworkitems/tests/test_collect.py' | |||
2439 | --- lpworkitems/tests/test_collect.py 2011-06-14 22:00:21 +0000 | |||
2440 | +++ lpworkitems/tests/test_collect.py 2012-10-09 09:20:30 +0000 | |||
2441 | @@ -184,7 +184,6 @@ | |||
2442 | 184 | self.store.find( | 184 | self.store.find( |
2443 | 185 | Milestone, Milestone.name == name).one()) | 185 | Milestone, Milestone.name == name).one()) |
2444 | 186 | 186 | ||
2445 | 187 | |||
2446 | 188 | def test_store_blueprint_stores_blueprint(self): | 187 | def test_store_blueprint_stores_blueprint(self): |
2447 | 189 | blueprint = self.factory.make_blueprint(store=False) | 188 | blueprint = self.factory.make_blueprint(store=False) |
2448 | 190 | ret = self.collector.store_blueprint(blueprint) | 189 | ret = self.collector.store_blueprint(blueprint) |
2449 | 191 | 190 | ||
2450 | === added file 'lpworkitems/tests/test_collect_roadmap.py' | |||
2451 | --- lpworkitems/tests/test_collect_roadmap.py 1970-01-01 00:00:00 +0000 | |||
2452 | +++ lpworkitems/tests/test_collect_roadmap.py 2012-10-09 09:20:30 +0000 | |||
2453 | @@ -0,0 +1,69 @@ | |||
2454 | 1 | import datetime | ||
2455 | 2 | |||
2456 | 3 | from lpworkitems.collect_roadmap import ( | ||
2457 | 4 | CollectorStore, | ||
2458 | 5 | get_json_item, | ||
2459 | 6 | ) | ||
2460 | 7 | from lpworkitems.models_roadmap import BlueprintDailyCountPerState | ||
2461 | 8 | from lpworkitems.error_collector import ( | ||
2462 | 9 | ErrorCollector, | ||
2463 | 10 | ) | ||
2464 | 11 | from lpworkitems.testing import TestCaseWithFakeLaunchpad | ||
2465 | 12 | |||
2466 | 13 | |||
2467 | 14 | class CollectorTests(TestCaseWithFakeLaunchpad): | ||
2468 | 15 | |||
2469 | 16 | def setUp(self): | ||
2470 | 17 | super(CollectorTests, self).setUp() | ||
2471 | 18 | self.error_collector = ErrorCollector() | ||
2472 | 19 | self.collector = CollectorStore( | ||
2473 | 20 | self.store, self.lp, self.error_collector) | ||
2474 | 21 | |||
2475 | 22 | def assertClears(self, cls, fn): | ||
2476 | 23 | self.assertTrue(self.store.find(cls).count() > 0) | ||
2477 | 24 | fn() | ||
2478 | 25 | self.assertEqual(0, self.store.find(cls).count()) | ||
2479 | 26 | |||
2480 | 27 | def test_clear_todays_blueprint_daily_count_per_state(self): | ||
2481 | 28 | self.factory.make_blueprint_daily_count_per_state( | ||
2482 | 29 | day=datetime.date.today()) | ||
2483 | 30 | self.assertClears( | ||
2484 | 31 | BlueprintDailyCountPerState, | ||
2485 | 32 | self.collector.clear_todays_blueprint_daily_count_per_state) | ||
2486 | 33 | |||
2487 | 34 | def test_store_roadmap_bp_count_per_state(self): | ||
2488 | 35 | bp = self.factory.make_blueprint() | ||
2489 | 36 | card = self.factory.make_card() | ||
2490 | 37 | meta = self.factory.make_meta( | ||
2491 | 38 | key=u'Roadmap id', value=card.roadmap_id, blueprint=bp) | ||
2492 | 39 | self.collector.store_roadmap_bp_count_per_state() | ||
2493 | 40 | self.assertEqual( | ||
2494 | 41 | 1, self.store.find(BlueprintDailyCountPerState).count()) | ||
2495 | 42 | entry = self.store.find(BlueprintDailyCountPerState).one() | ||
2496 | 43 | self.assertEqual(1, entry.count) | ||
2497 | 44 | self.assertEqual(card.lane_id, entry.lane_id) | ||
2498 | 45 | self.assertEqual(bp.implementation, entry.status) | ||
2499 | 46 | |||
2500 | 47 | # XXX Add tests for the roadmap classes. | ||
2501 | 48 | |||
2502 | 49 | |||
2503 | 50 | class RoadmapUtilsTests(TestCaseWithFakeLaunchpad): | ||
2504 | 51 | |||
2505 | 52 | def setUp(self): | ||
2506 | 53 | super(RoadmapUtilsTests, self).setUp() | ||
2507 | 54 | self.json_data = {"data": "Text", | ||
2508 | 55 | "whitespace": " Text ", | ||
2509 | 56 | "none": None | ||
2510 | 57 | } | ||
2511 | 58 | |||
2512 | 59 | def test_get_json_data_unicode(self): | ||
2513 | 60 | item = get_json_item(self.json_data, 'data') | ||
2514 | 61 | self.assertEquals(item, u'Text') | ||
2515 | 62 | |||
2516 | 63 | def test_get_json_data_whitespace(self): | ||
2517 | 64 | item = get_json_item(self.json_data, 'whitespace') | ||
2518 | 65 | self.assertEquals(item, u'Text') | ||
2519 | 66 | |||
2520 | 67 | def test_get_json_data_none(self): | ||
2521 | 68 | item = get_json_item(self.json_data, 'none') | ||
2522 | 69 | self.assertEquals(item, None) | ||
2523 | 0 | 70 | ||
2524 | === modified file 'lpworkitems/tests/test_factory.py' | |||
2525 | --- lpworkitems/tests/test_factory.py 2011-06-04 18:48:23 +0000 | |||
2526 | +++ lpworkitems/tests/test_factory.py 2012-10-09 09:20:30 +0000 | |||
2527 | @@ -162,7 +162,7 @@ | |||
2528 | 162 | implementation = u"Implemented" | 162 | implementation = u"Implemented" |
2529 | 163 | self.assert_with_and_without( | 163 | self.assert_with_and_without( |
2530 | 164 | self.factory.make_blueprint, "implementation", implementation, | 164 | self.factory.make_blueprint, "implementation", implementation, |
2532 | 165 | Equals(None)) | 165 | Equals("Unknown")) |
2533 | 166 | 166 | ||
2534 | 167 | def test_uses_assignee_name(self): | 167 | def test_uses_assignee_name(self): |
2535 | 168 | assignee_name = self.factory.getUniqueUnicode( | 168 | assignee_name = self.factory.getUniqueUnicode( |
2536 | 169 | 169 | ||
2537 | === modified file 'lpworkitems/tests/test_models.py' | |||
2538 | --- lpworkitems/tests/test_models.py 2011-12-06 15:20:43 +0000 | |||
2539 | +++ lpworkitems/tests/test_models.py 2012-10-09 09:20:30 +0000 | |||
2540 | @@ -6,8 +6,11 @@ | |||
2541 | 6 | extract_last_path_segment_from_url, | 6 | extract_last_path_segment_from_url, |
2542 | 7 | extract_user_name_from_url, | 7 | extract_user_name_from_url, |
2543 | 8 | get_whiteboard_section, | 8 | get_whiteboard_section, |
2546 | 9 | ) | 9 | ROADMAP_STATUSES_MAP, |
2547 | 10 | from lpworkitems.testing import TestCaseWithFakeLaunchpad | 10 | ) |
2548 | 11 | from lpworkitems.testing import ( | ||
2549 | 12 | TestCaseWithFakeLaunchpad, | ||
2550 | 13 | ) | ||
2551 | 11 | 14 | ||
2552 | 12 | 15 | ||
2553 | 13 | class GetWhiteboardSectionTests(TestCase): | 16 | class GetWhiteboardSectionTests(TestCase): |
2554 | @@ -42,6 +45,18 @@ | |||
2555 | 42 | 45 | ||
2556 | 43 | class BlueprintTests(TestCaseWithFakeLaunchpad): | 46 | class BlueprintTests(TestCaseWithFakeLaunchpad): |
2557 | 44 | 47 | ||
2558 | 48 | def test_roadmap_status(self): | ||
2559 | 49 | roadmap_status = "Completed" | ||
2560 | 50 | bp_implementation = ROADMAP_STATUSES_MAP[roadmap_status][0] | ||
2561 | 51 | bp_status = self.factory.make_blueprint( | ||
2562 | 52 | implementation=bp_implementation) | ||
2563 | 53 | self.assertEqual(roadmap_status, bp_status.roadmap_status) | ||
2564 | 54 | |||
2565 | 55 | def test_roadmap_status_unknown_status(self): | ||
2566 | 56 | blueprint = self.factory.make_blueprint( | ||
2567 | 57 | implementation=u"Not Expected") | ||
2568 | 58 | self.assertEqual(None, blueprint.roadmap_status) | ||
2569 | 59 | |||
2570 | 45 | def test_from_launchpad_sets_name(self): | 60 | def test_from_launchpad_sets_name(self): |
2571 | 46 | name = self.factory.getUniqueUnicode(prefix="lpblueprint") | 61 | name = self.factory.getUniqueUnicode(prefix="lpblueprint") |
2572 | 47 | lp_bp = self.lp.make_blueprint(name=name) | 62 | lp_bp = self.lp.make_blueprint(name=name) |
2573 | 48 | 63 | ||
2574 | === modified file 'report_tools.py' | |||
2575 | --- report_tools.py 2012-07-17 06:00:48 +0000 | |||
2576 | +++ report_tools.py 2012-10-09 09:20:30 +0000 | |||
2577 | @@ -3,12 +3,25 @@ | |||
2578 | 3 | # Tools for generating reports | 3 | # Tools for generating reports |
2579 | 4 | 4 | ||
2580 | 5 | import datetime | 5 | import datetime |
2582 | 6 | import urllib, sys, os.path, re | 6 | <<<<<<< TREE |
2583 | 7 | import urllib, sys, os.path, re | ||
2584 | 8 | ======= | ||
2585 | 9 | import urllib, sys, os.path, re | ||
2586 | 10 | from storm.locals import create_database, Store | ||
2587 | 11 | >>>>>>> MERGE-SOURCE | ||
2588 | 7 | from subprocess import Popen | 12 | from subprocess import Popen |
2589 | 8 | from cgi import escape | 13 | from cgi import escape |
2590 | 9 | from lpworkitems import database | 14 | from lpworkitems import database |
2591 | 10 | import errno | 15 | import errno |
2592 | 11 | import fcntl | 16 | import fcntl |
2593 | 17 | from lpworkitems.models_roadmap import ( | ||
2594 | 18 | Lane, | ||
2595 | 19 | Card, | ||
2596 | 20 | ) | ||
2597 | 21 | from lpworkitems.models import ( | ||
2598 | 22 | Meta, ROADMAP_STATUSES_MAP, | ||
2599 | 23 | get_roadmap_status_for_bp_implementation_status, | ||
2600 | 24 | ) | ||
2601 | 12 | 25 | ||
2602 | 13 | valid_states = [u'todo', u'blocked', u'inprogress', u'done', u'postponed'] | 26 | valid_states = [u'todo', u'blocked', u'inprogress', u'done', u'postponed'] |
2603 | 14 | state_labels = [u'Todo', u'Blocked', u'In Progress', u'Done', u'Postponed'] | 27 | state_labels = [u'Todo', u'Blocked', u'In Progress', u'Done', u'Postponed'] |
2604 | @@ -180,6 +193,52 @@ | |||
2605 | 180 | fh.close() | 193 | fh.close() |
2606 | 181 | 194 | ||
2607 | 182 | 195 | ||
2608 | 196 | def roadmap_pages(my_path, database, basename, config, lane, root=None): | ||
2609 | 197 | cfg = load_config(config) | ||
2610 | 198 | fh = open(basename + '.html', 'w') | ||
2611 | 199 | chart_path, _ = os.path.split(basename) | ||
2612 | 200 | chart_name = os.path.join(chart_path, 'current_quarter.svg') | ||
2613 | 201 | try: | ||
2614 | 202 | args = [os.path.join(my_path, 'html-report'), '-d', database] | ||
2615 | 203 | args += ['--report-type', 'roadmap_page'] | ||
2616 | 204 | args += ['--lane', lane.name] | ||
2617 | 205 | if root: | ||
2618 | 206 | args += ['--root', root] | ||
2619 | 207 | if lane.is_current: | ||
2620 | 208 | args += ['--chart', chart_name] | ||
2621 | 209 | report_args(args, theme=get_theme(cfg)) | ||
2622 | 210 | proc = Popen(args, stdout=fh) | ||
2623 | 211 | print basename + '.html' | ||
2624 | 212 | proc.wait() | ||
2625 | 213 | finally: | ||
2626 | 214 | fh.close() | ||
2627 | 215 | |||
2628 | 216 | if lane.is_current: | ||
2629 | 217 | args = [os.path.join(my_path, 'roadmap-bp-chart'), '-d', database, | ||
2630 | 218 | '-o', chart_name] | ||
2631 | 219 | args += ['--inverted'] | ||
2632 | 220 | proc = Popen(args) | ||
2633 | 221 | print chart_name | ||
2634 | 222 | proc.wait() | ||
2635 | 223 | |||
2636 | 224 | |||
2637 | 225 | def roadmap_cards(my_path, database, basename, config, card, root=None): | ||
2638 | 226 | cfg = load_config(config) | ||
2639 | 227 | fh = open(basename + '.html', 'w') | ||
2640 | 228 | try: | ||
2641 | 229 | args = [os.path.join(my_path, 'html-report'), '-d', database] | ||
2642 | 230 | args += ['--report-type', 'roadmap_card'] | ||
2643 | 231 | args += ['--card', '%s' % card.card_id] | ||
2644 | 232 | if root: | ||
2645 | 233 | args += ['--root', root] | ||
2646 | 234 | report_args(args, theme=get_theme(cfg)) | ||
2647 | 235 | proc = Popen(args, stdout=fh) | ||
2648 | 236 | print basename + '.html' | ||
2649 | 237 | proc.wait() | ||
2650 | 238 | finally: | ||
2651 | 239 | fh.close() | ||
2652 | 240 | |||
2653 | 241 | |||
2654 | 183 | def run_reports(my_path, database, basename, config, milestone=None, team=None, | 242 | def run_reports(my_path, database, basename, config, milestone=None, team=None, |
2655 | 184 | user=None, trend_starts=None, trend_override=None, burnup=False, root=None, date=None): | 243 | user=None, trend_starts=None, trend_override=None, burnup=False, root=None, date=None): |
2656 | 185 | 244 | ||
2657 | @@ -281,6 +340,34 @@ | |||
2658 | 281 | return escape(html, True) | 340 | return escape(html, True) |
2659 | 282 | 341 | ||
2660 | 283 | 342 | ||
2661 | 343 | def blueprints_over_time(store): | ||
2662 | 344 | '''Calculate blueprint development over time for the current lane. | ||
2663 | 345 | |||
2664 | 346 | We do not need to care about teams or groups since this is intended for the | ||
2665 | 347 | roadmap overview. | ||
2666 | 348 | |||
2667 | 349 | Return date -> state -> count mapping. states are | ||
2668 | 350 | {planned,inprogress,completed,blocked}. | ||
2669 | 351 | ''' | ||
2670 | 352 | data = {} | ||
2671 | 353 | result = store.execute(""" | ||
2672 | 354 | SELECT status, day, count | ||
2673 | 355 | FROM spec_daily_count_per_state | ||
2674 | 356 | JOIN lane on lane.lane_id = spec_daily_count_per_state.lane_id | ||
2675 | 357 | WHERE lane.is_current = 1 | ||
2676 | 358 | """) | ||
2677 | 359 | for status, day, count in result: | ||
2678 | 360 | roadmap_status = get_roadmap_status_for_bp_implementation_status( | ||
2679 | 361 | status) | ||
2680 | 362 | assert roadmap_status is not None | ||
2681 | 363 | if day not in data: | ||
2682 | 364 | data[day] = {} | ||
2683 | 365 | if roadmap_status not in data[day]: | ||
2684 | 366 | data[day][roadmap_status] = 0 | ||
2685 | 367 | data[day][roadmap_status] += count | ||
2686 | 368 | return data | ||
2687 | 369 | |||
2688 | 370 | |||
2689 | 284 | def workitems_over_time(store, team=None, group=None, milestone_collection=None): | 371 | def workitems_over_time(store, team=None, group=None, milestone_collection=None): |
2690 | 285 | '''Calculate work item development over time. | 372 | '''Calculate work item development over time. |
2691 | 286 | 373 | ||
2692 | @@ -876,6 +963,80 @@ | |||
2693 | 876 | return rv | 963 | return rv |
2694 | 877 | 964 | ||
2695 | 878 | 965 | ||
2696 | 966 | def lanes(store): | ||
2697 | 967 | return store.find(Lane) | ||
2698 | 968 | |||
2699 | 969 | |||
2700 | 970 | def lane(store, name, id=None): | ||
2701 | 971 | if id is None: | ||
2702 | 972 | return store.find(Lane, Lane.name == unicode(name)).one() | ||
2703 | 973 | else: | ||
2704 | 974 | return store.find(Lane, Lane.lane_id == id).one() | ||
2705 | 975 | |||
2706 | 976 | |||
2707 | 977 | def current_lane(store): | ||
2708 | 978 | return store.find(Lane, Lane.is_current).one() | ||
2709 | 979 | |||
2710 | 980 | |||
2711 | 981 | def lane_cards(store, lane): | ||
2712 | 982 | return lane.cards | ||
2713 | 983 | |||
2714 | 984 | |||
2715 | 985 | def statuses(store, lane): | ||
2716 | 986 | result = [] | ||
2717 | 987 | for status in store.find(Card.status, | ||
2718 | 988 | Card.lane_id == lane.lane_id).config(distinct=True): | ||
2719 | 989 | result.append((status, store.find(Card, | ||
2720 | 990 | Card.lane_id == lane.lane_id, | ||
2721 | 991 | Card.status == status))) | ||
2722 | 992 | return result | ||
2723 | 993 | |||
2724 | 994 | |||
2725 | 995 | def cards(store): | ||
2726 | 996 | return store.find(Card) | ||
2727 | 997 | |||
2728 | 998 | |||
2729 | 999 | def card(store, card_id): | ||
2730 | 1000 | return store.find(Card, Card.card_id == card_id) | ||
2731 | 1001 | |||
2732 | 1002 | |||
2733 | 1003 | def card_blueprints(store, roadmap_id): | ||
2734 | 1004 | metas = store.find(Meta, | ||
2735 | 1005 | Meta.key == u'Roadmap id', | ||
2736 | 1006 | Meta.value == roadmap_id) | ||
2737 | 1007 | return [meta.blueprint for meta in metas] | ||
2738 | 1008 | |||
2739 | 1009 | |||
2740 | 1010 | def card_blueprints_by_status(store, roadmap_id): | ||
2741 | 1011 | blueprints = card_blueprints(store, roadmap_id) | ||
2742 | 1012 | bp_by_status = {} | ||
2743 | 1013 | for key in ROADMAP_STATUSES_MAP: | ||
2744 | 1014 | bp_by_status[key] = [] | ||
2745 | 1015 | for bp in blueprints: | ||
2746 | 1016 | bp_by_status[bp.roadmap_status].append(bp) | ||
2747 | 1017 | return bp_by_status | ||
2748 | 1018 | |||
2749 | 1019 | |||
2750 | 1020 | def card_bp_status_counts(store, roadmap_id): | ||
2751 | 1021 | blueprints = card_blueprints(store, roadmap_id) | ||
2752 | 1022 | total_by_status = dict([(key, 0) for key in ROADMAP_STATUSES_MAP]) | ||
2753 | 1023 | for bp in blueprints: | ||
2754 | 1024 | total_by_status[bp.roadmap_status] += 1 | ||
2755 | 1025 | return total_by_status | ||
2756 | 1026 | |||
2757 | 1027 | |||
2758 | 1028 | def check_card_health(store, card_health_checks, card): | ||
2759 | 1029 | performed_checks = [] | ||
2760 | 1030 | card.is_healthy = True | ||
2761 | 1031 | for check in card_health_checks: | ||
2762 | 1032 | result = check.execute(card, store) | ||
2763 | 1033 | if result == check.NOT_OK: | ||
2764 | 1034 | card.is_healthy = False | ||
2765 | 1035 | performed_checks.append({'name': check.name, | ||
2766 | 1036 | 'result': result}) | ||
2767 | 1037 | return performed_checks | ||
2768 | 1038 | |||
2769 | 1039 | |||
2770 | 879 | def subteams(store, team): | 1040 | def subteams(store, team): |
2771 | 880 | result = store.execute('SELECT name from team_structure where team = ?', (unicode(team),)) | 1041 | result = store.execute('SELECT name from team_structure where team = ?', (unicode(team),)) |
2772 | 881 | return [i[0] for i in result] | 1042 | return [i[0] for i in result] |
2773 | 882 | 1043 | ||
2774 | === added file 'roadmap-bp-chart' | |||
2775 | --- roadmap-bp-chart 1970-01-01 00:00:00 +0000 | |||
2776 | +++ roadmap-bp-chart 2012-10-09 09:20:30 +0000 | |||
2777 | @@ -0,0 +1,249 @@ | |||
2778 | 1 | #!/usr/bin/python | ||
2779 | 2 | # | ||
2780 | 3 | # Create a blueprint tracking chart from a blueprint database. | ||
2781 | 4 | # | ||
2782 | 5 | # Copyright (C) 2010, 2011 Canonical Ltd. | ||
2783 | 6 | # License: GPL-3 | ||
2784 | 7 | |||
2785 | 8 | import optparse, datetime, sys | ||
2786 | 9 | import report_tools | ||
2787 | 10 | |||
2788 | 11 | from pychart import * | ||
2789 | 12 | |||
2790 | 13 | def date_to_ordinal(s): | ||
2791 | 14 | '''Turn yyyy-mm-dd strings to ordinals''' | ||
2792 | 15 | return report_tools.date_to_python(s).toordinal() | ||
2793 | 16 | |||
2794 | 17 | |||
2795 | 18 | def ordinal_to_date(ordinal): | ||
2796 | 19 | '''Turn an ordinal date into a string''' | ||
2797 | 20 | d = datetime.date.fromordinal(int(ordinal)) | ||
2798 | 21 | return d.strftime('%Y-%m-%d') | ||
2799 | 22 | |||
2800 | 23 | def format_date(ordinal): | ||
2801 | 24 | d = datetime.date.fromordinal(int(ordinal)) | ||
2802 | 25 | return '/a60{}' + d.strftime('%b %d, %y') | ||
2803 | 26 | |||
2804 | 27 | def do_chart(data, start_date, end_date, trend_start, title, filename, only_weekdays, inverted): | ||
2805 | 28 | #set up default values | ||
2806 | 29 | format = 'svg' | ||
2807 | 30 | height = 450 | ||
2808 | 31 | width = 1000 | ||
2809 | 32 | legend_x = 700 | ||
2810 | 33 | legend_y = 200 | ||
2811 | 34 | title_x = 300 | ||
2812 | 35 | title_y = 350 | ||
2813 | 36 | |||
2814 | 37 | if inverted: | ||
2815 | 38 | legend_x=200 | ||
2816 | 39 | |||
2817 | 40 | # Tell pychart to use colors | ||
2818 | 41 | theme.use_color = True | ||
2819 | 42 | theme.default_font_size = 12 | ||
2820 | 43 | theme.reinitialize() | ||
2821 | 44 | |||
2822 | 45 | # turn into pychart data model and calculate maximum number of WIs | ||
2823 | 46 | max_items = 1 # start at 1 to avoid zero div | ||
2824 | 47 | lastactive = 0 | ||
2825 | 48 | pcdata = [] | ||
2826 | 49 | |||
2827 | 50 | for date in xrange(date_to_ordinal(start_date), date_to_ordinal(end_date)+1): | ||
2828 | 51 | if (not only_weekdays or datetime.date.fromordinal(date).weekday() < 5): | ||
2829 | 52 | end_date = ordinal_to_date(date) | ||
2830 | 53 | i = data.get(ordinal_to_date(date), {}) | ||
2831 | 54 | count = i.get('Completed', 0) + i.get('Planned', 0) + i.get('Blocked', 0) + i.get('In Progress', 0) | ||
2832 | 55 | if max_items < count: | ||
2833 | 56 | max_items = count | ||
2834 | 57 | pcdata.append((date, i.get('Planned', 0),0, | ||
2835 | 58 | i.get('Blocked', 0),0, | ||
2836 | 59 | i.get('In Progress', 0),0, | ||
2837 | 60 | i.get('Completed',0),0, count)) | ||
2838 | 61 | if count > 0: | ||
2839 | 62 | lastactive = len(pcdata) - 1 | ||
2840 | 63 | |||
2841 | 64 | # add some extra space to look nicer | ||
2842 | 65 | max_items = int(max_items * 1.05) | ||
2843 | 66 | |||
2844 | 67 | x_interval = len(pcdata)/20 | ||
2845 | 68 | if max_items > 500: | ||
2846 | 69 | y_interval = max_items/200*10 | ||
2847 | 70 | elif max_items < 20: | ||
2848 | 71 | y_interval = 1 | ||
2849 | 72 | else: | ||
2850 | 73 | y_interval = max_items/20 | ||
2851 | 74 | |||
2852 | 75 | # create the chart object | ||
2853 | 76 | chart_object.set_defaults(area.T, size=(width, height), | ||
2854 | 77 | y_range=(0, None), x_coord=category_coord.T(pcdata, 0)) | ||
2855 | 78 | |||
2856 | 79 | # tell the chart object it will use a bar chart, and will | ||
2857 | 80 | # use the data list for it's model | ||
2858 | 81 | chart_object.set_defaults(bar_plot.T, data=pcdata) | ||
2859 | 82 | |||
2860 | 83 | # create the chart area | ||
2861 | 84 | # tell it to start at coords 0,0 | ||
2862 | 85 | # tell it the labels, and the tics, etc.. | ||
2863 | 86 | # HACK: to prevent 0 div | ||
2864 | 87 | if max_items == 0: | ||
2865 | 88 | max_items = 1 | ||
2866 | 89 | ar = area.T(legend=legend.T(loc=(legend_x,legend_y)), loc=(0,0), | ||
2867 | 90 | x_axis=axis.X(label='Date', tic_interval=x_interval,format=format_date), | ||
2868 | 91 | y_axis=axis.Y(label='Blueprints', tic_interval=y_interval), | ||
2869 | 92 | y_range=(0, max_items)) | ||
2870 | 93 | |||
2871 | 94 | #initialize the blar_plot fill styles | ||
2872 | 95 | bar_plot.fill_styles.reset() | ||
2873 | 96 | |||
2874 | 97 | # create each set of data to plot | ||
2875 | 98 | # note that index zero is the label col | ||
2876 | 99 | # for each column of data, tell it what to use for the legend and | ||
2877 | 100 | # what color to make the bar, no lines, and | ||
2878 | 101 | # what plot to stack on | ||
2879 | 102 | |||
2880 | 103 | tlabel = '' | ||
2881 | 104 | |||
2882 | 105 | if inverted: | ||
2883 | 106 | plot1 = bar_plot.T(label='Completed' + tlabel, hcol=7) | ||
2884 | 107 | plot1.fill_style = fill_style.Plain(bgcolor=color.seagreen) | ||
2885 | 108 | |||
2886 | 109 | plot3 = bar_plot.T(label='In Progress' + tlabel, hcol=5, stack_on = plot1) | ||
2887 | 110 | plot3.fill_style = fill_style.Plain(bgcolor=color.gray65) | ||
2888 | 111 | |||
2889 | 112 | plot5 = bar_plot.T(label='Blocked' + tlabel, hcol=3, stack_on = plot3) | ||
2890 | 113 | plot5.fill_style = fill_style.Plain(bgcolor=color.red1) | ||
2891 | 114 | |||
2892 | 115 | plot7 = bar_plot.T(label='Planned' + tlabel, hcol=1, stack_on = plot5) | ||
2893 | 116 | plot7.fill_style = fill_style.Plain(bgcolor=color.darkorange1) | ||
2894 | 117 | else: | ||
2895 | 118 | plot1 = bar_plot.T(label='Planned' + tlabel, hcol=1) | ||
2896 | 119 | plot1.fill_style = fill_style.Plain(bgcolor=color.darkorange1) | ||
2897 | 120 | |||
2898 | 121 | plot3 = bar_plot.T(label='Blocked' + tlabel, hcol=3, stack_on = plot1) | ||
2899 | 122 | plot3.fill_style = fill_style.Plain(bgcolor=color.red1) | ||
2900 | 123 | |||
2901 | 124 | plot5 = bar_plot.T(label='In Progress' + tlabel, hcol=5, stack_on = plot3) | ||
2902 | 125 | plot5.fill_style = fill_style.Plain(bgcolor=color.gray65) | ||
2903 | 126 | |||
2904 | 127 | plot7 = bar_plot.T(label='Completed' + tlabel, hcol=7, stack_on = plot5) | ||
2905 | 128 | plot7.fill_style = fill_style.Plain(bgcolor=color.seagreen) | ||
2906 | 129 | |||
2907 | 130 | |||
2908 | 131 | plot1.line_style = None | ||
2909 | 132 | plot3.line_style = None | ||
2910 | 133 | plot5.line_style = None | ||
2911 | 134 | plot7.line_style = None | ||
2912 | 135 | |||
2913 | 136 | plot11 = bar_plot.T(label='total', hcol=9) | ||
2914 | 137 | plot11.fill_style = None | ||
2915 | 138 | plot11.line_style = line_style.gray30 | ||
2916 | 139 | |||
2917 | 140 | # create the canvas with the specified filename and file format | ||
2918 | 141 | can = canvas.init(filename,format) | ||
2919 | 142 | |||
2920 | 143 | # add the data to the area and draw it | ||
2921 | 144 | ar.add_plot(plot1, plot3, plot5, plot7) | ||
2922 | 145 | ar.draw() | ||
2923 | 146 | |||
2924 | 147 | # title | ||
2925 | 148 | tb = text_box.T(loc=(title_x, title_y), text=title, line_style=None) | ||
2926 | 149 | tb.fill_style = None | ||
2927 | 150 | tb.draw() | ||
2928 | 151 | |||
2929 | 152 | # | ||
2930 | 153 | # main | ||
2931 | 154 | # | ||
2932 | 155 | |||
2933 | 156 | # argv parsing | ||
2934 | 157 | optparser = optparse.OptionParser() | ||
2935 | 158 | optparser.add_option('-d', '--database', | ||
2936 | 159 | help='Path to database', dest='database', metavar='PATH') | ||
2937 | 160 | optparser.add_option('-t', '--team', | ||
2938 | 161 | help='Restrict report to a particular team', dest='team') | ||
2939 | 162 | optparser.add_option('-m', '--milestone', | ||
2940 | 163 | help='Restrict report to a particular milestone', dest='milestone') | ||
2941 | 164 | optparser.add_option('-o', '--output', | ||
2942 | 165 | help='Output file', dest='output') | ||
2943 | 166 | optparser.add_option('--trend-start', type='int', | ||
2944 | 167 | help='Explicitly set start of trend line', dest='trendstart') | ||
2945 | 168 | optparser.add_option('-u', '--user', | ||
2946 | 169 | help='Run for this user', dest='user') | ||
2947 | 170 | optparser.add_option('--only-weekdays', action='store_true', | ||
2948 | 171 | help='Skip Saturdays and Sundays in the resulting graph', dest='only_weekdays') | ||
2949 | 172 | optparser.add_option('--inverted', action='store_true', | ||
2950 | 173 | help='Generate an inverted burndown chart', dest='inverted') | ||
2951 | 174 | optparser.add_option('-s', '--start-date', | ||
2952 | 175 | help='Explicitly set the start date of the burndown data', dest='start_date') | ||
2953 | 176 | optparser.add_option('-e', '--end-date', | ||
2954 | 177 | help='Explicitly set the end date of the burndown data', dest='end_date') | ||
2955 | 178 | optparser.add_option('--no-foreign', action='store_true', default=False, | ||
2956 | 179 | help='Do not show foreign totals separate', dest='noforeign') | ||
2957 | 180 | optparser.add_option('--group', | ||
2958 | 181 | help='Run for this group', dest='group') | ||
2959 | 182 | optparser.add_option('--date', | ||
2960 | 183 | help='Run for this date', dest='date') | ||
2961 | 184 | |||
2962 | 185 | (opts, args) = optparser.parse_args() | ||
2963 | 186 | if not opts.database: | ||
2964 | 187 | optparser.error('No database given') | ||
2965 | 188 | if not opts.output: | ||
2966 | 189 | optparser.error('No output file given') | ||
2967 | 190 | |||
2968 | 191 | if opts.user and opts.team: | ||
2969 | 192 | optparser.error('team and user options are mutually exclusive') | ||
2970 | 193 | if opts.user and opts.group: | ||
2971 | 194 | optparser.error('user and group options are mutually exclusive') | ||
2972 | 195 | if opts.team and opts.group: | ||
2973 | 196 | optparser.error('team and group options are mutually exclusive') | ||
2974 | 197 | if opts.milestone and opts.date: | ||
2975 | 198 | optparser.error('milestone and date options are mutually exclusive') | ||
2976 | 199 | |||
2977 | 200 | # The typing allows polymorphic behavior | ||
2978 | 201 | if opts.user: | ||
2979 | 202 | opts.team = report_tools.user_string(opts.user) | ||
2980 | 203 | elif opts.team: | ||
2981 | 204 | opts.team = report_tools.team_string(opts.team) | ||
2982 | 205 | |||
2983 | 206 | store = report_tools.get_store(opts.database) | ||
2984 | 207 | |||
2985 | 208 | milestone_collection = None | ||
2986 | 209 | if opts.milestone: | ||
2987 | 210 | milestone_collection = report_tools.get_milestone(store, opts.milestone) | ||
2988 | 211 | elif opts.date: | ||
2989 | 212 | milestone_collection = report_tools.MilestoneGroup( | ||
2990 | 213 | report_tools.date_to_python(opts.date)) | ||
2991 | 214 | |||
2992 | 215 | |||
2993 | 216 | # get date -> state -> count mapping | ||
2994 | 217 | data = report_tools.blueprints_over_time(store) | ||
2995 | 218 | |||
2996 | 219 | if len(data) == 0: | ||
2997 | 220 | print 'WARNING: no blueprints, not generating chart (team: %s, group: %s, due date: %s)' % ( | ||
2998 | 221 | opts.team or 'all', opts.group or 'none', milestone_collection and milestone_collection.display_name or 'none') | ||
2999 | 222 | sys.exit(0) | ||
3000 | 223 | |||
3001 | 224 | # calculate start/end date if no dates are given | ||
3002 | 225 | if opts.start_date is None: | ||
3003 | 226 | start_date = sorted(data.keys())[0] | ||
3004 | 227 | else: | ||
3005 | 228 | start_date=opts.start_date | ||
3006 | 229 | |||
3007 | 230 | if opts.end_date is None: | ||
3008 | 231 | if milestone_collection is not None: | ||
3009 | 232 | end_date = milestone_collection.due_date_str | ||
3010 | 233 | else: | ||
3011 | 234 | end_date=report_tools.milestone_due_date(store) | ||
3012 | 235 | else: | ||
3013 | 236 | end_date=opts.end_date | ||
3014 | 237 | |||
3015 | 238 | if not start_date or not end_date or date_to_ordinal(start_date) > date_to_ordinal(end_date): | ||
3016 | 239 | print 'WARNING: empty date range, not generating chart (team: %s, group: %s, due date: %s)' % ( | ||
3017 | 240 | opts.team or 'all', opts.group or 'none', milestone_collection and milestone_collection.display_name or 'none') | ||
3018 | 241 | sys.exit(0) | ||
3019 | 242 | |||
3020 | 243 | # title | ||
3021 | 244 | title = '/20all quarters' | ||
3022 | 245 | |||
3023 | 246 | if milestone_collection is not None: | ||
3024 | 247 | title += ' (%s)' % milestone_collection.name | ||
3025 | 248 | |||
3026 | 249 | do_chart(data, start_date, end_date, opts.trendstart, title, opts.output, opts.only_weekdays, opts.inverted) | ||
3027 | 0 | 250 | ||
3028 | === added file 'roadmap_health.py' | |||
3029 | --- roadmap_health.py 1970-01-01 00:00:00 +0000 | |||
3030 | +++ roadmap_health.py 2012-10-09 09:20:30 +0000 | |||
3031 | @@ -0,0 +1,102 @@ | |||
3032 | 1 | from report_tools import ( | ||
3033 | 2 | card_blueprints, | ||
3034 | 3 | card_blueprints_by_status, | ||
3035 | 4 | ) | ||
3036 | 5 | |||
3037 | 6 | card_health_checks = [] | ||
3038 | 7 | |||
3039 | 8 | |||
3040 | 9 | def register_health_check(cls): | ||
3041 | 10 | card_health_checks.append(cls) | ||
3042 | 11 | return cls | ||
3043 | 12 | |||
3044 | 13 | |||
3045 | 14 | class HealthCheck(object): | ||
3046 | 15 | NOT_APPLICABLE = 'n/a' | ||
3047 | 16 | OK = 'OK' | ||
3048 | 17 | NOT_OK = 'Not OK' | ||
3049 | 18 | name = 'Base check, not to be used' | ||
3050 | 19 | |||
3051 | 20 | @classmethod | ||
3052 | 21 | def applicable(cls, card, store=None): | ||
3053 | 22 | raise NotImplementedError() | ||
3054 | 23 | |||
3055 | 24 | @classmethod | ||
3056 | 25 | def check(cls, card, store=None): | ||
3057 | 26 | raise NotImplementedError() | ||
3058 | 27 | |||
3059 | 28 | @classmethod | ||
3060 | 29 | def execute(cls, card, store=None): | ||
3061 | 30 | if cls.applicable(card, store): | ||
3062 | 31 | if cls.check(card, store): | ||
3063 | 32 | return cls.OK | ||
3064 | 33 | else: | ||
3065 | 34 | return cls.NOT_OK | ||
3066 | 35 | else: | ||
3067 | 36 | return cls.NOT_APPLICABLE | ||
3068 | 37 | |||
3069 | 38 | |||
3070 | 39 | @register_health_check | ||
3071 | 40 | class DescriptionHealthCheck(HealthCheck): | ||
3072 | 41 | name = 'Has description' | ||
3073 | 42 | |||
3074 | 43 | @classmethod | ||
3075 | 44 | def applicable(cls, card, store=None): | ||
3076 | 45 | return True | ||
3077 | 46 | |||
3078 | 47 | @classmethod | ||
3079 | 48 | def check(cls, card, store=None): | ||
3080 | 49 | return card.description is not None | ||
3081 | 50 | |||
3082 | 51 | |||
3083 | 52 | @register_health_check | ||
3084 | 53 | class CriteriaHealthCheck(HealthCheck): | ||
3085 | 54 | name = 'Has acceptance criteria' | ||
3086 | 55 | |||
3087 | 56 | @classmethod | ||
3088 | 57 | def applicable(cls, card, store=None): | ||
3089 | 58 | return True | ||
3090 | 59 | |||
3091 | 60 | @classmethod | ||
3092 | 61 | def check(cls, card, store=None): | ||
3093 | 62 | return card.acceptance_criteria is not None | ||
3094 | 63 | |||
3095 | 64 | |||
3096 | 65 | @register_health_check | ||
3097 | 66 | class BlueprintsHealthCheck(HealthCheck): | ||
3098 | 67 | name = 'Has blueprints' | ||
3099 | 68 | |||
3100 | 69 | @classmethod | ||
3101 | 70 | def applicable(cls, card, store): | ||
3102 | 71 | return card.status == 'Ready' | ||
3103 | 72 | |||
3104 | 73 | @classmethod | ||
3105 | 74 | def check(cls, card, store): | ||
3106 | 75 | return len(card_blueprints(store, card.roadmap_id)) > 0 | ||
3107 | 76 | |||
3108 | 77 | |||
3109 | 78 | @register_health_check | ||
3110 | 79 | class BlueprintsBlockedHealthCheck(HealthCheck): | ||
3111 | 80 | name = 'Has no Blocked blueprints' | ||
3112 | 81 | |||
3113 | 82 | @classmethod | ||
3114 | 83 | def applicable(cls, card, store): | ||
3115 | 84 | return card.status != 'Ready' | ||
3116 | 85 | |||
3117 | 86 | @classmethod | ||
3118 | 87 | def check(cls, card, store): | ||
3119 | 88 | blueprints = card_blueprints_by_status(store, card.roadmap_id) | ||
3120 | 89 | return len(blueprints['Blocked']) == 0 | ||
3121 | 90 | |||
3122 | 91 | |||
3123 | 92 | @register_health_check | ||
3124 | 93 | class RoadmapIdHealthCheck(HealthCheck): | ||
3125 | 94 | name = 'Has a roadmap id' | ||
3126 | 95 | |||
3127 | 96 | @classmethod | ||
3128 | 97 | def applicable(cls, card, store=None): | ||
3129 | 98 | return True | ||
3130 | 99 | |||
3131 | 100 | @classmethod | ||
3132 | 101 | def check(cls, card, store=None): | ||
3133 | 102 | return card.roadmap_id != '' | ||
3134 | 0 | 103 | ||
3135 | === modified file 'templates/base.html' | |||
3136 | --- templates/base.html 2011-06-02 15:00:45 +0000 | |||
3137 | +++ templates/base.html 2012-10-09 09:20:30 +0000 | |||
3138 | @@ -127,6 +127,19 @@ | |||
3139 | 127 | 127 | ||
3140 | 128 | }); | 128 | }); |
3141 | 129 | </script> | 129 | </script> |
3142 | 130 | <script type="text/javascript"> | ||
3143 | 131 | |||
3144 | 132 | var _gaq = _gaq || []; | ||
3145 | 133 | _gaq.push(['_setAccount', 'UA-16756069-4']); | ||
3146 | 134 | _gaq.push(['_trackPageview']); | ||
3147 | 135 | |||
3148 | 136 | (function() { | ||
3149 | 137 | var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true; | ||
3150 | 138 | ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js'; | ||
3151 | 139 | var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s); | ||
3152 | 140 | })(); | ||
3153 | 141 | |||
3154 | 142 | </script> | ||
3155 | 130 | </head> | 143 | </head> |
3156 | 131 | 144 | ||
3157 | 132 | ${next.body()} | 145 | ${next.body()} |
3158 | 133 | 146 | ||
3159 | === modified file 'templates/body.html' | |||
3160 | --- templates/body.html 2011-05-27 20:07:25 +0000 | |||
3161 | +++ templates/body.html 2012-10-09 09:20:30 +0000 | |||
3162 | @@ -12,7 +12,7 @@ | |||
3163 | 12 | % if page_type == "overview": | 12 | % if page_type == "overview": |
3164 | 13 | active | 13 | active |
3165 | 14 | % endif | 14 | % endif |
3167 | 15 | " title="Overview" id="overview_nav"><a href="${util.url('')}">Overview</a></li> | 15 | " title="Roadmap" id="overview_nav"><a href="${root}../lane/">Roadmap</a></li> |
3168 | 16 | <li class="link | 16 | <li class="link |
3169 | 17 | % if page_type == "about": | 17 | % if page_type == "about": |
3170 | 18 | active | 18 | active |
3171 | @@ -39,6 +39,9 @@ | |||
3172 | 39 | <div id="content_pane"> | 39 | <div id="content_pane"> |
3173 | 40 | <div id="main_content"> | 40 | <div id="main_content"> |
3174 | 41 | ${next.body()} | 41 | ${next.body()} |
3175 | 42 | |||
3176 | 43 | <%namespace name="footer" file="footer.html"/> | ||
3177 | 44 | ${footer.body()} | ||
3178 | 42 | </div> | 45 | </div> |
3179 | 43 | </div> | 46 | </div> |
3180 | 44 | </div> | 47 | </div> |
3181 | 45 | 48 | ||
3182 | === added file 'templates/footer.html' | |||
3183 | === added file 'templates/roadmap_card.html' | |||
3184 | --- templates/roadmap_card.html 1970-01-01 00:00:00 +0000 | |||
3185 | +++ templates/roadmap_card.html 2012-10-09 09:20:30 +0000 | |||
3186 | @@ -0,0 +1,71 @@ | |||
3187 | 1 | <%inherit file="body.html"/> | ||
3188 | 2 | <%namespace name="util" file="util.html"/> | ||
3189 | 3 | <%namespace name="terminology" file="terminology.html"/> | ||
3190 | 4 | |||
3191 | 5 | <%! | ||
3192 | 6 | import report_tools | ||
3193 | 7 | %> | ||
3194 | 8 | |||
3195 | 9 | <%def name="title()"> | ||
3196 | 10 | ${card_title} | ||
3197 | 11 | </%def> | ||
3198 | 12 | |||
3199 | 13 | <h1>${title()}</h1> | ||
3200 | 14 | <div style="float: right"> | ||
3201 | 15 | <h3>Health check</h3> | ||
3202 | 16 | |||
3203 | 17 | <table>${'<tr colspan="2"><td><font color="#FF0000"><b>Needs attention!</b></font></td></tr>' if not card.is_healthy else ''} | ||
3204 | 18 | % for result in health_checks: | ||
3205 | 19 | <tr ${'bgcolor="#FFAAAA"' if result['result'] == 'Not OK' else 'bgcolor="#FFFFFF"'}> | ||
3206 | 20 | <td>${result['name']}</td> | ||
3207 | 21 | <td>${result['result']}</td> | ||
3208 | 22 | </tr> | ||
3209 | 23 | % endfor | ||
3210 | 24 | </table> | ||
3211 | 25 | </div> | ||
3212 | 26 | <h2>${card.status} in <a href="../lane/${lane}.html">${lane}</a></h2> | ||
3213 | 27 | <p> | ||
3214 | 28 | <ul> | ||
3215 | 29 | <li>Card ID: <a href="${card.url}">${card.roadmap_id}</a> | ||
3216 | 30 | <li>Sponsor: ${card.sponsor} | ||
3217 | 31 | <li>Contact: ${card.contact} | ||
3218 | 32 | <li>Priority: ${card.priority} | ||
3219 | 33 | <li>Size: ${card.size} | ||
3220 | 34 | <li>Team: ${card.team} | ||
3221 | 35 | </ul> | ||
3222 | 36 | |||
3223 | 37 | <div style="clear:both; text-align: center">Overall blueprint completion</div> | ||
3224 | 38 | % if card_has_blueprints: | ||
3225 | 39 | ${util.roadmap_progress_bar(bp_status_totals)} | ||
3226 | 40 | % else: | ||
3227 | 41 | <center><i>Progress graph pending linked blueprints.</i></center> | ||
3228 | 42 | % endif | ||
3229 | 43 | |||
3230 | 44 | <h3>Description</h3> ${card.description if card.description is not None else '<i>No description could be found.</i>'} | ||
3231 | 45 | <p><a href="${card.url}">Read the full description</a>. | ||
3232 | 46 | <h3>Acceptance criteria</h3> ${card.acceptance_criteria if card.acceptance_criteria is not None else '<i>No acceptance criteria could be found.</i>'} | ||
3233 | 47 | <p><a href="${card.url}">Read the full acceptance criteria</a>. | ||
3234 | 48 | <p> | ||
3235 | 49 | % if card_has_blueprints: | ||
3236 | 50 | <table> | ||
3237 | 51 | <thead> | ||
3238 | 52 | <tr><th>Title</th> | ||
3239 | 53 | <th>Assignee</th> | ||
3240 | 54 | <th>Priority</th> | ||
3241 | 55 | <th>Status</th> | ||
3242 | 56 | <th>Expected milestone</th> | ||
3243 | 57 | </tr> | ||
3244 | 58 | </thead> | ||
3245 | 59 | % for status in status_order: | ||
3246 | 60 | % for bp in sorted(blueprints[status], key=lambda bp: bp.milestone_name): | ||
3247 | 61 | <tr><td><a href="${bp.url}">${bp.name}</a></td> | ||
3248 | 62 | <td>${bp.assignee_name}</td> | ||
3249 | 63 | <td>${bp.priority}</td> | ||
3250 | 64 | <td>${status} | ||
3251 | 65 | <td>${bp.milestone_name}</td> | ||
3252 | 66 | </tr> | ||
3253 | 67 | % endfor | ||
3254 | 68 | % endfor | ||
3255 | 69 | </table> | ||
3256 | 70 | % endif | ||
3257 | 71 | |||
3258 | 0 | 72 | ||
3259 | === added file 'templates/roadmap_lane.html' | |||
3260 | --- templates/roadmap_lane.html 1970-01-01 00:00:00 +0000 | |||
3261 | +++ templates/roadmap_lane.html 2012-10-09 09:20:30 +0000 | |||
3262 | @@ -0,0 +1,60 @@ | |||
3263 | 1 | <%inherit file="body.html"/> | ||
3264 | 2 | <%namespace name="util" file="util.html"/> | ||
3265 | 3 | <%namespace name="terminology" file="terminology.html"/> | ||
3266 | 4 | |||
3267 | 5 | <%! | ||
3268 | 6 | import report_tools | ||
3269 | 7 | %> | ||
3270 | 8 | |||
3271 | 9 | <%def name="title()"> | ||
3272 | 10 | Progress for ${lane_title} | ||
3273 | 11 | </%def> | ||
3274 | 12 | |||
3275 | 13 | <p style="text-align: right; color: green; font-size: 13pt; float: right"> | ||
3276 | 14 | Lane: <select name="laneselect" onchange="window.location=this.value;"> | ||
3277 | 15 | % for lane in lanes: | ||
3278 | 16 | <option value="${lane.name}.html"${' selected="selected"' if lane.name == lane_title else ''}>${lane.name}${' (current)' if lane.is_current else ''}</option> | ||
3279 | 17 | % endfor | ||
3280 | 18 | </select> | ||
3281 | 19 | <h1>${title()}</h1> | ||
3282 | 20 | ${util.roadmap_progress_bar(bp_status_totals)} | ||
3283 | 21 | <p> | ||
3284 | 22 | <table width="100%"> | ||
3285 | 23 | <thead><tr><th>Card</th><th>Status</th><th>Team</th><th>Priority</th><th>Blueprints</th><th>Health</th></tr></thead> | ||
3286 | 24 | % for status in statuses: | ||
3287 | 25 | % for card_dict in status['cards']: | ||
3288 | 26 | <tr> | ||
3289 | 27 | <td> | ||
3290 | 28 | <a href="../card/${card_dict['card'].roadmap_id if card_dict['card'].roadmap_id != '' else card_dict['card'].card_id}.html">${card_dict['card'].name}</a> | ||
3291 | 29 | </td> | ||
3292 | 30 | <td>${status['name']}</td> | ||
3293 | 31 | <td>${card_dict['card'].team}</td><td align=right>${card_dict['card'].priority}</td> | ||
3294 | 32 | <td> | ||
3295 | 33 | <div class="roadmap_wrap" title=" | ||
3296 | 34 | % for bp_status in status_order: | ||
3297 | 35 | ${bp_status}: ${card_dict['bp_statuses'][bp_status]} | ||
3298 | 36 | % endfor | ||
3299 | 37 | "> | ||
3300 | 38 | % for bp_status in status_order: | ||
3301 | 39 | <div class="roadmap_value" style="width:${card_dict['bp_percentages'][bp_status]}%"> | ||
3302 | 40 | <div class="${bp_status.replace(' ', '')}"> </div> | ||
3303 | 41 | </div> | ||
3304 | 42 | % endfor | ||
3305 | 43 | </div> | ||
3306 | 44 | </td> | ||
3307 | 45 | <td> | ||
3308 | 46 | ${'<font color="#FF0000">Needs attention!</font>' if not card_dict['card'].is_healthy else '' | n} | ||
3309 | 47 | </td> | ||
3310 | 48 | % endfor | ||
3311 | 49 | % endfor | ||
3312 | 50 | </table> | ||
3313 | 51 | |||
3314 | 52 | % if chart_url != 'burndown.svg': | ||
3315 | 53 | <!-- The cli option defaults to burndown.svg! :( --> | ||
3316 | 54 | <div class="overview_graph"> | ||
3317 | 55 | <h3>Blueprint progress</h3><p><a href="current_quarter.svg">(enlarge)</a></p> | ||
3318 | 56 | <object | ||
3319 | 57 | height="500" width="833" | ||
3320 | 58 | data="current_quarter.svg" type="image/svg+xml">Blueprint progress</object> | ||
3321 | 59 | </div> | ||
3322 | 60 | % endif | ||
3323 | 0 | 61 | ||
3324 | === modified file 'templates/util.html' | |||
3325 | --- templates/util.html 2011-06-01 20:55:59 +0000 | |||
3326 | +++ templates/util.html 2012-10-09 09:20:30 +0000 | |||
3327 | @@ -23,6 +23,15 @@ | |||
3328 | 23 | </div> | 23 | </div> |
3329 | 24 | </%def> | 24 | </%def> |
3330 | 25 | 25 | ||
3331 | 26 | <%def name="roadmap_progress_bar(item)"> | ||
3332 | 27 | <div class="roadmap_wrap" title="${item['Completed']} blueprints complete of ${item['Total']}"> | ||
3333 | 28 | <div class="roadmap_value" style="width:${item['Percentage']}%"> | ||
3334 | 29 | <div class="Completed"> </div> | ||
3335 | 30 | </div> | ||
3336 | 31 | <div class="roadmap_progress_text">${item['Percentage']} % complete of ${item['Total']}</div> | ||
3337 | 32 | </div> | ||
3338 | 33 | </%def> | ||
3339 | 34 | |||
3340 | 26 | <%def name="url(end)">${root}${end}</%def> | 35 | <%def name="url(end)">${root}${end}</%def> |
3341 | 27 | 36 | ||
3342 | 28 | <%def name="burndown_chart(chart_url, large=False)"> | 37 | <%def name="burndown_chart(chart_url, large=False)"> |
3343 | 29 | 38 | ||
3344 | === modified file 'tests.py' | |||
3345 | --- tests.py 2011-06-01 03:36:17 +0000 | |||
3346 | +++ tests.py 2012-10-09 09:20:30 +0000 | |||
3347 | @@ -168,6 +168,7 @@ | |||
3348 | 168 | loader = TestLoader() | 168 | loader = TestLoader() |
3349 | 169 | suite = loader.loadTestsFromName(__name__) | 169 | suite = loader.loadTestsFromName(__name__) |
3350 | 170 | suite.addTests(loader.loadTestsFromName("lpworkitems.tests.test_collect")) | 170 | suite.addTests(loader.loadTestsFromName("lpworkitems.tests.test_collect")) |
3351 | 171 | suite.addTests(loader.loadTestsFromName("lpworkitems.tests.test_collect_roadmap")) | ||
3352 | 171 | suite.addTests(loader.loadTestsFromName("lpworkitems.tests.test_error_collector")) | 172 | suite.addTests(loader.loadTestsFromName("lpworkitems.tests.test_error_collector")) |
3353 | 172 | suite.addTests(loader.loadTestsFromName("lpworkitems.tests.test_factory")) | 173 | suite.addTests(loader.loadTestsFromName("lpworkitems.tests.test_factory")) |
3354 | 173 | suite.addTests(loader.loadTestsFromName("lpworkitems.tests.test_fake_launchpad")) | 174 | suite.addTests(loader.loadTestsFromName("lpworkitems.tests.test_fake_launchpad")) |
3355 | 174 | 175 | ||
3356 | === added file 'themes/linaro/templates/footer.html' | |||
3357 | --- themes/linaro/templates/footer.html 1970-01-01 00:00:00 +0000 | |||
3358 | +++ themes/linaro/templates/footer.html 2012-10-09 09:20:30 +0000 | |||
3359 | @@ -0,0 +1,10 @@ | |||
3360 | 1 | <%! | ||
3361 | 2 | import datetime | ||
3362 | 3 | %> | ||
3363 | 4 | <div id="footer"> | ||
3364 | 5 | Last updated: ${datetime.datetime.utcnow().strftime("%a %d %B %Y, %H:%M UTC")} | | ||
3365 | 6 | <a href="https://bugs.launchpad.net/launchpad-work-items-tracker">Bugs</a> | | ||
3366 | 7 | <a href="https://code.launchpad.net/~linaro-infrastructure/launchpad-work-items-tracker/linaro">Code</a> | | ||
3367 | 8 | <a href="https://code.launchpad.net/~linaro-infrastructure/launchpad-work-items-tracker/linaro-config">Config</a> | | ||
3368 | 9 | <a href="/update.log.txt">Update log</a> (<a href="/update.log.txt.1">yesterday</a>) | ||
3369 | 10 | </div> | ||
3370 | 0 | 11 | ||
3371 | === added file 'utils.py' | |||
3372 | --- utils.py 1970-01-01 00:00:00 +0000 | |||
3373 | +++ utils.py 2012-10-09 09:20:30 +0000 | |||
3374 | @@ -0,0 +1,6 @@ | |||
3375 | 1 | def unicode_or_None(attr): | ||
3376 | 2 | if attr is None: | ||
3377 | 3 | return attr | ||
3378 | 4 | if isinstance(attr, unicode): | ||
3379 | 5 | return attr | ||
3380 | 6 | return attr.decode("utf-8") |