Merge lp:~smoser/cloud-init/trunk.reporting into lp:~cloud-init-dev/cloud-init/trunk
- trunk.reporting
- Merge into trunk
Status: | Merged |
---|---|
Merged at revision: | 1134 |
Proposed branch: | lp:~smoser/cloud-init/trunk.reporting |
Merge into: | lp:~cloud-init-dev/cloud-init/trunk |
Diff against target: |
1589 lines (+1040/-137) 13 files modified
bin/cloud-init (+44/-8) cloudinit/cloud.py (+8/-1) cloudinit/registry.py (+37/-0) cloudinit/reporting/__init__.py (+241/-0) cloudinit/reporting/handlers.py (+78/-0) cloudinit/sources/DataSourceMAAS.py (+42/-104) cloudinit/sources/__init__.py (+16/-7) cloudinit/stages.py (+49/-10) cloudinit/url_helper.py (+135/-5) cloudinit/util.py (+2/-1) tests/unittests/test_datasource/test_maas.py (+1/-1) tests/unittests/test_registry.py (+28/-0) tests/unittests/test_reporting.py (+359/-0) |
To merge this branch: | bzr merge lp:~smoser/cloud-init/trunk.reporting |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
cloud-init Commiters | Pending | ||
Review via email: mp+266578@code.launchpad.net |
Commit message
Description of the change
This is a backport of the 2.0 reporting code + some changes.
I'm interested in comments on changes to the 2.0 reporting code, and also in the general usage of 'reporting' from 0.7.
the goal will be to get whatever changes we find into 2.0 and then back into 0.7.x
- 1137. By Scott Moser
-
add nicer formating and messages for datasource searching
- 1138. By Scott Moser
-
merge from trunk
Dan Watkins (oddbloke) wrote : | # |
Scott Moser (smoser) wrote : | # |
thanks. addressed and pushing now.
- 1139. By Scott Moser
-
address Daniel's comments in review
- 1140. By Scott Moser
-
move 'mode' out of SearchReportStack
- 1141. By Scott Moser
-
adjust searching so cache hits are logged
- 1142. By Scott Moser
-
init single: hook up reporter
- 1143. By Scott Moser
-
plumb the rest the reporting through
- 1144. By Scott Moser
-
sync with 2.0 trunk on reporting
- 1145. By Scott Moser
-
fix tests from sync
change ReportStack to ReportEventStack
change default ReportEventStack to be status.SUCCESS instead of None - 1146. By Scott Moser
-
event name doesnt need mode as it is run through init-local or init-net
- 1147. By Scott Moser
-
fix lack of import in cloud.py
- 1148. By Scott Moser
-
fix all tests (were broken due to copied code call to userdata twice
- 1149. By Scott Moser
-
sync to 2.0 review @ patchset 4
- 1150. By Scott Moser
-
merge from trunk
- 1151. By Scott Moser
-
fix pep8
- 1152. By Scott Moser
-
sync tests back
- 1153. By Scott Moser
-
sync with cloudinit 2.0 for registry and reporting
- 1154. By Scott Moser
-
tests pass
- 1155. By Scott Moser
-
add the webhook handler
- 1156. By Scott Moser
-
hopefully fix DataSourceMAAS
- 1157. By Scott Moser
-
seems functional in test
- 1158. By Scott Moser
-
improvements on skew
Dan Watkins (oddbloke) wrote : | # |
One of my comments suggests changes to the reporting framework itself, so making those changes in 2.0 and re-backporting probably makes most sense.
- 1159. By Scott Moser
-
fix syntax
- 1160. By Scott Moser
-
catch exception in webhook, adjust logging to use cloud-init logging
- 1161. By Scott Moser
-
undo broken logic that attempted to not re-initialize classes
Preview Diff
1 | === modified file 'bin/cloud-init' |
2 | --- bin/cloud-init 2015-07-31 15:20:36 +0000 |
3 | +++ bin/cloud-init 2015-08-07 15:21:12 +0000 |
4 | @@ -46,6 +46,7 @@ |
5 | from cloudinit import stages |
6 | from cloudinit import templater |
7 | from cloudinit import util |
8 | +from cloudinit import reporting |
9 | from cloudinit import version |
10 | |
11 | from cloudinit.settings import (PER_INSTANCE, PER_ALWAYS, PER_ONCE, |
12 | @@ -136,6 +137,11 @@ |
13 | return failures |
14 | |
15 | |
16 | +def apply_reporting_cfg(cfg): |
17 | + if cfg.get('reporting'): |
18 | + reporting.update_configuration(cfg.get('reporting')) |
19 | + |
20 | + |
21 | def main_init(name, args): |
22 | deps = [sources.DEP_FILESYSTEM, sources.DEP_NETWORK] |
23 | if args.local: |
24 | @@ -171,7 +177,7 @@ |
25 | w_msg = welcome_format(name) |
26 | else: |
27 | w_msg = welcome_format("%s-local" % (name)) |
28 | - init = stages.Init(deps) |
29 | + init = stages.Init(ds_deps=deps, reporter=args.reporter) |
30 | # Stage 1 |
31 | init.read_cfg(extract_fns(args)) |
32 | # Stage 2 |
33 | @@ -190,6 +196,7 @@ |
34 | " longer be active shortly")) |
35 | logging.resetLogging() |
36 | logging.setupLogging(init.cfg) |
37 | + apply_reporting_cfg(init.cfg) |
38 | |
39 | # Any log usage prior to setupLogging above did not have local user log |
40 | # config applied. We send the welcome message now, as stderr/out have |
41 | @@ -282,8 +289,10 @@ |
42 | util.logexc(LOG, "Consuming user data failed!") |
43 | return (init.datasource, ["Consuming user data failed!"]) |
44 | |
45 | + apply_reporting_cfg(init.cfg) |
46 | + |
47 | # Stage 8 - re-read and apply relevant cloud-config to include user-data |
48 | - mods = stages.Modules(init, extract_fns(args)) |
49 | + mods = stages.Modules(init, extract_fns(args), reporter=args.reporter) |
50 | # Stage 9 |
51 | try: |
52 | outfmt_orig = outfmt |
53 | @@ -313,7 +322,7 @@ |
54 | # 5. Run the modules for the given stage name |
55 | # 6. Done! |
56 | w_msg = welcome_format("%s:%s" % (action_name, name)) |
57 | - init = stages.Init(ds_deps=[]) |
58 | + init = stages.Init(ds_deps=[], reporter=args.reporter) |
59 | # Stage 1 |
60 | init.read_cfg(extract_fns(args)) |
61 | # Stage 2 |
62 | @@ -328,7 +337,7 @@ |
63 | if not args.force: |
64 | return [(msg)] |
65 | # Stage 3 |
66 | - mods = stages.Modules(init, extract_fns(args)) |
67 | + mods = stages.Modules(init, extract_fns(args), reporter=args.reporter) |
68 | # Stage 4 |
69 | try: |
70 | LOG.debug("Closing stdin") |
71 | @@ -342,6 +351,7 @@ |
72 | " longer be active shortly")) |
73 | logging.resetLogging() |
74 | logging.setupLogging(mods.cfg) |
75 | + apply_reporting_cfg(init.cfg) |
76 | |
77 | # now that logging is setup and stdout redirected, send welcome |
78 | welcome(name, msg=w_msg) |
79 | @@ -366,7 +376,7 @@ |
80 | # 6. Done! |
81 | mod_name = args.name |
82 | w_msg = welcome_format(name) |
83 | - init = stages.Init(ds_deps=[]) |
84 | + init = stages.Init(ds_deps=[], reporter=args.reporter) |
85 | # Stage 1 |
86 | init.read_cfg(extract_fns(args)) |
87 | # Stage 2 |
88 | @@ -383,7 +393,7 @@ |
89 | if not args.force: |
90 | return 1 |
91 | # Stage 3 |
92 | - mods = stages.Modules(init, extract_fns(args)) |
93 | + mods = stages.Modules(init, extract_fns(args), reporter=args.reporter) |
94 | mod_args = args.module_args |
95 | if mod_args: |
96 | LOG.debug("Using passed in arguments %s", mod_args) |
97 | @@ -404,6 +414,7 @@ |
98 | " longer be active shortly")) |
99 | logging.resetLogging() |
100 | logging.setupLogging(mods.cfg) |
101 | + apply_reporting_cfg(init.cfg) |
102 | |
103 | # now that logging is setup and stdout redirected, send welcome |
104 | welcome(name, msg=w_msg) |
105 | @@ -549,6 +560,8 @@ |
106 | ' found (use at your own risk)'), |
107 | dest='force', |
108 | default=False) |
109 | + |
110 | + parser.set_defaults(reporter=None) |
111 | subparsers = parser.add_subparsers() |
112 | |
113 | # Each action and its sub-options (if any) |
114 | @@ -595,6 +608,9 @@ |
115 | help=("frequency of the module"), |
116 | required=False, |
117 | choices=list(FREQ_SHORT_NAMES.keys())) |
118 | + parser_single.add_argument("--report", action="store_true", |
119 | + help="enable reporting", |
120 | + required=False) |
121 | parser_single.add_argument("module_args", nargs="*", |
122 | metavar='argument', |
123 | help=('any additional arguments to' |
124 | @@ -617,8 +633,28 @@ |
125 | if name in ("modules", "init"): |
126 | functor = status_wrapper |
127 | |
128 | - return util.log_time(logfunc=LOG.debug, msg="cloud-init mode '%s'" % name, |
129 | - get_uptime=True, func=functor, args=(name, args)) |
130 | + report_on = True |
131 | + if name == "init": |
132 | + if args.local: |
133 | + rname, rdesc = ("init-local", "searching for local datasources") |
134 | + else: |
135 | + rname, rdesc = ("init-network", |
136 | + "searching for network datasources") |
137 | + elif name == "modules": |
138 | + rname, rdesc = ("modules-%s" % args.mode, |
139 | + "running modules for %s" % args.mode) |
140 | + elif name == "single": |
141 | + rname, rdesc = ("single/%s" % args.name, |
142 | + "running single module %s" % args.name) |
143 | + report_on = args.report |
144 | + |
145 | + reporting.update_configuration({'print': {'type': 'print'}}) |
146 | + args.reporter = reporting.ReportEventStack( |
147 | + rname, rdesc, reporting_enabled=report_on) |
148 | + with args.reporter: |
149 | + return util.log_time( |
150 | + logfunc=LOG.debug, msg="cloud-init mode '%s'" % name, |
151 | + get_uptime=True, func=functor, args=(name, args)) |
152 | |
153 | |
154 | if __name__ == '__main__': |
155 | |
156 | === modified file 'cloudinit/cloud.py' |
157 | --- cloudinit/cloud.py 2012-08-27 17:56:47 +0000 |
158 | +++ cloudinit/cloud.py 2015-08-07 15:21:12 +0000 |
159 | @@ -24,6 +24,7 @@ |
160 | import os |
161 | |
162 | from cloudinit import log as logging |
163 | +from cloudinit import reporting |
164 | |
165 | LOG = logging.getLogger(__name__) |
166 | |
167 | @@ -40,12 +41,18 @@ |
168 | |
169 | |
170 | class Cloud(object): |
171 | - def __init__(self, datasource, paths, cfg, distro, runners): |
172 | + def __init__(self, datasource, paths, cfg, distro, runners, reporter=None): |
173 | self.datasource = datasource |
174 | self.paths = paths |
175 | self.distro = distro |
176 | self._cfg = cfg |
177 | self._runners = runners |
178 | + if reporter is None: |
179 | + reporter = reporting.ReportEventStack( |
180 | + name="unnamed-cloud-reporter", |
181 | + description="unnamed-cloud-reporter", |
182 | + reporting_enabled=False) |
183 | + self.reporter = reporter |
184 | |
185 | # If a 'user' manipulates logging or logging services |
186 | # it is typically useful to cause the logging to be |
187 | |
188 | === added file 'cloudinit/registry.py' |
189 | --- cloudinit/registry.py 1970-01-01 00:00:00 +0000 |
190 | +++ cloudinit/registry.py 2015-08-07 15:21:12 +0000 |
191 | @@ -0,0 +1,37 @@ |
192 | +# Copyright 2015 Canonical Ltd. |
193 | +# This file is part of cloud-init. See LICENCE file for license information. |
194 | +# |
195 | +# vi: ts=4 expandtab |
196 | +import copy |
197 | + |
198 | + |
199 | +class DictRegistry(object): |
200 | + """A simple registry for a mapping of objects.""" |
201 | + |
202 | + def __init__(self): |
203 | + self.reset() |
204 | + |
205 | + def reset(self): |
206 | + self._items = {} |
207 | + |
208 | + def register_item(self, key, item): |
209 | + """Add item to the registry.""" |
210 | + if key in self._items: |
211 | + raise ValueError( |
212 | + 'Item already registered with key {0}'.format(key)) |
213 | + self._items[key] = item |
214 | + |
215 | + def unregister_item(self, key, force=True): |
216 | + """Remove item from the registry.""" |
217 | + if key in self._items: |
218 | + del self._items[key] |
219 | + elif not force: |
220 | + raise KeyError("%s: key not present to unregister" % key) |
221 | + |
222 | + @property |
223 | + def registered_items(self): |
224 | + """All the items that have been registered. |
225 | + |
226 | + This cannot be used to modify the contents of the registry. |
227 | + """ |
228 | + return copy.copy(self._items) |
229 | |
230 | === added directory 'cloudinit/reporting' |
231 | === added file 'cloudinit/reporting/__init__.py' |
232 | --- cloudinit/reporting/__init__.py 1970-01-01 00:00:00 +0000 |
233 | +++ cloudinit/reporting/__init__.py 2015-08-07 15:21:12 +0000 |
234 | @@ -0,0 +1,241 @@ |
235 | +# Copyright 2015 Canonical Ltd. |
236 | +# This file is part of cloud-init. See LICENCE file for license information. |
237 | +# |
238 | +# vi: ts=4 expandtab |
239 | +""" |
240 | +cloud-init reporting framework |
241 | + |
242 | +The reporting framework is intended to allow all parts of cloud-init to |
243 | +report events in a structured manner. |
244 | +""" |
245 | + |
246 | +from ..registry import DictRegistry |
247 | +from ..reporting.handlers import available_handlers |
248 | + |
249 | + |
250 | +FINISH_EVENT_TYPE = 'finish' |
251 | +START_EVENT_TYPE = 'start' |
252 | + |
253 | +DEFAULT_CONFIG = { |
254 | + 'logging': {'type': 'log'}, |
255 | + 'print': {'type': 'print'}, |
256 | +} |
257 | + |
258 | + |
259 | +class _nameset(set): |
260 | + def __getattr__(self, name): |
261 | + if name in self: |
262 | + return name |
263 | + raise AttributeError("%s not a valid value" % name) |
264 | + |
265 | + |
266 | +status = _nameset(("SUCCESS", "WARN", "FAIL")) |
267 | + |
268 | + |
269 | +class ReportingEvent(object): |
270 | + """Encapsulation of event formatting.""" |
271 | + |
272 | + def __init__(self, event_type, name, description): |
273 | + self.event_type = event_type |
274 | + self.name = name |
275 | + self.description = description |
276 | + |
277 | + def as_string(self): |
278 | + """The event represented as a string.""" |
279 | + return '{0}: {1}: {2}'.format( |
280 | + self.event_type, self.name, self.description) |
281 | + |
282 | + def as_dict(self): |
283 | + """The event represented as a dictionary.""" |
284 | + return {'name': self.name, 'description': self.description, |
285 | + 'event_type': self.event_type} |
286 | + |
287 | + |
288 | +class FinishReportingEvent(ReportingEvent): |
289 | + |
290 | + def __init__(self, name, description, result=status.SUCCESS): |
291 | + super(FinishReportingEvent, self).__init__( |
292 | + FINISH_EVENT_TYPE, name, description) |
293 | + self.result = result |
294 | + if result not in status: |
295 | + raise ValueError("Invalid result: %s" % result) |
296 | + |
297 | + def as_string(self): |
298 | + return '{0}: {1}: {2}: {3}'.format( |
299 | + self.event_type, self.name, self.result, self.description) |
300 | + |
301 | + def as_dict(self): |
302 | + """The event represented as json friendly.""" |
303 | + data = super(FinishReportingEvent, self).as_dict() |
304 | + data['result'] = self.result |
305 | + return data |
306 | + |
307 | + |
308 | +def update_configuration(config): |
309 | + """Update the instanciated_handler_registry. |
310 | + |
311 | + :param config: |
312 | + The dictionary containing changes to apply. If a key is given |
313 | + with a False-ish value, the registered handler matching that name |
314 | + will be unregistered. |
315 | + """ |
316 | + for handler_name, handler_config in config.items(): |
317 | + if not handler_config: |
318 | + instantiated_handler_registry.unregister_item( |
319 | + handler_name, force=True) |
320 | + continue |
321 | + registered = instantiated_handler_registry.registered_items |
322 | + handler_config = handler_config.copy() |
323 | + cls = available_handlers.registered_items[handler_config.pop('type')] |
324 | + instantiated_handler_registry.unregister_item(handler_name) |
325 | + instance = cls(**handler_config) |
326 | + instantiated_handler_registry.register_item(handler_name, instance) |
327 | + |
328 | + |
329 | +def report_event(event): |
330 | + """Report an event to all registered event handlers. |
331 | + |
332 | + This should generally be called via one of the other functions in |
333 | + the reporting module. |
334 | + |
335 | + :param event_type: |
336 | + The type of the event; this should be a constant from the |
337 | + reporting module. |
338 | + """ |
339 | + for _, handler in instantiated_handler_registry.registered_items.items(): |
340 | + handler.publish_event(event) |
341 | + |
342 | + |
343 | +def report_finish_event(event_name, event_description, |
344 | + result=status.SUCCESS): |
345 | + """Report a "finish" event. |
346 | + |
347 | + See :py:func:`.report_event` for parameter details. |
348 | + """ |
349 | + event = FinishReportingEvent(event_name, event_description, result) |
350 | + return report_event(event) |
351 | + |
352 | + |
353 | +def report_start_event(event_name, event_description): |
354 | + """Report a "start" event. |
355 | + |
356 | + :param event_name: |
357 | + The name of the event; this should be a topic which events would |
358 | + share (e.g. it will be the same for start and finish events). |
359 | + |
360 | + :param event_description: |
361 | + A human-readable description of the event that has occurred. |
362 | + """ |
363 | + event = ReportingEvent(START_EVENT_TYPE, event_name, event_description) |
364 | + return report_event(event) |
365 | + |
366 | + |
367 | +class ReportEventStack(object): |
368 | + """Context Manager for using :py:func:`report_event` |
369 | + |
370 | + This enables calling :py:func:`report_start_event` and |
371 | + :py:func:`report_finish_event` through a context manager. |
372 | + |
373 | + :param name: |
374 | + the name of the event |
375 | + |
376 | + :param description: |
377 | + the event's description, passed on to :py:func:`report_start_event` |
378 | + |
379 | + :param message: |
380 | + the description to use for the finish event. defaults to |
381 | + :param:description. |
382 | + |
383 | + :param parent: |
384 | + :type parent: :py:class:ReportEventStack or None |
385 | + The parent of this event. The parent is populated with |
386 | + results of all its children. The name used in reporting |
387 | + is <parent.name>/<name> |
388 | + |
389 | + :param reporting_enabled: |
390 | + Indicates if reporting events should be generated. |
391 | + If not provided, defaults to the parent's value, or True if no parent |
392 | + is provided. |
393 | + |
394 | + :param result_on_exception: |
395 | + The result value to set if an exception is caught. default |
396 | + value is FAIL. |
397 | + """ |
398 | + def __init__(self, name, description, message=None, parent=None, |
399 | + reporting_enabled=None, result_on_exception=status.FAIL): |
400 | + self.parent = parent |
401 | + self.name = name |
402 | + self.description = description |
403 | + self.message = message |
404 | + self.result_on_exception = result_on_exception |
405 | + self.result = status.SUCCESS |
406 | + |
407 | + # use parents reporting value if not provided |
408 | + if reporting_enabled is None: |
409 | + if parent: |
410 | + reporting_enabled = parent.reporting_enabled |
411 | + else: |
412 | + reporting_enabled = True |
413 | + self.reporting_enabled = reporting_enabled |
414 | + |
415 | + if parent: |
416 | + self.fullname = '/'.join((parent.fullname, name,)) |
417 | + else: |
418 | + self.fullname = self.name |
419 | + self.children = {} |
420 | + |
421 | + def __repr__(self): |
422 | + return ("ReportEventStack(%s, %s, reporting_enabled=%s)" % |
423 | + (self.name, self.description, self.reporting_enabled)) |
424 | + |
425 | + def __enter__(self): |
426 | + self.result = status.SUCCESS |
427 | + if self.reporting_enabled: |
428 | + report_start_event(self.fullname, self.description) |
429 | + if self.parent: |
430 | + self.parent.children[self.name] = (None, None) |
431 | + return self |
432 | + |
433 | + def _childrens_finish_info(self): |
434 | + for cand_result in (status.FAIL, status.WARN): |
435 | + for name, (value, msg) in self.children.items(): |
436 | + if value == cand_result: |
437 | + return (value, self.message) |
438 | + return (self.result, self.message) |
439 | + |
440 | + @property |
441 | + def result(self): |
442 | + return self._result |
443 | + |
444 | + @result.setter |
445 | + def result(self, value): |
446 | + if value not in status: |
447 | + raise ValueError("'%s' not a valid result" % value) |
448 | + self._result = value |
449 | + |
450 | + @property |
451 | + def message(self): |
452 | + if self._message is not None: |
453 | + return self._message |
454 | + return self.description |
455 | + |
456 | + @message.setter |
457 | + def message(self, value): |
458 | + self._message = value |
459 | + |
460 | + def _finish_info(self, exc): |
461 | + # return tuple of description, and value |
462 | + if exc: |
463 | + return (self.result_on_exception, self.message) |
464 | + return self._childrens_finish_info() |
465 | + |
466 | + def __exit__(self, exc_type, exc_value, traceback): |
467 | + (result, msg) = self._finish_info(exc_value) |
468 | + if self.parent: |
469 | + self.parent.children[self.name] = (result, msg) |
470 | + if self.reporting_enabled: |
471 | + report_finish_event(self.fullname, msg, result) |
472 | + |
473 | + |
474 | +instantiated_handler_registry = DictRegistry() |
475 | +update_configuration(DEFAULT_CONFIG) |
476 | |
477 | === added file 'cloudinit/reporting/handlers.py' |
478 | --- cloudinit/reporting/handlers.py 1970-01-01 00:00:00 +0000 |
479 | +++ cloudinit/reporting/handlers.py 2015-08-07 15:21:12 +0000 |
480 | @@ -0,0 +1,78 @@ |
481 | +# vi: ts=4 expandtab |
482 | + |
483 | +import abc |
484 | +import oauthlib.oauth1 as oauth1 |
485 | +import six |
486 | + |
487 | +from ..registry import DictRegistry |
488 | +from .. import (url_helper, util) |
489 | +from .. import log as logging |
490 | + |
491 | + |
492 | +LOG = logging.getLogger(__name__) |
493 | + |
494 | + |
495 | +@six.add_metaclass(abc.ABCMeta) |
496 | +class ReportingHandler(object): |
497 | + """Base class for report handlers. |
498 | + |
499 | + Implement :meth:`~publish_event` for controlling what |
500 | + the handler does with an event. |
501 | + """ |
502 | + |
503 | + @abc.abstractmethod |
504 | + def publish_event(self, event): |
505 | + """Publish an event to the ``INFO`` log level.""" |
506 | + |
507 | + |
508 | +class LogHandler(ReportingHandler): |
509 | + """Publishes events to the cloud-init log at the ``INFO`` log level.""" |
510 | + |
511 | + def publish_event(self, event): |
512 | + """Publish an event to the ``INFO`` log level.""" |
513 | + logger = logging.getLogger( |
514 | + '.'.join(['cloudinit', 'reporting', event.event_type, event.name])) |
515 | + logger.info(event.as_string()) |
516 | + |
517 | + |
518 | +class PrintHandler(ReportingHandler): |
519 | + def publish_event(self, event): |
520 | + """Publish an event to the ``INFO`` log level.""" |
521 | + print(event.as_string()) |
522 | + |
523 | + |
524 | +class WebHookHandler(ReportingHandler): |
525 | + def __init__(self, endpoint, consumer_key=None, token_key=None, |
526 | + token_secret=None, consumer_secret=None, timeout=None, |
527 | + retries=None): |
528 | + super(WebHookHandler, self).__init__() |
529 | + |
530 | + if any([consumer_key, token_key, token_secret, consumer_secret]): |
531 | + self.oauth_helper = url_helper.OauthUrlHelper( |
532 | + consumer_key=consumer_key, token_key=token_key, |
533 | + token_secret=token_secret, consumer_secret=consumer_secret) |
534 | + else: |
535 | + self.oauth_helper = None |
536 | + self.endpoint = endpoint |
537 | + self.timeout = timeout |
538 | + self.retries = retries |
539 | + self.ssl_details = util.fetch_ssl_details() |
540 | + |
541 | + def publish_event(self, event): |
542 | + if self.oauth_helper: |
543 | + readurl = self.oauth_helper.readurl |
544 | + else: |
545 | + readurl = url_helper.readurl |
546 | + try: |
547 | + return readurl( |
548 | + self.endpoint, data=event.as_dict(), |
549 | + timeout=self.timeout, |
550 | + retries=self.retries, ssl_details=self.ssl_details) |
551 | + except: |
552 | + LOG.warn("failed posting event: %s" % event.as_string()) |
553 | + |
554 | + |
555 | +available_handlers = DictRegistry() |
556 | +available_handlers.register_item('log', LogHandler) |
557 | +available_handlers.register_item('print', PrintHandler) |
558 | +available_handlers.register_item('webhook', WebHookHandler) |
559 | |
560 | === modified file 'cloudinit/sources/DataSourceMAAS.py' |
561 | --- cloudinit/sources/DataSourceMAAS.py 2015-03-10 21:04:59 +0000 |
562 | +++ cloudinit/sources/DataSourceMAAS.py 2015-08-07 15:21:12 +0000 |
563 | @@ -52,7 +52,20 @@ |
564 | sources.DataSource.__init__(self, sys_cfg, distro, paths) |
565 | self.base_url = None |
566 | self.seed_dir = os.path.join(paths.seed_dir, 'maas') |
567 | - self.oauth_clockskew = None |
568 | + self.oauth_helper = self._get_helper() |
569 | + |
570 | + def _get_helper(self): |
571 | + mcfg = self.ds_cfg |
572 | + # If we are missing token_key, token_secret or consumer_key |
573 | + # then just do non-authed requests |
574 | + for required in ('token_key', 'token_secret', 'consumer_key'): |
575 | + if required not in mcfg: |
576 | + return url_helper.OauthUrlHelper() |
577 | + |
578 | + return url_helper.OauthHelper( |
579 | + consumer_key=mcfg['consumer_key'], token_key=mcfg['token_key'], |
580 | + token_secret=mcfg['token_secret'], |
581 | + consumer_secret=mcfg.get('consumer_secret')) |
582 | |
583 | def __str__(self): |
584 | root = sources.DataSource.__str__(self) |
585 | @@ -84,9 +97,9 @@ |
586 | |
587 | self.base_url = url |
588 | |
589 | - (userdata, metadata) = read_maas_seed_url(self.base_url, |
590 | - self._md_headers, |
591 | - paths=self.paths) |
592 | + (userdata, metadata) = read_maas_seed_url( |
593 | + self.base_url, self.oauth_helper.md_headers, |
594 | + paths=self.paths) |
595 | self.userdata_raw = userdata |
596 | self.metadata = metadata |
597 | return True |
598 | @@ -94,31 +107,8 @@ |
599 | util.logexc(LOG, "Failed fetching metadata from url %s", url) |
600 | return False |
601 | |
602 | - def _md_headers(self, url): |
603 | - mcfg = self.ds_cfg |
604 | - |
605 | - # If we are missing token_key, token_secret or consumer_key |
606 | - # then just do non-authed requests |
607 | - for required in ('token_key', 'token_secret', 'consumer_key'): |
608 | - if required not in mcfg: |
609 | - return {} |
610 | - |
611 | - consumer_secret = mcfg.get('consumer_secret', "") |
612 | - |
613 | - timestamp = None |
614 | - if self.oauth_clockskew: |
615 | - timestamp = int(time.time()) + self.oauth_clockskew |
616 | - |
617 | - return oauth_headers(url=url, |
618 | - consumer_key=mcfg['consumer_key'], |
619 | - token_key=mcfg['token_key'], |
620 | - token_secret=mcfg['token_secret'], |
621 | - consumer_secret=consumer_secret, |
622 | - timestamp=timestamp) |
623 | - |
624 | def wait_for_metadata_service(self, url): |
625 | mcfg = self.ds_cfg |
626 | - |
627 | max_wait = 120 |
628 | try: |
629 | max_wait = int(mcfg.get("max_wait", max_wait)) |
630 | @@ -138,10 +128,8 @@ |
631 | starttime = time.time() |
632 | check_url = "%s/%s/meta-data/instance-id" % (url, MD_VERSION) |
633 | urls = [check_url] |
634 | - url = url_helper.wait_for_url(urls=urls, max_wait=max_wait, |
635 | - timeout=timeout, |
636 | - exception_cb=self._except_cb, |
637 | - headers_cb=self._md_headers) |
638 | + url = self.oauth_helper.wait_for_url( |
639 | + urls=urls, max_wait=max_wait, timeout=timeout) |
640 | |
641 | if url: |
642 | LOG.debug("Using metadata source: '%s'", url) |
643 | @@ -151,26 +139,6 @@ |
644 | |
645 | return bool(url) |
646 | |
647 | - def _except_cb(self, msg, exception): |
648 | - if not (isinstance(exception, url_helper.UrlError) and |
649 | - (exception.code == 403 or exception.code == 401)): |
650 | - return |
651 | - |
652 | - if 'date' not in exception.headers: |
653 | - LOG.warn("Missing header 'date' in %s response", exception.code) |
654 | - return |
655 | - |
656 | - date = exception.headers['date'] |
657 | - try: |
658 | - ret_time = time.mktime(parsedate(date)) |
659 | - except Exception as e: |
660 | - LOG.warn("Failed to convert datetime '%s': %s", date, e) |
661 | - return |
662 | - |
663 | - self.oauth_clockskew = int(ret_time - time.time()) |
664 | - LOG.warn("Setting oauth clockskew to %d", self.oauth_clockskew) |
665 | - return |
666 | - |
667 | |
668 | def read_maas_seed_dir(seed_d): |
669 | """ |
670 | @@ -196,12 +164,12 @@ |
671 | return check_seed_contents(md, seed_d) |
672 | |
673 | |
674 | -def read_maas_seed_url(seed_url, header_cb=None, timeout=None, |
675 | +def read_maas_seed_url(seed_url, read_file_or_url=None, timeout=None, |
676 | version=MD_VERSION, paths=None): |
677 | """ |
678 | Read the maas datasource at seed_url. |
679 | - - header_cb is a method that should return a headers dictionary for |
680 | - a given url |
681 | + read_file_or_url is a method that should provide an interface |
682 | + like util.read_file_or_url |
683 | |
684 | Expected format of seed_url is are the following files: |
685 | * <seed_url>/<version>/meta-data/instance-id |
686 | @@ -222,14 +190,12 @@ |
687 | 'user-data': "%s/%s" % (base_url, 'user-data'), |
688 | } |
689 | |
690 | + if read_file_or_url is None: |
691 | + read_file_or_url = util.read_file_or_url |
692 | + |
693 | md = {} |
694 | for name in file_order: |
695 | url = files.get(name) |
696 | - if not header_cb: |
697 | - def _cb(url): |
698 | - return {} |
699 | - header_cb = _cb |
700 | - |
701 | if name == 'user-data': |
702 | retries = 0 |
703 | else: |
704 | @@ -237,10 +203,8 @@ |
705 | |
706 | try: |
707 | ssl_details = util.fetch_ssl_details(paths) |
708 | - resp = util.read_file_or_url(url, retries=retries, |
709 | - headers_cb=header_cb, |
710 | - timeout=timeout, |
711 | - ssl_details=ssl_details) |
712 | + resp = read_file_or_url(url, retries=retries, |
713 | + timeout=timeout, ssl_details=ssl_details) |
714 | if resp.ok(): |
715 | if name in BINARY_FIELDS: |
716 | md[name] = resp.contents |
717 | @@ -280,24 +244,6 @@ |
718 | return (userdata, md) |
719 | |
720 | |
721 | -def oauth_headers(url, consumer_key, token_key, token_secret, consumer_secret, |
722 | - timestamp=None): |
723 | - if timestamp: |
724 | - timestamp = str(timestamp) |
725 | - else: |
726 | - timestamp = None |
727 | - |
728 | - client = oauth1.Client( |
729 | - consumer_key, |
730 | - client_secret=consumer_secret, |
731 | - resource_owner_key=token_key, |
732 | - resource_owner_secret=token_secret, |
733 | - signature_method=oauth1.SIGNATURE_PLAINTEXT, |
734 | - timestamp=timestamp) |
735 | - uri, signed_headers, body = client.sign(url) |
736 | - return signed_headers |
737 | - |
738 | - |
739 | class MAASSeedDirNone(Exception): |
740 | pass |
741 | |
742 | @@ -361,47 +307,39 @@ |
743 | if key in cfg and creds[key] is None: |
744 | creds[key] = cfg[key] |
745 | |
746 | - def geturl(url, headers_cb): |
747 | - req = Request(url, data=None, headers=headers_cb(url)) |
748 | - return urlopen(req).read() |
749 | + oauth_helper = url_helper.OauthUrlHelper(**creds) |
750 | + |
751 | + def geturl(url): |
752 | + return oauth_helper.readurl(url).contents |
753 | |
754 | def printurl(url, headers_cb): |
755 | - print("== %s ==\n%s\n" % (url, geturl(url, headers_cb))) |
756 | + print("== %s ==\n%s\n" % (url, geturl(url))) |
757 | |
758 | - def crawl(url, headers_cb=None): |
759 | + def crawl(url): |
760 | if url.endswith("/"): |
761 | - for line in geturl(url, headers_cb).splitlines(): |
762 | + for line in geturl(url).splitlines(): |
763 | if line.endswith("/"): |
764 | - crawl("%s%s" % (url, line), headers_cb) |
765 | + crawl("%s%s" % (url, line)) |
766 | else: |
767 | - printurl("%s%s" % (url, line), headers_cb) |
768 | + printurl("%s%s" % (url, line)) |
769 | else: |
770 | - printurl(url, headers_cb) |
771 | - |
772 | - def my_headers(url): |
773 | - headers = {} |
774 | - if creds.get('consumer_key', None) is not None: |
775 | - headers = oauth_headers(url, **creds) |
776 | - return headers |
777 | + printurl(url) |
778 | |
779 | if args.subcmd == "check-seed": |
780 | - if args.url.startswith("http"): |
781 | - (userdata, metadata) = read_maas_seed_url(args.url, |
782 | - header_cb=my_headers, |
783 | - version=args.apiver) |
784 | - else: |
785 | - (userdata, metadata) = read_maas_seed_url(args.url) |
786 | + (userdata, metadata) = read_maas_seed_url( |
787 | + args.url, read_file_or_url=oauth_helper.read_file_or_url, |
788 | + version=args.apiver) |
789 | print("=== userdata ===") |
790 | print(userdata) |
791 | print("=== metadata ===") |
792 | pprint.pprint(metadata) |
793 | |
794 | elif args.subcmd == "get": |
795 | - printurl(args.url, my_headers) |
796 | + printurl(args.url) |
797 | |
798 | elif args.subcmd == "crawl": |
799 | if not args.url.endswith("/"): |
800 | args.url = "%s/" % args.url |
801 | - crawl(args.url, my_headers) |
802 | + crawl(args.url) |
803 | |
804 | main() |
805 | |
806 | === modified file 'cloudinit/sources/__init__.py' |
807 | --- cloudinit/sources/__init__.py 2015-07-22 12:06:34 +0000 |
808 | +++ cloudinit/sources/__init__.py 2015-08-07 15:21:12 +0000 |
809 | @@ -27,6 +27,7 @@ |
810 | |
811 | from cloudinit import importer |
812 | from cloudinit import log as logging |
813 | +from cloudinit import reporting |
814 | from cloudinit import type_utils |
815 | from cloudinit import user_data as ud |
816 | from cloudinit import util |
817 | @@ -246,17 +247,25 @@ |
818 | return keys |
819 | |
820 | |
821 | -def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list): |
822 | +def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list, reporter): |
823 | ds_list = list_sources(cfg_list, ds_deps, pkg_list) |
824 | ds_names = [type_utils.obj_name(f) for f in ds_list] |
825 | - LOG.debug("Searching for data source in: %s", ds_names) |
826 | + mode = "network" if DEP_NETWORK in ds_deps else "local" |
827 | + LOG.debug("Searching for %s data source in: %s", mode, ds_names) |
828 | |
829 | - for cls in ds_list: |
830 | + for name, cls in zip(ds_names, ds_list): |
831 | + myrep = reporting.ReportEventStack( |
832 | + name="search-%s" % name.replace("DataSource", ""), |
833 | + description="searching for %s data from %s" % (mode, name), |
834 | + message="no %s data found from %s" % (mode, name), |
835 | + parent=reporter) |
836 | try: |
837 | - LOG.debug("Seeing if we can get any data from %s", cls) |
838 | - s = cls(sys_cfg, distro, paths) |
839 | - if s.get_data(): |
840 | - return (s, type_utils.obj_name(cls)) |
841 | + with myrep: |
842 | + LOG.debug("Seeing if we can get any data from %s", cls) |
843 | + s = cls(sys_cfg, distro, paths) |
844 | + if s.get_data(): |
845 | + myrep.message = "found %s data from %s" % (mode, name) |
846 | + return (s, type_utils.obj_name(cls)) |
847 | except Exception: |
848 | util.logexc(LOG, "Getting data from %s failed", cls) |
849 | |
850 | |
851 | === modified file 'cloudinit/stages.py' |
852 | --- cloudinit/stages.py 2015-03-04 17:42:34 +0000 |
853 | +++ cloudinit/stages.py 2015-08-07 15:21:12 +0000 |
854 | @@ -46,6 +46,7 @@ |
855 | from cloudinit import sources |
856 | from cloudinit import type_utils |
857 | from cloudinit import util |
858 | +from cloudinit import reporting |
859 | |
860 | LOG = logging.getLogger(__name__) |
861 | |
862 | @@ -53,7 +54,7 @@ |
863 | |
864 | |
865 | class Init(object): |
866 | - def __init__(self, ds_deps=None): |
867 | + def __init__(self, ds_deps=None, reporter=None): |
868 | if ds_deps is not None: |
869 | self.ds_deps = ds_deps |
870 | else: |
871 | @@ -65,6 +66,12 @@ |
872 | # Changed only when a fetch occurs |
873 | self.datasource = NULL_DATA_SOURCE |
874 | |
875 | + if reporter is None: |
876 | + reporter = reporting.ReportEventStack( |
877 | + name="init-reporter", description="init-desc", |
878 | + reporting_enabled=False) |
879 | + self.reporter = reporter |
880 | + |
881 | def _reset(self, reset_ds=False): |
882 | # Recreated on access |
883 | self._cfg = None |
884 | @@ -234,9 +241,17 @@ |
885 | def _get_data_source(self): |
886 | if self.datasource is not NULL_DATA_SOURCE: |
887 | return self.datasource |
888 | - ds = self._restore_from_cache() |
889 | - if ds: |
890 | - LOG.debug("Restored from cache, datasource: %s", ds) |
891 | + |
892 | + with reporting.ReportEventStack( |
893 | + name="check-cache", |
894 | + description="attempting to read from cache", |
895 | + parent=self.reporter) as myrep: |
896 | + ds = self._restore_from_cache() |
897 | + if ds: |
898 | + LOG.debug("Restored from cache, datasource: %s", ds) |
899 | + myrep.description = "restored from cache" |
900 | + else: |
901 | + myrep.description = "no cache found" |
902 | if not ds: |
903 | (cfg_list, pkg_list) = self._get_datasources() |
904 | # Deep copy so that user-data handlers can not modify |
905 | @@ -246,7 +261,7 @@ |
906 | self.paths, |
907 | copy.deepcopy(self.ds_deps), |
908 | cfg_list, |
909 | - pkg_list) |
910 | + pkg_list, self.reporter) |
911 | LOG.info("Loaded datasource %s - %s", dsname, ds) |
912 | self.datasource = ds |
913 | # Ensure we adjust our path members datasource |
914 | @@ -327,7 +342,8 @@ |
915 | # Form the needed options to cloudify our members |
916 | return cloud.Cloud(self.datasource, |
917 | self.paths, self.cfg, |
918 | - self.distro, helpers.Runners(self.paths)) |
919 | + self.distro, helpers.Runners(self.paths), |
920 | + reporter=self.reporter) |
921 | |
922 | def update(self): |
923 | if not self._write_to_cache(): |
924 | @@ -493,8 +509,14 @@ |
925 | def consume_data(self, frequency=PER_INSTANCE): |
926 | # Consume the userdata first, because we need want to let the part |
927 | # handlers run first (for merging stuff) |
928 | - self._consume_userdata(frequency) |
929 | - self._consume_vendordata(frequency) |
930 | + with reporting.ReportEventStack( |
931 | + "consume-user-data", "reading and applying user-data", |
932 | + parent=self.reporter): |
933 | + self._consume_userdata(frequency) |
934 | + with reporting.ReportEventStack( |
935 | + "consume-vendor-data", "reading and applying vendor-data", |
936 | + parent=self.reporter): |
937 | + self._consume_vendordata(frequency) |
938 | |
939 | # Perform post-consumption adjustments so that |
940 | # modules that run during the init stage reflect |
941 | @@ -567,11 +589,16 @@ |
942 | |
943 | |
944 | class Modules(object): |
945 | - def __init__(self, init, cfg_files=None): |
946 | + def __init__(self, init, cfg_files=None, reporter=None): |
947 | self.init = init |
948 | self.cfg_files = cfg_files |
949 | # Created on first use |
950 | self._cached_cfg = None |
951 | + if reporter is None: |
952 | + reporter = reporting.ReportEventStack( |
953 | + name="module-reporter", description="module-desc", |
954 | + reporting_enabled=False) |
955 | + self.reporter = reporter |
956 | |
957 | @property |
958 | def cfg(self): |
959 | @@ -681,7 +708,19 @@ |
960 | which_ran.append(name) |
961 | # This name will affect the semaphore name created |
962 | run_name = "config-%s" % (name) |
963 | - cc.run(run_name, mod.handle, func_args, freq=freq) |
964 | + |
965 | + desc = "running %s with frequency %s" % (run_name, freq) |
966 | + myrep = reporting.ReportEventStack( |
967 | + name=run_name, description=desc, parent=self.reporter) |
968 | + |
969 | + with myrep: |
970 | + ran, _r = cc.run(run_name, mod.handle, func_args, |
971 | + freq=freq) |
972 | + if ran: |
973 | + myrep.message = "%s ran successfully" % run_name |
974 | + else: |
975 | + myrep.message = "%s previously ran" % run_name |
976 | + |
977 | except Exception as e: |
978 | util.logexc(LOG, "Running module %s (%s) failed", name, mod) |
979 | failures.append((name, e)) |
980 | |
981 | === modified file 'cloudinit/url_helper.py' |
982 | --- cloudinit/url_helper.py 2015-03-02 20:48:42 +0000 |
983 | +++ cloudinit/url_helper.py 2015-08-07 15:21:12 +0000 |
984 | @@ -25,6 +25,10 @@ |
985 | import six |
986 | |
987 | import requests |
988 | +import oauthlib.oauth1 as oauth1 |
989 | +import os |
990 | +import json |
991 | +from functools import partial |
992 | from requests import exceptions |
993 | |
994 | from six.moves.urllib.parse import ( |
995 | @@ -147,13 +151,14 @@ |
996 | |
997 | |
998 | class UrlError(IOError): |
999 | - def __init__(self, cause, code=None, headers=None): |
1000 | + def __init__(self, cause, code=None, headers=None, url=None): |
1001 | IOError.__init__(self, str(cause)) |
1002 | self.cause = cause |
1003 | self.code = code |
1004 | self.headers = headers |
1005 | if self.headers is None: |
1006 | self.headers = {} |
1007 | + self.url = url |
1008 | |
1009 | |
1010 | def _get_ssl_args(url, ssl_details): |
1011 | @@ -247,9 +252,10 @@ |
1012 | and hasattr(e, 'response') # This appeared in v 0.10.8 |
1013 | and hasattr(e.response, 'status_code')): |
1014 | excps.append(UrlError(e, code=e.response.status_code, |
1015 | - headers=e.response.headers)) |
1016 | + headers=e.response.headers, |
1017 | + url=url)) |
1018 | else: |
1019 | - excps.append(UrlError(e)) |
1020 | + excps.append(UrlError(e, url=url)) |
1021 | if SSL_ENABLED and isinstance(e, exceptions.SSLError): |
1022 | # ssl exceptions are not going to get fixed by waiting a |
1023 | # few seconds |
1024 | @@ -333,11 +339,11 @@ |
1025 | if not response.contents: |
1026 | reason = "empty response [%s]" % (response.code) |
1027 | url_exc = UrlError(ValueError(reason), code=response.code, |
1028 | - headers=response.headers) |
1029 | + headers=response.headers, url=url) |
1030 | elif not response.ok(): |
1031 | reason = "bad status code [%s]" % (response.code) |
1032 | url_exc = UrlError(ValueError(reason), code=response.code, |
1033 | - headers=response.headers) |
1034 | + headers=response.headers, url=url) |
1035 | else: |
1036 | return url |
1037 | except UrlError as e: |
1038 | @@ -368,3 +374,127 @@ |
1039 | time.sleep(sleep_time) |
1040 | |
1041 | return False |
1042 | + |
1043 | + |
1044 | +class OauthUrlHelper(object): |
1045 | + def __init__(self, consumer_key=None, token_key=None, |
1046 | + token_secret=None, consumer_secret=None, |
1047 | + skew_data_file="/run/oauth_skew.json"): |
1048 | + self.consumer_key = consumer_key |
1049 | + self.consumer_secret = consumer_secret or "" |
1050 | + self.token_key = token_key |
1051 | + self.token_secret = token_secret |
1052 | + self.skew_data_file = skew_data_file |
1053 | + self._do_oauth = True |
1054 | + self.skew_change_limit = 5 |
1055 | + required = (self.token_key, self.token_secret, self.consumer_key) |
1056 | + if not any(required): |
1057 | + self._do_oauth = False |
1058 | + elif not all(required): |
1059 | + raise ValueError("all or none of token_key, token_secret, or " |
1060 | + "consumer_key can be set") |
1061 | + |
1062 | + old = self.read_skew_file() |
1063 | + self.skew_data = old or {} |
1064 | + |
1065 | + def read_skew_file(self): |
1066 | + if self.skew_data_file and os.path.isfile(self.skew_data_file): |
1067 | + with open(self.skew_data_file, mode="r") as fp: |
1068 | + return json.load(fp.read()) |
1069 | + return None |
1070 | + |
1071 | + def update_skew_file(self, host, value): |
1072 | + # this is not atomic |
1073 | + if not self.skew_data_file: |
1074 | + return |
1075 | + cur = self.read_skew_file() |
1076 | + cur[host] = value |
1077 | + with open(self.skew_data_file, mode="w") as fp: |
1078 | + fp.write(json.dumps(cur)) |
1079 | + |
1080 | + def exception_cb(self, msg, exception): |
1081 | + if not (isinstance(exception, UrlError) and |
1082 | + (exception.code == 403 or exception.code == 401)): |
1083 | + return |
1084 | + |
1085 | + if 'date' not in exception.headers: |
1086 | + LOG.warn("Missing header 'date' in %s response", exception.code) |
1087 | + return |
1088 | + |
1089 | + date = exception.headers['date'] |
1090 | + try: |
1091 | + remote_time = time.mktime(parsedate(date)) |
1092 | + except Exception as e: |
1093 | + LOG.warn("Failed to convert datetime '%s': %s", date, e) |
1094 | + return |
1095 | + |
1096 | + skew = int(remote_time - time.time()) |
1097 | + host = urlparse(exception.url).netloc |
1098 | + old_skew = self.skew_data.get(host, 0) |
1099 | + if abs(old_skew - skew) > self.skew_change_limit: |
1100 | + self.update_skew_file(host, skew) |
1101 | + LOG.warn("Setting oauth clockskew for %s to %d", host, skew) |
1102 | + skew_data[host] = skew |
1103 | + |
1104 | + return |
1105 | + |
1106 | + def headers_cb(self, url): |
1107 | + if not self._do_oauth: |
1108 | + return {} |
1109 | + |
1110 | + timestamp = None |
1111 | + host = urlparse(url).netloc |
1112 | + if self.skew_data and host in self.skew_data: |
1113 | + timestamp = int(time.time()) + self.skew_data[host] |
1114 | + |
1115 | + return oauth_headers( |
1116 | + url=url, consumer_key=self.consumer_key, |
1117 | + token_key=self.token_key, token_secret=self.token_secret, |
1118 | + consumer_secret=self.consumer_secret, timestamp=timestamp) |
1119 | + |
1120 | + def _wrapped(self, wrapped_func, args, kwargs): |
1121 | + kwargs['headers_cb'] = partial( |
1122 | + self._headers_cb, kwargs.get('headers_cb')) |
1123 | + kwargs['exception_cb'] = partial( |
1124 | + self._exception_cb, kwargs.get('exception_cb')) |
1125 | + return wrapped_func(*args, **kwargs) |
1126 | + |
1127 | + def wait_for_url(self, *args, **kwargs): |
1128 | + return self._wrapped(wait_for_url, args, kwargs) |
1129 | + |
1130 | + def readurl(self, *args, **kwargs): |
1131 | + return self._wrapped(readurl, args, kwargs) |
1132 | + |
1133 | + def _exception_cb(self, extra_exception_cb, msg, exception): |
1134 | + ret = None |
1135 | + try: |
1136 | + if extra_exception_cb: |
1137 | + ret = extra_exception_cb(msg, exception) |
1138 | + finally: |
1139 | + self.exception_cb(msg, exception) |
1140 | + return ret |
1141 | + |
1142 | + def _headers_cb(self, extra_headers_cb, url): |
1143 | + headers = {} |
1144 | + if extra_headers_cb: |
1145 | + headers = extra_headers_cb(url) |
1146 | + headers.update(self.headers_cb(url)) |
1147 | + return headers |
1148 | + |
1149 | + |
1150 | +def oauth_headers(url, consumer_key, token_key, token_secret, consumer_secret, |
1151 | + timestamp=None): |
1152 | + if timestamp: |
1153 | + timestamp = str(timestamp) |
1154 | + else: |
1155 | + timestamp = None |
1156 | + |
1157 | + client = oauth1.Client( |
1158 | + consumer_key, |
1159 | + client_secret=consumer_secret, |
1160 | + resource_owner_key=token_key, |
1161 | + resource_owner_secret=token_secret, |
1162 | + signature_method=oauth1.SIGNATURE_PLAINTEXT, |
1163 | + timestamp=timestamp) |
1164 | + uri, signed_headers, body = client.sign(url) |
1165 | + return signed_headers |
1166 | |
1167 | === modified file 'cloudinit/util.py' |
1168 | --- cloudinit/util.py 2015-07-28 00:42:56 +0000 |
1169 | +++ cloudinit/util.py 2015-08-07 15:21:12 +0000 |
1170 | @@ -782,7 +782,8 @@ |
1171 | code = e.errno |
1172 | if e.errno == errno.ENOENT: |
1173 | code = url_helper.NOT_FOUND |
1174 | - raise url_helper.UrlError(cause=e, code=code, headers=None) |
1175 | + raise url_helper.UrlError(cause=e, code=code, headers=None, |
1176 | + url=url) |
1177 | return url_helper.FileResponse(file_path, contents=contents) |
1178 | else: |
1179 | return url_helper.readurl(url, |
1180 | |
1181 | === modified file 'tests/unittests/test_datasource/test_maas.py' |
1182 | --- tests/unittests/test_datasource/test_maas.py 2015-02-26 00:40:33 +0000 |
1183 | +++ tests/unittests/test_datasource/test_maas.py 2015-08-07 15:21:12 +0000 |
1184 | @@ -141,7 +141,7 @@ |
1185 | with mock.patch.object(url_helper, 'readurl', |
1186 | side_effect=side_effect()) as mockobj: |
1187 | userdata, metadata = DataSourceMAAS.read_maas_seed_url( |
1188 | - my_seed, header_cb=my_headers_cb, version=my_ver) |
1189 | + my_seed, version=my_ver) |
1190 | |
1191 | self.assertEqual(b"foodata", userdata) |
1192 | self.assertEqual(metadata['instance-id'], |
1193 | |
1194 | === added file 'tests/unittests/test_registry.py' |
1195 | --- tests/unittests/test_registry.py 1970-01-01 00:00:00 +0000 |
1196 | +++ tests/unittests/test_registry.py 2015-08-07 15:21:12 +0000 |
1197 | @@ -0,0 +1,28 @@ |
1198 | +from cloudinit.registry import DictRegistry |
1199 | + |
1200 | +from .helpers import (mock, TestCase) |
1201 | + |
1202 | + |
1203 | +class TestDictRegistry(TestCase): |
1204 | + |
1205 | + def test_added_item_included_in_output(self): |
1206 | + registry = DictRegistry() |
1207 | + item_key, item_to_register = 'test_key', mock.Mock() |
1208 | + registry.register_item(item_key, item_to_register) |
1209 | + self.assertEqual({item_key: item_to_register}, |
1210 | + registry.registered_items) |
1211 | + |
1212 | + def test_registry_starts_out_empty(self): |
1213 | + self.assertEqual({}, DictRegistry().registered_items) |
1214 | + |
1215 | + def test_modifying_registered_items_isnt_exposed_to_other_callers(self): |
1216 | + registry = DictRegistry() |
1217 | + registry.registered_items['test_item'] = mock.Mock() |
1218 | + self.assertEqual({}, registry.registered_items) |
1219 | + |
1220 | + def test_keys_cannot_be_replaced(self): |
1221 | + registry = DictRegistry() |
1222 | + item_key = 'test_key' |
1223 | + registry.register_item(item_key, mock.Mock()) |
1224 | + self.assertRaises(ValueError, |
1225 | + registry.register_item, item_key, mock.Mock()) |
1226 | |
1227 | === added file 'tests/unittests/test_reporting.py' |
1228 | --- tests/unittests/test_reporting.py 1970-01-01 00:00:00 +0000 |
1229 | +++ tests/unittests/test_reporting.py 2015-08-07 15:21:12 +0000 |
1230 | @@ -0,0 +1,359 @@ |
1231 | +# Copyright 2015 Canonical Ltd. |
1232 | +# This file is part of cloud-init. See LICENCE file for license information. |
1233 | +# |
1234 | +# vi: ts=4 expandtab |
1235 | + |
1236 | +from cloudinit import reporting |
1237 | +from cloudinit.reporting import handlers |
1238 | + |
1239 | +from .helpers import (mock, TestCase) |
1240 | + |
1241 | + |
1242 | +def _fake_registry(): |
1243 | + return mock.Mock(registered_items={'a': mock.MagicMock(), |
1244 | + 'b': mock.MagicMock()}) |
1245 | + |
1246 | + |
1247 | +class TestReportStartEvent(TestCase): |
1248 | + |
1249 | + @mock.patch('cloudinit.reporting.instantiated_handler_registry', |
1250 | + new_callable=_fake_registry) |
1251 | + def test_report_start_event_passes_something_with_as_string_to_handlers( |
1252 | + self, instantiated_handler_registry): |
1253 | + event_name, event_description = 'my_test_event', 'my description' |
1254 | + reporting.report_start_event(event_name, event_description) |
1255 | + expected_string_representation = ': '.join( |
1256 | + ['start', event_name, event_description]) |
1257 | + for _, handler in ( |
1258 | + instantiated_handler_registry.registered_items.items()): |
1259 | + self.assertEqual(1, handler.publish_event.call_count) |
1260 | + event = handler.publish_event.call_args[0][0] |
1261 | + self.assertEqual(expected_string_representation, event.as_string()) |
1262 | + |
1263 | + |
1264 | +class TestReportFinishEvent(TestCase): |
1265 | + |
1266 | + def _report_finish_event(self, result=reporting.status.SUCCESS): |
1267 | + event_name, event_description = 'my_test_event', 'my description' |
1268 | + reporting.report_finish_event( |
1269 | + event_name, event_description, result=result) |
1270 | + return event_name, event_description |
1271 | + |
1272 | + def assertHandlersPassedObjectWithAsString( |
1273 | + self, handlers, expected_as_string): |
1274 | + for _, handler in handlers.items(): |
1275 | + self.assertEqual(1, handler.publish_event.call_count) |
1276 | + event = handler.publish_event.call_args[0][0] |
1277 | + self.assertEqual(expected_as_string, event.as_string()) |
1278 | + |
1279 | + @mock.patch('cloudinit.reporting.instantiated_handler_registry', |
1280 | + new_callable=_fake_registry) |
1281 | + def test_report_finish_event_passes_something_with_as_string_to_handlers( |
1282 | + self, instantiated_handler_registry): |
1283 | + event_name, event_description = self._report_finish_event() |
1284 | + expected_string_representation = ': '.join( |
1285 | + ['finish', event_name, reporting.status.SUCCESS, |
1286 | + event_description]) |
1287 | + self.assertHandlersPassedObjectWithAsString( |
1288 | + instantiated_handler_registry.registered_items, |
1289 | + expected_string_representation) |
1290 | + |
1291 | + @mock.patch('cloudinit.reporting.instantiated_handler_registry', |
1292 | + new_callable=_fake_registry) |
1293 | + def test_reporting_successful_finish_has_sensible_string_repr( |
1294 | + self, instantiated_handler_registry): |
1295 | + event_name, event_description = self._report_finish_event( |
1296 | + result=reporting.status.SUCCESS) |
1297 | + expected_string_representation = ': '.join( |
1298 | + ['finish', event_name, reporting.status.SUCCESS, |
1299 | + event_description]) |
1300 | + self.assertHandlersPassedObjectWithAsString( |
1301 | + instantiated_handler_registry.registered_items, |
1302 | + expected_string_representation) |
1303 | + |
1304 | + @mock.patch('cloudinit.reporting.instantiated_handler_registry', |
1305 | + new_callable=_fake_registry) |
1306 | + def test_reporting_unsuccessful_finish_has_sensible_string_repr( |
1307 | + self, instantiated_handler_registry): |
1308 | + event_name, event_description = self._report_finish_event( |
1309 | + result=reporting.status.FAIL) |
1310 | + expected_string_representation = ': '.join( |
1311 | + ['finish', event_name, reporting.status.FAIL, event_description]) |
1312 | + self.assertHandlersPassedObjectWithAsString( |
1313 | + instantiated_handler_registry.registered_items, |
1314 | + expected_string_representation) |
1315 | + |
1316 | + def test_invalid_result_raises_attribute_error(self): |
1317 | + self.assertRaises(ValueError, self._report_finish_event, ("BOGUS",)) |
1318 | + |
1319 | + |
1320 | +class TestReportingEvent(TestCase): |
1321 | + |
1322 | + def test_as_string(self): |
1323 | + event_type, name, description = 'test_type', 'test_name', 'test_desc' |
1324 | + event = reporting.ReportingEvent(event_type, name, description) |
1325 | + expected_string_representation = ': '.join( |
1326 | + [event_type, name, description]) |
1327 | + self.assertEqual(expected_string_representation, event.as_string()) |
1328 | + |
1329 | + def test_as_dict(self): |
1330 | + event_type, name, desc = 'test_type', 'test_name', 'test_desc' |
1331 | + event = reporting.ReportingEvent(event_type, name, desc) |
1332 | + self.assertEqual( |
1333 | + {'event_type': event_type, 'name': name, 'description': desc}, |
1334 | + event.as_dict()) |
1335 | + |
1336 | + |
1337 | +class TestFinishReportingEvent(TestCase): |
1338 | + def test_as_has_result(self): |
1339 | + result = reporting.status.SUCCESS |
1340 | + name, desc = 'test_name', 'test_desc' |
1341 | + event = reporting.FinishReportingEvent(name, desc, result) |
1342 | + ret = event.as_dict() |
1343 | + self.assertTrue('result' in ret) |
1344 | + self.assertEqual(ret['result'], result) |
1345 | + |
1346 | + |
1347 | +class TestBaseReportingHandler(TestCase): |
1348 | + |
1349 | + def test_base_reporting_handler_is_abstract(self): |
1350 | + regexp = r".*abstract.*publish_event.*" |
1351 | + self.assertRaisesRegexp(TypeError, regexp, handlers.ReportingHandler) |
1352 | + |
1353 | + |
1354 | +class TestLogHandler(TestCase): |
1355 | + |
1356 | + @mock.patch.object(reporting.handlers.logging, 'getLogger') |
1357 | + def test_appropriate_logger_used(self, getLogger): |
1358 | + event_type, event_name = 'test_type', 'test_name' |
1359 | + event = reporting.ReportingEvent(event_type, event_name, 'description') |
1360 | + reporting.handlers.LogHandler().publish_event(event) |
1361 | + self.assertEqual( |
1362 | + [mock.call( |
1363 | + 'cloudinit.reporting.{0}.{1}'.format(event_type, event_name))], |
1364 | + getLogger.call_args_list) |
1365 | + |
1366 | + @mock.patch.object(reporting.handlers.logging, 'getLogger') |
1367 | + def test_single_log_message_at_info_published(self, getLogger): |
1368 | + event = reporting.ReportingEvent('type', 'name', 'description') |
1369 | + reporting.handlers.LogHandler().publish_event(event) |
1370 | + self.assertEqual(1, getLogger.return_value.info.call_count) |
1371 | + |
1372 | + @mock.patch.object(reporting.handlers.logging, 'getLogger') |
1373 | + def test_log_message_uses_event_as_string(self, getLogger): |
1374 | + event = reporting.ReportingEvent('type', 'name', 'description') |
1375 | + reporting.handlers.LogHandler().publish_event(event) |
1376 | + self.assertIn(event.as_string(), |
1377 | + getLogger.return_value.info.call_args[0][0]) |
1378 | + |
1379 | + |
1380 | +class TestDefaultRegisteredHandler(TestCase): |
1381 | + |
1382 | + def test_log_handler_registered_by_default(self): |
1383 | + registered_items = ( |
1384 | + reporting.instantiated_handler_registry.registered_items) |
1385 | + for _, item in registered_items.items(): |
1386 | + if isinstance(item, reporting.handlers.LogHandler): |
1387 | + break |
1388 | + else: |
1389 | + self.fail('No reporting LogHandler registered by default.') |
1390 | + |
1391 | + |
1392 | +class TestReportingConfiguration(TestCase): |
1393 | + |
1394 | + @mock.patch.object(reporting, 'instantiated_handler_registry') |
1395 | + def test_empty_configuration_doesnt_add_handlers( |
1396 | + self, instantiated_handler_registry): |
1397 | + reporting.update_configuration({}) |
1398 | + self.assertEqual( |
1399 | + 0, instantiated_handler_registry.register_item.call_count) |
1400 | + |
1401 | + @mock.patch.object( |
1402 | + reporting, 'instantiated_handler_registry', reporting.DictRegistry()) |
1403 | + @mock.patch.object(reporting, 'available_handlers') |
1404 | + def test_looks_up_handler_by_type_and_adds_it(self, available_handlers): |
1405 | + handler_type_name = 'test_handler' |
1406 | + handler_cls = mock.Mock() |
1407 | + available_handlers.registered_items = {handler_type_name: handler_cls} |
1408 | + handler_name = 'my_test_handler' |
1409 | + reporting.update_configuration( |
1410 | + {handler_name: {'type': handler_type_name}}) |
1411 | + self.assertEqual( |
1412 | + {handler_name: handler_cls.return_value}, |
1413 | + reporting.instantiated_handler_registry.registered_items) |
1414 | + |
1415 | + @mock.patch.object( |
1416 | + reporting, 'instantiated_handler_registry', reporting.DictRegistry()) |
1417 | + @mock.patch.object(reporting, 'available_handlers') |
1418 | + def test_uses_non_type_parts_of_config_dict_as_kwargs( |
1419 | + self, available_handlers): |
1420 | + handler_type_name = 'test_handler' |
1421 | + handler_cls = mock.Mock() |
1422 | + available_handlers.registered_items = {handler_type_name: handler_cls} |
1423 | + extra_kwargs = {'foo': 'bar', 'bar': 'baz'} |
1424 | + handler_config = extra_kwargs.copy() |
1425 | + handler_config.update({'type': handler_type_name}) |
1426 | + handler_name = 'my_test_handler' |
1427 | + reporting.update_configuration({handler_name: handler_config}) |
1428 | + self.assertEqual( |
1429 | + handler_cls.return_value, |
1430 | + reporting.instantiated_handler_registry.registered_items[ |
1431 | + handler_name]) |
1432 | + self.assertEqual([mock.call(**extra_kwargs)], |
1433 | + handler_cls.call_args_list) |
1434 | + |
1435 | + @mock.patch.object( |
1436 | + reporting, 'instantiated_handler_registry', reporting.DictRegistry()) |
1437 | + @mock.patch.object(reporting, 'available_handlers') |
1438 | + def test_handler_config_not_modified(self, available_handlers): |
1439 | + handler_type_name = 'test_handler' |
1440 | + handler_cls = mock.Mock() |
1441 | + available_handlers.registered_items = {handler_type_name: handler_cls} |
1442 | + handler_config = {'type': handler_type_name, 'foo': 'bar'} |
1443 | + expected_handler_config = handler_config.copy() |
1444 | + reporting.update_configuration({'my_test_handler': handler_config}) |
1445 | + self.assertEqual(expected_handler_config, handler_config) |
1446 | + |
1447 | + @mock.patch.object( |
1448 | + reporting, 'instantiated_handler_registry', reporting.DictRegistry()) |
1449 | + @mock.patch.object(reporting, 'available_handlers') |
1450 | + def test_handlers_removed_if_falseish_specified(self, available_handlers): |
1451 | + handler_type_name = 'test_handler' |
1452 | + handler_cls = mock.Mock() |
1453 | + available_handlers.registered_items = {handler_type_name: handler_cls} |
1454 | + handler_name = 'my_test_handler' |
1455 | + reporting.update_configuration( |
1456 | + {handler_name: {'type': handler_type_name}}) |
1457 | + self.assertEqual( |
1458 | + 1, len(reporting.instantiated_handler_registry.registered_items)) |
1459 | + reporting.update_configuration({handler_name: None}) |
1460 | + self.assertEqual( |
1461 | + 0, len(reporting.instantiated_handler_registry.registered_items)) |
1462 | + |
1463 | + |
1464 | +class TestReportingEventStack(TestCase): |
1465 | + @mock.patch('cloudinit.reporting.report_finish_event') |
1466 | + @mock.patch('cloudinit.reporting.report_start_event') |
1467 | + def test_start_and_finish_success(self, report_start, report_finish): |
1468 | + with reporting.ReportEventStack(name="myname", description="mydesc"): |
1469 | + pass |
1470 | + self.assertEqual( |
1471 | + [mock.call('myname', 'mydesc')], report_start.call_args_list) |
1472 | + self.assertEqual( |
1473 | + [mock.call('myname', 'mydesc', reporting.status.SUCCESS)], |
1474 | + report_finish.call_args_list) |
1475 | + |
1476 | + @mock.patch('cloudinit.reporting.report_finish_event') |
1477 | + @mock.patch('cloudinit.reporting.report_start_event') |
1478 | + def test_finish_exception_defaults_fail(self, report_start, report_finish): |
1479 | + name = "myname" |
1480 | + desc = "mydesc" |
1481 | + try: |
1482 | + with reporting.ReportEventStack(name, description=desc): |
1483 | + raise ValueError("This didnt work") |
1484 | + except ValueError: |
1485 | + pass |
1486 | + self.assertEqual([mock.call(name, desc)], report_start.call_args_list) |
1487 | + self.assertEqual( |
1488 | + [mock.call(name, desc, reporting.status.FAIL)], |
1489 | + report_finish.call_args_list) |
1490 | + |
1491 | + @mock.patch('cloudinit.reporting.report_finish_event') |
1492 | + @mock.patch('cloudinit.reporting.report_start_event') |
1493 | + def test_result_on_exception_used(self, report_start, report_finish): |
1494 | + name = "myname" |
1495 | + desc = "mydesc" |
1496 | + try: |
1497 | + with reporting.ReportEventStack( |
1498 | + name, desc, result_on_exception=reporting.status.WARN): |
1499 | + raise ValueError("This didnt work") |
1500 | + except ValueError: |
1501 | + pass |
1502 | + self.assertEqual([mock.call(name, desc)], report_start.call_args_list) |
1503 | + self.assertEqual( |
1504 | + [mock.call(name, desc, reporting.status.WARN)], |
1505 | + report_finish.call_args_list) |
1506 | + |
1507 | + @mock.patch('cloudinit.reporting.report_start_event') |
1508 | + def test_child_fullname_respects_parent(self, report_start): |
1509 | + parent_name = "topname" |
1510 | + c1_name = "c1name" |
1511 | + c2_name = "c2name" |
1512 | + c2_expected_fullname = '/'.join([parent_name, c1_name, c2_name]) |
1513 | + c1_expected_fullname = '/'.join([parent_name, c1_name]) |
1514 | + |
1515 | + parent = reporting.ReportEventStack(parent_name, "topdesc") |
1516 | + c1 = reporting.ReportEventStack(c1_name, "c1desc", parent=parent) |
1517 | + c2 = reporting.ReportEventStack(c2_name, "c2desc", parent=c1) |
1518 | + with c1: |
1519 | + report_start.assert_called_with(c1_expected_fullname, "c1desc") |
1520 | + with c2: |
1521 | + report_start.assert_called_with(c2_expected_fullname, "c2desc") |
1522 | + |
1523 | + @mock.patch('cloudinit.reporting.report_finish_event') |
1524 | + @mock.patch('cloudinit.reporting.report_start_event') |
1525 | + def test_child_result_bubbles_up(self, report_start, report_finish): |
1526 | + parent = reporting.ReportEventStack("topname", "topdesc") |
1527 | + child = reporting.ReportEventStack("c_name", "c_desc", parent=parent) |
1528 | + with parent: |
1529 | + with child: |
1530 | + child.result = reporting.status.WARN |
1531 | + |
1532 | + report_finish.assert_called_with( |
1533 | + "topname", "topdesc", reporting.status.WARN) |
1534 | + |
1535 | + @mock.patch('cloudinit.reporting.report_finish_event') |
1536 | + def test_message_used_in_finish(self, report_finish): |
1537 | + with reporting.ReportEventStack("myname", "mydesc", |
1538 | + message="mymessage"): |
1539 | + pass |
1540 | + self.assertEqual( |
1541 | + [mock.call("myname", "mymessage", reporting.status.SUCCESS)], |
1542 | + report_finish.call_args_list) |
1543 | + |
1544 | + @mock.patch('cloudinit.reporting.report_finish_event') |
1545 | + def test_message_updatable(self, report_finish): |
1546 | + with reporting.ReportEventStack("myname", "mydesc") as c: |
1547 | + c.message = "all good" |
1548 | + self.assertEqual( |
1549 | + [mock.call("myname", "all good", reporting.status.SUCCESS)], |
1550 | + report_finish.call_args_list) |
1551 | + |
1552 | + @mock.patch('cloudinit.reporting.report_start_event') |
1553 | + @mock.patch('cloudinit.reporting.report_finish_event') |
1554 | + def test_reporting_disabled_does_not_report_events( |
1555 | + self, report_start, report_finish): |
1556 | + with reporting.ReportEventStack("a", "b", reporting_enabled=False): |
1557 | + pass |
1558 | + self.assertEqual(report_start.call_count, 0) |
1559 | + self.assertEqual(report_finish.call_count, 0) |
1560 | + |
1561 | + @mock.patch('cloudinit.reporting.report_start_event') |
1562 | + @mock.patch('cloudinit.reporting.report_finish_event') |
1563 | + def test_reporting_child_default_to_parent( |
1564 | + self, report_start, report_finish): |
1565 | + parent = reporting.ReportEventStack( |
1566 | + "pname", "pdesc", reporting_enabled=False) |
1567 | + child = reporting.ReportEventStack("cname", "cdesc", parent=parent) |
1568 | + with parent: |
1569 | + with child: |
1570 | + pass |
1571 | + pass |
1572 | + self.assertEqual(report_start.call_count, 0) |
1573 | + self.assertEqual(report_finish.call_count, 0) |
1574 | + |
1575 | + def test_reporting_event_has_sane_repr(self): |
1576 | + myrep = reporting.ReportEventStack("fooname", "foodesc", |
1577 | + reporting_enabled=True).__repr__() |
1578 | + self.assertIn("fooname", myrep) |
1579 | + self.assertIn("foodesc", myrep) |
1580 | + self.assertIn("True", myrep) |
1581 | + |
1582 | + def test_set_invalid_result_raises_value_error(self): |
1583 | + f = reporting.ReportEventStack("myname", "mydesc") |
1584 | + self.assertRaises(ValueError, setattr, f, "result", "BOGUS") |
1585 | + |
1586 | + |
1587 | +class TestStatusAccess(TestCase): |
1588 | + def test_invalid_status_access_raises_value_error(self): |
1589 | + self.assertRaises(AttributeError, getattr, reporting.status, "BOGUS") |
Documentation on the context manager, and tests as well, please. :)