Merge lp:~smoser/cloud-init/trunk.reporting into lp:~cloud-init-dev/cloud-init/trunk

Proposed by Scott Moser
Status: Merged
Merged at revision: 1134
Proposed branch: lp:~smoser/cloud-init/trunk.reporting
Merge into: lp:~cloud-init-dev/cloud-init/trunk
Diff against target: 1589 lines (+1040/-137)
13 files modified
bin/cloud-init (+44/-8)
cloudinit/cloud.py (+8/-1)
cloudinit/registry.py (+37/-0)
cloudinit/reporting/__init__.py (+241/-0)
cloudinit/reporting/handlers.py (+78/-0)
cloudinit/sources/DataSourceMAAS.py (+42/-104)
cloudinit/sources/__init__.py (+16/-7)
cloudinit/stages.py (+49/-10)
cloudinit/url_helper.py (+135/-5)
cloudinit/util.py (+2/-1)
tests/unittests/test_datasource/test_maas.py (+1/-1)
tests/unittests/test_registry.py (+28/-0)
tests/unittests/test_reporting.py (+359/-0)
To merge this branch: bzr merge lp:~smoser/cloud-init/trunk.reporting
Reviewer Review Type Date Requested Status
cloud-init Commiters Pending
Review via email: mp+266578@code.launchpad.net

Description of the change

This is a backport of the 2.0 reporting code + some changes.

I'm interested in comments on changes to the 2.0 reporting code, and also in the general usage of 'reporting' from 0.7.

the goal will be to get whatever changes we find into 2.0 and then back into 0.7.x

To post a comment you must log in.
1137. By Scott Moser

add nicer formating and messages for datasource searching

1138. By Scott Moser

merge from trunk

Revision history for this message
Dan Watkins (oddbloke) wrote :

Documentation on the context manager, and tests as well, please. :)

Revision history for this message
Scott Moser (smoser) wrote :

thanks. addressed and pushing now.

1139. By Scott Moser

address Daniel's comments in review

1140. By Scott Moser

move 'mode' out of SearchReportStack

1141. By Scott Moser

adjust searching so cache hits are logged

1142. By Scott Moser

init single: hook up reporter

1143. By Scott Moser

plumb the rest the reporting through

1144. By Scott Moser

sync with 2.0 trunk on reporting

1145. By Scott Moser

fix tests from sync
change ReportStack to ReportEventStack
change default ReportEventStack to be status.SUCCESS instead of None

1146. By Scott Moser

event name doesnt need mode as it is run through init-local or init-net

1147. By Scott Moser

fix lack of import in cloud.py

1148. By Scott Moser

fix all tests (were broken due to copied code call to userdata twice

1149. By Scott Moser

sync to 2.0 review @ patchset 4

1150. By Scott Moser

merge from trunk

1151. By Scott Moser

fix pep8

1152. By Scott Moser

sync tests back

1153. By Scott Moser

sync with cloudinit 2.0 for registry and reporting

1154. By Scott Moser

tests pass

1155. By Scott Moser

add the webhook handler

1156. By Scott Moser

hopefully fix DataSourceMAAS

1157. By Scott Moser

seems functional in test

1158. By Scott Moser

improvements on skew

Revision history for this message
Dan Watkins (oddbloke) wrote :

One of my comments suggests changes to the reporting framework itself, so making those changes in 2.0 and re-backporting probably makes most sense.

1159. By Scott Moser

fix syntax

1160. By Scott Moser

catch exception in webhook, adjust logging to use cloud-init logging

1161. By Scott Moser

undo broken logic that attempted to not re-initialize classes

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'bin/cloud-init'
--- bin/cloud-init 2015-07-31 15:20:36 +0000
+++ bin/cloud-init 2015-08-07 15:21:12 +0000
@@ -46,6 +46,7 @@
46from cloudinit import stages46from cloudinit import stages
47from cloudinit import templater47from cloudinit import templater
48from cloudinit import util48from cloudinit import util
49from cloudinit import reporting
49from cloudinit import version50from cloudinit import version
5051
51from cloudinit.settings import (PER_INSTANCE, PER_ALWAYS, PER_ONCE,52from cloudinit.settings import (PER_INSTANCE, PER_ALWAYS, PER_ONCE,
@@ -136,6 +137,11 @@
136 return failures137 return failures
137138
138139
140def apply_reporting_cfg(cfg):
141 if cfg.get('reporting'):
142 reporting.update_configuration(cfg.get('reporting'))
143
144
139def main_init(name, args):145def main_init(name, args):
140 deps = [sources.DEP_FILESYSTEM, sources.DEP_NETWORK]146 deps = [sources.DEP_FILESYSTEM, sources.DEP_NETWORK]
141 if args.local:147 if args.local:
@@ -171,7 +177,7 @@
171 w_msg = welcome_format(name)177 w_msg = welcome_format(name)
172 else:178 else:
173 w_msg = welcome_format("%s-local" % (name))179 w_msg = welcome_format("%s-local" % (name))
174 init = stages.Init(deps)180 init = stages.Init(ds_deps=deps, reporter=args.reporter)
175 # Stage 1181 # Stage 1
176 init.read_cfg(extract_fns(args))182 init.read_cfg(extract_fns(args))
177 # Stage 2183 # Stage 2
@@ -190,6 +196,7 @@
190 " longer be active shortly"))196 " longer be active shortly"))
191 logging.resetLogging()197 logging.resetLogging()
192 logging.setupLogging(init.cfg)198 logging.setupLogging(init.cfg)
199 apply_reporting_cfg(init.cfg)
193200
194 # Any log usage prior to setupLogging above did not have local user log201 # Any log usage prior to setupLogging above did not have local user log
195 # config applied. We send the welcome message now, as stderr/out have202 # config applied. We send the welcome message now, as stderr/out have
@@ -282,8 +289,10 @@
282 util.logexc(LOG, "Consuming user data failed!")289 util.logexc(LOG, "Consuming user data failed!")
283 return (init.datasource, ["Consuming user data failed!"])290 return (init.datasource, ["Consuming user data failed!"])
284291
292 apply_reporting_cfg(init.cfg)
293
285 # Stage 8 - re-read and apply relevant cloud-config to include user-data294 # Stage 8 - re-read and apply relevant cloud-config to include user-data
286 mods = stages.Modules(init, extract_fns(args))295 mods = stages.Modules(init, extract_fns(args), reporter=args.reporter)
287 # Stage 9296 # Stage 9
288 try:297 try:
289 outfmt_orig = outfmt298 outfmt_orig = outfmt
@@ -313,7 +322,7 @@
313 # 5. Run the modules for the given stage name322 # 5. Run the modules for the given stage name
314 # 6. Done!323 # 6. Done!
315 w_msg = welcome_format("%s:%s" % (action_name, name))324 w_msg = welcome_format("%s:%s" % (action_name, name))
316 init = stages.Init(ds_deps=[])325 init = stages.Init(ds_deps=[], reporter=args.reporter)
317 # Stage 1326 # Stage 1
318 init.read_cfg(extract_fns(args))327 init.read_cfg(extract_fns(args))
319 # Stage 2328 # Stage 2
@@ -328,7 +337,7 @@
328 if not args.force:337 if not args.force:
329 return [(msg)]338 return [(msg)]
330 # Stage 3339 # Stage 3
331 mods = stages.Modules(init, extract_fns(args))340 mods = stages.Modules(init, extract_fns(args), reporter=args.reporter)
332 # Stage 4341 # Stage 4
333 try:342 try:
334 LOG.debug("Closing stdin")343 LOG.debug("Closing stdin")
@@ -342,6 +351,7 @@
342 " longer be active shortly"))351 " longer be active shortly"))
343 logging.resetLogging()352 logging.resetLogging()
344 logging.setupLogging(mods.cfg)353 logging.setupLogging(mods.cfg)
354 apply_reporting_cfg(init.cfg)
345355
346 # now that logging is setup and stdout redirected, send welcome356 # now that logging is setup and stdout redirected, send welcome
347 welcome(name, msg=w_msg)357 welcome(name, msg=w_msg)
@@ -366,7 +376,7 @@
366 # 6. Done!376 # 6. Done!
367 mod_name = args.name377 mod_name = args.name
368 w_msg = welcome_format(name)378 w_msg = welcome_format(name)
369 init = stages.Init(ds_deps=[])379 init = stages.Init(ds_deps=[], reporter=args.reporter)
370 # Stage 1380 # Stage 1
371 init.read_cfg(extract_fns(args))381 init.read_cfg(extract_fns(args))
372 # Stage 2382 # Stage 2
@@ -383,7 +393,7 @@
383 if not args.force:393 if not args.force:
384 return 1394 return 1
385 # Stage 3395 # Stage 3
386 mods = stages.Modules(init, extract_fns(args))396 mods = stages.Modules(init, extract_fns(args), reporter=args.reporter)
387 mod_args = args.module_args397 mod_args = args.module_args
388 if mod_args:398 if mod_args:
389 LOG.debug("Using passed in arguments %s", mod_args)399 LOG.debug("Using passed in arguments %s", mod_args)
@@ -404,6 +414,7 @@
404 " longer be active shortly"))414 " longer be active shortly"))
405 logging.resetLogging()415 logging.resetLogging()
406 logging.setupLogging(mods.cfg)416 logging.setupLogging(mods.cfg)
417 apply_reporting_cfg(init.cfg)
407418
408 # now that logging is setup and stdout redirected, send welcome419 # now that logging is setup and stdout redirected, send welcome
409 welcome(name, msg=w_msg)420 welcome(name, msg=w_msg)
@@ -549,6 +560,8 @@
549 ' found (use at your own risk)'),560 ' found (use at your own risk)'),
550 dest='force',561 dest='force',
551 default=False)562 default=False)
563
564 parser.set_defaults(reporter=None)
552 subparsers = parser.add_subparsers()565 subparsers = parser.add_subparsers()
553566
554 # Each action and its sub-options (if any)567 # Each action and its sub-options (if any)
@@ -595,6 +608,9 @@
595 help=("frequency of the module"),608 help=("frequency of the module"),
596 required=False,609 required=False,
597 choices=list(FREQ_SHORT_NAMES.keys()))610 choices=list(FREQ_SHORT_NAMES.keys()))
611 parser_single.add_argument("--report", action="store_true",
612 help="enable reporting",
613 required=False)
598 parser_single.add_argument("module_args", nargs="*",614 parser_single.add_argument("module_args", nargs="*",
599 metavar='argument',615 metavar='argument',
600 help=('any additional arguments to'616 help=('any additional arguments to'
@@ -617,8 +633,28 @@
617 if name in ("modules", "init"):633 if name in ("modules", "init"):
618 functor = status_wrapper634 functor = status_wrapper
619635
620 return util.log_time(logfunc=LOG.debug, msg="cloud-init mode '%s'" % name,636 report_on = True
621 get_uptime=True, func=functor, args=(name, args))637 if name == "init":
638 if args.local:
639 rname, rdesc = ("init-local", "searching for local datasources")
640 else:
641 rname, rdesc = ("init-network",
642 "searching for network datasources")
643 elif name == "modules":
644 rname, rdesc = ("modules-%s" % args.mode,
645 "running modules for %s" % args.mode)
646 elif name == "single":
647 rname, rdesc = ("single/%s" % args.name,
648 "running single module %s" % args.name)
649 report_on = args.report
650
651 reporting.update_configuration({'print': {'type': 'print'}})
652 args.reporter = reporting.ReportEventStack(
653 rname, rdesc, reporting_enabled=report_on)
654 with args.reporter:
655 return util.log_time(
656 logfunc=LOG.debug, msg="cloud-init mode '%s'" % name,
657 get_uptime=True, func=functor, args=(name, args))
622658
623659
624if __name__ == '__main__':660if __name__ == '__main__':
625661
=== modified file 'cloudinit/cloud.py'
--- cloudinit/cloud.py 2012-08-27 17:56:47 +0000
+++ cloudinit/cloud.py 2015-08-07 15:21:12 +0000
@@ -24,6 +24,7 @@
24import os24import os
2525
26from cloudinit import log as logging26from cloudinit import log as logging
27from cloudinit import reporting
2728
28LOG = logging.getLogger(__name__)29LOG = logging.getLogger(__name__)
2930
@@ -40,12 +41,18 @@
4041
4142
42class Cloud(object):43class Cloud(object):
43 def __init__(self, datasource, paths, cfg, distro, runners):44 def __init__(self, datasource, paths, cfg, distro, runners, reporter=None):
44 self.datasource = datasource45 self.datasource = datasource
45 self.paths = paths46 self.paths = paths
46 self.distro = distro47 self.distro = distro
47 self._cfg = cfg48 self._cfg = cfg
48 self._runners = runners49 self._runners = runners
50 if reporter is None:
51 reporter = reporting.ReportEventStack(
52 name="unnamed-cloud-reporter",
53 description="unnamed-cloud-reporter",
54 reporting_enabled=False)
55 self.reporter = reporter
4956
50 # If a 'user' manipulates logging or logging services57 # If a 'user' manipulates logging or logging services
51 # it is typically useful to cause the logging to be58 # it is typically useful to cause the logging to be
5259
=== added file 'cloudinit/registry.py'
--- cloudinit/registry.py 1970-01-01 00:00:00 +0000
+++ cloudinit/registry.py 2015-08-07 15:21:12 +0000
@@ -0,0 +1,37 @@
1# Copyright 2015 Canonical Ltd.
2# This file is part of cloud-init. See LICENCE file for license information.
3#
4# vi: ts=4 expandtab
5import copy
6
7
8class DictRegistry(object):
9 """A simple registry for a mapping of objects."""
10
11 def __init__(self):
12 self.reset()
13
14 def reset(self):
15 self._items = {}
16
17 def register_item(self, key, item):
18 """Add item to the registry."""
19 if key in self._items:
20 raise ValueError(
21 'Item already registered with key {0}'.format(key))
22 self._items[key] = item
23
24 def unregister_item(self, key, force=True):
25 """Remove item from the registry."""
26 if key in self._items:
27 del self._items[key]
28 elif not force:
29 raise KeyError("%s: key not present to unregister" % key)
30
31 @property
32 def registered_items(self):
33 """All the items that have been registered.
34
35 This cannot be used to modify the contents of the registry.
36 """
37 return copy.copy(self._items)
038
=== added directory 'cloudinit/reporting'
=== added file 'cloudinit/reporting/__init__.py'
--- cloudinit/reporting/__init__.py 1970-01-01 00:00:00 +0000
+++ cloudinit/reporting/__init__.py 2015-08-07 15:21:12 +0000
@@ -0,0 +1,241 @@
1# Copyright 2015 Canonical Ltd.
2# This file is part of cloud-init. See LICENCE file for license information.
3#
4# vi: ts=4 expandtab
5"""
6cloud-init reporting framework
7
8The reporting framework is intended to allow all parts of cloud-init to
9report events in a structured manner.
10"""
11
12from ..registry import DictRegistry
13from ..reporting.handlers import available_handlers
14
15
16FINISH_EVENT_TYPE = 'finish'
17START_EVENT_TYPE = 'start'
18
19DEFAULT_CONFIG = {
20 'logging': {'type': 'log'},
21 'print': {'type': 'print'},
22}
23
24
25class _nameset(set):
26 def __getattr__(self, name):
27 if name in self:
28 return name
29 raise AttributeError("%s not a valid value" % name)
30
31
32status = _nameset(("SUCCESS", "WARN", "FAIL"))
33
34
35class ReportingEvent(object):
36 """Encapsulation of event formatting."""
37
38 def __init__(self, event_type, name, description):
39 self.event_type = event_type
40 self.name = name
41 self.description = description
42
43 def as_string(self):
44 """The event represented as a string."""
45 return '{0}: {1}: {2}'.format(
46 self.event_type, self.name, self.description)
47
48 def as_dict(self):
49 """The event represented as a dictionary."""
50 return {'name': self.name, 'description': self.description,
51 'event_type': self.event_type}
52
53
54class FinishReportingEvent(ReportingEvent):
55
56 def __init__(self, name, description, result=status.SUCCESS):
57 super(FinishReportingEvent, self).__init__(
58 FINISH_EVENT_TYPE, name, description)
59 self.result = result
60 if result not in status:
61 raise ValueError("Invalid result: %s" % result)
62
63 def as_string(self):
64 return '{0}: {1}: {2}: {3}'.format(
65 self.event_type, self.name, self.result, self.description)
66
67 def as_dict(self):
68 """The event represented as json friendly."""
69 data = super(FinishReportingEvent, self).as_dict()
70 data['result'] = self.result
71 return data
72
73
74def update_configuration(config):
75 """Update the instanciated_handler_registry.
76
77 :param config:
78 The dictionary containing changes to apply. If a key is given
79 with a False-ish value, the registered handler matching that name
80 will be unregistered.
81 """
82 for handler_name, handler_config in config.items():
83 if not handler_config:
84 instantiated_handler_registry.unregister_item(
85 handler_name, force=True)
86 continue
87 registered = instantiated_handler_registry.registered_items
88 handler_config = handler_config.copy()
89 cls = available_handlers.registered_items[handler_config.pop('type')]
90 instantiated_handler_registry.unregister_item(handler_name)
91 instance = cls(**handler_config)
92 instantiated_handler_registry.register_item(handler_name, instance)
93
94
95def report_event(event):
96 """Report an event to all registered event handlers.
97
98 This should generally be called via one of the other functions in
99 the reporting module.
100
101 :param event_type:
102 The type of the event; this should be a constant from the
103 reporting module.
104 """
105 for _, handler in instantiated_handler_registry.registered_items.items():
106 handler.publish_event(event)
107
108
109def report_finish_event(event_name, event_description,
110 result=status.SUCCESS):
111 """Report a "finish" event.
112
113 See :py:func:`.report_event` for parameter details.
114 """
115 event = FinishReportingEvent(event_name, event_description, result)
116 return report_event(event)
117
118
119def report_start_event(event_name, event_description):
120 """Report a "start" event.
121
122 :param event_name:
123 The name of the event; this should be a topic which events would
124 share (e.g. it will be the same for start and finish events).
125
126 :param event_description:
127 A human-readable description of the event that has occurred.
128 """
129 event = ReportingEvent(START_EVENT_TYPE, event_name, event_description)
130 return report_event(event)
131
132
133class ReportEventStack(object):
134 """Context Manager for using :py:func:`report_event`
135
136 This enables calling :py:func:`report_start_event` and
137 :py:func:`report_finish_event` through a context manager.
138
139 :param name:
140 the name of the event
141
142 :param description:
143 the event's description, passed on to :py:func:`report_start_event`
144
145 :param message:
146 the description to use for the finish event. defaults to
147 :param:description.
148
149 :param parent:
150 :type parent: :py:class:ReportEventStack or None
151 The parent of this event. The parent is populated with
152 results of all its children. The name used in reporting
153 is <parent.name>/<name>
154
155 :param reporting_enabled:
156 Indicates if reporting events should be generated.
157 If not provided, defaults to the parent's value, or True if no parent
158 is provided.
159
160 :param result_on_exception:
161 The result value to set if an exception is caught. default
162 value is FAIL.
163 """
164 def __init__(self, name, description, message=None, parent=None,
165 reporting_enabled=None, result_on_exception=status.FAIL):
166 self.parent = parent
167 self.name = name
168 self.description = description
169 self.message = message
170 self.result_on_exception = result_on_exception
171 self.result = status.SUCCESS
172
173 # use parents reporting value if not provided
174 if reporting_enabled is None:
175 if parent:
176 reporting_enabled = parent.reporting_enabled
177 else:
178 reporting_enabled = True
179 self.reporting_enabled = reporting_enabled
180
181 if parent:
182 self.fullname = '/'.join((parent.fullname, name,))
183 else:
184 self.fullname = self.name
185 self.children = {}
186
187 def __repr__(self):
188 return ("ReportEventStack(%s, %s, reporting_enabled=%s)" %
189 (self.name, self.description, self.reporting_enabled))
190
191 def __enter__(self):
192 self.result = status.SUCCESS
193 if self.reporting_enabled:
194 report_start_event(self.fullname, self.description)
195 if self.parent:
196 self.parent.children[self.name] = (None, None)
197 return self
198
199 def _childrens_finish_info(self):
200 for cand_result in (status.FAIL, status.WARN):
201 for name, (value, msg) in self.children.items():
202 if value == cand_result:
203 return (value, self.message)
204 return (self.result, self.message)
205
206 @property
207 def result(self):
208 return self._result
209
210 @result.setter
211 def result(self, value):
212 if value not in status:
213 raise ValueError("'%s' not a valid result" % value)
214 self._result = value
215
216 @property
217 def message(self):
218 if self._message is not None:
219 return self._message
220 return self.description
221
222 @message.setter
223 def message(self, value):
224 self._message = value
225
226 def _finish_info(self, exc):
227 # return tuple of description, and value
228 if exc:
229 return (self.result_on_exception, self.message)
230 return self._childrens_finish_info()
231
232 def __exit__(self, exc_type, exc_value, traceback):
233 (result, msg) = self._finish_info(exc_value)
234 if self.parent:
235 self.parent.children[self.name] = (result, msg)
236 if self.reporting_enabled:
237 report_finish_event(self.fullname, msg, result)
238
239
240instantiated_handler_registry = DictRegistry()
241update_configuration(DEFAULT_CONFIG)
0242
=== added file 'cloudinit/reporting/handlers.py'
--- cloudinit/reporting/handlers.py 1970-01-01 00:00:00 +0000
+++ cloudinit/reporting/handlers.py 2015-08-07 15:21:12 +0000
@@ -0,0 +1,78 @@
1# vi: ts=4 expandtab
2
3import abc
4import oauthlib.oauth1 as oauth1
5import six
6
7from ..registry import DictRegistry
8from .. import (url_helper, util)
9from .. import log as logging
10
11
12LOG = logging.getLogger(__name__)
13
14
15@six.add_metaclass(abc.ABCMeta)
16class ReportingHandler(object):
17 """Base class for report handlers.
18
19 Implement :meth:`~publish_event` for controlling what
20 the handler does with an event.
21 """
22
23 @abc.abstractmethod
24 def publish_event(self, event):
25 """Publish an event to the ``INFO`` log level."""
26
27
28class LogHandler(ReportingHandler):
29 """Publishes events to the cloud-init log at the ``INFO`` log level."""
30
31 def publish_event(self, event):
32 """Publish an event to the ``INFO`` log level."""
33 logger = logging.getLogger(
34 '.'.join(['cloudinit', 'reporting', event.event_type, event.name]))
35 logger.info(event.as_string())
36
37
38class PrintHandler(ReportingHandler):
39 def publish_event(self, event):
40 """Publish an event to the ``INFO`` log level."""
41 print(event.as_string())
42
43
44class WebHookHandler(ReportingHandler):
45 def __init__(self, endpoint, consumer_key=None, token_key=None,
46 token_secret=None, consumer_secret=None, timeout=None,
47 retries=None):
48 super(WebHookHandler, self).__init__()
49
50 if any([consumer_key, token_key, token_secret, consumer_secret]):
51 self.oauth_helper = url_helper.OauthUrlHelper(
52 consumer_key=consumer_key, token_key=token_key,
53 token_secret=token_secret, consumer_secret=consumer_secret)
54 else:
55 self.oauth_helper = None
56 self.endpoint = endpoint
57 self.timeout = timeout
58 self.retries = retries
59 self.ssl_details = util.fetch_ssl_details()
60
61 def publish_event(self, event):
62 if self.oauth_helper:
63 readurl = self.oauth_helper.readurl
64 else:
65 readurl = url_helper.readurl
66 try:
67 return readurl(
68 self.endpoint, data=event.as_dict(),
69 timeout=self.timeout,
70 retries=self.retries, ssl_details=self.ssl_details)
71 except:
72 LOG.warn("failed posting event: %s" % event.as_string())
73
74
75available_handlers = DictRegistry()
76available_handlers.register_item('log', LogHandler)
77available_handlers.register_item('print', PrintHandler)
78available_handlers.register_item('webhook', WebHookHandler)
079
=== modified file 'cloudinit/sources/DataSourceMAAS.py'
--- cloudinit/sources/DataSourceMAAS.py 2015-03-10 21:04:59 +0000
+++ cloudinit/sources/DataSourceMAAS.py 2015-08-07 15:21:12 +0000
@@ -52,7 +52,20 @@
52 sources.DataSource.__init__(self, sys_cfg, distro, paths)52 sources.DataSource.__init__(self, sys_cfg, distro, paths)
53 self.base_url = None53 self.base_url = None
54 self.seed_dir = os.path.join(paths.seed_dir, 'maas')54 self.seed_dir = os.path.join(paths.seed_dir, 'maas')
55 self.oauth_clockskew = None55 self.oauth_helper = self._get_helper()
56
57 def _get_helper(self):
58 mcfg = self.ds_cfg
59 # If we are missing token_key, token_secret or consumer_key
60 # then just do non-authed requests
61 for required in ('token_key', 'token_secret', 'consumer_key'):
62 if required not in mcfg:
63 return url_helper.OauthUrlHelper()
64
65 return url_helper.OauthHelper(
66 consumer_key=mcfg['consumer_key'], token_key=mcfg['token_key'],
67 token_secret=mcfg['token_secret'],
68 consumer_secret=mcfg.get('consumer_secret'))
5669
57 def __str__(self):70 def __str__(self):
58 root = sources.DataSource.__str__(self)71 root = sources.DataSource.__str__(self)
@@ -84,9 +97,9 @@
8497
85 self.base_url = url98 self.base_url = url
8699
87 (userdata, metadata) = read_maas_seed_url(self.base_url,100 (userdata, metadata) = read_maas_seed_url(
88 self._md_headers,101 self.base_url, self.oauth_helper.md_headers,
89 paths=self.paths)102 paths=self.paths)
90 self.userdata_raw = userdata103 self.userdata_raw = userdata
91 self.metadata = metadata104 self.metadata = metadata
92 return True105 return True
@@ -94,31 +107,8 @@
94 util.logexc(LOG, "Failed fetching metadata from url %s", url)107 util.logexc(LOG, "Failed fetching metadata from url %s", url)
95 return False108 return False
96109
97 def _md_headers(self, url):
98 mcfg = self.ds_cfg
99
100 # If we are missing token_key, token_secret or consumer_key
101 # then just do non-authed requests
102 for required in ('token_key', 'token_secret', 'consumer_key'):
103 if required not in mcfg:
104 return {}
105
106 consumer_secret = mcfg.get('consumer_secret', "")
107
108 timestamp = None
109 if self.oauth_clockskew:
110 timestamp = int(time.time()) + self.oauth_clockskew
111
112 return oauth_headers(url=url,
113 consumer_key=mcfg['consumer_key'],
114 token_key=mcfg['token_key'],
115 token_secret=mcfg['token_secret'],
116 consumer_secret=consumer_secret,
117 timestamp=timestamp)
118
119 def wait_for_metadata_service(self, url):110 def wait_for_metadata_service(self, url):
120 mcfg = self.ds_cfg111 mcfg = self.ds_cfg
121
122 max_wait = 120112 max_wait = 120
123 try:113 try:
124 max_wait = int(mcfg.get("max_wait", max_wait))114 max_wait = int(mcfg.get("max_wait", max_wait))
@@ -138,10 +128,8 @@
138 starttime = time.time()128 starttime = time.time()
139 check_url = "%s/%s/meta-data/instance-id" % (url, MD_VERSION)129 check_url = "%s/%s/meta-data/instance-id" % (url, MD_VERSION)
140 urls = [check_url]130 urls = [check_url]
141 url = url_helper.wait_for_url(urls=urls, max_wait=max_wait,131 url = self.oauth_helper.wait_for_url(
142 timeout=timeout,132 urls=urls, max_wait=max_wait, timeout=timeout)
143 exception_cb=self._except_cb,
144 headers_cb=self._md_headers)
145133
146 if url:134 if url:
147 LOG.debug("Using metadata source: '%s'", url)135 LOG.debug("Using metadata source: '%s'", url)
@@ -151,26 +139,6 @@
151139
152 return bool(url)140 return bool(url)
153141
154 def _except_cb(self, msg, exception):
155 if not (isinstance(exception, url_helper.UrlError) and
156 (exception.code == 403 or exception.code == 401)):
157 return
158
159 if 'date' not in exception.headers:
160 LOG.warn("Missing header 'date' in %s response", exception.code)
161 return
162
163 date = exception.headers['date']
164 try:
165 ret_time = time.mktime(parsedate(date))
166 except Exception as e:
167 LOG.warn("Failed to convert datetime '%s': %s", date, e)
168 return
169
170 self.oauth_clockskew = int(ret_time - time.time())
171 LOG.warn("Setting oauth clockskew to %d", self.oauth_clockskew)
172 return
173
174142
175def read_maas_seed_dir(seed_d):143def read_maas_seed_dir(seed_d):
176 """144 """
@@ -196,12 +164,12 @@
196 return check_seed_contents(md, seed_d)164 return check_seed_contents(md, seed_d)
197165
198166
199def read_maas_seed_url(seed_url, header_cb=None, timeout=None,167def read_maas_seed_url(seed_url, read_file_or_url=None, timeout=None,
200 version=MD_VERSION, paths=None):168 version=MD_VERSION, paths=None):
201 """169 """
202 Read the maas datasource at seed_url.170 Read the maas datasource at seed_url.
203 - header_cb is a method that should return a headers dictionary for171 read_file_or_url is a method that should provide an interface
204 a given url172 like util.read_file_or_url
205173
206 Expected format of seed_url is are the following files:174 Expected format of seed_url is are the following files:
207 * <seed_url>/<version>/meta-data/instance-id175 * <seed_url>/<version>/meta-data/instance-id
@@ -222,14 +190,12 @@
222 'user-data': "%s/%s" % (base_url, 'user-data'),190 'user-data': "%s/%s" % (base_url, 'user-data'),
223 }191 }
224192
193 if read_file_or_url is None:
194 read_file_or_url = util.read_file_or_url
195
225 md = {}196 md = {}
226 for name in file_order:197 for name in file_order:
227 url = files.get(name)198 url = files.get(name)
228 if not header_cb:
229 def _cb(url):
230 return {}
231 header_cb = _cb
232
233 if name == 'user-data':199 if name == 'user-data':
234 retries = 0200 retries = 0
235 else:201 else:
@@ -237,10 +203,8 @@
237203
238 try:204 try:
239 ssl_details = util.fetch_ssl_details(paths)205 ssl_details = util.fetch_ssl_details(paths)
240 resp = util.read_file_or_url(url, retries=retries,206 resp = read_file_or_url(url, retries=retries,
241 headers_cb=header_cb,207 timeout=timeout, ssl_details=ssl_details)
242 timeout=timeout,
243 ssl_details=ssl_details)
244 if resp.ok():208 if resp.ok():
245 if name in BINARY_FIELDS:209 if name in BINARY_FIELDS:
246 md[name] = resp.contents210 md[name] = resp.contents
@@ -280,24 +244,6 @@
280 return (userdata, md)244 return (userdata, md)
281245
282246
283def oauth_headers(url, consumer_key, token_key, token_secret, consumer_secret,
284 timestamp=None):
285 if timestamp:
286 timestamp = str(timestamp)
287 else:
288 timestamp = None
289
290 client = oauth1.Client(
291 consumer_key,
292 client_secret=consumer_secret,
293 resource_owner_key=token_key,
294 resource_owner_secret=token_secret,
295 signature_method=oauth1.SIGNATURE_PLAINTEXT,
296 timestamp=timestamp)
297 uri, signed_headers, body = client.sign(url)
298 return signed_headers
299
300
301class MAASSeedDirNone(Exception):247class MAASSeedDirNone(Exception):
302 pass248 pass
303249
@@ -361,47 +307,39 @@
361 if key in cfg and creds[key] is None:307 if key in cfg and creds[key] is None:
362 creds[key] = cfg[key]308 creds[key] = cfg[key]
363309
364 def geturl(url, headers_cb):310 oauth_helper = url_helper.OauthUrlHelper(**creds)
365 req = Request(url, data=None, headers=headers_cb(url))311
366 return urlopen(req).read()312 def geturl(url):
313 return oauth_helper.readurl(url).contents
367314
368 def printurl(url, headers_cb):315 def printurl(url, headers_cb):
369 print("== %s ==\n%s\n" % (url, geturl(url, headers_cb)))316 print("== %s ==\n%s\n" % (url, geturl(url)))
370317
371 def crawl(url, headers_cb=None):318 def crawl(url):
372 if url.endswith("/"):319 if url.endswith("/"):
373 for line in geturl(url, headers_cb).splitlines():320 for line in geturl(url).splitlines():
374 if line.endswith("/"):321 if line.endswith("/"):
375 crawl("%s%s" % (url, line), headers_cb)322 crawl("%s%s" % (url, line))
376 else:323 else:
377 printurl("%s%s" % (url, line), headers_cb)324 printurl("%s%s" % (url, line))
378 else:325 else:
379 printurl(url, headers_cb)326 printurl(url)
380
381 def my_headers(url):
382 headers = {}
383 if creds.get('consumer_key', None) is not None:
384 headers = oauth_headers(url, **creds)
385 return headers
386327
387 if args.subcmd == "check-seed":328 if args.subcmd == "check-seed":
388 if args.url.startswith("http"):329 (userdata, metadata) = read_maas_seed_url(
389 (userdata, metadata) = read_maas_seed_url(args.url,330 args.url, read_file_or_url=oauth_helper.read_file_or_url,
390 header_cb=my_headers,331 version=args.apiver)
391 version=args.apiver)
392 else:
393 (userdata, metadata) = read_maas_seed_url(args.url)
394 print("=== userdata ===")332 print("=== userdata ===")
395 print(userdata)333 print(userdata)
396 print("=== metadata ===")334 print("=== metadata ===")
397 pprint.pprint(metadata)335 pprint.pprint(metadata)
398336
399 elif args.subcmd == "get":337 elif args.subcmd == "get":
400 printurl(args.url, my_headers)338 printurl(args.url)
401339
402 elif args.subcmd == "crawl":340 elif args.subcmd == "crawl":
403 if not args.url.endswith("/"):341 if not args.url.endswith("/"):
404 args.url = "%s/" % args.url342 args.url = "%s/" % args.url
405 crawl(args.url, my_headers)343 crawl(args.url)
406344
407 main()345 main()
408346
=== modified file 'cloudinit/sources/__init__.py'
--- cloudinit/sources/__init__.py 2015-07-22 12:06:34 +0000
+++ cloudinit/sources/__init__.py 2015-08-07 15:21:12 +0000
@@ -27,6 +27,7 @@
2727
28from cloudinit import importer28from cloudinit import importer
29from cloudinit import log as logging29from cloudinit import log as logging
30from cloudinit import reporting
30from cloudinit import type_utils31from cloudinit import type_utils
31from cloudinit import user_data as ud32from cloudinit import user_data as ud
32from cloudinit import util33from cloudinit import util
@@ -246,17 +247,25 @@
246 return keys247 return keys
247248
248249
249def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list):250def find_source(sys_cfg, distro, paths, ds_deps, cfg_list, pkg_list, reporter):
250 ds_list = list_sources(cfg_list, ds_deps, pkg_list)251 ds_list = list_sources(cfg_list, ds_deps, pkg_list)
251 ds_names = [type_utils.obj_name(f) for f in ds_list]252 ds_names = [type_utils.obj_name(f) for f in ds_list]
252 LOG.debug("Searching for data source in: %s", ds_names)253 mode = "network" if DEP_NETWORK in ds_deps else "local"
254 LOG.debug("Searching for %s data source in: %s", mode, ds_names)
253255
254 for cls in ds_list:256 for name, cls in zip(ds_names, ds_list):
257 myrep = reporting.ReportEventStack(
258 name="search-%s" % name.replace("DataSource", ""),
259 description="searching for %s data from %s" % (mode, name),
260 message="no %s data found from %s" % (mode, name),
261 parent=reporter)
255 try:262 try:
256 LOG.debug("Seeing if we can get any data from %s", cls)263 with myrep:
257 s = cls(sys_cfg, distro, paths)264 LOG.debug("Seeing if we can get any data from %s", cls)
258 if s.get_data():265 s = cls(sys_cfg, distro, paths)
259 return (s, type_utils.obj_name(cls))266 if s.get_data():
267 myrep.message = "found %s data from %s" % (mode, name)
268 return (s, type_utils.obj_name(cls))
260 except Exception:269 except Exception:
261 util.logexc(LOG, "Getting data from %s failed", cls)270 util.logexc(LOG, "Getting data from %s failed", cls)
262271
263272
=== modified file 'cloudinit/stages.py'
--- cloudinit/stages.py 2015-03-04 17:42:34 +0000
+++ cloudinit/stages.py 2015-08-07 15:21:12 +0000
@@ -46,6 +46,7 @@
46from cloudinit import sources46from cloudinit import sources
47from cloudinit import type_utils47from cloudinit import type_utils
48from cloudinit import util48from cloudinit import util
49from cloudinit import reporting
4950
50LOG = logging.getLogger(__name__)51LOG = logging.getLogger(__name__)
5152
@@ -53,7 +54,7 @@
5354
5455
55class Init(object):56class Init(object):
56 def __init__(self, ds_deps=None):57 def __init__(self, ds_deps=None, reporter=None):
57 if ds_deps is not None:58 if ds_deps is not None:
58 self.ds_deps = ds_deps59 self.ds_deps = ds_deps
59 else:60 else:
@@ -65,6 +66,12 @@
65 # Changed only when a fetch occurs66 # Changed only when a fetch occurs
66 self.datasource = NULL_DATA_SOURCE67 self.datasource = NULL_DATA_SOURCE
6768
69 if reporter is None:
70 reporter = reporting.ReportEventStack(
71 name="init-reporter", description="init-desc",
72 reporting_enabled=False)
73 self.reporter = reporter
74
68 def _reset(self, reset_ds=False):75 def _reset(self, reset_ds=False):
69 # Recreated on access76 # Recreated on access
70 self._cfg = None77 self._cfg = None
@@ -234,9 +241,17 @@
234 def _get_data_source(self):241 def _get_data_source(self):
235 if self.datasource is not NULL_DATA_SOURCE:242 if self.datasource is not NULL_DATA_SOURCE:
236 return self.datasource243 return self.datasource
237 ds = self._restore_from_cache()244
238 if ds:245 with reporting.ReportEventStack(
239 LOG.debug("Restored from cache, datasource: %s", ds)246 name="check-cache",
247 description="attempting to read from cache",
248 parent=self.reporter) as myrep:
249 ds = self._restore_from_cache()
250 if ds:
251 LOG.debug("Restored from cache, datasource: %s", ds)
252 myrep.description = "restored from cache"
253 else:
254 myrep.description = "no cache found"
240 if not ds:255 if not ds:
241 (cfg_list, pkg_list) = self._get_datasources()256 (cfg_list, pkg_list) = self._get_datasources()
242 # Deep copy so that user-data handlers can not modify257 # Deep copy so that user-data handlers can not modify
@@ -246,7 +261,7 @@
246 self.paths,261 self.paths,
247 copy.deepcopy(self.ds_deps),262 copy.deepcopy(self.ds_deps),
248 cfg_list,263 cfg_list,
249 pkg_list)264 pkg_list, self.reporter)
250 LOG.info("Loaded datasource %s - %s", dsname, ds)265 LOG.info("Loaded datasource %s - %s", dsname, ds)
251 self.datasource = ds266 self.datasource = ds
252 # Ensure we adjust our path members datasource267 # Ensure we adjust our path members datasource
@@ -327,7 +342,8 @@
327 # Form the needed options to cloudify our members342 # Form the needed options to cloudify our members
328 return cloud.Cloud(self.datasource,343 return cloud.Cloud(self.datasource,
329 self.paths, self.cfg,344 self.paths, self.cfg,
330 self.distro, helpers.Runners(self.paths))345 self.distro, helpers.Runners(self.paths),
346 reporter=self.reporter)
331347
332 def update(self):348 def update(self):
333 if not self._write_to_cache():349 if not self._write_to_cache():
@@ -493,8 +509,14 @@
493 def consume_data(self, frequency=PER_INSTANCE):509 def consume_data(self, frequency=PER_INSTANCE):
494 # Consume the userdata first, because we need want to let the part510 # Consume the userdata first, because we need want to let the part
495 # handlers run first (for merging stuff)511 # handlers run first (for merging stuff)
496 self._consume_userdata(frequency)512 with reporting.ReportEventStack(
497 self._consume_vendordata(frequency)513 "consume-user-data", "reading and applying user-data",
514 parent=self.reporter):
515 self._consume_userdata(frequency)
516 with reporting.ReportEventStack(
517 "consume-vendor-data", "reading and applying vendor-data",
518 parent=self.reporter):
519 self._consume_vendordata(frequency)
498520
499 # Perform post-consumption adjustments so that521 # Perform post-consumption adjustments so that
500 # modules that run during the init stage reflect522 # modules that run during the init stage reflect
@@ -567,11 +589,16 @@
567589
568590
569class Modules(object):591class Modules(object):
570 def __init__(self, init, cfg_files=None):592 def __init__(self, init, cfg_files=None, reporter=None):
571 self.init = init593 self.init = init
572 self.cfg_files = cfg_files594 self.cfg_files = cfg_files
573 # Created on first use595 # Created on first use
574 self._cached_cfg = None596 self._cached_cfg = None
597 if reporter is None:
598 reporter = reporting.ReportEventStack(
599 name="module-reporter", description="module-desc",
600 reporting_enabled=False)
601 self.reporter = reporter
575602
576 @property603 @property
577 def cfg(self):604 def cfg(self):
@@ -681,7 +708,19 @@
681 which_ran.append(name)708 which_ran.append(name)
682 # This name will affect the semaphore name created709 # This name will affect the semaphore name created
683 run_name = "config-%s" % (name)710 run_name = "config-%s" % (name)
684 cc.run(run_name, mod.handle, func_args, freq=freq)711
712 desc = "running %s with frequency %s" % (run_name, freq)
713 myrep = reporting.ReportEventStack(
714 name=run_name, description=desc, parent=self.reporter)
715
716 with myrep:
717 ran, _r = cc.run(run_name, mod.handle, func_args,
718 freq=freq)
719 if ran:
720 myrep.message = "%s ran successfully" % run_name
721 else:
722 myrep.message = "%s previously ran" % run_name
723
685 except Exception as e:724 except Exception as e:
686 util.logexc(LOG, "Running module %s (%s) failed", name, mod)725 util.logexc(LOG, "Running module %s (%s) failed", name, mod)
687 failures.append((name, e))726 failures.append((name, e))
688727
=== modified file 'cloudinit/url_helper.py'
--- cloudinit/url_helper.py 2015-03-02 20:48:42 +0000
+++ cloudinit/url_helper.py 2015-08-07 15:21:12 +0000
@@ -25,6 +25,10 @@
25import six25import six
2626
27import requests27import requests
28import oauthlib.oauth1 as oauth1
29import os
30import json
31from functools import partial
28from requests import exceptions32from requests import exceptions
2933
30from six.moves.urllib.parse import (34from six.moves.urllib.parse import (
@@ -147,13 +151,14 @@
147151
148152
149class UrlError(IOError):153class UrlError(IOError):
150 def __init__(self, cause, code=None, headers=None):154 def __init__(self, cause, code=None, headers=None, url=None):
151 IOError.__init__(self, str(cause))155 IOError.__init__(self, str(cause))
152 self.cause = cause156 self.cause = cause
153 self.code = code157 self.code = code
154 self.headers = headers158 self.headers = headers
155 if self.headers is None:159 if self.headers is None:
156 self.headers = {}160 self.headers = {}
161 self.url = url
157162
158163
159def _get_ssl_args(url, ssl_details):164def _get_ssl_args(url, ssl_details):
@@ -247,9 +252,10 @@
247 and hasattr(e, 'response') # This appeared in v 0.10.8252 and hasattr(e, 'response') # This appeared in v 0.10.8
248 and hasattr(e.response, 'status_code')):253 and hasattr(e.response, 'status_code')):
249 excps.append(UrlError(e, code=e.response.status_code,254 excps.append(UrlError(e, code=e.response.status_code,
250 headers=e.response.headers))255 headers=e.response.headers,
256 url=url))
251 else:257 else:
252 excps.append(UrlError(e))258 excps.append(UrlError(e, url=url))
253 if SSL_ENABLED and isinstance(e, exceptions.SSLError):259 if SSL_ENABLED and isinstance(e, exceptions.SSLError):
254 # ssl exceptions are not going to get fixed by waiting a260 # ssl exceptions are not going to get fixed by waiting a
255 # few seconds261 # few seconds
@@ -333,11 +339,11 @@
333 if not response.contents:339 if not response.contents:
334 reason = "empty response [%s]" % (response.code)340 reason = "empty response [%s]" % (response.code)
335 url_exc = UrlError(ValueError(reason), code=response.code,341 url_exc = UrlError(ValueError(reason), code=response.code,
336 headers=response.headers)342 headers=response.headers, url=url)
337 elif not response.ok():343 elif not response.ok():
338 reason = "bad status code [%s]" % (response.code)344 reason = "bad status code [%s]" % (response.code)
339 url_exc = UrlError(ValueError(reason), code=response.code,345 url_exc = UrlError(ValueError(reason), code=response.code,
340 headers=response.headers)346 headers=response.headers, url=url)
341 else:347 else:
342 return url348 return url
343 except UrlError as e:349 except UrlError as e:
@@ -368,3 +374,127 @@
368 time.sleep(sleep_time)374 time.sleep(sleep_time)
369375
370 return False376 return False
377
378
379class OauthUrlHelper(object):
380 def __init__(self, consumer_key=None, token_key=None,
381 token_secret=None, consumer_secret=None,
382 skew_data_file="/run/oauth_skew.json"):
383 self.consumer_key = consumer_key
384 self.consumer_secret = consumer_secret or ""
385 self.token_key = token_key
386 self.token_secret = token_secret
387 self.skew_data_file = skew_data_file
388 self._do_oauth = True
389 self.skew_change_limit = 5
390 required = (self.token_key, self.token_secret, self.consumer_key)
391 if not any(required):
392 self._do_oauth = False
393 elif not all(required):
394 raise ValueError("all or none of token_key, token_secret, or "
395 "consumer_key can be set")
396
397 old = self.read_skew_file()
398 self.skew_data = old or {}
399
400 def read_skew_file(self):
401 if self.skew_data_file and os.path.isfile(self.skew_data_file):
402 with open(self.skew_data_file, mode="r") as fp:
403 return json.load(fp.read())
404 return None
405
406 def update_skew_file(self, host, value):
407 # this is not atomic
408 if not self.skew_data_file:
409 return
410 cur = self.read_skew_file()
411 cur[host] = value
412 with open(self.skew_data_file, mode="w") as fp:
413 fp.write(json.dumps(cur))
414
415 def exception_cb(self, msg, exception):
416 if not (isinstance(exception, UrlError) and
417 (exception.code == 403 or exception.code == 401)):
418 return
419
420 if 'date' not in exception.headers:
421 LOG.warn("Missing header 'date' in %s response", exception.code)
422 return
423
424 date = exception.headers['date']
425 try:
426 remote_time = time.mktime(parsedate(date))
427 except Exception as e:
428 LOG.warn("Failed to convert datetime '%s': %s", date, e)
429 return
430
431 skew = int(remote_time - time.time())
432 host = urlparse(exception.url).netloc
433 old_skew = self.skew_data.get(host, 0)
434 if abs(old_skew - skew) > self.skew_change_limit:
435 self.update_skew_file(host, skew)
436 LOG.warn("Setting oauth clockskew for %s to %d", host, skew)
437 skew_data[host] = skew
438
439 return
440
441 def headers_cb(self, url):
442 if not self._do_oauth:
443 return {}
444
445 timestamp = None
446 host = urlparse(url).netloc
447 if self.skew_data and host in self.skew_data:
448 timestamp = int(time.time()) + self.skew_data[host]
449
450 return oauth_headers(
451 url=url, consumer_key=self.consumer_key,
452 token_key=self.token_key, token_secret=self.token_secret,
453 consumer_secret=self.consumer_secret, timestamp=timestamp)
454
455 def _wrapped(self, wrapped_func, args, kwargs):
456 kwargs['headers_cb'] = partial(
457 self._headers_cb, kwargs.get('headers_cb'))
458 kwargs['exception_cb'] = partial(
459 self._exception_cb, kwargs.get('exception_cb'))
460 return wrapped_func(*args, **kwargs)
461
462 def wait_for_url(self, *args, **kwargs):
463 return self._wrapped(wait_for_url, args, kwargs)
464
465 def readurl(self, *args, **kwargs):
466 return self._wrapped(readurl, args, kwargs)
467
468 def _exception_cb(self, extra_exception_cb, msg, exception):
469 ret = None
470 try:
471 if extra_exception_cb:
472 ret = extra_exception_cb(msg, exception)
473 finally:
474 self.exception_cb(msg, exception)
475 return ret
476
477 def _headers_cb(self, extra_headers_cb, url):
478 headers = {}
479 if extra_headers_cb:
480 headers = extra_headers_cb(url)
481 headers.update(self.headers_cb(url))
482 return headers
483
484
485def oauth_headers(url, consumer_key, token_key, token_secret, consumer_secret,
486 timestamp=None):
487 if timestamp:
488 timestamp = str(timestamp)
489 else:
490 timestamp = None
491
492 client = oauth1.Client(
493 consumer_key,
494 client_secret=consumer_secret,
495 resource_owner_key=token_key,
496 resource_owner_secret=token_secret,
497 signature_method=oauth1.SIGNATURE_PLAINTEXT,
498 timestamp=timestamp)
499 uri, signed_headers, body = client.sign(url)
500 return signed_headers
371501
=== modified file 'cloudinit/util.py'
--- cloudinit/util.py 2015-07-28 00:42:56 +0000
+++ cloudinit/util.py 2015-08-07 15:21:12 +0000
@@ -782,7 +782,8 @@
782 code = e.errno782 code = e.errno
783 if e.errno == errno.ENOENT:783 if e.errno == errno.ENOENT:
784 code = url_helper.NOT_FOUND784 code = url_helper.NOT_FOUND
785 raise url_helper.UrlError(cause=e, code=code, headers=None)785 raise url_helper.UrlError(cause=e, code=code, headers=None,
786 url=url)
786 return url_helper.FileResponse(file_path, contents=contents)787 return url_helper.FileResponse(file_path, contents=contents)
787 else:788 else:
788 return url_helper.readurl(url,789 return url_helper.readurl(url,
789790
=== modified file 'tests/unittests/test_datasource/test_maas.py'
--- tests/unittests/test_datasource/test_maas.py 2015-02-26 00:40:33 +0000
+++ tests/unittests/test_datasource/test_maas.py 2015-08-07 15:21:12 +0000
@@ -141,7 +141,7 @@
141 with mock.patch.object(url_helper, 'readurl',141 with mock.patch.object(url_helper, 'readurl',
142 side_effect=side_effect()) as mockobj:142 side_effect=side_effect()) as mockobj:
143 userdata, metadata = DataSourceMAAS.read_maas_seed_url(143 userdata, metadata = DataSourceMAAS.read_maas_seed_url(
144 my_seed, header_cb=my_headers_cb, version=my_ver)144 my_seed, version=my_ver)
145145
146 self.assertEqual(b"foodata", userdata)146 self.assertEqual(b"foodata", userdata)
147 self.assertEqual(metadata['instance-id'],147 self.assertEqual(metadata['instance-id'],
148148
=== added file 'tests/unittests/test_registry.py'
--- tests/unittests/test_registry.py 1970-01-01 00:00:00 +0000
+++ tests/unittests/test_registry.py 2015-08-07 15:21:12 +0000
@@ -0,0 +1,28 @@
1from cloudinit.registry import DictRegistry
2
3from .helpers import (mock, TestCase)
4
5
6class TestDictRegistry(TestCase):
7
8 def test_added_item_included_in_output(self):
9 registry = DictRegistry()
10 item_key, item_to_register = 'test_key', mock.Mock()
11 registry.register_item(item_key, item_to_register)
12 self.assertEqual({item_key: item_to_register},
13 registry.registered_items)
14
15 def test_registry_starts_out_empty(self):
16 self.assertEqual({}, DictRegistry().registered_items)
17
18 def test_modifying_registered_items_isnt_exposed_to_other_callers(self):
19 registry = DictRegistry()
20 registry.registered_items['test_item'] = mock.Mock()
21 self.assertEqual({}, registry.registered_items)
22
23 def test_keys_cannot_be_replaced(self):
24 registry = DictRegistry()
25 item_key = 'test_key'
26 registry.register_item(item_key, mock.Mock())
27 self.assertRaises(ValueError,
28 registry.register_item, item_key, mock.Mock())
029
=== added file 'tests/unittests/test_reporting.py'
--- tests/unittests/test_reporting.py 1970-01-01 00:00:00 +0000
+++ tests/unittests/test_reporting.py 2015-08-07 15:21:12 +0000
@@ -0,0 +1,359 @@
1# Copyright 2015 Canonical Ltd.
2# This file is part of cloud-init. See LICENCE file for license information.
3#
4# vi: ts=4 expandtab
5
6from cloudinit import reporting
7from cloudinit.reporting import handlers
8
9from .helpers import (mock, TestCase)
10
11
12def _fake_registry():
13 return mock.Mock(registered_items={'a': mock.MagicMock(),
14 'b': mock.MagicMock()})
15
16
17class TestReportStartEvent(TestCase):
18
19 @mock.patch('cloudinit.reporting.instantiated_handler_registry',
20 new_callable=_fake_registry)
21 def test_report_start_event_passes_something_with_as_string_to_handlers(
22 self, instantiated_handler_registry):
23 event_name, event_description = 'my_test_event', 'my description'
24 reporting.report_start_event(event_name, event_description)
25 expected_string_representation = ': '.join(
26 ['start', event_name, event_description])
27 for _, handler in (
28 instantiated_handler_registry.registered_items.items()):
29 self.assertEqual(1, handler.publish_event.call_count)
30 event = handler.publish_event.call_args[0][0]
31 self.assertEqual(expected_string_representation, event.as_string())
32
33
34class TestReportFinishEvent(TestCase):
35
36 def _report_finish_event(self, result=reporting.status.SUCCESS):
37 event_name, event_description = 'my_test_event', 'my description'
38 reporting.report_finish_event(
39 event_name, event_description, result=result)
40 return event_name, event_description
41
42 def assertHandlersPassedObjectWithAsString(
43 self, handlers, expected_as_string):
44 for _, handler in handlers.items():
45 self.assertEqual(1, handler.publish_event.call_count)
46 event = handler.publish_event.call_args[0][0]
47 self.assertEqual(expected_as_string, event.as_string())
48
49 @mock.patch('cloudinit.reporting.instantiated_handler_registry',
50 new_callable=_fake_registry)
51 def test_report_finish_event_passes_something_with_as_string_to_handlers(
52 self, instantiated_handler_registry):
53 event_name, event_description = self._report_finish_event()
54 expected_string_representation = ': '.join(
55 ['finish', event_name, reporting.status.SUCCESS,
56 event_description])
57 self.assertHandlersPassedObjectWithAsString(
58 instantiated_handler_registry.registered_items,
59 expected_string_representation)
60
61 @mock.patch('cloudinit.reporting.instantiated_handler_registry',
62 new_callable=_fake_registry)
63 def test_reporting_successful_finish_has_sensible_string_repr(
64 self, instantiated_handler_registry):
65 event_name, event_description = self._report_finish_event(
66 result=reporting.status.SUCCESS)
67 expected_string_representation = ': '.join(
68 ['finish', event_name, reporting.status.SUCCESS,
69 event_description])
70 self.assertHandlersPassedObjectWithAsString(
71 instantiated_handler_registry.registered_items,
72 expected_string_representation)
73
74 @mock.patch('cloudinit.reporting.instantiated_handler_registry',
75 new_callable=_fake_registry)
76 def test_reporting_unsuccessful_finish_has_sensible_string_repr(
77 self, instantiated_handler_registry):
78 event_name, event_description = self._report_finish_event(
79 result=reporting.status.FAIL)
80 expected_string_representation = ': '.join(
81 ['finish', event_name, reporting.status.FAIL, event_description])
82 self.assertHandlersPassedObjectWithAsString(
83 instantiated_handler_registry.registered_items,
84 expected_string_representation)
85
86 def test_invalid_result_raises_attribute_error(self):
87 self.assertRaises(ValueError, self._report_finish_event, ("BOGUS",))
88
89
90class TestReportingEvent(TestCase):
91
92 def test_as_string(self):
93 event_type, name, description = 'test_type', 'test_name', 'test_desc'
94 event = reporting.ReportingEvent(event_type, name, description)
95 expected_string_representation = ': '.join(
96 [event_type, name, description])
97 self.assertEqual(expected_string_representation, event.as_string())
98
99 def test_as_dict(self):
100 event_type, name, desc = 'test_type', 'test_name', 'test_desc'
101 event = reporting.ReportingEvent(event_type, name, desc)
102 self.assertEqual(
103 {'event_type': event_type, 'name': name, 'description': desc},
104 event.as_dict())
105
106
107class TestFinishReportingEvent(TestCase):
108 def test_as_has_result(self):
109 result = reporting.status.SUCCESS
110 name, desc = 'test_name', 'test_desc'
111 event = reporting.FinishReportingEvent(name, desc, result)
112 ret = event.as_dict()
113 self.assertTrue('result' in ret)
114 self.assertEqual(ret['result'], result)
115
116
117class TestBaseReportingHandler(TestCase):
118
119 def test_base_reporting_handler_is_abstract(self):
120 regexp = r".*abstract.*publish_event.*"
121 self.assertRaisesRegexp(TypeError, regexp, handlers.ReportingHandler)
122
123
124class TestLogHandler(TestCase):
125
126 @mock.patch.object(reporting.handlers.logging, 'getLogger')
127 def test_appropriate_logger_used(self, getLogger):
128 event_type, event_name = 'test_type', 'test_name'
129 event = reporting.ReportingEvent(event_type, event_name, 'description')
130 reporting.handlers.LogHandler().publish_event(event)
131 self.assertEqual(
132 [mock.call(
133 'cloudinit.reporting.{0}.{1}'.format(event_type, event_name))],
134 getLogger.call_args_list)
135
136 @mock.patch.object(reporting.handlers.logging, 'getLogger')
137 def test_single_log_message_at_info_published(self, getLogger):
138 event = reporting.ReportingEvent('type', 'name', 'description')
139 reporting.handlers.LogHandler().publish_event(event)
140 self.assertEqual(1, getLogger.return_value.info.call_count)
141
142 @mock.patch.object(reporting.handlers.logging, 'getLogger')
143 def test_log_message_uses_event_as_string(self, getLogger):
144 event = reporting.ReportingEvent('type', 'name', 'description')
145 reporting.handlers.LogHandler().publish_event(event)
146 self.assertIn(event.as_string(),
147 getLogger.return_value.info.call_args[0][0])
148
149
150class TestDefaultRegisteredHandler(TestCase):
151
152 def test_log_handler_registered_by_default(self):
153 registered_items = (
154 reporting.instantiated_handler_registry.registered_items)
155 for _, item in registered_items.items():
156 if isinstance(item, reporting.handlers.LogHandler):
157 break
158 else:
159 self.fail('No reporting LogHandler registered by default.')
160
161
162class TestReportingConfiguration(TestCase):
163
164 @mock.patch.object(reporting, 'instantiated_handler_registry')
165 def test_empty_configuration_doesnt_add_handlers(
166 self, instantiated_handler_registry):
167 reporting.update_configuration({})
168 self.assertEqual(
169 0, instantiated_handler_registry.register_item.call_count)
170
171 @mock.patch.object(
172 reporting, 'instantiated_handler_registry', reporting.DictRegistry())
173 @mock.patch.object(reporting, 'available_handlers')
174 def test_looks_up_handler_by_type_and_adds_it(self, available_handlers):
175 handler_type_name = 'test_handler'
176 handler_cls = mock.Mock()
177 available_handlers.registered_items = {handler_type_name: handler_cls}
178 handler_name = 'my_test_handler'
179 reporting.update_configuration(
180 {handler_name: {'type': handler_type_name}})
181 self.assertEqual(
182 {handler_name: handler_cls.return_value},
183 reporting.instantiated_handler_registry.registered_items)
184
185 @mock.patch.object(
186 reporting, 'instantiated_handler_registry', reporting.DictRegistry())
187 @mock.patch.object(reporting, 'available_handlers')
188 def test_uses_non_type_parts_of_config_dict_as_kwargs(
189 self, available_handlers):
190 handler_type_name = 'test_handler'
191 handler_cls = mock.Mock()
192 available_handlers.registered_items = {handler_type_name: handler_cls}
193 extra_kwargs = {'foo': 'bar', 'bar': 'baz'}
194 handler_config = extra_kwargs.copy()
195 handler_config.update({'type': handler_type_name})
196 handler_name = 'my_test_handler'
197 reporting.update_configuration({handler_name: handler_config})
198 self.assertEqual(
199 handler_cls.return_value,
200 reporting.instantiated_handler_registry.registered_items[
201 handler_name])
202 self.assertEqual([mock.call(**extra_kwargs)],
203 handler_cls.call_args_list)
204
205 @mock.patch.object(
206 reporting, 'instantiated_handler_registry', reporting.DictRegistry())
207 @mock.patch.object(reporting, 'available_handlers')
208 def test_handler_config_not_modified(self, available_handlers):
209 handler_type_name = 'test_handler'
210 handler_cls = mock.Mock()
211 available_handlers.registered_items = {handler_type_name: handler_cls}
212 handler_config = {'type': handler_type_name, 'foo': 'bar'}
213 expected_handler_config = handler_config.copy()
214 reporting.update_configuration({'my_test_handler': handler_config})
215 self.assertEqual(expected_handler_config, handler_config)
216
217 @mock.patch.object(
218 reporting, 'instantiated_handler_registry', reporting.DictRegistry())
219 @mock.patch.object(reporting, 'available_handlers')
220 def test_handlers_removed_if_falseish_specified(self, available_handlers):
221 handler_type_name = 'test_handler'
222 handler_cls = mock.Mock()
223 available_handlers.registered_items = {handler_type_name: handler_cls}
224 handler_name = 'my_test_handler'
225 reporting.update_configuration(
226 {handler_name: {'type': handler_type_name}})
227 self.assertEqual(
228 1, len(reporting.instantiated_handler_registry.registered_items))
229 reporting.update_configuration({handler_name: None})
230 self.assertEqual(
231 0, len(reporting.instantiated_handler_registry.registered_items))
232
233
234class TestReportingEventStack(TestCase):
235 @mock.patch('cloudinit.reporting.report_finish_event')
236 @mock.patch('cloudinit.reporting.report_start_event')
237 def test_start_and_finish_success(self, report_start, report_finish):
238 with reporting.ReportEventStack(name="myname", description="mydesc"):
239 pass
240 self.assertEqual(
241 [mock.call('myname', 'mydesc')], report_start.call_args_list)
242 self.assertEqual(
243 [mock.call('myname', 'mydesc', reporting.status.SUCCESS)],
244 report_finish.call_args_list)
245
246 @mock.patch('cloudinit.reporting.report_finish_event')
247 @mock.patch('cloudinit.reporting.report_start_event')
248 def test_finish_exception_defaults_fail(self, report_start, report_finish):
249 name = "myname"
250 desc = "mydesc"
251 try:
252 with reporting.ReportEventStack(name, description=desc):
253 raise ValueError("This didnt work")
254 except ValueError:
255 pass
256 self.assertEqual([mock.call(name, desc)], report_start.call_args_list)
257 self.assertEqual(
258 [mock.call(name, desc, reporting.status.FAIL)],
259 report_finish.call_args_list)
260
261 @mock.patch('cloudinit.reporting.report_finish_event')
262 @mock.patch('cloudinit.reporting.report_start_event')
263 def test_result_on_exception_used(self, report_start, report_finish):
264 name = "myname"
265 desc = "mydesc"
266 try:
267 with reporting.ReportEventStack(
268 name, desc, result_on_exception=reporting.status.WARN):
269 raise ValueError("This didnt work")
270 except ValueError:
271 pass
272 self.assertEqual([mock.call(name, desc)], report_start.call_args_list)
273 self.assertEqual(
274 [mock.call(name, desc, reporting.status.WARN)],
275 report_finish.call_args_list)
276
277 @mock.patch('cloudinit.reporting.report_start_event')
278 def test_child_fullname_respects_parent(self, report_start):
279 parent_name = "topname"
280 c1_name = "c1name"
281 c2_name = "c2name"
282 c2_expected_fullname = '/'.join([parent_name, c1_name, c2_name])
283 c1_expected_fullname = '/'.join([parent_name, c1_name])
284
285 parent = reporting.ReportEventStack(parent_name, "topdesc")
286 c1 = reporting.ReportEventStack(c1_name, "c1desc", parent=parent)
287 c2 = reporting.ReportEventStack(c2_name, "c2desc", parent=c1)
288 with c1:
289 report_start.assert_called_with(c1_expected_fullname, "c1desc")
290 with c2:
291 report_start.assert_called_with(c2_expected_fullname, "c2desc")
292
293 @mock.patch('cloudinit.reporting.report_finish_event')
294 @mock.patch('cloudinit.reporting.report_start_event')
295 def test_child_result_bubbles_up(self, report_start, report_finish):
296 parent = reporting.ReportEventStack("topname", "topdesc")
297 child = reporting.ReportEventStack("c_name", "c_desc", parent=parent)
298 with parent:
299 with child:
300 child.result = reporting.status.WARN
301
302 report_finish.assert_called_with(
303 "topname", "topdesc", reporting.status.WARN)
304
305 @mock.patch('cloudinit.reporting.report_finish_event')
306 def test_message_used_in_finish(self, report_finish):
307 with reporting.ReportEventStack("myname", "mydesc",
308 message="mymessage"):
309 pass
310 self.assertEqual(
311 [mock.call("myname", "mymessage", reporting.status.SUCCESS)],
312 report_finish.call_args_list)
313
314 @mock.patch('cloudinit.reporting.report_finish_event')
315 def test_message_updatable(self, report_finish):
316 with reporting.ReportEventStack("myname", "mydesc") as c:
317 c.message = "all good"
318 self.assertEqual(
319 [mock.call("myname", "all good", reporting.status.SUCCESS)],
320 report_finish.call_args_list)
321
322 @mock.patch('cloudinit.reporting.report_start_event')
323 @mock.patch('cloudinit.reporting.report_finish_event')
324 def test_reporting_disabled_does_not_report_events(
325 self, report_start, report_finish):
326 with reporting.ReportEventStack("a", "b", reporting_enabled=False):
327 pass
328 self.assertEqual(report_start.call_count, 0)
329 self.assertEqual(report_finish.call_count, 0)
330
331 @mock.patch('cloudinit.reporting.report_start_event')
332 @mock.patch('cloudinit.reporting.report_finish_event')
333 def test_reporting_child_default_to_parent(
334 self, report_start, report_finish):
335 parent = reporting.ReportEventStack(
336 "pname", "pdesc", reporting_enabled=False)
337 child = reporting.ReportEventStack("cname", "cdesc", parent=parent)
338 with parent:
339 with child:
340 pass
341 pass
342 self.assertEqual(report_start.call_count, 0)
343 self.assertEqual(report_finish.call_count, 0)
344
345 def test_reporting_event_has_sane_repr(self):
346 myrep = reporting.ReportEventStack("fooname", "foodesc",
347 reporting_enabled=True).__repr__()
348 self.assertIn("fooname", myrep)
349 self.assertIn("foodesc", myrep)
350 self.assertIn("True", myrep)
351
352 def test_set_invalid_result_raises_value_error(self):
353 f = reporting.ReportEventStack("myname", "mydesc")
354 self.assertRaises(ValueError, setattr, f, "result", "BOGUS")
355
356
357class TestStatusAccess(TestCase):
358 def test_invalid_status_access_raises_value_error(self):
359 self.assertRaises(AttributeError, getattr, reporting.status, "BOGUS")