Merge lp:~corey.bryant/charms/precise/mongodb/source_key_options into lp:charms/mongodb
- Precise Pangolin (12.04)
- source_key_options
- Merge into trunk
Proposed by
Corey Bryant
Status: | Merged |
---|---|
Merged at revision: | 47 |
Proposed branch: | lp:~corey.bryant/charms/precise/mongodb/source_key_options |
Merge into: | lp:charms/mongodb |
Diff against target: |
1271 lines (+1158/-23) 10 files modified
Makefile (+4/-1) charm-helpers-sync.yaml (+5/-0) config.yaml (+15/-0) hooks/charmhelpers/core/hookenv.py (+401/-0) hooks/charmhelpers/core/host.py (+297/-0) hooks/charmhelpers/fetch/__init__.py (+308/-0) hooks/charmhelpers/fetch/archiveurl.py (+63/-0) hooks/charmhelpers/fetch/bzrurl.py (+49/-0) hooks/hooks.py (+13/-21) tests/test_write_log_rotate_config.py (+3/-1) |
To merge this branch: | bzr merge lp:~corey.bryant/charms/precise/mongodb/source_key_options |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Juan L. Negron (community) | Approve | ||
Review via email: mp+215514@code.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Revision history for this message
Juan L. Negron (negronjl) wrote : | # |
Revision history for this message
Juan L. Negron (negronjl) wrote : | # |
Approved... merging.
-Juan
review:
Approve
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'Makefile' |
2 | --- Makefile 2013-12-11 13:17:03 +0000 |
3 | +++ Makefile 2014-04-11 22:06:38 +0000 |
4 | @@ -14,5 +14,8 @@ |
5 | # along with this program. If not, see <http://www.gnu.org/licenses/>. |
6 | |
7 | |
8 | -unittest: |
9 | +unittest: |
10 | tests/10-unit.test |
11 | + |
12 | +sync: |
13 | + @charm-helper-sync -c charm-helpers-sync.yaml |
14 | |
15 | === added file 'charm-helpers-sync.yaml' |
16 | --- charm-helpers-sync.yaml 1970-01-01 00:00:00 +0000 |
17 | +++ charm-helpers-sync.yaml 2014-04-11 22:06:38 +0000 |
18 | @@ -0,0 +1,5 @@ |
19 | +branch: lp:charm-helpers |
20 | +destination: hooks/charmhelpers |
21 | +include: |
22 | + - core |
23 | + - fetch |
24 | |
25 | === modified file 'config.yaml' |
26 | --- config.yaml 2014-02-19 19:18:51 +0000 |
27 | +++ config.yaml 2014-04-11 22:06:38 +0000 |
28 | @@ -196,3 +196,18 @@ |
29 | description: > |
30 | Block device for attached volumes as seen by the VM, will be "scanned" |
31 | for an unused device when "volume-map" is valid for the unit. |
32 | + source: |
33 | + type: string |
34 | + default: None |
35 | + description: > |
36 | + Optional configuration to support use of additional sources such as: |
37 | + - ppa:myteam/ppa |
38 | + - cloud:precise-proposed/icehouse |
39 | + - http://my.archive.com/ubuntu main |
40 | + The last option should be used in conjunction with the key configuration |
41 | + option. |
42 | + key: |
43 | + type: string |
44 | + description: > |
45 | + Key ID to import to the apt keyring to support use with arbitary source |
46 | + configuration from outside of Launchpad archives or PPA's. |
47 | |
48 | === added directory 'hooks/charmhelpers' |
49 | === added file 'hooks/charmhelpers/__init__.py' |
50 | === added directory 'hooks/charmhelpers/core' |
51 | === added file 'hooks/charmhelpers/core/__init__.py' |
52 | === added file 'hooks/charmhelpers/core/hookenv.py' |
53 | --- hooks/charmhelpers/core/hookenv.py 1970-01-01 00:00:00 +0000 |
54 | +++ hooks/charmhelpers/core/hookenv.py 2014-04-11 22:06:38 +0000 |
55 | @@ -0,0 +1,401 @@ |
56 | +"Interactions with the Juju environment" |
57 | +# Copyright 2013 Canonical Ltd. |
58 | +# |
59 | +# Authors: |
60 | +# Charm Helpers Developers <juju@lists.ubuntu.com> |
61 | + |
62 | +import os |
63 | +import json |
64 | +import yaml |
65 | +import subprocess |
66 | +import sys |
67 | +import UserDict |
68 | +from subprocess import CalledProcessError |
69 | + |
70 | +CRITICAL = "CRITICAL" |
71 | +ERROR = "ERROR" |
72 | +WARNING = "WARNING" |
73 | +INFO = "INFO" |
74 | +DEBUG = "DEBUG" |
75 | +MARKER = object() |
76 | + |
77 | +cache = {} |
78 | + |
79 | + |
80 | +def cached(func): |
81 | + """Cache return values for multiple executions of func + args |
82 | + |
83 | + For example: |
84 | + |
85 | + @cached |
86 | + def unit_get(attribute): |
87 | + pass |
88 | + |
89 | + unit_get('test') |
90 | + |
91 | + will cache the result of unit_get + 'test' for future calls. |
92 | + """ |
93 | + def wrapper(*args, **kwargs): |
94 | + global cache |
95 | + key = str((func, args, kwargs)) |
96 | + try: |
97 | + return cache[key] |
98 | + except KeyError: |
99 | + res = func(*args, **kwargs) |
100 | + cache[key] = res |
101 | + return res |
102 | + return wrapper |
103 | + |
104 | + |
105 | +def flush(key): |
106 | + """Flushes any entries from function cache where the |
107 | + key is found in the function+args """ |
108 | + flush_list = [] |
109 | + for item in cache: |
110 | + if key in item: |
111 | + flush_list.append(item) |
112 | + for item in flush_list: |
113 | + del cache[item] |
114 | + |
115 | + |
116 | +def log(message, level=None): |
117 | + """Write a message to the juju log""" |
118 | + command = ['juju-log'] |
119 | + if level: |
120 | + command += ['-l', level] |
121 | + command += [message] |
122 | + subprocess.call(command) |
123 | + |
124 | + |
125 | +class Serializable(UserDict.IterableUserDict): |
126 | + """Wrapper, an object that can be serialized to yaml or json""" |
127 | + |
128 | + def __init__(self, obj): |
129 | + # wrap the object |
130 | + UserDict.IterableUserDict.__init__(self) |
131 | + self.data = obj |
132 | + |
133 | + def __getattr__(self, attr): |
134 | + # See if this object has attribute. |
135 | + if attr in ("json", "yaml", "data"): |
136 | + return self.__dict__[attr] |
137 | + # Check for attribute in wrapped object. |
138 | + got = getattr(self.data, attr, MARKER) |
139 | + if got is not MARKER: |
140 | + return got |
141 | + # Proxy to the wrapped object via dict interface. |
142 | + try: |
143 | + return self.data[attr] |
144 | + except KeyError: |
145 | + raise AttributeError(attr) |
146 | + |
147 | + def __getstate__(self): |
148 | + # Pickle as a standard dictionary. |
149 | + return self.data |
150 | + |
151 | + def __setstate__(self, state): |
152 | + # Unpickle into our wrapper. |
153 | + self.data = state |
154 | + |
155 | + def json(self): |
156 | + """Serialize the object to json""" |
157 | + return json.dumps(self.data) |
158 | + |
159 | + def yaml(self): |
160 | + """Serialize the object to yaml""" |
161 | + return yaml.dump(self.data) |
162 | + |
163 | + |
164 | +def execution_environment(): |
165 | + """A convenient bundling of the current execution context""" |
166 | + context = {} |
167 | + context['conf'] = config() |
168 | + if relation_id(): |
169 | + context['reltype'] = relation_type() |
170 | + context['relid'] = relation_id() |
171 | + context['rel'] = relation_get() |
172 | + context['unit'] = local_unit() |
173 | + context['rels'] = relations() |
174 | + context['env'] = os.environ |
175 | + return context |
176 | + |
177 | + |
178 | +def in_relation_hook(): |
179 | + """Determine whether we're running in a relation hook""" |
180 | + return 'JUJU_RELATION' in os.environ |
181 | + |
182 | + |
183 | +def relation_type(): |
184 | + """The scope for the current relation hook""" |
185 | + return os.environ.get('JUJU_RELATION', None) |
186 | + |
187 | + |
188 | +def relation_id(): |
189 | + """The relation ID for the current relation hook""" |
190 | + return os.environ.get('JUJU_RELATION_ID', None) |
191 | + |
192 | + |
193 | +def local_unit(): |
194 | + """Local unit ID""" |
195 | + return os.environ['JUJU_UNIT_NAME'] |
196 | + |
197 | + |
198 | +def remote_unit(): |
199 | + """The remote unit for the current relation hook""" |
200 | + return os.environ['JUJU_REMOTE_UNIT'] |
201 | + |
202 | + |
203 | +def service_name(): |
204 | + """The name service group this unit belongs to""" |
205 | + return local_unit().split('/')[0] |
206 | + |
207 | + |
208 | +def hook_name(): |
209 | + """The name of the currently executing hook""" |
210 | + return os.path.basename(sys.argv[0]) |
211 | + |
212 | + |
213 | +@cached |
214 | +def config(scope=None): |
215 | + """Juju charm configuration""" |
216 | + config_cmd_line = ['config-get'] |
217 | + if scope is not None: |
218 | + config_cmd_line.append(scope) |
219 | + config_cmd_line.append('--format=json') |
220 | + try: |
221 | + return json.loads(subprocess.check_output(config_cmd_line)) |
222 | + except ValueError: |
223 | + return None |
224 | + |
225 | + |
226 | +@cached |
227 | +def relation_get(attribute=None, unit=None, rid=None): |
228 | + """Get relation information""" |
229 | + _args = ['relation-get', '--format=json'] |
230 | + if rid: |
231 | + _args.append('-r') |
232 | + _args.append(rid) |
233 | + _args.append(attribute or '-') |
234 | + if unit: |
235 | + _args.append(unit) |
236 | + try: |
237 | + return json.loads(subprocess.check_output(_args)) |
238 | + except ValueError: |
239 | + return None |
240 | + except CalledProcessError, e: |
241 | + if e.returncode == 2: |
242 | + return None |
243 | + raise |
244 | + |
245 | + |
246 | +def relation_set(relation_id=None, relation_settings={}, **kwargs): |
247 | + """Set relation information for the current unit""" |
248 | + relation_cmd_line = ['relation-set'] |
249 | + if relation_id is not None: |
250 | + relation_cmd_line.extend(('-r', relation_id)) |
251 | + for k, v in (relation_settings.items() + kwargs.items()): |
252 | + if v is None: |
253 | + relation_cmd_line.append('{}='.format(k)) |
254 | + else: |
255 | + relation_cmd_line.append('{}={}'.format(k, v)) |
256 | + subprocess.check_call(relation_cmd_line) |
257 | + # Flush cache of any relation-gets for local unit |
258 | + flush(local_unit()) |
259 | + |
260 | + |
261 | +@cached |
262 | +def relation_ids(reltype=None): |
263 | + """A list of relation_ids""" |
264 | + reltype = reltype or relation_type() |
265 | + relid_cmd_line = ['relation-ids', '--format=json'] |
266 | + if reltype is not None: |
267 | + relid_cmd_line.append(reltype) |
268 | + return json.loads(subprocess.check_output(relid_cmd_line)) or [] |
269 | + return [] |
270 | + |
271 | + |
272 | +@cached |
273 | +def related_units(relid=None): |
274 | + """A list of related units""" |
275 | + relid = relid or relation_id() |
276 | + units_cmd_line = ['relation-list', '--format=json'] |
277 | + if relid is not None: |
278 | + units_cmd_line.extend(('-r', relid)) |
279 | + return json.loads(subprocess.check_output(units_cmd_line)) or [] |
280 | + |
281 | + |
282 | +@cached |
283 | +def relation_for_unit(unit=None, rid=None): |
284 | + """Get the json represenation of a unit's relation""" |
285 | + unit = unit or remote_unit() |
286 | + relation = relation_get(unit=unit, rid=rid) |
287 | + for key in relation: |
288 | + if key.endswith('-list'): |
289 | + relation[key] = relation[key].split() |
290 | + relation['__unit__'] = unit |
291 | + return relation |
292 | + |
293 | + |
294 | +@cached |
295 | +def relations_for_id(relid=None): |
296 | + """Get relations of a specific relation ID""" |
297 | + relation_data = [] |
298 | + relid = relid or relation_ids() |
299 | + for unit in related_units(relid): |
300 | + unit_data = relation_for_unit(unit, relid) |
301 | + unit_data['__relid__'] = relid |
302 | + relation_data.append(unit_data) |
303 | + return relation_data |
304 | + |
305 | + |
306 | +@cached |
307 | +def relations_of_type(reltype=None): |
308 | + """Get relations of a specific type""" |
309 | + relation_data = [] |
310 | + reltype = reltype or relation_type() |
311 | + for relid in relation_ids(reltype): |
312 | + for relation in relations_for_id(relid): |
313 | + relation['__relid__'] = relid |
314 | + relation_data.append(relation) |
315 | + return relation_data |
316 | + |
317 | + |
318 | +@cached |
319 | +def relation_types(): |
320 | + """Get a list of relation types supported by this charm""" |
321 | + charmdir = os.environ.get('CHARM_DIR', '') |
322 | + mdf = open(os.path.join(charmdir, 'metadata.yaml')) |
323 | + md = yaml.safe_load(mdf) |
324 | + rel_types = [] |
325 | + for key in ('provides', 'requires', 'peers'): |
326 | + section = md.get(key) |
327 | + if section: |
328 | + rel_types.extend(section.keys()) |
329 | + mdf.close() |
330 | + return rel_types |
331 | + |
332 | + |
333 | +@cached |
334 | +def relations(): |
335 | + """Get a nested dictionary of relation data for all related units""" |
336 | + rels = {} |
337 | + for reltype in relation_types(): |
338 | + relids = {} |
339 | + for relid in relation_ids(reltype): |
340 | + units = {local_unit(): relation_get(unit=local_unit(), rid=relid)} |
341 | + for unit in related_units(relid): |
342 | + reldata = relation_get(unit=unit, rid=relid) |
343 | + units[unit] = reldata |
344 | + relids[relid] = units |
345 | + rels[reltype] = relids |
346 | + return rels |
347 | + |
348 | + |
349 | +@cached |
350 | +def is_relation_made(relation, keys='private-address'): |
351 | + ''' |
352 | + Determine whether a relation is established by checking for |
353 | + presence of key(s). If a list of keys is provided, they |
354 | + must all be present for the relation to be identified as made |
355 | + ''' |
356 | + if isinstance(keys, str): |
357 | + keys = [keys] |
358 | + for r_id in relation_ids(relation): |
359 | + for unit in related_units(r_id): |
360 | + context = {} |
361 | + for k in keys: |
362 | + context[k] = relation_get(k, rid=r_id, |
363 | + unit=unit) |
364 | + if None not in context.values(): |
365 | + return True |
366 | + return False |
367 | + |
368 | + |
369 | +def open_port(port, protocol="TCP"): |
370 | + """Open a service network port""" |
371 | + _args = ['open-port'] |
372 | + _args.append('{}/{}'.format(port, protocol)) |
373 | + subprocess.check_call(_args) |
374 | + |
375 | + |
376 | +def close_port(port, protocol="TCP"): |
377 | + """Close a service network port""" |
378 | + _args = ['close-port'] |
379 | + _args.append('{}/{}'.format(port, protocol)) |
380 | + subprocess.check_call(_args) |
381 | + |
382 | + |
383 | +@cached |
384 | +def unit_get(attribute): |
385 | + """Get the unit ID for the remote unit""" |
386 | + _args = ['unit-get', '--format=json', attribute] |
387 | + try: |
388 | + return json.loads(subprocess.check_output(_args)) |
389 | + except ValueError: |
390 | + return None |
391 | + |
392 | + |
393 | +def unit_private_ip(): |
394 | + """Get this unit's private IP address""" |
395 | + return unit_get('private-address') |
396 | + |
397 | + |
398 | +class UnregisteredHookError(Exception): |
399 | + """Raised when an undefined hook is called""" |
400 | + pass |
401 | + |
402 | + |
403 | +class Hooks(object): |
404 | + """A convenient handler for hook functions. |
405 | + |
406 | + Example: |
407 | + hooks = Hooks() |
408 | + |
409 | + # register a hook, taking its name from the function name |
410 | + @hooks.hook() |
411 | + def install(): |
412 | + ... |
413 | + |
414 | + # register a hook, providing a custom hook name |
415 | + @hooks.hook("config-changed") |
416 | + def config_changed(): |
417 | + ... |
418 | + |
419 | + if __name__ == "__main__": |
420 | + # execute a hook based on the name the program is called by |
421 | + hooks.execute(sys.argv) |
422 | + """ |
423 | + |
424 | + def __init__(self): |
425 | + super(Hooks, self).__init__() |
426 | + self._hooks = {} |
427 | + |
428 | + def register(self, name, function): |
429 | + """Register a hook""" |
430 | + self._hooks[name] = function |
431 | + |
432 | + def execute(self, args): |
433 | + """Execute a registered hook based on args[0]""" |
434 | + hook_name = os.path.basename(args[0]) |
435 | + if hook_name in self._hooks: |
436 | + self._hooks[hook_name]() |
437 | + else: |
438 | + raise UnregisteredHookError(hook_name) |
439 | + |
440 | + def hook(self, *hook_names): |
441 | + """Decorator, registering them as hooks""" |
442 | + def wrapper(decorated): |
443 | + for hook_name in hook_names: |
444 | + self.register(hook_name, decorated) |
445 | + else: |
446 | + self.register(decorated.__name__, decorated) |
447 | + if '_' in decorated.__name__: |
448 | + self.register( |
449 | + decorated.__name__.replace('_', '-'), decorated) |
450 | + return decorated |
451 | + return wrapper |
452 | + |
453 | + |
454 | +def charm_dir(): |
455 | + """Return the root directory of the current charm""" |
456 | + return os.environ.get('CHARM_DIR') |
457 | |
458 | === added file 'hooks/charmhelpers/core/host.py' |
459 | --- hooks/charmhelpers/core/host.py 1970-01-01 00:00:00 +0000 |
460 | +++ hooks/charmhelpers/core/host.py 2014-04-11 22:06:38 +0000 |
461 | @@ -0,0 +1,297 @@ |
462 | +"""Tools for working with the host system""" |
463 | +# Copyright 2012 Canonical Ltd. |
464 | +# |
465 | +# Authors: |
466 | +# Nick Moffitt <nick.moffitt@canonical.com> |
467 | +# Matthew Wedgwood <matthew.wedgwood@canonical.com> |
468 | + |
469 | +import os |
470 | +import pwd |
471 | +import grp |
472 | +import random |
473 | +import string |
474 | +import subprocess |
475 | +import hashlib |
476 | + |
477 | +from collections import OrderedDict |
478 | + |
479 | +from hookenv import log |
480 | + |
481 | + |
482 | +def service_start(service_name): |
483 | + """Start a system service""" |
484 | + return service('start', service_name) |
485 | + |
486 | + |
487 | +def service_stop(service_name): |
488 | + """Stop a system service""" |
489 | + return service('stop', service_name) |
490 | + |
491 | + |
492 | +def service_restart(service_name): |
493 | + """Restart a system service""" |
494 | + return service('restart', service_name) |
495 | + |
496 | + |
497 | +def service_reload(service_name, restart_on_failure=False): |
498 | + """Reload a system service, optionally falling back to restart if reload fails""" |
499 | + service_result = service('reload', service_name) |
500 | + if not service_result and restart_on_failure: |
501 | + service_result = service('restart', service_name) |
502 | + return service_result |
503 | + |
504 | + |
505 | +def service(action, service_name): |
506 | + """Control a system service""" |
507 | + cmd = ['service', service_name, action] |
508 | + return subprocess.call(cmd) == 0 |
509 | + |
510 | + |
511 | +def service_running(service): |
512 | + """Determine whether a system service is running""" |
513 | + try: |
514 | + output = subprocess.check_output(['service', service, 'status']) |
515 | + except subprocess.CalledProcessError: |
516 | + return False |
517 | + else: |
518 | + if ("start/running" in output or "is running" in output): |
519 | + return True |
520 | + else: |
521 | + return False |
522 | + |
523 | + |
524 | +def adduser(username, password=None, shell='/bin/bash', system_user=False): |
525 | + """Add a user to the system""" |
526 | + try: |
527 | + user_info = pwd.getpwnam(username) |
528 | + log('user {0} already exists!'.format(username)) |
529 | + except KeyError: |
530 | + log('creating user {0}'.format(username)) |
531 | + cmd = ['useradd'] |
532 | + if system_user or password is None: |
533 | + cmd.append('--system') |
534 | + else: |
535 | + cmd.extend([ |
536 | + '--create-home', |
537 | + '--shell', shell, |
538 | + '--password', password, |
539 | + ]) |
540 | + cmd.append(username) |
541 | + subprocess.check_call(cmd) |
542 | + user_info = pwd.getpwnam(username) |
543 | + return user_info |
544 | + |
545 | + |
546 | +def add_user_to_group(username, group): |
547 | + """Add a user to a group""" |
548 | + cmd = [ |
549 | + 'gpasswd', '-a', |
550 | + username, |
551 | + group |
552 | + ] |
553 | + log("Adding user {} to group {}".format(username, group)) |
554 | + subprocess.check_call(cmd) |
555 | + |
556 | + |
557 | +def rsync(from_path, to_path, flags='-r', options=None): |
558 | + """Replicate the contents of a path""" |
559 | + options = options or ['--delete', '--executability'] |
560 | + cmd = ['/usr/bin/rsync', flags] |
561 | + cmd.extend(options) |
562 | + cmd.append(from_path) |
563 | + cmd.append(to_path) |
564 | + log(" ".join(cmd)) |
565 | + return subprocess.check_output(cmd).strip() |
566 | + |
567 | + |
568 | +def symlink(source, destination): |
569 | + """Create a symbolic link""" |
570 | + log("Symlinking {} as {}".format(source, destination)) |
571 | + cmd = [ |
572 | + 'ln', |
573 | + '-sf', |
574 | + source, |
575 | + destination, |
576 | + ] |
577 | + subprocess.check_call(cmd) |
578 | + |
579 | + |
580 | +def mkdir(path, owner='root', group='root', perms=0555, force=False): |
581 | + """Create a directory""" |
582 | + log("Making dir {} {}:{} {:o}".format(path, owner, group, |
583 | + perms)) |
584 | + uid = pwd.getpwnam(owner).pw_uid |
585 | + gid = grp.getgrnam(group).gr_gid |
586 | + realpath = os.path.abspath(path) |
587 | + if os.path.exists(realpath): |
588 | + if force and not os.path.isdir(realpath): |
589 | + log("Removing non-directory file {} prior to mkdir()".format(path)) |
590 | + os.unlink(realpath) |
591 | + else: |
592 | + os.makedirs(realpath, perms) |
593 | + os.chown(realpath, uid, gid) |
594 | + |
595 | + |
596 | +def write_file(path, content, owner='root', group='root', perms=0444): |
597 | + """Create or overwrite a file with the contents of a string""" |
598 | + log("Writing file {} {}:{} {:o}".format(path, owner, group, perms)) |
599 | + uid = pwd.getpwnam(owner).pw_uid |
600 | + gid = grp.getgrnam(group).gr_gid |
601 | + with open(path, 'w') as target: |
602 | + os.fchown(target.fileno(), uid, gid) |
603 | + os.fchmod(target.fileno(), perms) |
604 | + target.write(content) |
605 | + |
606 | + |
607 | +def mount(device, mountpoint, options=None, persist=False): |
608 | + """Mount a filesystem at a particular mountpoint""" |
609 | + cmd_args = ['mount'] |
610 | + if options is not None: |
611 | + cmd_args.extend(['-o', options]) |
612 | + cmd_args.extend([device, mountpoint]) |
613 | + try: |
614 | + subprocess.check_output(cmd_args) |
615 | + except subprocess.CalledProcessError, e: |
616 | + log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output)) |
617 | + return False |
618 | + if persist: |
619 | + # TODO: update fstab |
620 | + pass |
621 | + return True |
622 | + |
623 | + |
624 | +def umount(mountpoint, persist=False): |
625 | + """Unmount a filesystem""" |
626 | + cmd_args = ['umount', mountpoint] |
627 | + try: |
628 | + subprocess.check_output(cmd_args) |
629 | + except subprocess.CalledProcessError, e: |
630 | + log('Error unmounting {}\n{}'.format(mountpoint, e.output)) |
631 | + return False |
632 | + if persist: |
633 | + # TODO: update fstab |
634 | + pass |
635 | + return True |
636 | + |
637 | + |
638 | +def mounts(): |
639 | + """Get a list of all mounted volumes as [[mountpoint,device],[...]]""" |
640 | + with open('/proc/mounts') as f: |
641 | + # [['/mount/point','/dev/path'],[...]] |
642 | + system_mounts = [m[1::-1] for m in [l.strip().split() |
643 | + for l in f.readlines()]] |
644 | + return system_mounts |
645 | + |
646 | + |
647 | +def file_hash(path): |
648 | + """Generate a md5 hash of the contents of 'path' or None if not found """ |
649 | + if os.path.exists(path): |
650 | + h = hashlib.md5() |
651 | + with open(path, 'r') as source: |
652 | + h.update(source.read()) # IGNORE:E1101 - it does have update |
653 | + return h.hexdigest() |
654 | + else: |
655 | + return None |
656 | + |
657 | + |
658 | +def restart_on_change(restart_map, stopstart=False): |
659 | + """Restart services based on configuration files changing |
660 | + |
661 | + This function is used a decorator, for example |
662 | + |
663 | + @restart_on_change({ |
664 | + '/etc/ceph/ceph.conf': [ 'cinder-api', 'cinder-volume' ] |
665 | + }) |
666 | + def ceph_client_changed(): |
667 | + ... |
668 | + |
669 | + In this example, the cinder-api and cinder-volume services |
670 | + would be restarted if /etc/ceph/ceph.conf is changed by the |
671 | + ceph_client_changed function. |
672 | + """ |
673 | + def wrap(f): |
674 | + def wrapped_f(*args): |
675 | + checksums = {} |
676 | + for path in restart_map: |
677 | + checksums[path] = file_hash(path) |
678 | + f(*args) |
679 | + restarts = [] |
680 | + for path in restart_map: |
681 | + if checksums[path] != file_hash(path): |
682 | + restarts += restart_map[path] |
683 | + services_list = list(OrderedDict.fromkeys(restarts)) |
684 | + if not stopstart: |
685 | + for service_name in services_list: |
686 | + service('restart', service_name) |
687 | + else: |
688 | + for action in ['stop', 'start']: |
689 | + for service_name in services_list: |
690 | + service(action, service_name) |
691 | + return wrapped_f |
692 | + return wrap |
693 | + |
694 | + |
695 | +def lsb_release(): |
696 | + """Return /etc/lsb-release in a dict""" |
697 | + d = {} |
698 | + with open('/etc/lsb-release', 'r') as lsb: |
699 | + for l in lsb: |
700 | + k, v = l.split('=') |
701 | + d[k.strip()] = v.strip() |
702 | + return d |
703 | + |
704 | + |
705 | +def pwgen(length=None): |
706 | + """Generate a random pasword.""" |
707 | + if length is None: |
708 | + length = random.choice(range(35, 45)) |
709 | + alphanumeric_chars = [ |
710 | + l for l in (string.letters + string.digits) |
711 | + if l not in 'l0QD1vAEIOUaeiou'] |
712 | + random_chars = [ |
713 | + random.choice(alphanumeric_chars) for _ in range(length)] |
714 | + return(''.join(random_chars)) |
715 | + |
716 | + |
717 | +def list_nics(nic_type): |
718 | + '''Return a list of nics of given type(s)''' |
719 | + if isinstance(nic_type, basestring): |
720 | + int_types = [nic_type] |
721 | + else: |
722 | + int_types = nic_type |
723 | + interfaces = [] |
724 | + for int_type in int_types: |
725 | + cmd = ['ip', 'addr', 'show', 'label', int_type + '*'] |
726 | + ip_output = subprocess.check_output(cmd).split('\n') |
727 | + ip_output = (line for line in ip_output if line) |
728 | + for line in ip_output: |
729 | + if line.split()[1].startswith(int_type): |
730 | + interfaces.append(line.split()[1].replace(":", "")) |
731 | + return interfaces |
732 | + |
733 | + |
734 | +def set_nic_mtu(nic, mtu): |
735 | + '''Set MTU on a network interface''' |
736 | + cmd = ['ip', 'link', 'set', nic, 'mtu', mtu] |
737 | + subprocess.check_call(cmd) |
738 | + |
739 | + |
740 | +def get_nic_mtu(nic): |
741 | + cmd = ['ip', 'addr', 'show', nic] |
742 | + ip_output = subprocess.check_output(cmd).split('\n') |
743 | + mtu = "" |
744 | + for line in ip_output: |
745 | + words = line.split() |
746 | + if 'mtu' in words: |
747 | + mtu = words[words.index("mtu") + 1] |
748 | + return mtu |
749 | + |
750 | + |
751 | +def get_nic_hwaddr(nic): |
752 | + cmd = ['ip', '-o', '-0', 'addr', 'show', nic] |
753 | + ip_output = subprocess.check_output(cmd) |
754 | + hwaddr = "" |
755 | + words = ip_output.split() |
756 | + if 'link/ether' in words: |
757 | + hwaddr = words[words.index('link/ether') + 1] |
758 | + return hwaddr |
759 | |
760 | === added directory 'hooks/charmhelpers/fetch' |
761 | === added file 'hooks/charmhelpers/fetch/__init__.py' |
762 | --- hooks/charmhelpers/fetch/__init__.py 1970-01-01 00:00:00 +0000 |
763 | +++ hooks/charmhelpers/fetch/__init__.py 2014-04-11 22:06:38 +0000 |
764 | @@ -0,0 +1,308 @@ |
765 | +import importlib |
766 | +from yaml import safe_load |
767 | +from charmhelpers.core.host import ( |
768 | + lsb_release |
769 | +) |
770 | +from urlparse import ( |
771 | + urlparse, |
772 | + urlunparse, |
773 | +) |
774 | +import subprocess |
775 | +from charmhelpers.core.hookenv import ( |
776 | + config, |
777 | + log, |
778 | +) |
779 | +import apt_pkg |
780 | +import os |
781 | + |
782 | +CLOUD_ARCHIVE = """# Ubuntu Cloud Archive |
783 | +deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main |
784 | +""" |
785 | +PROPOSED_POCKET = """# Proposed |
786 | +deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted |
787 | +""" |
788 | +CLOUD_ARCHIVE_POCKETS = { |
789 | + # Folsom |
790 | + 'folsom': 'precise-updates/folsom', |
791 | + 'precise-folsom': 'precise-updates/folsom', |
792 | + 'precise-folsom/updates': 'precise-updates/folsom', |
793 | + 'precise-updates/folsom': 'precise-updates/folsom', |
794 | + 'folsom/proposed': 'precise-proposed/folsom', |
795 | + 'precise-folsom/proposed': 'precise-proposed/folsom', |
796 | + 'precise-proposed/folsom': 'precise-proposed/folsom', |
797 | + # Grizzly |
798 | + 'grizzly': 'precise-updates/grizzly', |
799 | + 'precise-grizzly': 'precise-updates/grizzly', |
800 | + 'precise-grizzly/updates': 'precise-updates/grizzly', |
801 | + 'precise-updates/grizzly': 'precise-updates/grizzly', |
802 | + 'grizzly/proposed': 'precise-proposed/grizzly', |
803 | + 'precise-grizzly/proposed': 'precise-proposed/grizzly', |
804 | + 'precise-proposed/grizzly': 'precise-proposed/grizzly', |
805 | + # Havana |
806 | + 'havana': 'precise-updates/havana', |
807 | + 'precise-havana': 'precise-updates/havana', |
808 | + 'precise-havana/updates': 'precise-updates/havana', |
809 | + 'precise-updates/havana': 'precise-updates/havana', |
810 | + 'havana/proposed': 'precise-proposed/havana', |
811 | + 'precise-havana/proposed': 'precise-proposed/havana', |
812 | + 'precise-proposed/havana': 'precise-proposed/havana', |
813 | + # Icehouse |
814 | + 'icehouse': 'precise-updates/icehouse', |
815 | + 'precise-icehouse': 'precise-updates/icehouse', |
816 | + 'precise-icehouse/updates': 'precise-updates/icehouse', |
817 | + 'precise-updates/icehouse': 'precise-updates/icehouse', |
818 | + 'icehouse/proposed': 'precise-proposed/icehouse', |
819 | + 'precise-icehouse/proposed': 'precise-proposed/icehouse', |
820 | + 'precise-proposed/icehouse': 'precise-proposed/icehouse', |
821 | +} |
822 | + |
823 | + |
824 | +def filter_installed_packages(packages): |
825 | + """Returns a list of packages that require installation""" |
826 | + apt_pkg.init() |
827 | + cache = apt_pkg.Cache() |
828 | + _pkgs = [] |
829 | + for package in packages: |
830 | + try: |
831 | + p = cache[package] |
832 | + p.current_ver or _pkgs.append(package) |
833 | + except KeyError: |
834 | + log('Package {} has no installation candidate.'.format(package), |
835 | + level='WARNING') |
836 | + _pkgs.append(package) |
837 | + return _pkgs |
838 | + |
839 | + |
840 | +def apt_install(packages, options=None, fatal=False): |
841 | + """Install one or more packages""" |
842 | + if options is None: |
843 | + options = ['--option=Dpkg::Options::=--force-confold'] |
844 | + |
845 | + cmd = ['apt-get', '--assume-yes'] |
846 | + cmd.extend(options) |
847 | + cmd.append('install') |
848 | + if isinstance(packages, basestring): |
849 | + cmd.append(packages) |
850 | + else: |
851 | + cmd.extend(packages) |
852 | + log("Installing {} with options: {}".format(packages, |
853 | + options)) |
854 | + env = os.environ.copy() |
855 | + if 'DEBIAN_FRONTEND' not in env: |
856 | + env['DEBIAN_FRONTEND'] = 'noninteractive' |
857 | + |
858 | + if fatal: |
859 | + subprocess.check_call(cmd, env=env) |
860 | + else: |
861 | + subprocess.call(cmd, env=env) |
862 | + |
863 | + |
864 | +def apt_upgrade(options=None, fatal=False, dist=False): |
865 | + """Upgrade all packages""" |
866 | + if options is None: |
867 | + options = ['--option=Dpkg::Options::=--force-confold'] |
868 | + |
869 | + cmd = ['apt-get', '--assume-yes'] |
870 | + cmd.extend(options) |
871 | + if dist: |
872 | + cmd.append('dist-upgrade') |
873 | + else: |
874 | + cmd.append('upgrade') |
875 | + log("Upgrading with options: {}".format(options)) |
876 | + |
877 | + env = os.environ.copy() |
878 | + if 'DEBIAN_FRONTEND' not in env: |
879 | + env['DEBIAN_FRONTEND'] = 'noninteractive' |
880 | + |
881 | + if fatal: |
882 | + subprocess.check_call(cmd, env=env) |
883 | + else: |
884 | + subprocess.call(cmd, env=env) |
885 | + |
886 | + |
887 | +def apt_update(fatal=False): |
888 | + """Update local apt cache""" |
889 | + cmd = ['apt-get', 'update'] |
890 | + if fatal: |
891 | + subprocess.check_call(cmd) |
892 | + else: |
893 | + subprocess.call(cmd) |
894 | + |
895 | + |
896 | +def apt_purge(packages, fatal=False): |
897 | + """Purge one or more packages""" |
898 | + cmd = ['apt-get', '--assume-yes', 'purge'] |
899 | + if isinstance(packages, basestring): |
900 | + cmd.append(packages) |
901 | + else: |
902 | + cmd.extend(packages) |
903 | + log("Purging {}".format(packages)) |
904 | + if fatal: |
905 | + subprocess.check_call(cmd) |
906 | + else: |
907 | + subprocess.call(cmd) |
908 | + |
909 | + |
910 | +def apt_hold(packages, fatal=False): |
911 | + """Hold one or more packages""" |
912 | + cmd = ['apt-mark', 'hold'] |
913 | + if isinstance(packages, basestring): |
914 | + cmd.append(packages) |
915 | + else: |
916 | + cmd.extend(packages) |
917 | + log("Holding {}".format(packages)) |
918 | + if fatal: |
919 | + subprocess.check_call(cmd) |
920 | + else: |
921 | + subprocess.call(cmd) |
922 | + |
923 | + |
924 | +def add_source(source, key=None): |
925 | + if source is None: |
926 | + log('Source is not present. Skipping') |
927 | + return |
928 | + |
929 | + if (source.startswith('ppa:') or |
930 | + source.startswith('http') or |
931 | + source.startswith('deb ') or |
932 | + source.startswith('cloud-archive:')): |
933 | + subprocess.check_call(['add-apt-repository', '--yes', source]) |
934 | + elif source.startswith('cloud:'): |
935 | + apt_install(filter_installed_packages(['ubuntu-cloud-keyring']), |
936 | + fatal=True) |
937 | + pocket = source.split(':')[-1] |
938 | + if pocket not in CLOUD_ARCHIVE_POCKETS: |
939 | + raise SourceConfigError( |
940 | + 'Unsupported cloud: source option %s' % |
941 | + pocket) |
942 | + actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket] |
943 | + with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt: |
944 | + apt.write(CLOUD_ARCHIVE.format(actual_pocket)) |
945 | + elif source == 'proposed': |
946 | + release = lsb_release()['DISTRIB_CODENAME'] |
947 | + with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt: |
948 | + apt.write(PROPOSED_POCKET.format(release)) |
949 | + if key: |
950 | + subprocess.check_call(['apt-key', 'adv', '--keyserver', |
951 | + 'keyserver.ubuntu.com', '--recv', |
952 | + key]) |
953 | + |
954 | + |
955 | +class SourceConfigError(Exception): |
956 | + pass |
957 | + |
958 | + |
959 | +def configure_sources(update=False, |
960 | + sources_var='install_sources', |
961 | + keys_var='install_keys'): |
962 | + """ |
963 | + Configure multiple sources from charm configuration |
964 | + |
965 | + Example config: |
966 | + install_sources: |
967 | + - "ppa:foo" |
968 | + - "http://example.com/repo precise main" |
969 | + install_keys: |
970 | + - null |
971 | + - "a1b2c3d4" |
972 | + |
973 | + Note that 'null' (a.k.a. None) should not be quoted. |
974 | + """ |
975 | + sources = safe_load(config(sources_var)) |
976 | + keys = config(keys_var) |
977 | + if keys is not None: |
978 | + keys = safe_load(keys) |
979 | + if isinstance(sources, basestring) and ( |
980 | + keys is None or isinstance(keys, basestring)): |
981 | + add_source(sources, keys) |
982 | + else: |
983 | + if not len(sources) == len(keys): |
984 | + msg = 'Install sources and keys lists are different lengths' |
985 | + raise SourceConfigError(msg) |
986 | + for src_num in range(len(sources)): |
987 | + add_source(sources[src_num], keys[src_num]) |
988 | + if update: |
989 | + apt_update(fatal=True) |
990 | + |
991 | +# The order of this list is very important. Handlers should be listed in from |
992 | +# least- to most-specific URL matching. |
993 | +FETCH_HANDLERS = ( |
994 | + 'charmhelpers.fetch.archiveurl.ArchiveUrlFetchHandler', |
995 | + 'charmhelpers.fetch.bzrurl.BzrUrlFetchHandler', |
996 | +) |
997 | + |
998 | + |
999 | +class UnhandledSource(Exception): |
1000 | + pass |
1001 | + |
1002 | + |
1003 | +def install_remote(source): |
1004 | + """ |
1005 | + Install a file tree from a remote source |
1006 | + |
1007 | + The specified source should be a url of the form: |
1008 | + scheme://[host]/path[#[option=value][&...]] |
1009 | + |
1010 | + Schemes supported are based on this modules submodules |
1011 | + Options supported are submodule-specific""" |
1012 | + # We ONLY check for True here because can_handle may return a string |
1013 | + # explaining why it can't handle a given source. |
1014 | + handlers = [h for h in plugins() if h.can_handle(source) is True] |
1015 | + installed_to = None |
1016 | + for handler in handlers: |
1017 | + try: |
1018 | + installed_to = handler.install(source) |
1019 | + except UnhandledSource: |
1020 | + pass |
1021 | + if not installed_to: |
1022 | + raise UnhandledSource("No handler found for source {}".format(source)) |
1023 | + return installed_to |
1024 | + |
1025 | + |
1026 | +def install_from_config(config_var_name): |
1027 | + charm_config = config() |
1028 | + source = charm_config[config_var_name] |
1029 | + return install_remote(source) |
1030 | + |
1031 | + |
1032 | +class BaseFetchHandler(object): |
1033 | + |
1034 | + """Base class for FetchHandler implementations in fetch plugins""" |
1035 | + |
1036 | + def can_handle(self, source): |
1037 | + """Returns True if the source can be handled. Otherwise returns |
1038 | + a string explaining why it cannot""" |
1039 | + return "Wrong source type" |
1040 | + |
1041 | + def install(self, source): |
1042 | + """Try to download and unpack the source. Return the path to the |
1043 | + unpacked files or raise UnhandledSource.""" |
1044 | + raise UnhandledSource("Wrong source type {}".format(source)) |
1045 | + |
1046 | + def parse_url(self, url): |
1047 | + return urlparse(url) |
1048 | + |
1049 | + def base_url(self, url): |
1050 | + """Return url without querystring or fragment""" |
1051 | + parts = list(self.parse_url(url)) |
1052 | + parts[4:] = ['' for i in parts[4:]] |
1053 | + return urlunparse(parts) |
1054 | + |
1055 | + |
1056 | +def plugins(fetch_handlers=None): |
1057 | + if not fetch_handlers: |
1058 | + fetch_handlers = FETCH_HANDLERS |
1059 | + plugin_list = [] |
1060 | + for handler_name in fetch_handlers: |
1061 | + package, classname = handler_name.rsplit('.', 1) |
1062 | + try: |
1063 | + handler_class = getattr( |
1064 | + importlib.import_module(package), |
1065 | + classname) |
1066 | + plugin_list.append(handler_class()) |
1067 | + except (ImportError, AttributeError): |
1068 | + # Skip missing plugins so that they can be ommitted from |
1069 | + # installation if desired |
1070 | + log("FetchHandler {} not found, skipping plugin".format( |
1071 | + handler_name)) |
1072 | + return plugin_list |
1073 | |
1074 | === added file 'hooks/charmhelpers/fetch/archiveurl.py' |
1075 | --- hooks/charmhelpers/fetch/archiveurl.py 1970-01-01 00:00:00 +0000 |
1076 | +++ hooks/charmhelpers/fetch/archiveurl.py 2014-04-11 22:06:38 +0000 |
1077 | @@ -0,0 +1,63 @@ |
1078 | +import os |
1079 | +import urllib2 |
1080 | +import urlparse |
1081 | + |
1082 | +from charmhelpers.fetch import ( |
1083 | + BaseFetchHandler, |
1084 | + UnhandledSource |
1085 | +) |
1086 | +from charmhelpers.payload.archive import ( |
1087 | + get_archive_handler, |
1088 | + extract, |
1089 | +) |
1090 | +from charmhelpers.core.host import mkdir |
1091 | + |
1092 | + |
1093 | +class ArchiveUrlFetchHandler(BaseFetchHandler): |
1094 | + """Handler for archives via generic URLs""" |
1095 | + def can_handle(self, source): |
1096 | + url_parts = self.parse_url(source) |
1097 | + if url_parts.scheme not in ('http', 'https', 'ftp', 'file'): |
1098 | + return "Wrong source type" |
1099 | + if get_archive_handler(self.base_url(source)): |
1100 | + return True |
1101 | + return False |
1102 | + |
1103 | + def download(self, source, dest): |
1104 | + # propogate all exceptions |
1105 | + # URLError, OSError, etc |
1106 | + proto, netloc, path, params, query, fragment = urlparse.urlparse(source) |
1107 | + if proto in ('http', 'https'): |
1108 | + auth, barehost = urllib2.splituser(netloc) |
1109 | + if auth is not None: |
1110 | + source = urlparse.urlunparse((proto, barehost, path, params, query, fragment)) |
1111 | + username, password = urllib2.splitpasswd(auth) |
1112 | + passman = urllib2.HTTPPasswordMgrWithDefaultRealm() |
1113 | + # Realm is set to None in add_password to force the username and password |
1114 | + # to be used whatever the realm |
1115 | + passman.add_password(None, source, username, password) |
1116 | + authhandler = urllib2.HTTPBasicAuthHandler(passman) |
1117 | + opener = urllib2.build_opener(authhandler) |
1118 | + urllib2.install_opener(opener) |
1119 | + response = urllib2.urlopen(source) |
1120 | + try: |
1121 | + with open(dest, 'w') as dest_file: |
1122 | + dest_file.write(response.read()) |
1123 | + except Exception as e: |
1124 | + if os.path.isfile(dest): |
1125 | + os.unlink(dest) |
1126 | + raise e |
1127 | + |
1128 | + def install(self, source): |
1129 | + url_parts = self.parse_url(source) |
1130 | + dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched') |
1131 | + if not os.path.exists(dest_dir): |
1132 | + mkdir(dest_dir, perms=0755) |
1133 | + dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path)) |
1134 | + try: |
1135 | + self.download(source, dld_file) |
1136 | + except urllib2.URLError as e: |
1137 | + raise UnhandledSource(e.reason) |
1138 | + except OSError as e: |
1139 | + raise UnhandledSource(e.strerror) |
1140 | + return extract(dld_file) |
1141 | |
1142 | === added file 'hooks/charmhelpers/fetch/bzrurl.py' |
1143 | --- hooks/charmhelpers/fetch/bzrurl.py 1970-01-01 00:00:00 +0000 |
1144 | +++ hooks/charmhelpers/fetch/bzrurl.py 2014-04-11 22:06:38 +0000 |
1145 | @@ -0,0 +1,49 @@ |
1146 | +import os |
1147 | +from charmhelpers.fetch import ( |
1148 | + BaseFetchHandler, |
1149 | + UnhandledSource |
1150 | +) |
1151 | +from charmhelpers.core.host import mkdir |
1152 | + |
1153 | +try: |
1154 | + from bzrlib.branch import Branch |
1155 | +except ImportError: |
1156 | + from charmhelpers.fetch import apt_install |
1157 | + apt_install("python-bzrlib") |
1158 | + from bzrlib.branch import Branch |
1159 | + |
1160 | + |
1161 | +class BzrUrlFetchHandler(BaseFetchHandler): |
1162 | + """Handler for bazaar branches via generic and lp URLs""" |
1163 | + def can_handle(self, source): |
1164 | + url_parts = self.parse_url(source) |
1165 | + if url_parts.scheme not in ('bzr+ssh', 'lp'): |
1166 | + return False |
1167 | + else: |
1168 | + return True |
1169 | + |
1170 | + def branch(self, source, dest): |
1171 | + url_parts = self.parse_url(source) |
1172 | + # If we use lp:branchname scheme we need to load plugins |
1173 | + if not self.can_handle(source): |
1174 | + raise UnhandledSource("Cannot handle {}".format(source)) |
1175 | + if url_parts.scheme == "lp": |
1176 | + from bzrlib.plugin import load_plugins |
1177 | + load_plugins() |
1178 | + try: |
1179 | + remote_branch = Branch.open(source) |
1180 | + remote_branch.bzrdir.sprout(dest).open_branch() |
1181 | + except Exception as e: |
1182 | + raise e |
1183 | + |
1184 | + def install(self, source): |
1185 | + url_parts = self.parse_url(source) |
1186 | + branch_name = url_parts.path.strip("/").split("/")[-1] |
1187 | + dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", branch_name) |
1188 | + if not os.path.exists(dest_dir): |
1189 | + mkdir(dest_dir, perms=0755) |
1190 | + try: |
1191 | + self.branch(source, dest_dir) |
1192 | + except OSError as e: |
1193 | + raise UnhandledSource(e.strerror) |
1194 | + return dest_dir |
1195 | |
1196 | === modified file 'hooks/hooks.py' |
1197 | --- hooks/hooks.py 2014-03-06 07:55:30 +0000 |
1198 | +++ hooks/hooks.py 2014-04-11 22:06:38 +0000 |
1199 | @@ -24,6 +24,15 @@ |
1200 | from textwrap import dedent |
1201 | from yaml.constructor import ConstructorError |
1202 | |
1203 | +from charmhelpers.fetch import ( |
1204 | + add_source, |
1205 | + apt_update, |
1206 | + apt_install |
1207 | +) |
1208 | +from charmhelpers.core.hookenv import ( |
1209 | + config |
1210 | +) |
1211 | + |
1212 | |
1213 | ############################################################################### |
1214 | # Global variables |
1215 | @@ -204,22 +213,6 @@ |
1216 | |
1217 | |
1218 | #------------------------------------------------------------------------------ |
1219 | -# apt_get_install( package ): Installs a package |
1220 | -#------------------------------------------------------------------------------ |
1221 | -def apt_get_install(packages=None): |
1222 | - juju_log("apt_get_install: %s" % packages) |
1223 | - if packages is None: |
1224 | - return(False) |
1225 | - if isinstance(packages, str): |
1226 | - packages = [packages] |
1227 | - cmd_line = ['apt-get', '-y', 'install', '-qq'] |
1228 | - cmd_line.extend(packages) |
1229 | - retVal = subprocess.call(cmd_line) == 0 |
1230 | - juju_log("apt_get_install %s returns: %s" % (packages, retVal)) |
1231 | - return(retVal) |
1232 | - |
1233 | - |
1234 | -#------------------------------------------------------------------------------ |
1235 | # open_port: Convenience function to open a port in juju to |
1236 | # expose a service |
1237 | #------------------------------------------------------------------------------ |
1238 | @@ -910,11 +903,10 @@ |
1239 | ############################################################################### |
1240 | def install_hook(): |
1241 | juju_log("Installing mongodb") |
1242 | - if not apt_get_install(['mongodb', 'python-yaml']): |
1243 | - juju_log("Installation of mongodb failed.") |
1244 | - return(False) |
1245 | - else: |
1246 | - return(True) |
1247 | + add_source(config('source'), config('key')) |
1248 | + apt_update(fatal=True) |
1249 | + apt_install(packages=['mongodb', 'python-yaml'], fatal=True) |
1250 | + return True |
1251 | |
1252 | |
1253 | def config_changed(): |
1254 | |
1255 | === removed symlink 'tests/hooks.py' |
1256 | === target was u'../hooks/hooks.py' |
1257 | === modified file 'tests/test_write_log_rotate_config.py' |
1258 | --- tests/test_write_log_rotate_config.py 2013-12-11 15:53:29 +0000 |
1259 | +++ tests/test_write_log_rotate_config.py 2014-04-11 22:06:38 +0000 |
1260 | @@ -1,8 +1,10 @@ |
1261 | -import hooks |
1262 | import mock |
1263 | import os |
1264 | import unittest |
1265 | import tempfile |
1266 | +import sys |
1267 | +sys.path.append('hooks') |
1268 | +import hooks |
1269 | |
1270 | |
1271 | class TestWriteLogrotateConfigFile(unittest.TestCase): |
Reviewing now.
-Juan