Merge lp:~tribaal/charms/trusty/ntpmaster/resync-charm-helpers into lp:charms/trusty/ntpmaster

Proposed by Chris Glass
Status: Merged
Merged at revision: 11
Proposed branch: lp:~tribaal/charms/trusty/ntpmaster/resync-charm-helpers
Merge into: lp:charms/trusty/ntpmaster
Diff against target: 2155 lines (+1453/-133)
12 files modified
.bzrignore (+1/-0)
Makefile (+8/-2)
hooks/charmhelpers/core/fstab.py (+116/-0)
hooks/charmhelpers/core/hookenv.py (+215/-29)
hooks/charmhelpers/core/host.py (+167/-23)
hooks/charmhelpers/core/services/__init__.py (+2/-0)
hooks/charmhelpers/core/services/base.py (+313/-0)
hooks/charmhelpers/core/services/helpers.py (+239/-0)
hooks/charmhelpers/core/templating.py (+51/-0)
hooks/charmhelpers/fetch/__init__.py (+274/-73)
hooks/charmhelpers/fetch/archiveurl.py (+64/-4)
hooks/charmhelpers/fetch/bzrurl.py (+3/-2)
To merge this branch: bzr merge lp:~tribaal/charms/trusty/ntpmaster/resync-charm-helpers
Reviewer Review Type Date Requested Status
David Britton (community) Approve
Review via email: mp+236071@code.launchpad.net

Description of the change

This branch resyncs charm-helpers to pull in a fix where the apt-cache was not built in-memory and was causing some locking race conditions (see https://bugs.launchpad.net/charms/+source/ceph/+bug/1346489 for a charm with the same problem and fix)

To post a comment you must log in.
Revision history for this message
David Britton (dpb) wrote :

Looks great Chris, committed.

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== added file '.bzrignore'
--- .bzrignore 1970-01-01 00:00:00 +0000
+++ .bzrignore 2014-09-26 08:01:25 +0000
@@ -0,0 +1,1 @@
1bin/
02
=== modified file 'Makefile'
--- Makefile 2014-08-11 08:11:00 +0000
+++ Makefile 2014-09-26 08:01:25 +0000
@@ -1,11 +1,17 @@
1#!/usr/bin/make1#!/usr/bin/make
2PYTHON := /usr/bin/env python
23
3lint:4lint:
4 @flake8 --exclude hooks/charmhelpers hooks5 @flake8 --exclude hooks/charmhelpers hooks
5 @charm proof6 @charm proof
67
7sync:8bin/charm_helpers_sync.py:
8 @charm-helper-sync -c charm-helpers-sync.yaml9 @mkdir -p bin
10 @bzr cat lp:charm-helpers/tools/charm_helpers_sync/charm_helpers_sync.py \
11 > bin/charm_helpers_sync.py
12
13sync: bin/charm_helpers_sync.py
14 $(PYTHON) bin/charm_helpers_sync.py -c charm-helpers-sync.yaml
915
10publish: lint16publish: lint
11 bzr push lp:charms/ntpmaster17 bzr push lp:charms/ntpmaster
1218
=== added file 'hooks/charmhelpers/core/fstab.py'
--- hooks/charmhelpers/core/fstab.py 1970-01-01 00:00:00 +0000
+++ hooks/charmhelpers/core/fstab.py 2014-09-26 08:01:25 +0000
@@ -0,0 +1,116 @@
1#!/usr/bin/env python
2# -*- coding: utf-8 -*-
3
4__author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>'
5
6import os
7
8
9class Fstab(file):
10 """This class extends file in order to implement a file reader/writer
11 for file `/etc/fstab`
12 """
13
14 class Entry(object):
15 """Entry class represents a non-comment line on the `/etc/fstab` file
16 """
17 def __init__(self, device, mountpoint, filesystem,
18 options, d=0, p=0):
19 self.device = device
20 self.mountpoint = mountpoint
21 self.filesystem = filesystem
22
23 if not options:
24 options = "defaults"
25
26 self.options = options
27 self.d = d
28 self.p = p
29
30 def __eq__(self, o):
31 return str(self) == str(o)
32
33 def __str__(self):
34 return "{} {} {} {} {} {}".format(self.device,
35 self.mountpoint,
36 self.filesystem,
37 self.options,
38 self.d,
39 self.p)
40
41 DEFAULT_PATH = os.path.join(os.path.sep, 'etc', 'fstab')
42
43 def __init__(self, path=None):
44 if path:
45 self._path = path
46 else:
47 self._path = self.DEFAULT_PATH
48 file.__init__(self, self._path, 'r+')
49
50 def _hydrate_entry(self, line):
51 # NOTE: use split with no arguments to split on any
52 # whitespace including tabs
53 return Fstab.Entry(*filter(
54 lambda x: x not in ('', None),
55 line.strip("\n").split()))
56
57 @property
58 def entries(self):
59 self.seek(0)
60 for line in self.readlines():
61 try:
62 if not line.startswith("#"):
63 yield self._hydrate_entry(line)
64 except ValueError:
65 pass
66
67 def get_entry_by_attr(self, attr, value):
68 for entry in self.entries:
69 e_attr = getattr(entry, attr)
70 if e_attr == value:
71 return entry
72 return None
73
74 def add_entry(self, entry):
75 if self.get_entry_by_attr('device', entry.device):
76 return False
77
78 self.write(str(entry) + '\n')
79 self.truncate()
80 return entry
81
82 def remove_entry(self, entry):
83 self.seek(0)
84
85 lines = self.readlines()
86
87 found = False
88 for index, line in enumerate(lines):
89 if not line.startswith("#"):
90 if self._hydrate_entry(line) == entry:
91 found = True
92 break
93
94 if not found:
95 return False
96
97 lines.remove(line)
98
99 self.seek(0)
100 self.write(''.join(lines))
101 self.truncate()
102 return True
103
104 @classmethod
105 def remove_by_mountpoint(cls, mountpoint, path=None):
106 fstab = cls(path=path)
107 entry = fstab.get_entry_by_attr('mountpoint', mountpoint)
108 if entry:
109 return fstab.remove_entry(entry)
110 return False
111
112 @classmethod
113 def add(cls, device, mountpoint, filesystem, options=None, path=None):
114 return cls(path=path).add_entry(Fstab.Entry(device,
115 mountpoint, filesystem,
116 options=options))
0117
=== modified file 'hooks/charmhelpers/core/hookenv.py'
--- hooks/charmhelpers/core/hookenv.py 2013-08-29 18:39:36 +0000
+++ hooks/charmhelpers/core/hookenv.py 2014-09-26 08:01:25 +0000
@@ -8,7 +8,9 @@
8import json8import json
9import yaml9import yaml
10import subprocess10import subprocess
11import sys
11import UserDict12import UserDict
13from subprocess import CalledProcessError
1214
13CRITICAL = "CRITICAL"15CRITICAL = "CRITICAL"
14ERROR = "ERROR"16ERROR = "ERROR"
@@ -21,9 +23,9 @@
2123
2224
23def cached(func):25def cached(func):
24 ''' Cache return values for multiple executions of func + args26 """Cache return values for multiple executions of func + args
2527
26 For example:28 For example::
2729
28 @cached30 @cached
29 def unit_get(attribute):31 def unit_get(attribute):
@@ -32,7 +34,7 @@
32 unit_get('test')34 unit_get('test')
3335
34 will cache the result of unit_get + 'test' for future calls.36 will cache the result of unit_get + 'test' for future calls.
35 '''37 """
36 def wrapper(*args, **kwargs):38 def wrapper(*args, **kwargs):
37 global cache39 global cache
38 key = str((func, args, kwargs))40 key = str((func, args, kwargs))
@@ -46,8 +48,8 @@
4648
4749
48def flush(key):50def flush(key):
49 ''' Flushes any entries from function cache where the51 """Flushes any entries from function cache where the
50 key is found in the function+args '''52 key is found in the function+args """
51 flush_list = []53 flush_list = []
52 for item in cache:54 for item in cache:
53 if key in item:55 if key in item:
@@ -57,7 +59,7 @@
5759
5860
59def log(message, level=None):61def log(message, level=None):
60 "Write a message to the juju log"62 """Write a message to the juju log"""
61 command = ['juju-log']63 command = ['juju-log']
62 if level:64 if level:
63 command += ['-l', level]65 command += ['-l', level]
@@ -66,7 +68,7 @@
6668
6769
68class Serializable(UserDict.IterableUserDict):70class Serializable(UserDict.IterableUserDict):
69 "Wrapper, an object that can be serialized to yaml or json"71 """Wrapper, an object that can be serialized to yaml or json"""
7072
71 def __init__(self, obj):73 def __init__(self, obj):
72 # wrap the object74 # wrap the object
@@ -96,11 +98,11 @@
96 self.data = state98 self.data = state
9799
98 def json(self):100 def json(self):
99 "Serialize the object to json"101 """Serialize the object to json"""
100 return json.dumps(self.data)102 return json.dumps(self.data)
101103
102 def yaml(self):104 def yaml(self):
103 "Serialize the object to yaml"105 """Serialize the object to yaml"""
104 return yaml.dump(self.data)106 return yaml.dump(self.data)
105107
106108
@@ -119,50 +121,174 @@
119121
120122
121def in_relation_hook():123def in_relation_hook():
122 "Determine whether we're running in a relation hook"124 """Determine whether we're running in a relation hook"""
123 return 'JUJU_RELATION' in os.environ125 return 'JUJU_RELATION' in os.environ
124126
125127
126def relation_type():128def relation_type():
127 "The scope for the current relation hook"129 """The scope for the current relation hook"""
128 return os.environ.get('JUJU_RELATION', None)130 return os.environ.get('JUJU_RELATION', None)
129131
130132
131def relation_id():133def relation_id():
132 "The relation ID for the current relation hook"134 """The relation ID for the current relation hook"""
133 return os.environ.get('JUJU_RELATION_ID', None)135 return os.environ.get('JUJU_RELATION_ID', None)
134136
135137
136def local_unit():138def local_unit():
137 "Local unit ID"139 """Local unit ID"""
138 return os.environ['JUJU_UNIT_NAME']140 return os.environ['JUJU_UNIT_NAME']
139141
140142
141def remote_unit():143def remote_unit():
142 "The remote unit for the current relation hook"144 """The remote unit for the current relation hook"""
143 return os.environ['JUJU_REMOTE_UNIT']145 return os.environ['JUJU_REMOTE_UNIT']
144146
145147
146def service_name():148def service_name():
147 "The name service group this unit belongs to"149 """The name service group this unit belongs to"""
148 return local_unit().split('/')[0]150 return local_unit().split('/')[0]
149151
150152
153def hook_name():
154 """The name of the currently executing hook"""
155 return os.path.basename(sys.argv[0])
156
157
158class Config(dict):
159 """A dictionary representation of the charm's config.yaml, with some
160 extra features:
161
162 - See which values in the dictionary have changed since the previous hook.
163 - For values that have changed, see what the previous value was.
164 - Store arbitrary data for use in a later hook.
165
166 NOTE: Do not instantiate this object directly - instead call
167 ``hookenv.config()``, which will return an instance of :class:`Config`.
168
169 Example usage::
170
171 >>> # inside a hook
172 >>> from charmhelpers.core import hookenv
173 >>> config = hookenv.config()
174 >>> config['foo']
175 'bar'
176 >>> # store a new key/value for later use
177 >>> config['mykey'] = 'myval'
178
179
180 >>> # user runs `juju set mycharm foo=baz`
181 >>> # now we're inside subsequent config-changed hook
182 >>> config = hookenv.config()
183 >>> config['foo']
184 'baz'
185 >>> # test to see if this val has changed since last hook
186 >>> config.changed('foo')
187 True
188 >>> # what was the previous value?
189 >>> config.previous('foo')
190 'bar'
191 >>> # keys/values that we add are preserved across hooks
192 >>> config['mykey']
193 'myval'
194
195 """
196 CONFIG_FILE_NAME = '.juju-persistent-config'
197
198 def __init__(self, *args, **kw):
199 super(Config, self).__init__(*args, **kw)
200 self.implicit_save = True
201 self._prev_dict = None
202 self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME)
203 if os.path.exists(self.path):
204 self.load_previous()
205
206 def __getitem__(self, key):
207 """For regular dict lookups, check the current juju config first,
208 then the previous (saved) copy. This ensures that user-saved values
209 will be returned by a dict lookup.
210
211 """
212 try:
213 return dict.__getitem__(self, key)
214 except KeyError:
215 return (self._prev_dict or {})[key]
216
217 def load_previous(self, path=None):
218 """Load previous copy of config from disk.
219
220 In normal usage you don't need to call this method directly - it
221 is called automatically at object initialization.
222
223 :param path:
224
225 File path from which to load the previous config. If `None`,
226 config is loaded from the default location. If `path` is
227 specified, subsequent `save()` calls will write to the same
228 path.
229
230 """
231 self.path = path or self.path
232 with open(self.path) as f:
233 self._prev_dict = json.load(f)
234
235 def changed(self, key):
236 """Return True if the current value for this key is different from
237 the previous value.
238
239 """
240 if self._prev_dict is None:
241 return True
242 return self.previous(key) != self.get(key)
243
244 def previous(self, key):
245 """Return previous value for this key, or None if there
246 is no previous value.
247
248 """
249 if self._prev_dict:
250 return self._prev_dict.get(key)
251 return None
252
253 def save(self):
254 """Save this config to disk.
255
256 If the charm is using the :mod:`Services Framework <services.base>`
257 or :meth:'@hook <Hooks.hook>' decorator, this
258 is called automatically at the end of successful hook execution.
259 Otherwise, it should be called directly by user code.
260
261 To disable automatic saves, set ``implicit_save=False`` on this
262 instance.
263
264 """
265 if self._prev_dict:
266 for k, v in self._prev_dict.iteritems():
267 if k not in self:
268 self[k] = v
269 with open(self.path, 'w') as f:
270 json.dump(self, f)
271
272
151@cached273@cached
152def config(scope=None):274def config(scope=None):
153 "Juju charm configuration"275 """Juju charm configuration"""
154 config_cmd_line = ['config-get']276 config_cmd_line = ['config-get']
155 if scope is not None:277 if scope is not None:
156 config_cmd_line.append(scope)278 config_cmd_line.append(scope)
157 config_cmd_line.append('--format=json')279 config_cmd_line.append('--format=json')
158 try:280 try:
159 return json.loads(subprocess.check_output(config_cmd_line))281 config_data = json.loads(subprocess.check_output(config_cmd_line))
282 if scope is not None:
283 return config_data
284 return Config(config_data)
160 except ValueError:285 except ValueError:
161 return None286 return None
162287
163288
164@cached289@cached
165def relation_get(attribute=None, unit=None, rid=None):290def relation_get(attribute=None, unit=None, rid=None):
291 """Get relation information"""
166 _args = ['relation-get', '--format=json']292 _args = ['relation-get', '--format=json']
167 if rid:293 if rid:
168 _args.append('-r')294 _args.append('-r')
@@ -174,9 +300,15 @@
174 return json.loads(subprocess.check_output(_args))300 return json.loads(subprocess.check_output(_args))
175 except ValueError:301 except ValueError:
176 return None302 return None
177303 except CalledProcessError, e:
178304 if e.returncode == 2:
179def relation_set(relation_id=None, relation_settings={}, **kwargs):305 return None
306 raise
307
308
309def relation_set(relation_id=None, relation_settings=None, **kwargs):
310 """Set relation information for the current unit"""
311 relation_settings = relation_settings if relation_settings else {}
180 relation_cmd_line = ['relation-set']312 relation_cmd_line = ['relation-set']
181 if relation_id is not None:313 if relation_id is not None:
182 relation_cmd_line.extend(('-r', relation_id))314 relation_cmd_line.extend(('-r', relation_id))
@@ -192,7 +324,7 @@
192324
193@cached325@cached
194def relation_ids(reltype=None):326def relation_ids(reltype=None):
195 "A list of relation_ids"327 """A list of relation_ids"""
196 reltype = reltype or relation_type()328 reltype = reltype or relation_type()
197 relid_cmd_line = ['relation-ids', '--format=json']329 relid_cmd_line = ['relation-ids', '--format=json']
198 if reltype is not None:330 if reltype is not None:
@@ -203,7 +335,7 @@
203335
204@cached336@cached
205def related_units(relid=None):337def related_units(relid=None):
206 "A list of related units"338 """A list of related units"""
207 relid = relid or relation_id()339 relid = relid or relation_id()
208 units_cmd_line = ['relation-list', '--format=json']340 units_cmd_line = ['relation-list', '--format=json']
209 if relid is not None:341 if relid is not None:
@@ -213,7 +345,7 @@
213345
214@cached346@cached
215def relation_for_unit(unit=None, rid=None):347def relation_for_unit(unit=None, rid=None):
216 "Get the json represenation of a unit's relation"348 """Get the json represenation of a unit's relation"""
217 unit = unit or remote_unit()349 unit = unit or remote_unit()
218 relation = relation_get(unit=unit, rid=rid)350 relation = relation_get(unit=unit, rid=rid)
219 for key in relation:351 for key in relation:
@@ -225,7 +357,7 @@
225357
226@cached358@cached
227def relations_for_id(relid=None):359def relations_for_id(relid=None):
228 "Get relations of a specific relation ID"360 """Get relations of a specific relation ID"""
229 relation_data = []361 relation_data = []
230 relid = relid or relation_ids()362 relid = relid or relation_ids()
231 for unit in related_units(relid):363 for unit in related_units(relid):
@@ -237,7 +369,7 @@
237369
238@cached370@cached
239def relations_of_type(reltype=None):371def relations_of_type(reltype=None):
240 "Get relations of a specific type"372 """Get relations of a specific type"""
241 relation_data = []373 relation_data = []
242 reltype = reltype or relation_type()374 reltype = reltype or relation_type()
243 for relid in relation_ids(reltype):375 for relid in relation_ids(reltype):
@@ -249,7 +381,7 @@
249381
250@cached382@cached
251def relation_types():383def relation_types():
252 "Get a list of relation types supported by this charm"384 """Get a list of relation types supported by this charm"""
253 charmdir = os.environ.get('CHARM_DIR', '')385 charmdir = os.environ.get('CHARM_DIR', '')
254 mdf = open(os.path.join(charmdir, 'metadata.yaml'))386 mdf = open(os.path.join(charmdir, 'metadata.yaml'))
255 md = yaml.safe_load(mdf)387 md = yaml.safe_load(mdf)
@@ -264,6 +396,7 @@
264396
265@cached397@cached
266def relations():398def relations():
399 """Get a nested dictionary of relation data for all related units"""
267 rels = {}400 rels = {}
268 for reltype in relation_types():401 for reltype in relation_types():
269 relids = {}402 relids = {}
@@ -277,15 +410,35 @@
277 return rels410 return rels
278411
279412
413@cached
414def is_relation_made(relation, keys='private-address'):
415 '''
416 Determine whether a relation is established by checking for
417 presence of key(s). If a list of keys is provided, they
418 must all be present for the relation to be identified as made
419 '''
420 if isinstance(keys, str):
421 keys = [keys]
422 for r_id in relation_ids(relation):
423 for unit in related_units(r_id):
424 context = {}
425 for k in keys:
426 context[k] = relation_get(k, rid=r_id,
427 unit=unit)
428 if None not in context.values():
429 return True
430 return False
431
432
280def open_port(port, protocol="TCP"):433def open_port(port, protocol="TCP"):
281 "Open a service network port"434 """Open a service network port"""
282 _args = ['open-port']435 _args = ['open-port']
283 _args.append('{}/{}'.format(port, protocol))436 _args.append('{}/{}'.format(port, protocol))
284 subprocess.check_call(_args)437 subprocess.check_call(_args)
285438
286439
287def close_port(port, protocol="TCP"):440def close_port(port, protocol="TCP"):
288 "Close a service network port"441 """Close a service network port"""
289 _args = ['close-port']442 _args = ['close-port']
290 _args.append('{}/{}'.format(port, protocol))443 _args.append('{}/{}'.format(port, protocol))
291 subprocess.check_call(_args)444 subprocess.check_call(_args)
@@ -293,6 +446,7 @@
293446
294@cached447@cached
295def unit_get(attribute):448def unit_get(attribute):
449 """Get the unit ID for the remote unit"""
296 _args = ['unit-get', '--format=json', attribute]450 _args = ['unit-get', '--format=json', attribute]
297 try:451 try:
298 return json.loads(subprocess.check_output(_args))452 return json.loads(subprocess.check_output(_args))
@@ -301,29 +455,60 @@
301455
302456
303def unit_private_ip():457def unit_private_ip():
458 """Get this unit's private IP address"""
304 return unit_get('private-address')459 return unit_get('private-address')
305460
306461
307class UnregisteredHookError(Exception):462class UnregisteredHookError(Exception):
463 """Raised when an undefined hook is called"""
308 pass464 pass
309465
310466
311class Hooks(object):467class Hooks(object):
312 def __init__(self):468 """A convenient handler for hook functions.
469
470 Example::
471
472 hooks = Hooks()
473
474 # register a hook, taking its name from the function name
475 @hooks.hook()
476 def install():
477 pass # your code here
478
479 # register a hook, providing a custom hook name
480 @hooks.hook("config-changed")
481 def config_changed():
482 pass # your code here
483
484 if __name__ == "__main__":
485 # execute a hook based on the name the program is called by
486 hooks.execute(sys.argv)
487 """
488
489 def __init__(self, config_save=True):
313 super(Hooks, self).__init__()490 super(Hooks, self).__init__()
314 self._hooks = {}491 self._hooks = {}
492 self._config_save = config_save
315493
316 def register(self, name, function):494 def register(self, name, function):
495 """Register a hook"""
317 self._hooks[name] = function496 self._hooks[name] = function
318497
319 def execute(self, args):498 def execute(self, args):
499 """Execute a registered hook based on args[0]"""
320 hook_name = os.path.basename(args[0])500 hook_name = os.path.basename(args[0])
321 if hook_name in self._hooks:501 if hook_name in self._hooks:
322 self._hooks[hook_name]()502 self._hooks[hook_name]()
503 if self._config_save:
504 cfg = config()
505 if cfg.implicit_save:
506 cfg.save()
323 else:507 else:
324 raise UnregisteredHookError(hook_name)508 raise UnregisteredHookError(hook_name)
325509
326 def hook(self, *hook_names):510 def hook(self, *hook_names):
511 """Decorator, registering them as hooks"""
327 def wrapper(decorated):512 def wrapper(decorated):
328 for hook_name in hook_names:513 for hook_name in hook_names:
329 self.register(hook_name, decorated)514 self.register(hook_name, decorated)
@@ -337,4 +522,5 @@
337522
338523
339def charm_dir():524def charm_dir():
525 """Return the root directory of the current charm"""
340 return os.environ.get('CHARM_DIR')526 return os.environ.get('CHARM_DIR')
341527
=== modified file 'hooks/charmhelpers/core/host.py'
--- hooks/charmhelpers/core/host.py 2013-08-29 18:39:36 +0000
+++ hooks/charmhelpers/core/host.py 2014-09-26 08:01:25 +0000
@@ -12,25 +12,33 @@
12import string12import string
13import subprocess13import subprocess
14import hashlib14import hashlib
15import shutil
16from contextlib import contextmanager
1517
16from collections import OrderedDict18from collections import OrderedDict
1719
18from hookenv import log20from hookenv import log
21from fstab import Fstab
1922
2023
21def service_start(service_name):24def service_start(service_name):
25 """Start a system service"""
22 return service('start', service_name)26 return service('start', service_name)
2327
2428
25def service_stop(service_name):29def service_stop(service_name):
30 """Stop a system service"""
26 return service('stop', service_name)31 return service('stop', service_name)
2732
2833
29def service_restart(service_name):34def service_restart(service_name):
35 """Restart a system service"""
30 return service('restart', service_name)36 return service('restart', service_name)
3137
3238
33def service_reload(service_name, restart_on_failure=False):39def service_reload(service_name, restart_on_failure=False):
40 """Reload a system service, optionally falling back to restart if
41 reload fails"""
34 service_result = service('reload', service_name)42 service_result = service('reload', service_name)
35 if not service_result and restart_on_failure:43 if not service_result and restart_on_failure:
36 service_result = service('restart', service_name)44 service_result = service('restart', service_name)
@@ -38,13 +46,15 @@
3846
3947
40def service(action, service_name):48def service(action, service_name):
49 """Control a system service"""
41 cmd = ['service', service_name, action]50 cmd = ['service', service_name, action]
42 return subprocess.call(cmd) == 051 return subprocess.call(cmd) == 0
4352
4453
45def service_running(service):54def service_running(service):
55 """Determine whether a system service is running"""
46 try:56 try:
47 output = subprocess.check_output(['service', service, 'status'])57 output = subprocess.check_output(['service', service, 'status'], stderr=subprocess.STDOUT)
48 except subprocess.CalledProcessError:58 except subprocess.CalledProcessError:
49 return False59 return False
50 else:60 else:
@@ -54,8 +64,18 @@
54 return False64 return False
5565
5666
67def service_available(service_name):
68 """Determine whether a system service is available"""
69 try:
70 subprocess.check_output(['service', service_name, 'status'], stderr=subprocess.STDOUT)
71 except subprocess.CalledProcessError as e:
72 return 'unrecognized service' not in e.output
73 else:
74 return True
75
76
57def adduser(username, password=None, shell='/bin/bash', system_user=False):77def adduser(username, password=None, shell='/bin/bash', system_user=False):
58 """Add a user"""78 """Add a user to the system"""
59 try:79 try:
60 user_info = pwd.getpwnam(username)80 user_info = pwd.getpwnam(username)
61 log('user {0} already exists!'.format(username))81 log('user {0} already exists!'.format(username))
@@ -137,8 +157,20 @@
137 target.write(content)157 target.write(content)
138158
139159
140def mount(device, mountpoint, options=None, persist=False):160def fstab_remove(mp):
141 '''Mount a filesystem'''161 """Remove the given mountpoint entry from /etc/fstab
162 """
163 return Fstab.remove_by_mountpoint(mp)
164
165
166def fstab_add(dev, mp, fs, options=None):
167 """Adds the given device entry to the /etc/fstab file
168 """
169 return Fstab.add(dev, mp, fs, options=options)
170
171
172def mount(device, mountpoint, options=None, persist=False, filesystem="ext3"):
173 """Mount a filesystem at a particular mountpoint"""
142 cmd_args = ['mount']174 cmd_args = ['mount']
143 if options is not None:175 if options is not None:
144 cmd_args.extend(['-o', options])176 cmd_args.extend(['-o', options])
@@ -148,28 +180,28 @@
148 except subprocess.CalledProcessError, e:180 except subprocess.CalledProcessError, e:
149 log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output))181 log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output))
150 return False182 return False
183
151 if persist:184 if persist:
152 # TODO: update fstab185 return fstab_add(device, mountpoint, filesystem, options=options)
153 pass
154 return True186 return True
155187
156188
157def umount(mountpoint, persist=False):189def umount(mountpoint, persist=False):
158 '''Unmount a filesystem'''190 """Unmount a filesystem"""
159 cmd_args = ['umount', mountpoint]191 cmd_args = ['umount', mountpoint]
160 try:192 try:
161 subprocess.check_output(cmd_args)193 subprocess.check_output(cmd_args)
162 except subprocess.CalledProcessError, e:194 except subprocess.CalledProcessError, e:
163 log('Error unmounting {}\n{}'.format(mountpoint, e.output))195 log('Error unmounting {}\n{}'.format(mountpoint, e.output))
164 return False196 return False
197
165 if persist:198 if persist:
166 # TODO: update fstab199 return fstab_remove(mountpoint)
167 pass
168 return True200 return True
169201
170202
171def mounts():203def mounts():
172 '''List of all mounted volumes as [[mountpoint,device],[...]]'''204 """Get a list of all mounted volumes as [[mountpoint,device],[...]]"""
173 with open('/proc/mounts') as f:205 with open('/proc/mounts') as f:
174 # [['/mount/point','/dev/path'],[...]]206 # [['/mount/point','/dev/path'],[...]]
175 system_mounts = [m[1::-1] for m in [l.strip().split()207 system_mounts = [m[1::-1] for m in [l.strip().split()
@@ -177,10 +209,15 @@
177 return system_mounts209 return system_mounts
178210
179211
180def file_hash(path):212def file_hash(path, hash_type='md5'):
181 ''' Generate a md5 hash of the contents of 'path' or None if not found '''213 """
214 Generate a hash checksum of the contents of 'path' or None if not found.
215
216 :param str hash_type: Any hash alrgorithm supported by :mod:`hashlib`,
217 such as md5, sha1, sha256, sha512, etc.
218 """
182 if os.path.exists(path):219 if os.path.exists(path):
183 h = hashlib.md5()220 h = getattr(hashlib, hash_type)()
184 with open(path, 'r') as source:221 with open(path, 'r') as source:
185 h.update(source.read()) # IGNORE:E1101 - it does have update222 h.update(source.read()) # IGNORE:E1101 - it does have update
186 return h.hexdigest()223 return h.hexdigest()
@@ -188,21 +225,41 @@
188 return None225 return None
189226
190227
191def restart_on_change(restart_map):228def check_hash(path, checksum, hash_type='md5'):
192 ''' Restart services based on configuration files changing229 """
193230 Validate a file using a cryptographic checksum.
194 This function is used a decorator, for example231
232 :param str checksum: Value of the checksum used to validate the file.
233 :param str hash_type: Hash algorithm used to generate `checksum`.
234 Can be any hash alrgorithm supported by :mod:`hashlib`,
235 such as md5, sha1, sha256, sha512, etc.
236 :raises ChecksumError: If the file fails the checksum
237
238 """
239 actual_checksum = file_hash(path, hash_type)
240 if checksum != actual_checksum:
241 raise ChecksumError("'%s' != '%s'" % (checksum, actual_checksum))
242
243
244class ChecksumError(ValueError):
245 pass
246
247
248def restart_on_change(restart_map, stopstart=False):
249 """Restart services based on configuration files changing
250
251 This function is used a decorator, for example::
195252
196 @restart_on_change({253 @restart_on_change({
197 '/etc/ceph/ceph.conf': [ 'cinder-api', 'cinder-volume' ]254 '/etc/ceph/ceph.conf': [ 'cinder-api', 'cinder-volume' ]
198 })255 })
199 def ceph_client_changed():256 def ceph_client_changed():
200 ...257 pass # your code here
201258
202 In this example, the cinder-api and cinder-volume services259 In this example, the cinder-api and cinder-volume services
203 would be restarted if /etc/ceph/ceph.conf is changed by the260 would be restarted if /etc/ceph/ceph.conf is changed by the
204 ceph_client_changed function.261 ceph_client_changed function.
205 '''262 """
206 def wrap(f):263 def wrap(f):
207 def wrapped_f(*args):264 def wrapped_f(*args):
208 checksums = {}265 checksums = {}
@@ -213,14 +270,20 @@
213 for path in restart_map:270 for path in restart_map:
214 if checksums[path] != file_hash(path):271 if checksums[path] != file_hash(path):
215 restarts += restart_map[path]272 restarts += restart_map[path]
216 for service_name in list(OrderedDict.fromkeys(restarts)):273 services_list = list(OrderedDict.fromkeys(restarts))
217 service('restart', service_name)274 if not stopstart:
275 for service_name in services_list:
276 service('restart', service_name)
277 else:
278 for action in ['stop', 'start']:
279 for service_name in services_list:
280 service(action, service_name)
218 return wrapped_f281 return wrapped_f
219 return wrap282 return wrap
220283
221284
222def lsb_release():285def lsb_release():
223 '''Return /etc/lsb-release in a dict'''286 """Return /etc/lsb-release in a dict"""
224 d = {}287 d = {}
225 with open('/etc/lsb-release', 'r') as lsb:288 with open('/etc/lsb-release', 'r') as lsb:
226 for l in lsb:289 for l in lsb:
@@ -230,7 +293,7 @@
230293
231294
232def pwgen(length=None):295def pwgen(length=None):
233 '''Generate a random pasword.'''296 """Generate a random pasword."""
234 if length is None:297 if length is None:
235 length = random.choice(range(35, 45))298 length = random.choice(range(35, 45))
236 alphanumeric_chars = [299 alphanumeric_chars = [
@@ -239,3 +302,84 @@
239 random_chars = [302 random_chars = [
240 random.choice(alphanumeric_chars) for _ in range(length)]303 random.choice(alphanumeric_chars) for _ in range(length)]
241 return(''.join(random_chars))304 return(''.join(random_chars))
305
306
307def list_nics(nic_type):
308 '''Return a list of nics of given type(s)'''
309 if isinstance(nic_type, basestring):
310 int_types = [nic_type]
311 else:
312 int_types = nic_type
313 interfaces = []
314 for int_type in int_types:
315 cmd = ['ip', 'addr', 'show', 'label', int_type + '*']
316 ip_output = subprocess.check_output(cmd).split('\n')
317 ip_output = (line for line in ip_output if line)
318 for line in ip_output:
319 if line.split()[1].startswith(int_type):
320 interfaces.append(line.split()[1].replace(":", ""))
321 return interfaces
322
323
324def set_nic_mtu(nic, mtu):
325 '''Set MTU on a network interface'''
326 cmd = ['ip', 'link', 'set', nic, 'mtu', mtu]
327 subprocess.check_call(cmd)
328
329
330def get_nic_mtu(nic):
331 cmd = ['ip', 'addr', 'show', nic]
332 ip_output = subprocess.check_output(cmd).split('\n')
333 mtu = ""
334 for line in ip_output:
335 words = line.split()
336 if 'mtu' in words:
337 mtu = words[words.index("mtu") + 1]
338 return mtu
339
340
341def get_nic_hwaddr(nic):
342 cmd = ['ip', '-o', '-0', 'addr', 'show', nic]
343 ip_output = subprocess.check_output(cmd)
344 hwaddr = ""
345 words = ip_output.split()
346 if 'link/ether' in words:
347 hwaddr = words[words.index('link/ether') + 1]
348 return hwaddr
349
350
351def cmp_pkgrevno(package, revno, pkgcache=None):
352 '''Compare supplied revno with the revno of the installed package
353
354 * 1 => Installed revno is greater than supplied arg
355 * 0 => Installed revno is the same as supplied arg
356 * -1 => Installed revno is less than supplied arg
357
358 '''
359 import apt_pkg
360 from charmhelpers.fetch import apt_cache
361 if not pkgcache:
362 pkgcache = apt_cache()
363 pkg = pkgcache[package]
364 return apt_pkg.version_compare(pkg.current_ver.ver_str, revno)
365
366
367@contextmanager
368def chdir(d):
369 cur = os.getcwd()
370 try:
371 yield os.chdir(d)
372 finally:
373 os.chdir(cur)
374
375
376def chownr(path, owner, group):
377 uid = pwd.getpwnam(owner).pw_uid
378 gid = grp.getgrnam(group).gr_gid
379
380 for root, dirs, files in os.walk(path):
381 for name in dirs + files:
382 full = os.path.join(root, name)
383 broken_symlink = os.path.lexists(full) and not os.path.exists(full)
384 if not broken_symlink:
385 os.chown(full, uid, gid)
242386
=== added directory 'hooks/charmhelpers/core/services'
=== added file 'hooks/charmhelpers/core/services/__init__.py'
--- hooks/charmhelpers/core/services/__init__.py 1970-01-01 00:00:00 +0000
+++ hooks/charmhelpers/core/services/__init__.py 2014-09-26 08:01:25 +0000
@@ -0,0 +1,2 @@
1from .base import *
2from .helpers import *
03
=== added file 'hooks/charmhelpers/core/services/base.py'
--- hooks/charmhelpers/core/services/base.py 1970-01-01 00:00:00 +0000
+++ hooks/charmhelpers/core/services/base.py 2014-09-26 08:01:25 +0000
@@ -0,0 +1,313 @@
1import os
2import re
3import json
4from collections import Iterable
5
6from charmhelpers.core import host
7from charmhelpers.core import hookenv
8
9
10__all__ = ['ServiceManager', 'ManagerCallback',
11 'PortManagerCallback', 'open_ports', 'close_ports', 'manage_ports',
12 'service_restart', 'service_stop']
13
14
15class ServiceManager(object):
16 def __init__(self, services=None):
17 """
18 Register a list of services, given their definitions.
19
20 Service definitions are dicts in the following formats (all keys except
21 'service' are optional)::
22
23 {
24 "service": <service name>,
25 "required_data": <list of required data contexts>,
26 "provided_data": <list of provided data contexts>,
27 "data_ready": <one or more callbacks>,
28 "data_lost": <one or more callbacks>,
29 "start": <one or more callbacks>,
30 "stop": <one or more callbacks>,
31 "ports": <list of ports to manage>,
32 }
33
34 The 'required_data' list should contain dicts of required data (or
35 dependency managers that act like dicts and know how to collect the data).
36 Only when all items in the 'required_data' list are populated are the list
37 of 'data_ready' and 'start' callbacks executed. See `is_ready()` for more
38 information.
39
40 The 'provided_data' list should contain relation data providers, most likely
41 a subclass of :class:`charmhelpers.core.services.helpers.RelationContext`,
42 that will indicate a set of data to set on a given relation.
43
44 The 'data_ready' value should be either a single callback, or a list of
45 callbacks, to be called when all items in 'required_data' pass `is_ready()`.
46 Each callback will be called with the service name as the only parameter.
47 After all of the 'data_ready' callbacks are called, the 'start' callbacks
48 are fired.
49
50 The 'data_lost' value should be either a single callback, or a list of
51 callbacks, to be called when a 'required_data' item no longer passes
52 `is_ready()`. Each callback will be called with the service name as the
53 only parameter. After all of the 'data_lost' callbacks are called,
54 the 'stop' callbacks are fired.
55
56 The 'start' value should be either a single callback, or a list of
57 callbacks, to be called when starting the service, after the 'data_ready'
58 callbacks are complete. Each callback will be called with the service
59 name as the only parameter. This defaults to
60 `[host.service_start, services.open_ports]`.
61
62 The 'stop' value should be either a single callback, or a list of
63 callbacks, to be called when stopping the service. If the service is
64 being stopped because it no longer has all of its 'required_data', this
65 will be called after all of the 'data_lost' callbacks are complete.
66 Each callback will be called with the service name as the only parameter.
67 This defaults to `[services.close_ports, host.service_stop]`.
68
69 The 'ports' value should be a list of ports to manage. The default
70 'start' handler will open the ports after the service is started,
71 and the default 'stop' handler will close the ports prior to stopping
72 the service.
73
74
75 Examples:
76
77 The following registers an Upstart service called bingod that depends on
78 a mongodb relation and which runs a custom `db_migrate` function prior to
79 restarting the service, and a Runit service called spadesd::
80
81 manager = services.ServiceManager([
82 {
83 'service': 'bingod',
84 'ports': [80, 443],
85 'required_data': [MongoRelation(), config(), {'my': 'data'}],
86 'data_ready': [
87 services.template(source='bingod.conf'),
88 services.template(source='bingod.ini',
89 target='/etc/bingod.ini',
90 owner='bingo', perms=0400),
91 ],
92 },
93 {
94 'service': 'spadesd',
95 'data_ready': services.template(source='spadesd_run.j2',
96 target='/etc/sv/spadesd/run',
97 perms=0555),
98 'start': runit_start,
99 'stop': runit_stop,
100 },
101 ])
102 manager.manage()
103 """
104 self._ready_file = os.path.join(hookenv.charm_dir(), 'READY-SERVICES.json')
105 self._ready = None
106 self.services = {}
107 for service in services or []:
108 service_name = service['service']
109 self.services[service_name] = service
110
111 def manage(self):
112 """
113 Handle the current hook by doing The Right Thing with the registered services.
114 """
115 hook_name = hookenv.hook_name()
116 if hook_name == 'stop':
117 self.stop_services()
118 else:
119 self.provide_data()
120 self.reconfigure_services()
121 cfg = hookenv.config()
122 if cfg.implicit_save:
123 cfg.save()
124
125 def provide_data(self):
126 """
127 Set the relation data for each provider in the ``provided_data`` list.
128
129 A provider must have a `name` attribute, which indicates which relation
130 to set data on, and a `provide_data()` method, which returns a dict of
131 data to set.
132 """
133 hook_name = hookenv.hook_name()
134 for service in self.services.values():
135 for provider in service.get('provided_data', []):
136 if re.match(r'{}-relation-(joined|changed)'.format(provider.name), hook_name):
137 data = provider.provide_data()
138 _ready = provider._is_ready(data) if hasattr(provider, '_is_ready') else data
139 if _ready:
140 hookenv.relation_set(None, data)
141
142 def reconfigure_services(self, *service_names):
143 """
144 Update all files for one or more registered services, and,
145 if ready, optionally restart them.
146
147 If no service names are given, reconfigures all registered services.
148 """
149 for service_name in service_names or self.services.keys():
150 if self.is_ready(service_name):
151 self.fire_event('data_ready', service_name)
152 self.fire_event('start', service_name, default=[
153 service_restart,
154 manage_ports])
155 self.save_ready(service_name)
156 else:
157 if self.was_ready(service_name):
158 self.fire_event('data_lost', service_name)
159 self.fire_event('stop', service_name, default=[
160 manage_ports,
161 service_stop])
162 self.save_lost(service_name)
163
164 def stop_services(self, *service_names):
165 """
166 Stop one or more registered services, by name.
167
168 If no service names are given, stops all registered services.
169 """
170 for service_name in service_names or self.services.keys():
171 self.fire_event('stop', service_name, default=[
172 manage_ports,
173 service_stop])
174
175 def get_service(self, service_name):
176 """
177 Given the name of a registered service, return its service definition.
178 """
179 service = self.services.get(service_name)
180 if not service:
181 raise KeyError('Service not registered: %s' % service_name)
182 return service
183
184 def fire_event(self, event_name, service_name, default=None):
185 """
186 Fire a data_ready, data_lost, start, or stop event on a given service.
187 """
188 service = self.get_service(service_name)
189 callbacks = service.get(event_name, default)
190 if not callbacks:
191 return
192 if not isinstance(callbacks, Iterable):
193 callbacks = [callbacks]
194 for callback in callbacks:
195 if isinstance(callback, ManagerCallback):
196 callback(self, service_name, event_name)
197 else:
198 callback(service_name)
199
200 def is_ready(self, service_name):
201 """
202 Determine if a registered service is ready, by checking its 'required_data'.
203
204 A 'required_data' item can be any mapping type, and is considered ready
205 if `bool(item)` evaluates as True.
206 """
207 service = self.get_service(service_name)
208 reqs = service.get('required_data', [])
209 return all(bool(req) for req in reqs)
210
211 def _load_ready_file(self):
212 if self._ready is not None:
213 return
214 if os.path.exists(self._ready_file):
215 with open(self._ready_file) as fp:
216 self._ready = set(json.load(fp))
217 else:
218 self._ready = set()
219
220 def _save_ready_file(self):
221 if self._ready is None:
222 return
223 with open(self._ready_file, 'w') as fp:
224 json.dump(list(self._ready), fp)
225
226 def save_ready(self, service_name):
227 """
228 Save an indicator that the given service is now data_ready.
229 """
230 self._load_ready_file()
231 self._ready.add(service_name)
232 self._save_ready_file()
233
234 def save_lost(self, service_name):
235 """
236 Save an indicator that the given service is no longer data_ready.
237 """
238 self._load_ready_file()
239 self._ready.discard(service_name)
240 self._save_ready_file()
241
242 def was_ready(self, service_name):
243 """
244 Determine if the given service was previously data_ready.
245 """
246 self._load_ready_file()
247 return service_name in self._ready
248
249
250class ManagerCallback(object):
251 """
252 Special case of a callback that takes the `ServiceManager` instance
253 in addition to the service name.
254
255 Subclasses should implement `__call__` which should accept three parameters:
256
257 * `manager` The `ServiceManager` instance
258 * `service_name` The name of the service it's being triggered for
259 * `event_name` The name of the event that this callback is handling
260 """
261 def __call__(self, manager, service_name, event_name):
262 raise NotImplementedError()
263
264
265class PortManagerCallback(ManagerCallback):
266 """
267 Callback class that will open or close ports, for use as either
268 a start or stop action.
269 """
270 def __call__(self, manager, service_name, event_name):
271 service = manager.get_service(service_name)
272 new_ports = service.get('ports', [])
273 port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name))
274 if os.path.exists(port_file):
275 with open(port_file) as fp:
276 old_ports = fp.read().split(',')
277 for old_port in old_ports:
278 if bool(old_port):
279 old_port = int(old_port)
280 if old_port not in new_ports:
281 hookenv.close_port(old_port)
282 with open(port_file, 'w') as fp:
283 fp.write(','.join(str(port) for port in new_ports))
284 for port in new_ports:
285 if event_name == 'start':
286 hookenv.open_port(port)
287 elif event_name == 'stop':
288 hookenv.close_port(port)
289
290
291def service_stop(service_name):
292 """
293 Wrapper around host.service_stop to prevent spurious "unknown service"
294 messages in the logs.
295 """
296 if host.service_running(service_name):
297 host.service_stop(service_name)
298
299
300def service_restart(service_name):
301 """
302 Wrapper around host.service_restart to prevent spurious "unknown service"
303 messages in the logs.
304 """
305 if host.service_available(service_name):
306 if host.service_running(service_name):
307 host.service_restart(service_name)
308 else:
309 host.service_start(service_name)
310
311
312# Convenience aliases
313open_ports = close_ports = manage_ports = PortManagerCallback()
0314
=== added file 'hooks/charmhelpers/core/services/helpers.py'
--- hooks/charmhelpers/core/services/helpers.py 1970-01-01 00:00:00 +0000
+++ hooks/charmhelpers/core/services/helpers.py 2014-09-26 08:01:25 +0000
@@ -0,0 +1,239 @@
1import os
2import yaml
3from charmhelpers.core import hookenv
4from charmhelpers.core import templating
5
6from charmhelpers.core.services.base import ManagerCallback
7
8
9__all__ = ['RelationContext', 'TemplateCallback',
10 'render_template', 'template']
11
12
13class RelationContext(dict):
14 """
15 Base class for a context generator that gets relation data from juju.
16
17 Subclasses must provide the attributes `name`, which is the name of the
18 interface of interest, `interface`, which is the type of the interface of
19 interest, and `required_keys`, which is the set of keys required for the
20 relation to be considered complete. The data for all interfaces matching
21 the `name` attribute that are complete will used to populate the dictionary
22 values (see `get_data`, below).
23
24 The generated context will be namespaced under the relation :attr:`name`,
25 to prevent potential naming conflicts.
26
27 :param str name: Override the relation :attr:`name`, since it can vary from charm to charm
28 :param list additional_required_keys: Extend the list of :attr:`required_keys`
29 """
30 name = None
31 interface = None
32 required_keys = []
33
34 def __init__(self, name=None, additional_required_keys=None):
35 if name is not None:
36 self.name = name
37 if additional_required_keys is not None:
38 self.required_keys.extend(additional_required_keys)
39 self.get_data()
40
41 def __bool__(self):
42 """
43 Returns True if all of the required_keys are available.
44 """
45 return self.is_ready()
46
47 __nonzero__ = __bool__
48
49 def __repr__(self):
50 return super(RelationContext, self).__repr__()
51
52 def is_ready(self):
53 """
54 Returns True if all of the `required_keys` are available from any units.
55 """
56 ready = len(self.get(self.name, [])) > 0
57 if not ready:
58 hookenv.log('Incomplete relation: {}'.format(self.__class__.__name__), hookenv.DEBUG)
59 return ready
60
61 def _is_ready(self, unit_data):
62 """
63 Helper method that tests a set of relation data and returns True if
64 all of the `required_keys` are present.
65 """
66 return set(unit_data.keys()).issuperset(set(self.required_keys))
67
68 def get_data(self):
69 """
70 Retrieve the relation data for each unit involved in a relation and,
71 if complete, store it in a list under `self[self.name]`. This
72 is automatically called when the RelationContext is instantiated.
73
74 The units are sorted lexographically first by the service ID, then by
75 the unit ID. Thus, if an interface has two other services, 'db:1'
76 and 'db:2', with 'db:1' having two units, 'wordpress/0' and 'wordpress/1',
77 and 'db:2' having one unit, 'mediawiki/0', all of which have a complete
78 set of data, the relation data for the units will be stored in the
79 order: 'wordpress/0', 'wordpress/1', 'mediawiki/0'.
80
81 If you only care about a single unit on the relation, you can just
82 access it as `{{ interface[0]['key'] }}`. However, if you can at all
83 support multiple units on a relation, you should iterate over the list,
84 like::
85
86 {% for unit in interface -%}
87 {{ unit['key'] }}{% if not loop.last %},{% endif %}
88 {%- endfor %}
89
90 Note that since all sets of relation data from all related services and
91 units are in a single list, if you need to know which service or unit a
92 set of data came from, you'll need to extend this class to preserve
93 that information.
94 """
95 if not hookenv.relation_ids(self.name):
96 return
97
98 ns = self.setdefault(self.name, [])
99 for rid in sorted(hookenv.relation_ids(self.name)):
100 for unit in sorted(hookenv.related_units(rid)):
101 reldata = hookenv.relation_get(rid=rid, unit=unit)
102 if self._is_ready(reldata):
103 ns.append(reldata)
104
105 def provide_data(self):
106 """
107 Return data to be relation_set for this interface.
108 """
109 return {}
110
111
112class MysqlRelation(RelationContext):
113 """
114 Relation context for the `mysql` interface.
115
116 :param str name: Override the relation :attr:`name`, since it can vary from charm to charm
117 :param list additional_required_keys: Extend the list of :attr:`required_keys`
118 """
119 name = 'db'
120 interface = 'mysql'
121 required_keys = ['host', 'user', 'password', 'database']
122
123
124class HttpRelation(RelationContext):
125 """
126 Relation context for the `http` interface.
127
128 :param str name: Override the relation :attr:`name`, since it can vary from charm to charm
129 :param list additional_required_keys: Extend the list of :attr:`required_keys`
130 """
131 name = 'website'
132 interface = 'http'
133 required_keys = ['host', 'port']
134
135 def provide_data(self):
136 return {
137 'host': hookenv.unit_get('private-address'),
138 'port': 80,
139 }
140
141
142class RequiredConfig(dict):
143 """
144 Data context that loads config options with one or more mandatory options.
145
146 Once the required options have been changed from their default values, all
147 config options will be available, namespaced under `config` to prevent
148 potential naming conflicts (for example, between a config option and a
149 relation property).
150
151 :param list *args: List of options that must be changed from their default values.
152 """
153
154 def __init__(self, *args):
155 self.required_options = args
156 self['config'] = hookenv.config()
157 with open(os.path.join(hookenv.charm_dir(), 'config.yaml')) as fp:
158 self.config = yaml.load(fp).get('options', {})
159
160 def __bool__(self):
161 for option in self.required_options:
162 if option not in self['config']:
163 return False
164 current_value = self['config'][option]
165 default_value = self.config[option].get('default')
166 if current_value == default_value:
167 return False
168 if current_value in (None, '') and default_value in (None, ''):
169 return False
170 return True
171
172 def __nonzero__(self):
173 return self.__bool__()
174
175
176class StoredContext(dict):
177 """
178 A data context that always returns the data that it was first created with.
179
180 This is useful to do a one-time generation of things like passwords, that
181 will thereafter use the same value that was originally generated, instead
182 of generating a new value each time it is run.
183 """
184 def __init__(self, file_name, config_data):
185 """
186 If the file exists, populate `self` with the data from the file.
187 Otherwise, populate with the given data and persist it to the file.
188 """
189 if os.path.exists(file_name):
190 self.update(self.read_context(file_name))
191 else:
192 self.store_context(file_name, config_data)
193 self.update(config_data)
194
195 def store_context(self, file_name, config_data):
196 if not os.path.isabs(file_name):
197 file_name = os.path.join(hookenv.charm_dir(), file_name)
198 with open(file_name, 'w') as file_stream:
199 os.fchmod(file_stream.fileno(), 0600)
200 yaml.dump(config_data, file_stream)
201
202 def read_context(self, file_name):
203 if not os.path.isabs(file_name):
204 file_name = os.path.join(hookenv.charm_dir(), file_name)
205 with open(file_name, 'r') as file_stream:
206 data = yaml.load(file_stream)
207 if not data:
208 raise OSError("%s is empty" % file_name)
209 return data
210
211
212class TemplateCallback(ManagerCallback):
213 """
214 Callback class that will render a Jinja2 template, for use as a ready action.
215
216 :param str source: The template source file, relative to `$CHARM_DIR/templates`
217 :param str target: The target to write the rendered template to
218 :param str owner: The owner of the rendered file
219 :param str group: The group of the rendered file
220 :param int perms: The permissions of the rendered file
221 """
222 def __init__(self, source, target, owner='root', group='root', perms=0444):
223 self.source = source
224 self.target = target
225 self.owner = owner
226 self.group = group
227 self.perms = perms
228
229 def __call__(self, manager, service_name, event_name):
230 service = manager.get_service(service_name)
231 context = {}
232 for ctx in service.get('required_data', []):
233 context.update(ctx)
234 templating.render(self.source, self.target, context,
235 self.owner, self.group, self.perms)
236
237
238# Convenience aliases for templates
239render_template = template = TemplateCallback
0240
=== added file 'hooks/charmhelpers/core/templating.py'
--- hooks/charmhelpers/core/templating.py 1970-01-01 00:00:00 +0000
+++ hooks/charmhelpers/core/templating.py 2014-09-26 08:01:25 +0000
@@ -0,0 +1,51 @@
1import os
2
3from charmhelpers.core import host
4from charmhelpers.core import hookenv
5
6
7def render(source, target, context, owner='root', group='root', perms=0444, templates_dir=None):
8 """
9 Render a template.
10
11 The `source` path, if not absolute, is relative to the `templates_dir`.
12
13 The `target` path should be absolute.
14
15 The context should be a dict containing the values to be replaced in the
16 template.
17
18 The `owner`, `group`, and `perms` options will be passed to `write_file`.
19
20 If omitted, `templates_dir` defaults to the `templates` folder in the charm.
21
22 Note: Using this requires python-jinja2; if it is not installed, calling
23 this will attempt to use charmhelpers.fetch.apt_install to install it.
24 """
25 try:
26 from jinja2 import FileSystemLoader, Environment, exceptions
27 except ImportError:
28 try:
29 from charmhelpers.fetch import apt_install
30 except ImportError:
31 hookenv.log('Could not import jinja2, and could not import '
32 'charmhelpers.fetch to install it',
33 level=hookenv.ERROR)
34 raise
35 apt_install('python-jinja2', fatal=True)
36 from jinja2 import FileSystemLoader, Environment, exceptions
37
38 if templates_dir is None:
39 templates_dir = os.path.join(hookenv.charm_dir(), 'templates')
40 loader = Environment(loader=FileSystemLoader(templates_dir))
41 try:
42 source = source
43 template = loader.get_template(source)
44 except exceptions.TemplateNotFound as e:
45 hookenv.log('Could not load template %s from %s.' %
46 (source, templates_dir),
47 level=hookenv.ERROR)
48 raise e
49 content = template.render(context)
50 host.mkdir(os.path.dirname(target))
51 host.write_file(target, content, owner, group, perms)
052
=== modified file 'hooks/charmhelpers/fetch/__init__.py'
--- hooks/charmhelpers/fetch/__init__.py 2013-08-29 18:41:54 +0000
+++ hooks/charmhelpers/fetch/__init__.py 2014-09-26 08:01:25 +0000
@@ -1,4 +1,6 @@
1import importlib1import importlib
2from tempfile import NamedTemporaryFile
3import time
2from yaml import safe_load4from yaml import safe_load
3from charmhelpers.core.host import (5from charmhelpers.core.host import (
4 lsb_release6 lsb_release
@@ -12,7 +14,8 @@
12 config,14 config,
13 log,15 log,
14)16)
15import apt_pkg17import os
18
1619
17CLOUD_ARCHIVE = """# Ubuntu Cloud Archive20CLOUD_ARCHIVE = """# Ubuntu Cloud Archive
18deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main21deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main
@@ -20,12 +23,101 @@
20PROPOSED_POCKET = """# Proposed23PROPOSED_POCKET = """# Proposed
21deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted24deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted
22"""25"""
26CLOUD_ARCHIVE_POCKETS = {
27 # Folsom
28 'folsom': 'precise-updates/folsom',
29 'precise-folsom': 'precise-updates/folsom',
30 'precise-folsom/updates': 'precise-updates/folsom',
31 'precise-updates/folsom': 'precise-updates/folsom',
32 'folsom/proposed': 'precise-proposed/folsom',
33 'precise-folsom/proposed': 'precise-proposed/folsom',
34 'precise-proposed/folsom': 'precise-proposed/folsom',
35 # Grizzly
36 'grizzly': 'precise-updates/grizzly',
37 'precise-grizzly': 'precise-updates/grizzly',
38 'precise-grizzly/updates': 'precise-updates/grizzly',
39 'precise-updates/grizzly': 'precise-updates/grizzly',
40 'grizzly/proposed': 'precise-proposed/grizzly',
41 'precise-grizzly/proposed': 'precise-proposed/grizzly',
42 'precise-proposed/grizzly': 'precise-proposed/grizzly',
43 # Havana
44 'havana': 'precise-updates/havana',
45 'precise-havana': 'precise-updates/havana',
46 'precise-havana/updates': 'precise-updates/havana',
47 'precise-updates/havana': 'precise-updates/havana',
48 'havana/proposed': 'precise-proposed/havana',
49 'precise-havana/proposed': 'precise-proposed/havana',
50 'precise-proposed/havana': 'precise-proposed/havana',
51 # Icehouse
52 'icehouse': 'precise-updates/icehouse',
53 'precise-icehouse': 'precise-updates/icehouse',
54 'precise-icehouse/updates': 'precise-updates/icehouse',
55 'precise-updates/icehouse': 'precise-updates/icehouse',
56 'icehouse/proposed': 'precise-proposed/icehouse',
57 'precise-icehouse/proposed': 'precise-proposed/icehouse',
58 'precise-proposed/icehouse': 'precise-proposed/icehouse',
59 # Juno
60 'juno': 'trusty-updates/juno',
61 'trusty-juno': 'trusty-updates/juno',
62 'trusty-juno/updates': 'trusty-updates/juno',
63 'trusty-updates/juno': 'trusty-updates/juno',
64 'juno/proposed': 'trusty-proposed/juno',
65 'juno/proposed': 'trusty-proposed/juno',
66 'trusty-juno/proposed': 'trusty-proposed/juno',
67 'trusty-proposed/juno': 'trusty-proposed/juno',
68}
69
70# The order of this list is very important. Handlers should be listed in from
71# least- to most-specific URL matching.
72FETCH_HANDLERS = (
73 'charmhelpers.fetch.archiveurl.ArchiveUrlFetchHandler',
74 'charmhelpers.fetch.bzrurl.BzrUrlFetchHandler',
75)
76
77APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT.
78APT_NO_LOCK_RETRY_DELAY = 10 # Wait 10 seconds between apt lock checks.
79APT_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times.
80
81
82class SourceConfigError(Exception):
83 pass
84
85
86class UnhandledSource(Exception):
87 pass
88
89
90class AptLockError(Exception):
91 pass
92
93
94class BaseFetchHandler(object):
95
96 """Base class for FetchHandler implementations in fetch plugins"""
97
98 def can_handle(self, source):
99 """Returns True if the source can be handled. Otherwise returns
100 a string explaining why it cannot"""
101 return "Wrong source type"
102
103 def install(self, source):
104 """Try to download and unpack the source. Return the path to the
105 unpacked files or raise UnhandledSource."""
106 raise UnhandledSource("Wrong source type {}".format(source))
107
108 def parse_url(self, url):
109 return urlparse(url)
110
111 def base_url(self, url):
112 """Return url without querystring or fragment"""
113 parts = list(self.parse_url(url))
114 parts[4:] = ['' for i in parts[4:]]
115 return urlunparse(parts)
23116
24117
25def filter_installed_packages(packages):118def filter_installed_packages(packages):
26 """Returns a list of packages that require installation"""119 """Returns a list of packages that require installation"""
27 apt_pkg.init()120 cache = apt_cache()
28 cache = apt_pkg.Cache()
29 _pkgs = []121 _pkgs = []
30 for package in packages:122 for package in packages:
31 try:123 try:
@@ -38,10 +130,22 @@
38 return _pkgs130 return _pkgs
39131
40132
133def apt_cache(in_memory=True):
134 """Build and return an apt cache"""
135 import apt_pkg
136 apt_pkg.init()
137 if in_memory:
138 apt_pkg.config.set("Dir::Cache::pkgcache", "")
139 apt_pkg.config.set("Dir::Cache::srcpkgcache", "")
140 return apt_pkg.Cache()
141
142
41def apt_install(packages, options=None, fatal=False):143def apt_install(packages, options=None, fatal=False):
42 """Install one or more packages"""144 """Install one or more packages"""
43 options = options or []145 if options is None:
44 cmd = ['apt-get', '-y']146 options = ['--option=Dpkg::Options::=--force-confold']
147
148 cmd = ['apt-get', '--assume-yes']
45 cmd.extend(options)149 cmd.extend(options)
46 cmd.append('install')150 cmd.append('install')
47 if isinstance(packages, basestring):151 if isinstance(packages, basestring):
@@ -50,29 +154,50 @@
50 cmd.extend(packages)154 cmd.extend(packages)
51 log("Installing {} with options: {}".format(packages,155 log("Installing {} with options: {}".format(packages,
52 options))156 options))
53 if fatal:157 _run_apt_command(cmd, fatal)
54 subprocess.check_call(cmd)158
159
160def apt_upgrade(options=None, fatal=False, dist=False):
161 """Upgrade all packages"""
162 if options is None:
163 options = ['--option=Dpkg::Options::=--force-confold']
164
165 cmd = ['apt-get', '--assume-yes']
166 cmd.extend(options)
167 if dist:
168 cmd.append('dist-upgrade')
55 else:169 else:
56 subprocess.call(cmd)170 cmd.append('upgrade')
171 log("Upgrading with options: {}".format(options))
172 _run_apt_command(cmd, fatal)
57173
58174
59def apt_update(fatal=False):175def apt_update(fatal=False):
60 """Update local apt cache"""176 """Update local apt cache"""
61 cmd = ['apt-get', 'update']177 cmd = ['apt-get', 'update']
62 if fatal:178 _run_apt_command(cmd, fatal)
63 subprocess.check_call(cmd)
64 else:
65 subprocess.call(cmd)
66179
67180
68def apt_purge(packages, fatal=False):181def apt_purge(packages, fatal=False):
69 """Purge one or more packages"""182 """Purge one or more packages"""
70 cmd = ['apt-get', '-y', 'purge']183 cmd = ['apt-get', '--assume-yes', 'purge']
71 if isinstance(packages, basestring):184 if isinstance(packages, basestring):
72 cmd.append(packages)185 cmd.append(packages)
73 else:186 else:
74 cmd.extend(packages)187 cmd.extend(packages)
75 log("Purging {}".format(packages))188 log("Purging {}".format(packages))
189 _run_apt_command(cmd, fatal)
190
191
192def apt_hold(packages, fatal=False):
193 """Hold one or more packages"""
194 cmd = ['apt-mark', 'hold']
195 if isinstance(packages, basestring):
196 cmd.append(packages)
197 else:
198 cmd.extend(packages)
199 log("Holding {}".format(packages))
200
76 if fatal:201 if fatal:
77 subprocess.check_call(cmd)202 subprocess.check_call(cmd)
78 else:203 else:
@@ -80,84 +205,142 @@
80205
81206
82def add_source(source, key=None):207def add_source(source, key=None):
83 if ((source.startswith('ppa:') or208 """Add a package source to this system.
84 source.startswith('http:'))):209
210 @param source: a URL or sources.list entry, as supported by
211 add-apt-repository(1). Examples::
212
213 ppa:charmers/example
214 deb https://stub:key@private.example.com/ubuntu trusty main
215
216 In addition:
217 'proposed:' may be used to enable the standard 'proposed'
218 pocket for the release.
219 'cloud:' may be used to activate official cloud archive pockets,
220 such as 'cloud:icehouse'
221
222 @param key: A key to be added to the system's APT keyring and used
223 to verify the signatures on packages. Ideally, this should be an
224 ASCII format GPG public key including the block headers. A GPG key
225 id may also be used, but be aware that only insecure protocols are
226 available to retrieve the actual public key from a public keyserver
227 placing your Juju environment at risk. ppa and cloud archive keys
228 are securely added automtically, so sould not be provided.
229 """
230 if source is None:
231 log('Source is not present. Skipping')
232 return
233
234 if (source.startswith('ppa:') or
235 source.startswith('http') or
236 source.startswith('deb ') or
237 source.startswith('cloud-archive:')):
85 subprocess.check_call(['add-apt-repository', '--yes', source])238 subprocess.check_call(['add-apt-repository', '--yes', source])
86 elif source.startswith('cloud:'):239 elif source.startswith('cloud:'):
87 apt_install(filter_installed_packages(['ubuntu-cloud-keyring']),240 apt_install(filter_installed_packages(['ubuntu-cloud-keyring']),
88 fatal=True)241 fatal=True)
89 pocket = source.split(':')[-1]242 pocket = source.split(':')[-1]
243 if pocket not in CLOUD_ARCHIVE_POCKETS:
244 raise SourceConfigError(
245 'Unsupported cloud: source option %s' %
246 pocket)
247 actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket]
90 with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt:248 with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt:
91 apt.write(CLOUD_ARCHIVE.format(pocket))249 apt.write(CLOUD_ARCHIVE.format(actual_pocket))
92 elif source == 'proposed':250 elif source == 'proposed':
93 release = lsb_release()['DISTRIB_CODENAME']251 release = lsb_release()['DISTRIB_CODENAME']
94 with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt:252 with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt:
95 apt.write(PROPOSED_POCKET.format(release))253 apt.write(PROPOSED_POCKET.format(release))
254 else:
255 raise SourceConfigError("Unknown source: {!r}".format(source))
256
96 if key:257 if key:
97 subprocess.check_call(['apt-key', 'import', key])258 if '-----BEGIN PGP PUBLIC KEY BLOCK-----' in key:
98259 with NamedTemporaryFile() as key_file:
99260 key_file.write(key)
100class SourceConfigError(Exception):261 key_file.flush()
101 pass262 key_file.seek(0)
263 subprocess.check_call(['apt-key', 'add', '-'], stdin=key_file)
264 else:
265 # Note that hkp: is in no way a secure protocol. Using a
266 # GPG key id is pointless from a security POV unless you
267 # absolutely trust your network and DNS.
268 subprocess.check_call(['apt-key', 'adv', '--keyserver',
269 'hkp://keyserver.ubuntu.com:80', '--recv',
270 key])
102271
103272
104def configure_sources(update=False,273def configure_sources(update=False,
105 sources_var='install_sources',274 sources_var='install_sources',
106 keys_var='install_keys'):275 keys_var='install_keys'):
107 """276 """
108 Configure multiple sources from charm configuration277 Configure multiple sources from charm configuration.
278
279 The lists are encoded as yaml fragments in the configuration.
280 The frament needs to be included as a string. Sources and their
281 corresponding keys are of the types supported by add_source().
109282
110 Example config:283 Example config:
111 install_sources:284 install_sources: |
112 - "ppa:foo"285 - "ppa:foo"
113 - "http://example.com/repo precise main"286 - "http://example.com/repo precise main"
114 install_keys:287 install_keys: |
115 - null288 - null
116 - "a1b2c3d4"289 - "a1b2c3d4"
117290
118 Note that 'null' (a.k.a. None) should not be quoted.291 Note that 'null' (a.k.a. None) should not be quoted.
119 """292 """
120 sources = safe_load(config(sources_var))293 sources = safe_load((config(sources_var) or '').strip()) or []
121 keys = safe_load(config(keys_var))294 keys = safe_load((config(keys_var) or '').strip()) or None
122 if isinstance(sources, basestring) and isinstance(keys, basestring):295
123 add_source(sources, keys)296 if isinstance(sources, basestring):
297 sources = [sources]
298
299 if keys is None:
300 for source in sources:
301 add_source(source, None)
124 else:302 else:
125 if not len(sources) == len(keys):303 if isinstance(keys, basestring):
126 msg = 'Install sources and keys lists are different lengths'304 keys = [keys]
127 raise SourceConfigError(msg)305
128 for src_num in range(len(sources)):306 if len(sources) != len(keys):
129 add_source(sources[src_num], keys[src_num])307 raise SourceConfigError(
308 'Install sources and keys lists are different lengths')
309 for source, key in zip(sources, keys):
310 add_source(source, key)
130 if update:311 if update:
131 apt_update(fatal=True)312 apt_update(fatal=True)
132313
133# The order of this list is very important. Handlers should be listed in from314
134# least- to most-specific URL matching.315def install_remote(source, *args, **kwargs):
135FETCH_HANDLERS = (
136 'charmhelpers.fetch.archiveurl.ArchiveUrlFetchHandler',
137 'charmhelpers.fetch.bzrurl.BzrUrlFetchHandler',
138)
139
140
141class UnhandledSource(Exception):
142 pass
143
144
145def install_remote(source):
146 """316 """
147 Install a file tree from a remote source317 Install a file tree from a remote source
148318
149 The specified source should be a url of the form:319 The specified source should be a url of the form:
150 scheme://[host]/path[#[option=value][&...]]320 scheme://[host]/path[#[option=value][&...]]
151321
152 Schemes supported are based on this modules submodules322 Schemes supported are based on this modules submodules.
153 Options supported are submodule-specific"""323 Options supported are submodule-specific.
324 Additional arguments are passed through to the submodule.
325
326 For example::
327
328 dest = install_remote('http://example.com/archive.tgz',
329 checksum='deadbeef',
330 hash_type='sha1')
331
332 This will download `archive.tgz`, validate it using SHA1 and, if
333 the file is ok, extract it and return the directory in which it
334 was extracted. If the checksum fails, it will raise
335 :class:`charmhelpers.core.host.ChecksumError`.
336 """
154 # We ONLY check for True here because can_handle may return a string337 # We ONLY check for True here because can_handle may return a string
155 # explaining why it can't handle a given source.338 # explaining why it can't handle a given source.
156 handlers = [h for h in plugins() if h.can_handle(source) is True]339 handlers = [h for h in plugins() if h.can_handle(source) is True]
157 installed_to = None340 installed_to = None
158 for handler in handlers:341 for handler in handlers:
159 try:342 try:
160 installed_to = handler.install(source)343 installed_to = handler.install(source, *args, **kwargs)
161 except UnhandledSource:344 except UnhandledSource:
162 pass345 pass
163 if not installed_to:346 if not installed_to:
@@ -171,28 +354,6 @@
171 return install_remote(source)354 return install_remote(source)
172355
173356
174class BaseFetchHandler(object):
175 """Base class for FetchHandler implementations in fetch plugins"""
176 def can_handle(self, source):
177 """Returns True if the source can be handled. Otherwise returns
178 a string explaining why it cannot"""
179 return "Wrong source type"
180
181 def install(self, source):
182 """Try to download and unpack the source. Return the path to the
183 unpacked files or raise UnhandledSource."""
184 raise UnhandledSource("Wrong source type {}".format(source))
185
186 def parse_url(self, url):
187 return urlparse(url)
188
189 def base_url(self, url):
190 """Return url without querystring or fragment"""
191 parts = list(self.parse_url(url))
192 parts[4:] = ['' for i in parts[4:]]
193 return urlunparse(parts)
194
195
196def plugins(fetch_handlers=None):357def plugins(fetch_handlers=None):
197 if not fetch_handlers:358 if not fetch_handlers:
198 fetch_handlers = FETCH_HANDLERS359 fetch_handlers = FETCH_HANDLERS
@@ -200,10 +361,50 @@
200 for handler_name in fetch_handlers:361 for handler_name in fetch_handlers:
201 package, classname = handler_name.rsplit('.', 1)362 package, classname = handler_name.rsplit('.', 1)
202 try:363 try:
203 handler_class = getattr(importlib.import_module(package), classname)364 handler_class = getattr(
365 importlib.import_module(package),
366 classname)
204 plugin_list.append(handler_class())367 plugin_list.append(handler_class())
205 except (ImportError, AttributeError):368 except (ImportError, AttributeError):
206 # Skip missing plugins so that they can be ommitted from369 # Skip missing plugins so that they can be ommitted from
207 # installation if desired370 # installation if desired
208 log("FetchHandler {} not found, skipping plugin".format(handler_name))371 log("FetchHandler {} not found, skipping plugin".format(
372 handler_name))
209 return plugin_list373 return plugin_list
374
375
376def _run_apt_command(cmd, fatal=False):
377 """
378 Run an APT command, checking output and retrying if the fatal flag is set
379 to True.
380
381 :param: cmd: str: The apt command to run.
382 :param: fatal: bool: Whether the command's output should be checked and
383 retried.
384 """
385 env = os.environ.copy()
386
387 if 'DEBIAN_FRONTEND' not in env:
388 env['DEBIAN_FRONTEND'] = 'noninteractive'
389
390 if fatal:
391 retry_count = 0
392 result = None
393
394 # If the command is considered "fatal", we need to retry if the apt
395 # lock was not acquired.
396
397 while result is None or result == APT_NO_LOCK:
398 try:
399 result = subprocess.check_call(cmd, env=env)
400 except subprocess.CalledProcessError, e:
401 retry_count = retry_count + 1
402 if retry_count > APT_NO_LOCK_RETRY_COUNT:
403 raise
404 result = e.returncode
405 log("Couldn't acquire DPKG lock. Will retry in {} seconds."
406 "".format(APT_NO_LOCK_RETRY_DELAY))
407 time.sleep(APT_NO_LOCK_RETRY_DELAY)
408
409 else:
410 subprocess.call(cmd, env=env)
210411
=== modified file 'hooks/charmhelpers/fetch/archiveurl.py'
--- hooks/charmhelpers/fetch/archiveurl.py 2013-08-29 18:41:54 +0000
+++ hooks/charmhelpers/fetch/archiveurl.py 2014-09-26 08:01:25 +0000
@@ -1,5 +1,9 @@
1import os1import os
2import urllib22import urllib2
3from urllib import urlretrieve
4import urlparse
5import hashlib
6
3from charmhelpers.fetch import (7from charmhelpers.fetch import (
4 BaseFetchHandler,8 BaseFetchHandler,
5 UnhandledSource9 UnhandledSource
@@ -8,11 +12,19 @@
8 get_archive_handler,12 get_archive_handler,
9 extract,13 extract,
10)14)
11from charmhelpers.core.host import mkdir15from charmhelpers.core.host import mkdir, check_hash
1216
1317
14class ArchiveUrlFetchHandler(BaseFetchHandler):18class ArchiveUrlFetchHandler(BaseFetchHandler):
15 """Handler for archives via generic URLs"""19 """
20 Handler to download archive files from arbitrary URLs.
21
22 Can fetch from http, https, ftp, and file URLs.
23
24 Can install either tarballs (.tar, .tgz, .tbz2, etc) or zip files.
25
26 Installs the contents of the archive in $CHARM_DIR/fetched/.
27 """
16 def can_handle(self, source):28 def can_handle(self, source):
17 url_parts = self.parse_url(source)29 url_parts = self.parse_url(source)
18 if url_parts.scheme not in ('http', 'https', 'ftp', 'file'):30 if url_parts.scheme not in ('http', 'https', 'ftp', 'file'):
@@ -22,8 +34,27 @@
22 return False34 return False
2335
24 def download(self, source, dest):36 def download(self, source, dest):
37 """
38 Download an archive file.
39
40 :param str source: URL pointing to an archive file.
41 :param str dest: Local path location to download archive file to.
42 """
25 # propogate all exceptions43 # propogate all exceptions
26 # URLError, OSError, etc44 # URLError, OSError, etc
45 proto, netloc, path, params, query, fragment = urlparse.urlparse(source)
46 if proto in ('http', 'https'):
47 auth, barehost = urllib2.splituser(netloc)
48 if auth is not None:
49 source = urlparse.urlunparse((proto, barehost, path, params, query, fragment))
50 username, password = urllib2.splitpasswd(auth)
51 passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
52 # Realm is set to None in add_password to force the username and password
53 # to be used whatever the realm
54 passman.add_password(None, source, username, password)
55 authhandler = urllib2.HTTPBasicAuthHandler(passman)
56 opener = urllib2.build_opener(authhandler)
57 urllib2.install_opener(opener)
27 response = urllib2.urlopen(source)58 response = urllib2.urlopen(source)
28 try:59 try:
29 with open(dest, 'w') as dest_file:60 with open(dest, 'w') as dest_file:
@@ -33,7 +64,30 @@
33 os.unlink(dest)64 os.unlink(dest)
34 raise e65 raise e
3566
36 def install(self, source):67 # Mandatory file validation via Sha1 or MD5 hashing.
68 def download_and_validate(self, url, hashsum, validate="sha1"):
69 tempfile, headers = urlretrieve(url)
70 check_hash(tempfile, hashsum, validate)
71 return tempfile
72
73 def install(self, source, dest=None, checksum=None, hash_type='sha1'):
74 """
75 Download and install an archive file, with optional checksum validation.
76
77 The checksum can also be given on the `source` URL's fragment.
78 For example::
79
80 handler.install('http://example.com/file.tgz#sha1=deadbeef')
81
82 :param str source: URL pointing to an archive file.
83 :param str dest: Local destination path to install to. If not given,
84 installs to `$CHARM_DIR/archives/archive_file_name`.
85 :param str checksum: If given, validate the archive file after download.
86 :param str hash_type: Algorithm used to generate `checksum`.
87 Can be any hash alrgorithm supported by :mod:`hashlib`,
88 such as md5, sha1, sha256, sha512, etc.
89
90 """
37 url_parts = self.parse_url(source)91 url_parts = self.parse_url(source)
38 dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched')92 dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched')
39 if not os.path.exists(dest_dir):93 if not os.path.exists(dest_dir):
@@ -45,4 +99,10 @@
45 raise UnhandledSource(e.reason)99 raise UnhandledSource(e.reason)
46 except OSError as e:100 except OSError as e:
47 raise UnhandledSource(e.strerror)101 raise UnhandledSource(e.strerror)
48 return extract(dld_file)102 options = urlparse.parse_qs(url_parts.fragment)
103 for key, value in options.items():
104 if key in hashlib.algorithms:
105 check_hash(dld_file, value, key)
106 if checksum:
107 check_hash(dld_file, checksum, hash_type)
108 return extract(dld_file, dest)
49109
=== modified file 'hooks/charmhelpers/fetch/bzrurl.py'
--- hooks/charmhelpers/fetch/bzrurl.py 2013-08-29 18:41:54 +0000
+++ hooks/charmhelpers/fetch/bzrurl.py 2014-09-26 08:01:25 +0000
@@ -12,6 +12,7 @@
12 apt_install("python-bzrlib")12 apt_install("python-bzrlib")
13 from bzrlib.branch import Branch13 from bzrlib.branch import Branch
1414
15
15class BzrUrlFetchHandler(BaseFetchHandler):16class BzrUrlFetchHandler(BaseFetchHandler):
16 """Handler for bazaar branches via generic and lp URLs"""17 """Handler for bazaar branches via generic and lp URLs"""
17 def can_handle(self, source):18 def can_handle(self, source):
@@ -38,7 +39,8 @@
38 def install(self, source):39 def install(self, source):
39 url_parts = self.parse_url(source)40 url_parts = self.parse_url(source)
40 branch_name = url_parts.path.strip("/").split("/")[-1]41 branch_name = url_parts.path.strip("/").split("/")[-1]
41 dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", branch_name)42 dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched",
43 branch_name)
42 if not os.path.exists(dest_dir):44 if not os.path.exists(dest_dir):
43 mkdir(dest_dir, perms=0755)45 mkdir(dest_dir, perms=0755)
44 try:46 try:
@@ -46,4 +48,3 @@
46 except OSError as e:48 except OSError as e:
47 raise UnhandledSource(e.strerror)49 raise UnhandledSource(e.strerror)
48 return dest_dir50 return dest_dir
49

Subscribers

People subscribed via source and target branches

to all changes: