Merge lp:~tribaal/charms/trusty/ntpmaster/resync-charm-helpers into lp:charms/trusty/ntpmaster
- Trusty Tahr (14.04)
- resync-charm-helpers
- Merge into trunk
Proposed by
Chris Glass
Status: | Merged |
---|---|
Merged at revision: | 11 |
Proposed branch: | lp:~tribaal/charms/trusty/ntpmaster/resync-charm-helpers |
Merge into: | lp:charms/trusty/ntpmaster |
Diff against target: |
2155 lines (+1453/-133) 12 files modified
.bzrignore (+1/-0) Makefile (+8/-2) hooks/charmhelpers/core/fstab.py (+116/-0) hooks/charmhelpers/core/hookenv.py (+215/-29) hooks/charmhelpers/core/host.py (+167/-23) hooks/charmhelpers/core/services/__init__.py (+2/-0) hooks/charmhelpers/core/services/base.py (+313/-0) hooks/charmhelpers/core/services/helpers.py (+239/-0) hooks/charmhelpers/core/templating.py (+51/-0) hooks/charmhelpers/fetch/__init__.py (+274/-73) hooks/charmhelpers/fetch/archiveurl.py (+64/-4) hooks/charmhelpers/fetch/bzrurl.py (+3/-2) |
To merge this branch: | bzr merge lp:~tribaal/charms/trusty/ntpmaster/resync-charm-helpers |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
David Britton (community) | Approve | ||
Review via email: mp+236071@code.launchpad.net |
Commit message
Description of the change
This branch resyncs charm-helpers to pull in a fix where the apt-cache was not built in-memory and was causing some locking race conditions (see https:/
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === added file '.bzrignore' | |||
2 | --- .bzrignore 1970-01-01 00:00:00 +0000 | |||
3 | +++ .bzrignore 2014-09-26 08:01:25 +0000 | |||
4 | @@ -0,0 +1,1 @@ | |||
5 | 1 | bin/ | ||
6 | 0 | 2 | ||
7 | === modified file 'Makefile' | |||
8 | --- Makefile 2014-08-11 08:11:00 +0000 | |||
9 | +++ Makefile 2014-09-26 08:01:25 +0000 | |||
10 | @@ -1,11 +1,17 @@ | |||
11 | 1 | #!/usr/bin/make | 1 | #!/usr/bin/make |
12 | 2 | PYTHON := /usr/bin/env python | ||
13 | 2 | 3 | ||
14 | 3 | lint: | 4 | lint: |
15 | 4 | @flake8 --exclude hooks/charmhelpers hooks | 5 | @flake8 --exclude hooks/charmhelpers hooks |
16 | 5 | @charm proof | 6 | @charm proof |
17 | 6 | 7 | ||
20 | 7 | sync: | 8 | bin/charm_helpers_sync.py: |
21 | 8 | @charm-helper-sync -c charm-helpers-sync.yaml | 9 | @mkdir -p bin |
22 | 10 | @bzr cat lp:charm-helpers/tools/charm_helpers_sync/charm_helpers_sync.py \ | ||
23 | 11 | > bin/charm_helpers_sync.py | ||
24 | 12 | |||
25 | 13 | sync: bin/charm_helpers_sync.py | ||
26 | 14 | $(PYTHON) bin/charm_helpers_sync.py -c charm-helpers-sync.yaml | ||
27 | 9 | 15 | ||
28 | 10 | publish: lint | 16 | publish: lint |
29 | 11 | bzr push lp:charms/ntpmaster | 17 | bzr push lp:charms/ntpmaster |
30 | 12 | 18 | ||
31 | === added file 'hooks/charmhelpers/core/fstab.py' | |||
32 | --- hooks/charmhelpers/core/fstab.py 1970-01-01 00:00:00 +0000 | |||
33 | +++ hooks/charmhelpers/core/fstab.py 2014-09-26 08:01:25 +0000 | |||
34 | @@ -0,0 +1,116 @@ | |||
35 | 1 | #!/usr/bin/env python | ||
36 | 2 | # -*- coding: utf-8 -*- | ||
37 | 3 | |||
38 | 4 | __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' | ||
39 | 5 | |||
40 | 6 | import os | ||
41 | 7 | |||
42 | 8 | |||
43 | 9 | class Fstab(file): | ||
44 | 10 | """This class extends file in order to implement a file reader/writer | ||
45 | 11 | for file `/etc/fstab` | ||
46 | 12 | """ | ||
47 | 13 | |||
48 | 14 | class Entry(object): | ||
49 | 15 | """Entry class represents a non-comment line on the `/etc/fstab` file | ||
50 | 16 | """ | ||
51 | 17 | def __init__(self, device, mountpoint, filesystem, | ||
52 | 18 | options, d=0, p=0): | ||
53 | 19 | self.device = device | ||
54 | 20 | self.mountpoint = mountpoint | ||
55 | 21 | self.filesystem = filesystem | ||
56 | 22 | |||
57 | 23 | if not options: | ||
58 | 24 | options = "defaults" | ||
59 | 25 | |||
60 | 26 | self.options = options | ||
61 | 27 | self.d = d | ||
62 | 28 | self.p = p | ||
63 | 29 | |||
64 | 30 | def __eq__(self, o): | ||
65 | 31 | return str(self) == str(o) | ||
66 | 32 | |||
67 | 33 | def __str__(self): | ||
68 | 34 | return "{} {} {} {} {} {}".format(self.device, | ||
69 | 35 | self.mountpoint, | ||
70 | 36 | self.filesystem, | ||
71 | 37 | self.options, | ||
72 | 38 | self.d, | ||
73 | 39 | self.p) | ||
74 | 40 | |||
75 | 41 | DEFAULT_PATH = os.path.join(os.path.sep, 'etc', 'fstab') | ||
76 | 42 | |||
77 | 43 | def __init__(self, path=None): | ||
78 | 44 | if path: | ||
79 | 45 | self._path = path | ||
80 | 46 | else: | ||
81 | 47 | self._path = self.DEFAULT_PATH | ||
82 | 48 | file.__init__(self, self._path, 'r+') | ||
83 | 49 | |||
84 | 50 | def _hydrate_entry(self, line): | ||
85 | 51 | # NOTE: use split with no arguments to split on any | ||
86 | 52 | # whitespace including tabs | ||
87 | 53 | return Fstab.Entry(*filter( | ||
88 | 54 | lambda x: x not in ('', None), | ||
89 | 55 | line.strip("\n").split())) | ||
90 | 56 | |||
91 | 57 | @property | ||
92 | 58 | def entries(self): | ||
93 | 59 | self.seek(0) | ||
94 | 60 | for line in self.readlines(): | ||
95 | 61 | try: | ||
96 | 62 | if not line.startswith("#"): | ||
97 | 63 | yield self._hydrate_entry(line) | ||
98 | 64 | except ValueError: | ||
99 | 65 | pass | ||
100 | 66 | |||
101 | 67 | def get_entry_by_attr(self, attr, value): | ||
102 | 68 | for entry in self.entries: | ||
103 | 69 | e_attr = getattr(entry, attr) | ||
104 | 70 | if e_attr == value: | ||
105 | 71 | return entry | ||
106 | 72 | return None | ||
107 | 73 | |||
108 | 74 | def add_entry(self, entry): | ||
109 | 75 | if self.get_entry_by_attr('device', entry.device): | ||
110 | 76 | return False | ||
111 | 77 | |||
112 | 78 | self.write(str(entry) + '\n') | ||
113 | 79 | self.truncate() | ||
114 | 80 | return entry | ||
115 | 81 | |||
116 | 82 | def remove_entry(self, entry): | ||
117 | 83 | self.seek(0) | ||
118 | 84 | |||
119 | 85 | lines = self.readlines() | ||
120 | 86 | |||
121 | 87 | found = False | ||
122 | 88 | for index, line in enumerate(lines): | ||
123 | 89 | if not line.startswith("#"): | ||
124 | 90 | if self._hydrate_entry(line) == entry: | ||
125 | 91 | found = True | ||
126 | 92 | break | ||
127 | 93 | |||
128 | 94 | if not found: | ||
129 | 95 | return False | ||
130 | 96 | |||
131 | 97 | lines.remove(line) | ||
132 | 98 | |||
133 | 99 | self.seek(0) | ||
134 | 100 | self.write(''.join(lines)) | ||
135 | 101 | self.truncate() | ||
136 | 102 | return True | ||
137 | 103 | |||
138 | 104 | @classmethod | ||
139 | 105 | def remove_by_mountpoint(cls, mountpoint, path=None): | ||
140 | 106 | fstab = cls(path=path) | ||
141 | 107 | entry = fstab.get_entry_by_attr('mountpoint', mountpoint) | ||
142 | 108 | if entry: | ||
143 | 109 | return fstab.remove_entry(entry) | ||
144 | 110 | return False | ||
145 | 111 | |||
146 | 112 | @classmethod | ||
147 | 113 | def add(cls, device, mountpoint, filesystem, options=None, path=None): | ||
148 | 114 | return cls(path=path).add_entry(Fstab.Entry(device, | ||
149 | 115 | mountpoint, filesystem, | ||
150 | 116 | options=options)) | ||
151 | 0 | 117 | ||
152 | === modified file 'hooks/charmhelpers/core/hookenv.py' | |||
153 | --- hooks/charmhelpers/core/hookenv.py 2013-08-29 18:39:36 +0000 | |||
154 | +++ hooks/charmhelpers/core/hookenv.py 2014-09-26 08:01:25 +0000 | |||
155 | @@ -8,7 +8,9 @@ | |||
156 | 8 | import json | 8 | import json |
157 | 9 | import yaml | 9 | import yaml |
158 | 10 | import subprocess | 10 | import subprocess |
159 | 11 | import sys | ||
160 | 11 | import UserDict | 12 | import UserDict |
161 | 13 | from subprocess import CalledProcessError | ||
162 | 12 | 14 | ||
163 | 13 | CRITICAL = "CRITICAL" | 15 | CRITICAL = "CRITICAL" |
164 | 14 | ERROR = "ERROR" | 16 | ERROR = "ERROR" |
165 | @@ -21,9 +23,9 @@ | |||
166 | 21 | 23 | ||
167 | 22 | 24 | ||
168 | 23 | def cached(func): | 25 | def cached(func): |
170 | 24 | ''' Cache return values for multiple executions of func + args | 26 | """Cache return values for multiple executions of func + args |
171 | 25 | 27 | ||
173 | 26 | For example: | 28 | For example:: |
174 | 27 | 29 | ||
175 | 28 | @cached | 30 | @cached |
176 | 29 | def unit_get(attribute): | 31 | def unit_get(attribute): |
177 | @@ -32,7 +34,7 @@ | |||
178 | 32 | unit_get('test') | 34 | unit_get('test') |
179 | 33 | 35 | ||
180 | 34 | will cache the result of unit_get + 'test' for future calls. | 36 | will cache the result of unit_get + 'test' for future calls. |
182 | 35 | ''' | 37 | """ |
183 | 36 | def wrapper(*args, **kwargs): | 38 | def wrapper(*args, **kwargs): |
184 | 37 | global cache | 39 | global cache |
185 | 38 | key = str((func, args, kwargs)) | 40 | key = str((func, args, kwargs)) |
186 | @@ -46,8 +48,8 @@ | |||
187 | 46 | 48 | ||
188 | 47 | 49 | ||
189 | 48 | def flush(key): | 50 | def flush(key): |
192 | 49 | ''' Flushes any entries from function cache where the | 51 | """Flushes any entries from function cache where the |
193 | 50 | key is found in the function+args ''' | 52 | key is found in the function+args """ |
194 | 51 | flush_list = [] | 53 | flush_list = [] |
195 | 52 | for item in cache: | 54 | for item in cache: |
196 | 53 | if key in item: | 55 | if key in item: |
197 | @@ -57,7 +59,7 @@ | |||
198 | 57 | 59 | ||
199 | 58 | 60 | ||
200 | 59 | def log(message, level=None): | 61 | def log(message, level=None): |
202 | 60 | "Write a message to the juju log" | 62 | """Write a message to the juju log""" |
203 | 61 | command = ['juju-log'] | 63 | command = ['juju-log'] |
204 | 62 | if level: | 64 | if level: |
205 | 63 | command += ['-l', level] | 65 | command += ['-l', level] |
206 | @@ -66,7 +68,7 @@ | |||
207 | 66 | 68 | ||
208 | 67 | 69 | ||
209 | 68 | class Serializable(UserDict.IterableUserDict): | 70 | class Serializable(UserDict.IterableUserDict): |
211 | 69 | "Wrapper, an object that can be serialized to yaml or json" | 71 | """Wrapper, an object that can be serialized to yaml or json""" |
212 | 70 | 72 | ||
213 | 71 | def __init__(self, obj): | 73 | def __init__(self, obj): |
214 | 72 | # wrap the object | 74 | # wrap the object |
215 | @@ -96,11 +98,11 @@ | |||
216 | 96 | self.data = state | 98 | self.data = state |
217 | 97 | 99 | ||
218 | 98 | def json(self): | 100 | def json(self): |
220 | 99 | "Serialize the object to json" | 101 | """Serialize the object to json""" |
221 | 100 | return json.dumps(self.data) | 102 | return json.dumps(self.data) |
222 | 101 | 103 | ||
223 | 102 | def yaml(self): | 104 | def yaml(self): |
225 | 103 | "Serialize the object to yaml" | 105 | """Serialize the object to yaml""" |
226 | 104 | return yaml.dump(self.data) | 106 | return yaml.dump(self.data) |
227 | 105 | 107 | ||
228 | 106 | 108 | ||
229 | @@ -119,50 +121,174 @@ | |||
230 | 119 | 121 | ||
231 | 120 | 122 | ||
232 | 121 | def in_relation_hook(): | 123 | def in_relation_hook(): |
234 | 122 | "Determine whether we're running in a relation hook" | 124 | """Determine whether we're running in a relation hook""" |
235 | 123 | return 'JUJU_RELATION' in os.environ | 125 | return 'JUJU_RELATION' in os.environ |
236 | 124 | 126 | ||
237 | 125 | 127 | ||
238 | 126 | def relation_type(): | 128 | def relation_type(): |
240 | 127 | "The scope for the current relation hook" | 129 | """The scope for the current relation hook""" |
241 | 128 | return os.environ.get('JUJU_RELATION', None) | 130 | return os.environ.get('JUJU_RELATION', None) |
242 | 129 | 131 | ||
243 | 130 | 132 | ||
244 | 131 | def relation_id(): | 133 | def relation_id(): |
246 | 132 | "The relation ID for the current relation hook" | 134 | """The relation ID for the current relation hook""" |
247 | 133 | return os.environ.get('JUJU_RELATION_ID', None) | 135 | return os.environ.get('JUJU_RELATION_ID', None) |
248 | 134 | 136 | ||
249 | 135 | 137 | ||
250 | 136 | def local_unit(): | 138 | def local_unit(): |
252 | 137 | "Local unit ID" | 139 | """Local unit ID""" |
253 | 138 | return os.environ['JUJU_UNIT_NAME'] | 140 | return os.environ['JUJU_UNIT_NAME'] |
254 | 139 | 141 | ||
255 | 140 | 142 | ||
256 | 141 | def remote_unit(): | 143 | def remote_unit(): |
258 | 142 | "The remote unit for the current relation hook" | 144 | """The remote unit for the current relation hook""" |
259 | 143 | return os.environ['JUJU_REMOTE_UNIT'] | 145 | return os.environ['JUJU_REMOTE_UNIT'] |
260 | 144 | 146 | ||
261 | 145 | 147 | ||
262 | 146 | def service_name(): | 148 | def service_name(): |
264 | 147 | "The name service group this unit belongs to" | 149 | """The name service group this unit belongs to""" |
265 | 148 | return local_unit().split('/')[0] | 150 | return local_unit().split('/')[0] |
266 | 149 | 151 | ||
267 | 150 | 152 | ||
268 | 153 | def hook_name(): | ||
269 | 154 | """The name of the currently executing hook""" | ||
270 | 155 | return os.path.basename(sys.argv[0]) | ||
271 | 156 | |||
272 | 157 | |||
273 | 158 | class Config(dict): | ||
274 | 159 | """A dictionary representation of the charm's config.yaml, with some | ||
275 | 160 | extra features: | ||
276 | 161 | |||
277 | 162 | - See which values in the dictionary have changed since the previous hook. | ||
278 | 163 | - For values that have changed, see what the previous value was. | ||
279 | 164 | - Store arbitrary data for use in a later hook. | ||
280 | 165 | |||
281 | 166 | NOTE: Do not instantiate this object directly - instead call | ||
282 | 167 | ``hookenv.config()``, which will return an instance of :class:`Config`. | ||
283 | 168 | |||
284 | 169 | Example usage:: | ||
285 | 170 | |||
286 | 171 | >>> # inside a hook | ||
287 | 172 | >>> from charmhelpers.core import hookenv | ||
288 | 173 | >>> config = hookenv.config() | ||
289 | 174 | >>> config['foo'] | ||
290 | 175 | 'bar' | ||
291 | 176 | >>> # store a new key/value for later use | ||
292 | 177 | >>> config['mykey'] = 'myval' | ||
293 | 178 | |||
294 | 179 | |||
295 | 180 | >>> # user runs `juju set mycharm foo=baz` | ||
296 | 181 | >>> # now we're inside subsequent config-changed hook | ||
297 | 182 | >>> config = hookenv.config() | ||
298 | 183 | >>> config['foo'] | ||
299 | 184 | 'baz' | ||
300 | 185 | >>> # test to see if this val has changed since last hook | ||
301 | 186 | >>> config.changed('foo') | ||
302 | 187 | True | ||
303 | 188 | >>> # what was the previous value? | ||
304 | 189 | >>> config.previous('foo') | ||
305 | 190 | 'bar' | ||
306 | 191 | >>> # keys/values that we add are preserved across hooks | ||
307 | 192 | >>> config['mykey'] | ||
308 | 193 | 'myval' | ||
309 | 194 | |||
310 | 195 | """ | ||
311 | 196 | CONFIG_FILE_NAME = '.juju-persistent-config' | ||
312 | 197 | |||
313 | 198 | def __init__(self, *args, **kw): | ||
314 | 199 | super(Config, self).__init__(*args, **kw) | ||
315 | 200 | self.implicit_save = True | ||
316 | 201 | self._prev_dict = None | ||
317 | 202 | self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME) | ||
318 | 203 | if os.path.exists(self.path): | ||
319 | 204 | self.load_previous() | ||
320 | 205 | |||
321 | 206 | def __getitem__(self, key): | ||
322 | 207 | """For regular dict lookups, check the current juju config first, | ||
323 | 208 | then the previous (saved) copy. This ensures that user-saved values | ||
324 | 209 | will be returned by a dict lookup. | ||
325 | 210 | |||
326 | 211 | """ | ||
327 | 212 | try: | ||
328 | 213 | return dict.__getitem__(self, key) | ||
329 | 214 | except KeyError: | ||
330 | 215 | return (self._prev_dict or {})[key] | ||
331 | 216 | |||
332 | 217 | def load_previous(self, path=None): | ||
333 | 218 | """Load previous copy of config from disk. | ||
334 | 219 | |||
335 | 220 | In normal usage you don't need to call this method directly - it | ||
336 | 221 | is called automatically at object initialization. | ||
337 | 222 | |||
338 | 223 | :param path: | ||
339 | 224 | |||
340 | 225 | File path from which to load the previous config. If `None`, | ||
341 | 226 | config is loaded from the default location. If `path` is | ||
342 | 227 | specified, subsequent `save()` calls will write to the same | ||
343 | 228 | path. | ||
344 | 229 | |||
345 | 230 | """ | ||
346 | 231 | self.path = path or self.path | ||
347 | 232 | with open(self.path) as f: | ||
348 | 233 | self._prev_dict = json.load(f) | ||
349 | 234 | |||
350 | 235 | def changed(self, key): | ||
351 | 236 | """Return True if the current value for this key is different from | ||
352 | 237 | the previous value. | ||
353 | 238 | |||
354 | 239 | """ | ||
355 | 240 | if self._prev_dict is None: | ||
356 | 241 | return True | ||
357 | 242 | return self.previous(key) != self.get(key) | ||
358 | 243 | |||
359 | 244 | def previous(self, key): | ||
360 | 245 | """Return previous value for this key, or None if there | ||
361 | 246 | is no previous value. | ||
362 | 247 | |||
363 | 248 | """ | ||
364 | 249 | if self._prev_dict: | ||
365 | 250 | return self._prev_dict.get(key) | ||
366 | 251 | return None | ||
367 | 252 | |||
368 | 253 | def save(self): | ||
369 | 254 | """Save this config to disk. | ||
370 | 255 | |||
371 | 256 | If the charm is using the :mod:`Services Framework <services.base>` | ||
372 | 257 | or :meth:'@hook <Hooks.hook>' decorator, this | ||
373 | 258 | is called automatically at the end of successful hook execution. | ||
374 | 259 | Otherwise, it should be called directly by user code. | ||
375 | 260 | |||
376 | 261 | To disable automatic saves, set ``implicit_save=False`` on this | ||
377 | 262 | instance. | ||
378 | 263 | |||
379 | 264 | """ | ||
380 | 265 | if self._prev_dict: | ||
381 | 266 | for k, v in self._prev_dict.iteritems(): | ||
382 | 267 | if k not in self: | ||
383 | 268 | self[k] = v | ||
384 | 269 | with open(self.path, 'w') as f: | ||
385 | 270 | json.dump(self, f) | ||
386 | 271 | |||
387 | 272 | |||
388 | 151 | @cached | 273 | @cached |
389 | 152 | def config(scope=None): | 274 | def config(scope=None): |
391 | 153 | "Juju charm configuration" | 275 | """Juju charm configuration""" |
392 | 154 | config_cmd_line = ['config-get'] | 276 | config_cmd_line = ['config-get'] |
393 | 155 | if scope is not None: | 277 | if scope is not None: |
394 | 156 | config_cmd_line.append(scope) | 278 | config_cmd_line.append(scope) |
395 | 157 | config_cmd_line.append('--format=json') | 279 | config_cmd_line.append('--format=json') |
396 | 158 | try: | 280 | try: |
398 | 159 | return json.loads(subprocess.check_output(config_cmd_line)) | 281 | config_data = json.loads(subprocess.check_output(config_cmd_line)) |
399 | 282 | if scope is not None: | ||
400 | 283 | return config_data | ||
401 | 284 | return Config(config_data) | ||
402 | 160 | except ValueError: | 285 | except ValueError: |
403 | 161 | return None | 286 | return None |
404 | 162 | 287 | ||
405 | 163 | 288 | ||
406 | 164 | @cached | 289 | @cached |
407 | 165 | def relation_get(attribute=None, unit=None, rid=None): | 290 | def relation_get(attribute=None, unit=None, rid=None): |
408 | 291 | """Get relation information""" | ||
409 | 166 | _args = ['relation-get', '--format=json'] | 292 | _args = ['relation-get', '--format=json'] |
410 | 167 | if rid: | 293 | if rid: |
411 | 168 | _args.append('-r') | 294 | _args.append('-r') |
412 | @@ -174,9 +300,15 @@ | |||
413 | 174 | return json.loads(subprocess.check_output(_args)) | 300 | return json.loads(subprocess.check_output(_args)) |
414 | 175 | except ValueError: | 301 | except ValueError: |
415 | 176 | return None | 302 | return None |
419 | 177 | 303 | except CalledProcessError, e: | |
420 | 178 | 304 | if e.returncode == 2: | |
421 | 179 | def relation_set(relation_id=None, relation_settings={}, **kwargs): | 305 | return None |
422 | 306 | raise | ||
423 | 307 | |||
424 | 308 | |||
425 | 309 | def relation_set(relation_id=None, relation_settings=None, **kwargs): | ||
426 | 310 | """Set relation information for the current unit""" | ||
427 | 311 | relation_settings = relation_settings if relation_settings else {} | ||
428 | 180 | relation_cmd_line = ['relation-set'] | 312 | relation_cmd_line = ['relation-set'] |
429 | 181 | if relation_id is not None: | 313 | if relation_id is not None: |
430 | 182 | relation_cmd_line.extend(('-r', relation_id)) | 314 | relation_cmd_line.extend(('-r', relation_id)) |
431 | @@ -192,7 +324,7 @@ | |||
432 | 192 | 324 | ||
433 | 193 | @cached | 325 | @cached |
434 | 194 | def relation_ids(reltype=None): | 326 | def relation_ids(reltype=None): |
436 | 195 | "A list of relation_ids" | 327 | """A list of relation_ids""" |
437 | 196 | reltype = reltype or relation_type() | 328 | reltype = reltype or relation_type() |
438 | 197 | relid_cmd_line = ['relation-ids', '--format=json'] | 329 | relid_cmd_line = ['relation-ids', '--format=json'] |
439 | 198 | if reltype is not None: | 330 | if reltype is not None: |
440 | @@ -203,7 +335,7 @@ | |||
441 | 203 | 335 | ||
442 | 204 | @cached | 336 | @cached |
443 | 205 | def related_units(relid=None): | 337 | def related_units(relid=None): |
445 | 206 | "A list of related units" | 338 | """A list of related units""" |
446 | 207 | relid = relid or relation_id() | 339 | relid = relid or relation_id() |
447 | 208 | units_cmd_line = ['relation-list', '--format=json'] | 340 | units_cmd_line = ['relation-list', '--format=json'] |
448 | 209 | if relid is not None: | 341 | if relid is not None: |
449 | @@ -213,7 +345,7 @@ | |||
450 | 213 | 345 | ||
451 | 214 | @cached | 346 | @cached |
452 | 215 | def relation_for_unit(unit=None, rid=None): | 347 | def relation_for_unit(unit=None, rid=None): |
454 | 216 | "Get the json represenation of a unit's relation" | 348 | """Get the json represenation of a unit's relation""" |
455 | 217 | unit = unit or remote_unit() | 349 | unit = unit or remote_unit() |
456 | 218 | relation = relation_get(unit=unit, rid=rid) | 350 | relation = relation_get(unit=unit, rid=rid) |
457 | 219 | for key in relation: | 351 | for key in relation: |
458 | @@ -225,7 +357,7 @@ | |||
459 | 225 | 357 | ||
460 | 226 | @cached | 358 | @cached |
461 | 227 | def relations_for_id(relid=None): | 359 | def relations_for_id(relid=None): |
463 | 228 | "Get relations of a specific relation ID" | 360 | """Get relations of a specific relation ID""" |
464 | 229 | relation_data = [] | 361 | relation_data = [] |
465 | 230 | relid = relid or relation_ids() | 362 | relid = relid or relation_ids() |
466 | 231 | for unit in related_units(relid): | 363 | for unit in related_units(relid): |
467 | @@ -237,7 +369,7 @@ | |||
468 | 237 | 369 | ||
469 | 238 | @cached | 370 | @cached |
470 | 239 | def relations_of_type(reltype=None): | 371 | def relations_of_type(reltype=None): |
472 | 240 | "Get relations of a specific type" | 372 | """Get relations of a specific type""" |
473 | 241 | relation_data = [] | 373 | relation_data = [] |
474 | 242 | reltype = reltype or relation_type() | 374 | reltype = reltype or relation_type() |
475 | 243 | for relid in relation_ids(reltype): | 375 | for relid in relation_ids(reltype): |
476 | @@ -249,7 +381,7 @@ | |||
477 | 249 | 381 | ||
478 | 250 | @cached | 382 | @cached |
479 | 251 | def relation_types(): | 383 | def relation_types(): |
481 | 252 | "Get a list of relation types supported by this charm" | 384 | """Get a list of relation types supported by this charm""" |
482 | 253 | charmdir = os.environ.get('CHARM_DIR', '') | 385 | charmdir = os.environ.get('CHARM_DIR', '') |
483 | 254 | mdf = open(os.path.join(charmdir, 'metadata.yaml')) | 386 | mdf = open(os.path.join(charmdir, 'metadata.yaml')) |
484 | 255 | md = yaml.safe_load(mdf) | 387 | md = yaml.safe_load(mdf) |
485 | @@ -264,6 +396,7 @@ | |||
486 | 264 | 396 | ||
487 | 265 | @cached | 397 | @cached |
488 | 266 | def relations(): | 398 | def relations(): |
489 | 399 | """Get a nested dictionary of relation data for all related units""" | ||
490 | 267 | rels = {} | 400 | rels = {} |
491 | 268 | for reltype in relation_types(): | 401 | for reltype in relation_types(): |
492 | 269 | relids = {} | 402 | relids = {} |
493 | @@ -277,15 +410,35 @@ | |||
494 | 277 | return rels | 410 | return rels |
495 | 278 | 411 | ||
496 | 279 | 412 | ||
497 | 413 | @cached | ||
498 | 414 | def is_relation_made(relation, keys='private-address'): | ||
499 | 415 | ''' | ||
500 | 416 | Determine whether a relation is established by checking for | ||
501 | 417 | presence of key(s). If a list of keys is provided, they | ||
502 | 418 | must all be present for the relation to be identified as made | ||
503 | 419 | ''' | ||
504 | 420 | if isinstance(keys, str): | ||
505 | 421 | keys = [keys] | ||
506 | 422 | for r_id in relation_ids(relation): | ||
507 | 423 | for unit in related_units(r_id): | ||
508 | 424 | context = {} | ||
509 | 425 | for k in keys: | ||
510 | 426 | context[k] = relation_get(k, rid=r_id, | ||
511 | 427 | unit=unit) | ||
512 | 428 | if None not in context.values(): | ||
513 | 429 | return True | ||
514 | 430 | return False | ||
515 | 431 | |||
516 | 432 | |||
517 | 280 | def open_port(port, protocol="TCP"): | 433 | def open_port(port, protocol="TCP"): |
519 | 281 | "Open a service network port" | 434 | """Open a service network port""" |
520 | 282 | _args = ['open-port'] | 435 | _args = ['open-port'] |
521 | 283 | _args.append('{}/{}'.format(port, protocol)) | 436 | _args.append('{}/{}'.format(port, protocol)) |
522 | 284 | subprocess.check_call(_args) | 437 | subprocess.check_call(_args) |
523 | 285 | 438 | ||
524 | 286 | 439 | ||
525 | 287 | def close_port(port, protocol="TCP"): | 440 | def close_port(port, protocol="TCP"): |
527 | 288 | "Close a service network port" | 441 | """Close a service network port""" |
528 | 289 | _args = ['close-port'] | 442 | _args = ['close-port'] |
529 | 290 | _args.append('{}/{}'.format(port, protocol)) | 443 | _args.append('{}/{}'.format(port, protocol)) |
530 | 291 | subprocess.check_call(_args) | 444 | subprocess.check_call(_args) |
531 | @@ -293,6 +446,7 @@ | |||
532 | 293 | 446 | ||
533 | 294 | @cached | 447 | @cached |
534 | 295 | def unit_get(attribute): | 448 | def unit_get(attribute): |
535 | 449 | """Get the unit ID for the remote unit""" | ||
536 | 296 | _args = ['unit-get', '--format=json', attribute] | 450 | _args = ['unit-get', '--format=json', attribute] |
537 | 297 | try: | 451 | try: |
538 | 298 | return json.loads(subprocess.check_output(_args)) | 452 | return json.loads(subprocess.check_output(_args)) |
539 | @@ -301,29 +455,60 @@ | |||
540 | 301 | 455 | ||
541 | 302 | 456 | ||
542 | 303 | def unit_private_ip(): | 457 | def unit_private_ip(): |
543 | 458 | """Get this unit's private IP address""" | ||
544 | 304 | return unit_get('private-address') | 459 | return unit_get('private-address') |
545 | 305 | 460 | ||
546 | 306 | 461 | ||
547 | 307 | class UnregisteredHookError(Exception): | 462 | class UnregisteredHookError(Exception): |
548 | 463 | """Raised when an undefined hook is called""" | ||
549 | 308 | pass | 464 | pass |
550 | 309 | 465 | ||
551 | 310 | 466 | ||
552 | 311 | class Hooks(object): | 467 | class Hooks(object): |
554 | 312 | def __init__(self): | 468 | """A convenient handler for hook functions. |
555 | 469 | |||
556 | 470 | Example:: | ||
557 | 471 | |||
558 | 472 | hooks = Hooks() | ||
559 | 473 | |||
560 | 474 | # register a hook, taking its name from the function name | ||
561 | 475 | @hooks.hook() | ||
562 | 476 | def install(): | ||
563 | 477 | pass # your code here | ||
564 | 478 | |||
565 | 479 | # register a hook, providing a custom hook name | ||
566 | 480 | @hooks.hook("config-changed") | ||
567 | 481 | def config_changed(): | ||
568 | 482 | pass # your code here | ||
569 | 483 | |||
570 | 484 | if __name__ == "__main__": | ||
571 | 485 | # execute a hook based on the name the program is called by | ||
572 | 486 | hooks.execute(sys.argv) | ||
573 | 487 | """ | ||
574 | 488 | |||
575 | 489 | def __init__(self, config_save=True): | ||
576 | 313 | super(Hooks, self).__init__() | 490 | super(Hooks, self).__init__() |
577 | 314 | self._hooks = {} | 491 | self._hooks = {} |
578 | 492 | self._config_save = config_save | ||
579 | 315 | 493 | ||
580 | 316 | def register(self, name, function): | 494 | def register(self, name, function): |
581 | 495 | """Register a hook""" | ||
582 | 317 | self._hooks[name] = function | 496 | self._hooks[name] = function |
583 | 318 | 497 | ||
584 | 319 | def execute(self, args): | 498 | def execute(self, args): |
585 | 499 | """Execute a registered hook based on args[0]""" | ||
586 | 320 | hook_name = os.path.basename(args[0]) | 500 | hook_name = os.path.basename(args[0]) |
587 | 321 | if hook_name in self._hooks: | 501 | if hook_name in self._hooks: |
588 | 322 | self._hooks[hook_name]() | 502 | self._hooks[hook_name]() |
589 | 503 | if self._config_save: | ||
590 | 504 | cfg = config() | ||
591 | 505 | if cfg.implicit_save: | ||
592 | 506 | cfg.save() | ||
593 | 323 | else: | 507 | else: |
594 | 324 | raise UnregisteredHookError(hook_name) | 508 | raise UnregisteredHookError(hook_name) |
595 | 325 | 509 | ||
596 | 326 | def hook(self, *hook_names): | 510 | def hook(self, *hook_names): |
597 | 511 | """Decorator, registering them as hooks""" | ||
598 | 327 | def wrapper(decorated): | 512 | def wrapper(decorated): |
599 | 328 | for hook_name in hook_names: | 513 | for hook_name in hook_names: |
600 | 329 | self.register(hook_name, decorated) | 514 | self.register(hook_name, decorated) |
601 | @@ -337,4 +522,5 @@ | |||
602 | 337 | 522 | ||
603 | 338 | 523 | ||
604 | 339 | def charm_dir(): | 524 | def charm_dir(): |
605 | 525 | """Return the root directory of the current charm""" | ||
606 | 340 | return os.environ.get('CHARM_DIR') | 526 | return os.environ.get('CHARM_DIR') |
607 | 341 | 527 | ||
608 | === modified file 'hooks/charmhelpers/core/host.py' | |||
609 | --- hooks/charmhelpers/core/host.py 2013-08-29 18:39:36 +0000 | |||
610 | +++ hooks/charmhelpers/core/host.py 2014-09-26 08:01:25 +0000 | |||
611 | @@ -12,25 +12,33 @@ | |||
612 | 12 | import string | 12 | import string |
613 | 13 | import subprocess | 13 | import subprocess |
614 | 14 | import hashlib | 14 | import hashlib |
615 | 15 | import shutil | ||
616 | 16 | from contextlib import contextmanager | ||
617 | 15 | 17 | ||
618 | 16 | from collections import OrderedDict | 18 | from collections import OrderedDict |
619 | 17 | 19 | ||
620 | 18 | from hookenv import log | 20 | from hookenv import log |
621 | 21 | from fstab import Fstab | ||
622 | 19 | 22 | ||
623 | 20 | 23 | ||
624 | 21 | def service_start(service_name): | 24 | def service_start(service_name): |
625 | 25 | """Start a system service""" | ||
626 | 22 | return service('start', service_name) | 26 | return service('start', service_name) |
627 | 23 | 27 | ||
628 | 24 | 28 | ||
629 | 25 | def service_stop(service_name): | 29 | def service_stop(service_name): |
630 | 30 | """Stop a system service""" | ||
631 | 26 | return service('stop', service_name) | 31 | return service('stop', service_name) |
632 | 27 | 32 | ||
633 | 28 | 33 | ||
634 | 29 | def service_restart(service_name): | 34 | def service_restart(service_name): |
635 | 35 | """Restart a system service""" | ||
636 | 30 | return service('restart', service_name) | 36 | return service('restart', service_name) |
637 | 31 | 37 | ||
638 | 32 | 38 | ||
639 | 33 | def service_reload(service_name, restart_on_failure=False): | 39 | def service_reload(service_name, restart_on_failure=False): |
640 | 40 | """Reload a system service, optionally falling back to restart if | ||
641 | 41 | reload fails""" | ||
642 | 34 | service_result = service('reload', service_name) | 42 | service_result = service('reload', service_name) |
643 | 35 | if not service_result and restart_on_failure: | 43 | if not service_result and restart_on_failure: |
644 | 36 | service_result = service('restart', service_name) | 44 | service_result = service('restart', service_name) |
645 | @@ -38,13 +46,15 @@ | |||
646 | 38 | 46 | ||
647 | 39 | 47 | ||
648 | 40 | def service(action, service_name): | 48 | def service(action, service_name): |
649 | 49 | """Control a system service""" | ||
650 | 41 | cmd = ['service', service_name, action] | 50 | cmd = ['service', service_name, action] |
651 | 42 | return subprocess.call(cmd) == 0 | 51 | return subprocess.call(cmd) == 0 |
652 | 43 | 52 | ||
653 | 44 | 53 | ||
654 | 45 | def service_running(service): | 54 | def service_running(service): |
655 | 55 | """Determine whether a system service is running""" | ||
656 | 46 | try: | 56 | try: |
658 | 47 | output = subprocess.check_output(['service', service, 'status']) | 57 | output = subprocess.check_output(['service', service, 'status'], stderr=subprocess.STDOUT) |
659 | 48 | except subprocess.CalledProcessError: | 58 | except subprocess.CalledProcessError: |
660 | 49 | return False | 59 | return False |
661 | 50 | else: | 60 | else: |
662 | @@ -54,8 +64,18 @@ | |||
663 | 54 | return False | 64 | return False |
664 | 55 | 65 | ||
665 | 56 | 66 | ||
666 | 67 | def service_available(service_name): | ||
667 | 68 | """Determine whether a system service is available""" | ||
668 | 69 | try: | ||
669 | 70 | subprocess.check_output(['service', service_name, 'status'], stderr=subprocess.STDOUT) | ||
670 | 71 | except subprocess.CalledProcessError as e: | ||
671 | 72 | return 'unrecognized service' not in e.output | ||
672 | 73 | else: | ||
673 | 74 | return True | ||
674 | 75 | |||
675 | 76 | |||
676 | 57 | def adduser(username, password=None, shell='/bin/bash', system_user=False): | 77 | def adduser(username, password=None, shell='/bin/bash', system_user=False): |
678 | 58 | """Add a user""" | 78 | """Add a user to the system""" |
679 | 59 | try: | 79 | try: |
680 | 60 | user_info = pwd.getpwnam(username) | 80 | user_info = pwd.getpwnam(username) |
681 | 61 | log('user {0} already exists!'.format(username)) | 81 | log('user {0} already exists!'.format(username)) |
682 | @@ -137,8 +157,20 @@ | |||
683 | 137 | target.write(content) | 157 | target.write(content) |
684 | 138 | 158 | ||
685 | 139 | 159 | ||
688 | 140 | def mount(device, mountpoint, options=None, persist=False): | 160 | def fstab_remove(mp): |
689 | 141 | '''Mount a filesystem''' | 161 | """Remove the given mountpoint entry from /etc/fstab |
690 | 162 | """ | ||
691 | 163 | return Fstab.remove_by_mountpoint(mp) | ||
692 | 164 | |||
693 | 165 | |||
694 | 166 | def fstab_add(dev, mp, fs, options=None): | ||
695 | 167 | """Adds the given device entry to the /etc/fstab file | ||
696 | 168 | """ | ||
697 | 169 | return Fstab.add(dev, mp, fs, options=options) | ||
698 | 170 | |||
699 | 171 | |||
700 | 172 | def mount(device, mountpoint, options=None, persist=False, filesystem="ext3"): | ||
701 | 173 | """Mount a filesystem at a particular mountpoint""" | ||
702 | 142 | cmd_args = ['mount'] | 174 | cmd_args = ['mount'] |
703 | 143 | if options is not None: | 175 | if options is not None: |
704 | 144 | cmd_args.extend(['-o', options]) | 176 | cmd_args.extend(['-o', options]) |
705 | @@ -148,28 +180,28 @@ | |||
706 | 148 | except subprocess.CalledProcessError, e: | 180 | except subprocess.CalledProcessError, e: |
707 | 149 | log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output)) | 181 | log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output)) |
708 | 150 | return False | 182 | return False |
709 | 183 | |||
710 | 151 | if persist: | 184 | if persist: |
713 | 152 | # TODO: update fstab | 185 | return fstab_add(device, mountpoint, filesystem, options=options) |
712 | 153 | pass | ||
714 | 154 | return True | 186 | return True |
715 | 155 | 187 | ||
716 | 156 | 188 | ||
717 | 157 | def umount(mountpoint, persist=False): | 189 | def umount(mountpoint, persist=False): |
719 | 158 | '''Unmount a filesystem''' | 190 | """Unmount a filesystem""" |
720 | 159 | cmd_args = ['umount', mountpoint] | 191 | cmd_args = ['umount', mountpoint] |
721 | 160 | try: | 192 | try: |
722 | 161 | subprocess.check_output(cmd_args) | 193 | subprocess.check_output(cmd_args) |
723 | 162 | except subprocess.CalledProcessError, e: | 194 | except subprocess.CalledProcessError, e: |
724 | 163 | log('Error unmounting {}\n{}'.format(mountpoint, e.output)) | 195 | log('Error unmounting {}\n{}'.format(mountpoint, e.output)) |
725 | 164 | return False | 196 | return False |
726 | 197 | |||
727 | 165 | if persist: | 198 | if persist: |
730 | 166 | # TODO: update fstab | 199 | return fstab_remove(mountpoint) |
729 | 167 | pass | ||
731 | 168 | return True | 200 | return True |
732 | 169 | 201 | ||
733 | 170 | 202 | ||
734 | 171 | def mounts(): | 203 | def mounts(): |
736 | 172 | '''List of all mounted volumes as [[mountpoint,device],[...]]''' | 204 | """Get a list of all mounted volumes as [[mountpoint,device],[...]]""" |
737 | 173 | with open('/proc/mounts') as f: | 205 | with open('/proc/mounts') as f: |
738 | 174 | # [['/mount/point','/dev/path'],[...]] | 206 | # [['/mount/point','/dev/path'],[...]] |
739 | 175 | system_mounts = [m[1::-1] for m in [l.strip().split() | 207 | system_mounts = [m[1::-1] for m in [l.strip().split() |
740 | @@ -177,10 +209,15 @@ | |||
741 | 177 | return system_mounts | 209 | return system_mounts |
742 | 178 | 210 | ||
743 | 179 | 211 | ||
746 | 180 | def file_hash(path): | 212 | def file_hash(path, hash_type='md5'): |
747 | 181 | ''' Generate a md5 hash of the contents of 'path' or None if not found ''' | 213 | """ |
748 | 214 | Generate a hash checksum of the contents of 'path' or None if not found. | ||
749 | 215 | |||
750 | 216 | :param str hash_type: Any hash alrgorithm supported by :mod:`hashlib`, | ||
751 | 217 | such as md5, sha1, sha256, sha512, etc. | ||
752 | 218 | """ | ||
753 | 182 | if os.path.exists(path): | 219 | if os.path.exists(path): |
755 | 183 | h = hashlib.md5() | 220 | h = getattr(hashlib, hash_type)() |
756 | 184 | with open(path, 'r') as source: | 221 | with open(path, 'r') as source: |
757 | 185 | h.update(source.read()) # IGNORE:E1101 - it does have update | 222 | h.update(source.read()) # IGNORE:E1101 - it does have update |
758 | 186 | return h.hexdigest() | 223 | return h.hexdigest() |
759 | @@ -188,21 +225,41 @@ | |||
760 | 188 | return None | 225 | return None |
761 | 189 | 226 | ||
762 | 190 | 227 | ||
767 | 191 | def restart_on_change(restart_map): | 228 | def check_hash(path, checksum, hash_type='md5'): |
768 | 192 | ''' Restart services based on configuration files changing | 229 | """ |
769 | 193 | 230 | Validate a file using a cryptographic checksum. | |
770 | 194 | This function is used a decorator, for example | 231 | |
771 | 232 | :param str checksum: Value of the checksum used to validate the file. | ||
772 | 233 | :param str hash_type: Hash algorithm used to generate `checksum`. | ||
773 | 234 | Can be any hash alrgorithm supported by :mod:`hashlib`, | ||
774 | 235 | such as md5, sha1, sha256, sha512, etc. | ||
775 | 236 | :raises ChecksumError: If the file fails the checksum | ||
776 | 237 | |||
777 | 238 | """ | ||
778 | 239 | actual_checksum = file_hash(path, hash_type) | ||
779 | 240 | if checksum != actual_checksum: | ||
780 | 241 | raise ChecksumError("'%s' != '%s'" % (checksum, actual_checksum)) | ||
781 | 242 | |||
782 | 243 | |||
783 | 244 | class ChecksumError(ValueError): | ||
784 | 245 | pass | ||
785 | 246 | |||
786 | 247 | |||
787 | 248 | def restart_on_change(restart_map, stopstart=False): | ||
788 | 249 | """Restart services based on configuration files changing | ||
789 | 250 | |||
790 | 251 | This function is used a decorator, for example:: | ||
791 | 195 | 252 | ||
792 | 196 | @restart_on_change({ | 253 | @restart_on_change({ |
793 | 197 | '/etc/ceph/ceph.conf': [ 'cinder-api', 'cinder-volume' ] | 254 | '/etc/ceph/ceph.conf': [ 'cinder-api', 'cinder-volume' ] |
794 | 198 | }) | 255 | }) |
795 | 199 | def ceph_client_changed(): | 256 | def ceph_client_changed(): |
797 | 200 | ... | 257 | pass # your code here |
798 | 201 | 258 | ||
799 | 202 | In this example, the cinder-api and cinder-volume services | 259 | In this example, the cinder-api and cinder-volume services |
800 | 203 | would be restarted if /etc/ceph/ceph.conf is changed by the | 260 | would be restarted if /etc/ceph/ceph.conf is changed by the |
801 | 204 | ceph_client_changed function. | 261 | ceph_client_changed function. |
803 | 205 | ''' | 262 | """ |
804 | 206 | def wrap(f): | 263 | def wrap(f): |
805 | 207 | def wrapped_f(*args): | 264 | def wrapped_f(*args): |
806 | 208 | checksums = {} | 265 | checksums = {} |
807 | @@ -213,14 +270,20 @@ | |||
808 | 213 | for path in restart_map: | 270 | for path in restart_map: |
809 | 214 | if checksums[path] != file_hash(path): | 271 | if checksums[path] != file_hash(path): |
810 | 215 | restarts += restart_map[path] | 272 | restarts += restart_map[path] |
813 | 216 | for service_name in list(OrderedDict.fromkeys(restarts)): | 273 | services_list = list(OrderedDict.fromkeys(restarts)) |
814 | 217 | service('restart', service_name) | 274 | if not stopstart: |
815 | 275 | for service_name in services_list: | ||
816 | 276 | service('restart', service_name) | ||
817 | 277 | else: | ||
818 | 278 | for action in ['stop', 'start']: | ||
819 | 279 | for service_name in services_list: | ||
820 | 280 | service(action, service_name) | ||
821 | 218 | return wrapped_f | 281 | return wrapped_f |
822 | 219 | return wrap | 282 | return wrap |
823 | 220 | 283 | ||
824 | 221 | 284 | ||
825 | 222 | def lsb_release(): | 285 | def lsb_release(): |
827 | 223 | '''Return /etc/lsb-release in a dict''' | 286 | """Return /etc/lsb-release in a dict""" |
828 | 224 | d = {} | 287 | d = {} |
829 | 225 | with open('/etc/lsb-release', 'r') as lsb: | 288 | with open('/etc/lsb-release', 'r') as lsb: |
830 | 226 | for l in lsb: | 289 | for l in lsb: |
831 | @@ -230,7 +293,7 @@ | |||
832 | 230 | 293 | ||
833 | 231 | 294 | ||
834 | 232 | def pwgen(length=None): | 295 | def pwgen(length=None): |
836 | 233 | '''Generate a random pasword.''' | 296 | """Generate a random pasword.""" |
837 | 234 | if length is None: | 297 | if length is None: |
838 | 235 | length = random.choice(range(35, 45)) | 298 | length = random.choice(range(35, 45)) |
839 | 236 | alphanumeric_chars = [ | 299 | alphanumeric_chars = [ |
840 | @@ -239,3 +302,84 @@ | |||
841 | 239 | random_chars = [ | 302 | random_chars = [ |
842 | 240 | random.choice(alphanumeric_chars) for _ in range(length)] | 303 | random.choice(alphanumeric_chars) for _ in range(length)] |
843 | 241 | return(''.join(random_chars)) | 304 | return(''.join(random_chars)) |
844 | 305 | |||
845 | 306 | |||
846 | 307 | def list_nics(nic_type): | ||
847 | 308 | '''Return a list of nics of given type(s)''' | ||
848 | 309 | if isinstance(nic_type, basestring): | ||
849 | 310 | int_types = [nic_type] | ||
850 | 311 | else: | ||
851 | 312 | int_types = nic_type | ||
852 | 313 | interfaces = [] | ||
853 | 314 | for int_type in int_types: | ||
854 | 315 | cmd = ['ip', 'addr', 'show', 'label', int_type + '*'] | ||
855 | 316 | ip_output = subprocess.check_output(cmd).split('\n') | ||
856 | 317 | ip_output = (line for line in ip_output if line) | ||
857 | 318 | for line in ip_output: | ||
858 | 319 | if line.split()[1].startswith(int_type): | ||
859 | 320 | interfaces.append(line.split()[1].replace(":", "")) | ||
860 | 321 | return interfaces | ||
861 | 322 | |||
862 | 323 | |||
863 | 324 | def set_nic_mtu(nic, mtu): | ||
864 | 325 | '''Set MTU on a network interface''' | ||
865 | 326 | cmd = ['ip', 'link', 'set', nic, 'mtu', mtu] | ||
866 | 327 | subprocess.check_call(cmd) | ||
867 | 328 | |||
868 | 329 | |||
869 | 330 | def get_nic_mtu(nic): | ||
870 | 331 | cmd = ['ip', 'addr', 'show', nic] | ||
871 | 332 | ip_output = subprocess.check_output(cmd).split('\n') | ||
872 | 333 | mtu = "" | ||
873 | 334 | for line in ip_output: | ||
874 | 335 | words = line.split() | ||
875 | 336 | if 'mtu' in words: | ||
876 | 337 | mtu = words[words.index("mtu") + 1] | ||
877 | 338 | return mtu | ||
878 | 339 | |||
879 | 340 | |||
880 | 341 | def get_nic_hwaddr(nic): | ||
881 | 342 | cmd = ['ip', '-o', '-0', 'addr', 'show', nic] | ||
882 | 343 | ip_output = subprocess.check_output(cmd) | ||
883 | 344 | hwaddr = "" | ||
884 | 345 | words = ip_output.split() | ||
885 | 346 | if 'link/ether' in words: | ||
886 | 347 | hwaddr = words[words.index('link/ether') + 1] | ||
887 | 348 | return hwaddr | ||
888 | 349 | |||
889 | 350 | |||
890 | 351 | def cmp_pkgrevno(package, revno, pkgcache=None): | ||
891 | 352 | '''Compare supplied revno with the revno of the installed package | ||
892 | 353 | |||
893 | 354 | * 1 => Installed revno is greater than supplied arg | ||
894 | 355 | * 0 => Installed revno is the same as supplied arg | ||
895 | 356 | * -1 => Installed revno is less than supplied arg | ||
896 | 357 | |||
897 | 358 | ''' | ||
898 | 359 | import apt_pkg | ||
899 | 360 | from charmhelpers.fetch import apt_cache | ||
900 | 361 | if not pkgcache: | ||
901 | 362 | pkgcache = apt_cache() | ||
902 | 363 | pkg = pkgcache[package] | ||
903 | 364 | return apt_pkg.version_compare(pkg.current_ver.ver_str, revno) | ||
904 | 365 | |||
905 | 366 | |||
906 | 367 | @contextmanager | ||
907 | 368 | def chdir(d): | ||
908 | 369 | cur = os.getcwd() | ||
909 | 370 | try: | ||
910 | 371 | yield os.chdir(d) | ||
911 | 372 | finally: | ||
912 | 373 | os.chdir(cur) | ||
913 | 374 | |||
914 | 375 | |||
915 | 376 | def chownr(path, owner, group): | ||
916 | 377 | uid = pwd.getpwnam(owner).pw_uid | ||
917 | 378 | gid = grp.getgrnam(group).gr_gid | ||
918 | 379 | |||
919 | 380 | for root, dirs, files in os.walk(path): | ||
920 | 381 | for name in dirs + files: | ||
921 | 382 | full = os.path.join(root, name) | ||
922 | 383 | broken_symlink = os.path.lexists(full) and not os.path.exists(full) | ||
923 | 384 | if not broken_symlink: | ||
924 | 385 | os.chown(full, uid, gid) | ||
925 | 242 | 386 | ||
926 | === added directory 'hooks/charmhelpers/core/services' | |||
927 | === added file 'hooks/charmhelpers/core/services/__init__.py' | |||
928 | --- hooks/charmhelpers/core/services/__init__.py 1970-01-01 00:00:00 +0000 | |||
929 | +++ hooks/charmhelpers/core/services/__init__.py 2014-09-26 08:01:25 +0000 | |||
930 | @@ -0,0 +1,2 @@ | |||
931 | 1 | from .base import * | ||
932 | 2 | from .helpers import * | ||
933 | 0 | 3 | ||
934 | === added file 'hooks/charmhelpers/core/services/base.py' | |||
935 | --- hooks/charmhelpers/core/services/base.py 1970-01-01 00:00:00 +0000 | |||
936 | +++ hooks/charmhelpers/core/services/base.py 2014-09-26 08:01:25 +0000 | |||
937 | @@ -0,0 +1,313 @@ | |||
938 | 1 | import os | ||
939 | 2 | import re | ||
940 | 3 | import json | ||
941 | 4 | from collections import Iterable | ||
942 | 5 | |||
943 | 6 | from charmhelpers.core import host | ||
944 | 7 | from charmhelpers.core import hookenv | ||
945 | 8 | |||
946 | 9 | |||
947 | 10 | __all__ = ['ServiceManager', 'ManagerCallback', | ||
948 | 11 | 'PortManagerCallback', 'open_ports', 'close_ports', 'manage_ports', | ||
949 | 12 | 'service_restart', 'service_stop'] | ||
950 | 13 | |||
951 | 14 | |||
952 | 15 | class ServiceManager(object): | ||
953 | 16 | def __init__(self, services=None): | ||
954 | 17 | """ | ||
955 | 18 | Register a list of services, given their definitions. | ||
956 | 19 | |||
957 | 20 | Service definitions are dicts in the following formats (all keys except | ||
958 | 21 | 'service' are optional):: | ||
959 | 22 | |||
960 | 23 | { | ||
961 | 24 | "service": <service name>, | ||
962 | 25 | "required_data": <list of required data contexts>, | ||
963 | 26 | "provided_data": <list of provided data contexts>, | ||
964 | 27 | "data_ready": <one or more callbacks>, | ||
965 | 28 | "data_lost": <one or more callbacks>, | ||
966 | 29 | "start": <one or more callbacks>, | ||
967 | 30 | "stop": <one or more callbacks>, | ||
968 | 31 | "ports": <list of ports to manage>, | ||
969 | 32 | } | ||
970 | 33 | |||
971 | 34 | The 'required_data' list should contain dicts of required data (or | ||
972 | 35 | dependency managers that act like dicts and know how to collect the data). | ||
973 | 36 | Only when all items in the 'required_data' list are populated are the list | ||
974 | 37 | of 'data_ready' and 'start' callbacks executed. See `is_ready()` for more | ||
975 | 38 | information. | ||
976 | 39 | |||
977 | 40 | The 'provided_data' list should contain relation data providers, most likely | ||
978 | 41 | a subclass of :class:`charmhelpers.core.services.helpers.RelationContext`, | ||
979 | 42 | that will indicate a set of data to set on a given relation. | ||
980 | 43 | |||
981 | 44 | The 'data_ready' value should be either a single callback, or a list of | ||
982 | 45 | callbacks, to be called when all items in 'required_data' pass `is_ready()`. | ||
983 | 46 | Each callback will be called with the service name as the only parameter. | ||
984 | 47 | After all of the 'data_ready' callbacks are called, the 'start' callbacks | ||
985 | 48 | are fired. | ||
986 | 49 | |||
987 | 50 | The 'data_lost' value should be either a single callback, or a list of | ||
988 | 51 | callbacks, to be called when a 'required_data' item no longer passes | ||
989 | 52 | `is_ready()`. Each callback will be called with the service name as the | ||
990 | 53 | only parameter. After all of the 'data_lost' callbacks are called, | ||
991 | 54 | the 'stop' callbacks are fired. | ||
992 | 55 | |||
993 | 56 | The 'start' value should be either a single callback, or a list of | ||
994 | 57 | callbacks, to be called when starting the service, after the 'data_ready' | ||
995 | 58 | callbacks are complete. Each callback will be called with the service | ||
996 | 59 | name as the only parameter. This defaults to | ||
997 | 60 | `[host.service_start, services.open_ports]`. | ||
998 | 61 | |||
999 | 62 | The 'stop' value should be either a single callback, or a list of | ||
1000 | 63 | callbacks, to be called when stopping the service. If the service is | ||
1001 | 64 | being stopped because it no longer has all of its 'required_data', this | ||
1002 | 65 | will be called after all of the 'data_lost' callbacks are complete. | ||
1003 | 66 | Each callback will be called with the service name as the only parameter. | ||
1004 | 67 | This defaults to `[services.close_ports, host.service_stop]`. | ||
1005 | 68 | |||
1006 | 69 | The 'ports' value should be a list of ports to manage. The default | ||
1007 | 70 | 'start' handler will open the ports after the service is started, | ||
1008 | 71 | and the default 'stop' handler will close the ports prior to stopping | ||
1009 | 72 | the service. | ||
1010 | 73 | |||
1011 | 74 | |||
1012 | 75 | Examples: | ||
1013 | 76 | |||
1014 | 77 | The following registers an Upstart service called bingod that depends on | ||
1015 | 78 | a mongodb relation and which runs a custom `db_migrate` function prior to | ||
1016 | 79 | restarting the service, and a Runit service called spadesd:: | ||
1017 | 80 | |||
1018 | 81 | manager = services.ServiceManager([ | ||
1019 | 82 | { | ||
1020 | 83 | 'service': 'bingod', | ||
1021 | 84 | 'ports': [80, 443], | ||
1022 | 85 | 'required_data': [MongoRelation(), config(), {'my': 'data'}], | ||
1023 | 86 | 'data_ready': [ | ||
1024 | 87 | services.template(source='bingod.conf'), | ||
1025 | 88 | services.template(source='bingod.ini', | ||
1026 | 89 | target='/etc/bingod.ini', | ||
1027 | 90 | owner='bingo', perms=0400), | ||
1028 | 91 | ], | ||
1029 | 92 | }, | ||
1030 | 93 | { | ||
1031 | 94 | 'service': 'spadesd', | ||
1032 | 95 | 'data_ready': services.template(source='spadesd_run.j2', | ||
1033 | 96 | target='/etc/sv/spadesd/run', | ||
1034 | 97 | perms=0555), | ||
1035 | 98 | 'start': runit_start, | ||
1036 | 99 | 'stop': runit_stop, | ||
1037 | 100 | }, | ||
1038 | 101 | ]) | ||
1039 | 102 | manager.manage() | ||
1040 | 103 | """ | ||
1041 | 104 | self._ready_file = os.path.join(hookenv.charm_dir(), 'READY-SERVICES.json') | ||
1042 | 105 | self._ready = None | ||
1043 | 106 | self.services = {} | ||
1044 | 107 | for service in services or []: | ||
1045 | 108 | service_name = service['service'] | ||
1046 | 109 | self.services[service_name] = service | ||
1047 | 110 | |||
1048 | 111 | def manage(self): | ||
1049 | 112 | """ | ||
1050 | 113 | Handle the current hook by doing The Right Thing with the registered services. | ||
1051 | 114 | """ | ||
1052 | 115 | hook_name = hookenv.hook_name() | ||
1053 | 116 | if hook_name == 'stop': | ||
1054 | 117 | self.stop_services() | ||
1055 | 118 | else: | ||
1056 | 119 | self.provide_data() | ||
1057 | 120 | self.reconfigure_services() | ||
1058 | 121 | cfg = hookenv.config() | ||
1059 | 122 | if cfg.implicit_save: | ||
1060 | 123 | cfg.save() | ||
1061 | 124 | |||
1062 | 125 | def provide_data(self): | ||
1063 | 126 | """ | ||
1064 | 127 | Set the relation data for each provider in the ``provided_data`` list. | ||
1065 | 128 | |||
1066 | 129 | A provider must have a `name` attribute, which indicates which relation | ||
1067 | 130 | to set data on, and a `provide_data()` method, which returns a dict of | ||
1068 | 131 | data to set. | ||
1069 | 132 | """ | ||
1070 | 133 | hook_name = hookenv.hook_name() | ||
1071 | 134 | for service in self.services.values(): | ||
1072 | 135 | for provider in service.get('provided_data', []): | ||
1073 | 136 | if re.match(r'{}-relation-(joined|changed)'.format(provider.name), hook_name): | ||
1074 | 137 | data = provider.provide_data() | ||
1075 | 138 | _ready = provider._is_ready(data) if hasattr(provider, '_is_ready') else data | ||
1076 | 139 | if _ready: | ||
1077 | 140 | hookenv.relation_set(None, data) | ||
1078 | 141 | |||
1079 | 142 | def reconfigure_services(self, *service_names): | ||
1080 | 143 | """ | ||
1081 | 144 | Update all files for one or more registered services, and, | ||
1082 | 145 | if ready, optionally restart them. | ||
1083 | 146 | |||
1084 | 147 | If no service names are given, reconfigures all registered services. | ||
1085 | 148 | """ | ||
1086 | 149 | for service_name in service_names or self.services.keys(): | ||
1087 | 150 | if self.is_ready(service_name): | ||
1088 | 151 | self.fire_event('data_ready', service_name) | ||
1089 | 152 | self.fire_event('start', service_name, default=[ | ||
1090 | 153 | service_restart, | ||
1091 | 154 | manage_ports]) | ||
1092 | 155 | self.save_ready(service_name) | ||
1093 | 156 | else: | ||
1094 | 157 | if self.was_ready(service_name): | ||
1095 | 158 | self.fire_event('data_lost', service_name) | ||
1096 | 159 | self.fire_event('stop', service_name, default=[ | ||
1097 | 160 | manage_ports, | ||
1098 | 161 | service_stop]) | ||
1099 | 162 | self.save_lost(service_name) | ||
1100 | 163 | |||
1101 | 164 | def stop_services(self, *service_names): | ||
1102 | 165 | """ | ||
1103 | 166 | Stop one or more registered services, by name. | ||
1104 | 167 | |||
1105 | 168 | If no service names are given, stops all registered services. | ||
1106 | 169 | """ | ||
1107 | 170 | for service_name in service_names or self.services.keys(): | ||
1108 | 171 | self.fire_event('stop', service_name, default=[ | ||
1109 | 172 | manage_ports, | ||
1110 | 173 | service_stop]) | ||
1111 | 174 | |||
1112 | 175 | def get_service(self, service_name): | ||
1113 | 176 | """ | ||
1114 | 177 | Given the name of a registered service, return its service definition. | ||
1115 | 178 | """ | ||
1116 | 179 | service = self.services.get(service_name) | ||
1117 | 180 | if not service: | ||
1118 | 181 | raise KeyError('Service not registered: %s' % service_name) | ||
1119 | 182 | return service | ||
1120 | 183 | |||
1121 | 184 | def fire_event(self, event_name, service_name, default=None): | ||
1122 | 185 | """ | ||
1123 | 186 | Fire a data_ready, data_lost, start, or stop event on a given service. | ||
1124 | 187 | """ | ||
1125 | 188 | service = self.get_service(service_name) | ||
1126 | 189 | callbacks = service.get(event_name, default) | ||
1127 | 190 | if not callbacks: | ||
1128 | 191 | return | ||
1129 | 192 | if not isinstance(callbacks, Iterable): | ||
1130 | 193 | callbacks = [callbacks] | ||
1131 | 194 | for callback in callbacks: | ||
1132 | 195 | if isinstance(callback, ManagerCallback): | ||
1133 | 196 | callback(self, service_name, event_name) | ||
1134 | 197 | else: | ||
1135 | 198 | callback(service_name) | ||
1136 | 199 | |||
1137 | 200 | def is_ready(self, service_name): | ||
1138 | 201 | """ | ||
1139 | 202 | Determine if a registered service is ready, by checking its 'required_data'. | ||
1140 | 203 | |||
1141 | 204 | A 'required_data' item can be any mapping type, and is considered ready | ||
1142 | 205 | if `bool(item)` evaluates as True. | ||
1143 | 206 | """ | ||
1144 | 207 | service = self.get_service(service_name) | ||
1145 | 208 | reqs = service.get('required_data', []) | ||
1146 | 209 | return all(bool(req) for req in reqs) | ||
1147 | 210 | |||
1148 | 211 | def _load_ready_file(self): | ||
1149 | 212 | if self._ready is not None: | ||
1150 | 213 | return | ||
1151 | 214 | if os.path.exists(self._ready_file): | ||
1152 | 215 | with open(self._ready_file) as fp: | ||
1153 | 216 | self._ready = set(json.load(fp)) | ||
1154 | 217 | else: | ||
1155 | 218 | self._ready = set() | ||
1156 | 219 | |||
1157 | 220 | def _save_ready_file(self): | ||
1158 | 221 | if self._ready is None: | ||
1159 | 222 | return | ||
1160 | 223 | with open(self._ready_file, 'w') as fp: | ||
1161 | 224 | json.dump(list(self._ready), fp) | ||
1162 | 225 | |||
1163 | 226 | def save_ready(self, service_name): | ||
1164 | 227 | """ | ||
1165 | 228 | Save an indicator that the given service is now data_ready. | ||
1166 | 229 | """ | ||
1167 | 230 | self._load_ready_file() | ||
1168 | 231 | self._ready.add(service_name) | ||
1169 | 232 | self._save_ready_file() | ||
1170 | 233 | |||
1171 | 234 | def save_lost(self, service_name): | ||
1172 | 235 | """ | ||
1173 | 236 | Save an indicator that the given service is no longer data_ready. | ||
1174 | 237 | """ | ||
1175 | 238 | self._load_ready_file() | ||
1176 | 239 | self._ready.discard(service_name) | ||
1177 | 240 | self._save_ready_file() | ||
1178 | 241 | |||
1179 | 242 | def was_ready(self, service_name): | ||
1180 | 243 | """ | ||
1181 | 244 | Determine if the given service was previously data_ready. | ||
1182 | 245 | """ | ||
1183 | 246 | self._load_ready_file() | ||
1184 | 247 | return service_name in self._ready | ||
1185 | 248 | |||
1186 | 249 | |||
1187 | 250 | class ManagerCallback(object): | ||
1188 | 251 | """ | ||
1189 | 252 | Special case of a callback that takes the `ServiceManager` instance | ||
1190 | 253 | in addition to the service name. | ||
1191 | 254 | |||
1192 | 255 | Subclasses should implement `__call__` which should accept three parameters: | ||
1193 | 256 | |||
1194 | 257 | * `manager` The `ServiceManager` instance | ||
1195 | 258 | * `service_name` The name of the service it's being triggered for | ||
1196 | 259 | * `event_name` The name of the event that this callback is handling | ||
1197 | 260 | """ | ||
1198 | 261 | def __call__(self, manager, service_name, event_name): | ||
1199 | 262 | raise NotImplementedError() | ||
1200 | 263 | |||
1201 | 264 | |||
1202 | 265 | class PortManagerCallback(ManagerCallback): | ||
1203 | 266 | """ | ||
1204 | 267 | Callback class that will open or close ports, for use as either | ||
1205 | 268 | a start or stop action. | ||
1206 | 269 | """ | ||
1207 | 270 | def __call__(self, manager, service_name, event_name): | ||
1208 | 271 | service = manager.get_service(service_name) | ||
1209 | 272 | new_ports = service.get('ports', []) | ||
1210 | 273 | port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name)) | ||
1211 | 274 | if os.path.exists(port_file): | ||
1212 | 275 | with open(port_file) as fp: | ||
1213 | 276 | old_ports = fp.read().split(',') | ||
1214 | 277 | for old_port in old_ports: | ||
1215 | 278 | if bool(old_port): | ||
1216 | 279 | old_port = int(old_port) | ||
1217 | 280 | if old_port not in new_ports: | ||
1218 | 281 | hookenv.close_port(old_port) | ||
1219 | 282 | with open(port_file, 'w') as fp: | ||
1220 | 283 | fp.write(','.join(str(port) for port in new_ports)) | ||
1221 | 284 | for port in new_ports: | ||
1222 | 285 | if event_name == 'start': | ||
1223 | 286 | hookenv.open_port(port) | ||
1224 | 287 | elif event_name == 'stop': | ||
1225 | 288 | hookenv.close_port(port) | ||
1226 | 289 | |||
1227 | 290 | |||
1228 | 291 | def service_stop(service_name): | ||
1229 | 292 | """ | ||
1230 | 293 | Wrapper around host.service_stop to prevent spurious "unknown service" | ||
1231 | 294 | messages in the logs. | ||
1232 | 295 | """ | ||
1233 | 296 | if host.service_running(service_name): | ||
1234 | 297 | host.service_stop(service_name) | ||
1235 | 298 | |||
1236 | 299 | |||
1237 | 300 | def service_restart(service_name): | ||
1238 | 301 | """ | ||
1239 | 302 | Wrapper around host.service_restart to prevent spurious "unknown service" | ||
1240 | 303 | messages in the logs. | ||
1241 | 304 | """ | ||
1242 | 305 | if host.service_available(service_name): | ||
1243 | 306 | if host.service_running(service_name): | ||
1244 | 307 | host.service_restart(service_name) | ||
1245 | 308 | else: | ||
1246 | 309 | host.service_start(service_name) | ||
1247 | 310 | |||
1248 | 311 | |||
1249 | 312 | # Convenience aliases | ||
1250 | 313 | open_ports = close_ports = manage_ports = PortManagerCallback() | ||
1251 | 0 | 314 | ||
1252 | === added file 'hooks/charmhelpers/core/services/helpers.py' | |||
1253 | --- hooks/charmhelpers/core/services/helpers.py 1970-01-01 00:00:00 +0000 | |||
1254 | +++ hooks/charmhelpers/core/services/helpers.py 2014-09-26 08:01:25 +0000 | |||
1255 | @@ -0,0 +1,239 @@ | |||
1256 | 1 | import os | ||
1257 | 2 | import yaml | ||
1258 | 3 | from charmhelpers.core import hookenv | ||
1259 | 4 | from charmhelpers.core import templating | ||
1260 | 5 | |||
1261 | 6 | from charmhelpers.core.services.base import ManagerCallback | ||
1262 | 7 | |||
1263 | 8 | |||
1264 | 9 | __all__ = ['RelationContext', 'TemplateCallback', | ||
1265 | 10 | 'render_template', 'template'] | ||
1266 | 11 | |||
1267 | 12 | |||
1268 | 13 | class RelationContext(dict): | ||
1269 | 14 | """ | ||
1270 | 15 | Base class for a context generator that gets relation data from juju. | ||
1271 | 16 | |||
1272 | 17 | Subclasses must provide the attributes `name`, which is the name of the | ||
1273 | 18 | interface of interest, `interface`, which is the type of the interface of | ||
1274 | 19 | interest, and `required_keys`, which is the set of keys required for the | ||
1275 | 20 | relation to be considered complete. The data for all interfaces matching | ||
1276 | 21 | the `name` attribute that are complete will used to populate the dictionary | ||
1277 | 22 | values (see `get_data`, below). | ||
1278 | 23 | |||
1279 | 24 | The generated context will be namespaced under the relation :attr:`name`, | ||
1280 | 25 | to prevent potential naming conflicts. | ||
1281 | 26 | |||
1282 | 27 | :param str name: Override the relation :attr:`name`, since it can vary from charm to charm | ||
1283 | 28 | :param list additional_required_keys: Extend the list of :attr:`required_keys` | ||
1284 | 29 | """ | ||
1285 | 30 | name = None | ||
1286 | 31 | interface = None | ||
1287 | 32 | required_keys = [] | ||
1288 | 33 | |||
1289 | 34 | def __init__(self, name=None, additional_required_keys=None): | ||
1290 | 35 | if name is not None: | ||
1291 | 36 | self.name = name | ||
1292 | 37 | if additional_required_keys is not None: | ||
1293 | 38 | self.required_keys.extend(additional_required_keys) | ||
1294 | 39 | self.get_data() | ||
1295 | 40 | |||
1296 | 41 | def __bool__(self): | ||
1297 | 42 | """ | ||
1298 | 43 | Returns True if all of the required_keys are available. | ||
1299 | 44 | """ | ||
1300 | 45 | return self.is_ready() | ||
1301 | 46 | |||
1302 | 47 | __nonzero__ = __bool__ | ||
1303 | 48 | |||
1304 | 49 | def __repr__(self): | ||
1305 | 50 | return super(RelationContext, self).__repr__() | ||
1306 | 51 | |||
1307 | 52 | def is_ready(self): | ||
1308 | 53 | """ | ||
1309 | 54 | Returns True if all of the `required_keys` are available from any units. | ||
1310 | 55 | """ | ||
1311 | 56 | ready = len(self.get(self.name, [])) > 0 | ||
1312 | 57 | if not ready: | ||
1313 | 58 | hookenv.log('Incomplete relation: {}'.format(self.__class__.__name__), hookenv.DEBUG) | ||
1314 | 59 | return ready | ||
1315 | 60 | |||
1316 | 61 | def _is_ready(self, unit_data): | ||
1317 | 62 | """ | ||
1318 | 63 | Helper method that tests a set of relation data and returns True if | ||
1319 | 64 | all of the `required_keys` are present. | ||
1320 | 65 | """ | ||
1321 | 66 | return set(unit_data.keys()).issuperset(set(self.required_keys)) | ||
1322 | 67 | |||
1323 | 68 | def get_data(self): | ||
1324 | 69 | """ | ||
1325 | 70 | Retrieve the relation data for each unit involved in a relation and, | ||
1326 | 71 | if complete, store it in a list under `self[self.name]`. This | ||
1327 | 72 | is automatically called when the RelationContext is instantiated. | ||
1328 | 73 | |||
1329 | 74 | The units are sorted lexographically first by the service ID, then by | ||
1330 | 75 | the unit ID. Thus, if an interface has two other services, 'db:1' | ||
1331 | 76 | and 'db:2', with 'db:1' having two units, 'wordpress/0' and 'wordpress/1', | ||
1332 | 77 | and 'db:2' having one unit, 'mediawiki/0', all of which have a complete | ||
1333 | 78 | set of data, the relation data for the units will be stored in the | ||
1334 | 79 | order: 'wordpress/0', 'wordpress/1', 'mediawiki/0'. | ||
1335 | 80 | |||
1336 | 81 | If you only care about a single unit on the relation, you can just | ||
1337 | 82 | access it as `{{ interface[0]['key'] }}`. However, if you can at all | ||
1338 | 83 | support multiple units on a relation, you should iterate over the list, | ||
1339 | 84 | like:: | ||
1340 | 85 | |||
1341 | 86 | {% for unit in interface -%} | ||
1342 | 87 | {{ unit['key'] }}{% if not loop.last %},{% endif %} | ||
1343 | 88 | {%- endfor %} | ||
1344 | 89 | |||
1345 | 90 | Note that since all sets of relation data from all related services and | ||
1346 | 91 | units are in a single list, if you need to know which service or unit a | ||
1347 | 92 | set of data came from, you'll need to extend this class to preserve | ||
1348 | 93 | that information. | ||
1349 | 94 | """ | ||
1350 | 95 | if not hookenv.relation_ids(self.name): | ||
1351 | 96 | return | ||
1352 | 97 | |||
1353 | 98 | ns = self.setdefault(self.name, []) | ||
1354 | 99 | for rid in sorted(hookenv.relation_ids(self.name)): | ||
1355 | 100 | for unit in sorted(hookenv.related_units(rid)): | ||
1356 | 101 | reldata = hookenv.relation_get(rid=rid, unit=unit) | ||
1357 | 102 | if self._is_ready(reldata): | ||
1358 | 103 | ns.append(reldata) | ||
1359 | 104 | |||
1360 | 105 | def provide_data(self): | ||
1361 | 106 | """ | ||
1362 | 107 | Return data to be relation_set for this interface. | ||
1363 | 108 | """ | ||
1364 | 109 | return {} | ||
1365 | 110 | |||
1366 | 111 | |||
1367 | 112 | class MysqlRelation(RelationContext): | ||
1368 | 113 | """ | ||
1369 | 114 | Relation context for the `mysql` interface. | ||
1370 | 115 | |||
1371 | 116 | :param str name: Override the relation :attr:`name`, since it can vary from charm to charm | ||
1372 | 117 | :param list additional_required_keys: Extend the list of :attr:`required_keys` | ||
1373 | 118 | """ | ||
1374 | 119 | name = 'db' | ||
1375 | 120 | interface = 'mysql' | ||
1376 | 121 | required_keys = ['host', 'user', 'password', 'database'] | ||
1377 | 122 | |||
1378 | 123 | |||
1379 | 124 | class HttpRelation(RelationContext): | ||
1380 | 125 | """ | ||
1381 | 126 | Relation context for the `http` interface. | ||
1382 | 127 | |||
1383 | 128 | :param str name: Override the relation :attr:`name`, since it can vary from charm to charm | ||
1384 | 129 | :param list additional_required_keys: Extend the list of :attr:`required_keys` | ||
1385 | 130 | """ | ||
1386 | 131 | name = 'website' | ||
1387 | 132 | interface = 'http' | ||
1388 | 133 | required_keys = ['host', 'port'] | ||
1389 | 134 | |||
1390 | 135 | def provide_data(self): | ||
1391 | 136 | return { | ||
1392 | 137 | 'host': hookenv.unit_get('private-address'), | ||
1393 | 138 | 'port': 80, | ||
1394 | 139 | } | ||
1395 | 140 | |||
1396 | 141 | |||
1397 | 142 | class RequiredConfig(dict): | ||
1398 | 143 | """ | ||
1399 | 144 | Data context that loads config options with one or more mandatory options. | ||
1400 | 145 | |||
1401 | 146 | Once the required options have been changed from their default values, all | ||
1402 | 147 | config options will be available, namespaced under `config` to prevent | ||
1403 | 148 | potential naming conflicts (for example, between a config option and a | ||
1404 | 149 | relation property). | ||
1405 | 150 | |||
1406 | 151 | :param list *args: List of options that must be changed from their default values. | ||
1407 | 152 | """ | ||
1408 | 153 | |||
1409 | 154 | def __init__(self, *args): | ||
1410 | 155 | self.required_options = args | ||
1411 | 156 | self['config'] = hookenv.config() | ||
1412 | 157 | with open(os.path.join(hookenv.charm_dir(), 'config.yaml')) as fp: | ||
1413 | 158 | self.config = yaml.load(fp).get('options', {}) | ||
1414 | 159 | |||
1415 | 160 | def __bool__(self): | ||
1416 | 161 | for option in self.required_options: | ||
1417 | 162 | if option not in self['config']: | ||
1418 | 163 | return False | ||
1419 | 164 | current_value = self['config'][option] | ||
1420 | 165 | default_value = self.config[option].get('default') | ||
1421 | 166 | if current_value == default_value: | ||
1422 | 167 | return False | ||
1423 | 168 | if current_value in (None, '') and default_value in (None, ''): | ||
1424 | 169 | return False | ||
1425 | 170 | return True | ||
1426 | 171 | |||
1427 | 172 | def __nonzero__(self): | ||
1428 | 173 | return self.__bool__() | ||
1429 | 174 | |||
1430 | 175 | |||
1431 | 176 | class StoredContext(dict): | ||
1432 | 177 | """ | ||
1433 | 178 | A data context that always returns the data that it was first created with. | ||
1434 | 179 | |||
1435 | 180 | This is useful to do a one-time generation of things like passwords, that | ||
1436 | 181 | will thereafter use the same value that was originally generated, instead | ||
1437 | 182 | of generating a new value each time it is run. | ||
1438 | 183 | """ | ||
1439 | 184 | def __init__(self, file_name, config_data): | ||
1440 | 185 | """ | ||
1441 | 186 | If the file exists, populate `self` with the data from the file. | ||
1442 | 187 | Otherwise, populate with the given data and persist it to the file. | ||
1443 | 188 | """ | ||
1444 | 189 | if os.path.exists(file_name): | ||
1445 | 190 | self.update(self.read_context(file_name)) | ||
1446 | 191 | else: | ||
1447 | 192 | self.store_context(file_name, config_data) | ||
1448 | 193 | self.update(config_data) | ||
1449 | 194 | |||
1450 | 195 | def store_context(self, file_name, config_data): | ||
1451 | 196 | if not os.path.isabs(file_name): | ||
1452 | 197 | file_name = os.path.join(hookenv.charm_dir(), file_name) | ||
1453 | 198 | with open(file_name, 'w') as file_stream: | ||
1454 | 199 | os.fchmod(file_stream.fileno(), 0600) | ||
1455 | 200 | yaml.dump(config_data, file_stream) | ||
1456 | 201 | |||
1457 | 202 | def read_context(self, file_name): | ||
1458 | 203 | if not os.path.isabs(file_name): | ||
1459 | 204 | file_name = os.path.join(hookenv.charm_dir(), file_name) | ||
1460 | 205 | with open(file_name, 'r') as file_stream: | ||
1461 | 206 | data = yaml.load(file_stream) | ||
1462 | 207 | if not data: | ||
1463 | 208 | raise OSError("%s is empty" % file_name) | ||
1464 | 209 | return data | ||
1465 | 210 | |||
1466 | 211 | |||
1467 | 212 | class TemplateCallback(ManagerCallback): | ||
1468 | 213 | """ | ||
1469 | 214 | Callback class that will render a Jinja2 template, for use as a ready action. | ||
1470 | 215 | |||
1471 | 216 | :param str source: The template source file, relative to `$CHARM_DIR/templates` | ||
1472 | 217 | :param str target: The target to write the rendered template to | ||
1473 | 218 | :param str owner: The owner of the rendered file | ||
1474 | 219 | :param str group: The group of the rendered file | ||
1475 | 220 | :param int perms: The permissions of the rendered file | ||
1476 | 221 | """ | ||
1477 | 222 | def __init__(self, source, target, owner='root', group='root', perms=0444): | ||
1478 | 223 | self.source = source | ||
1479 | 224 | self.target = target | ||
1480 | 225 | self.owner = owner | ||
1481 | 226 | self.group = group | ||
1482 | 227 | self.perms = perms | ||
1483 | 228 | |||
1484 | 229 | def __call__(self, manager, service_name, event_name): | ||
1485 | 230 | service = manager.get_service(service_name) | ||
1486 | 231 | context = {} | ||
1487 | 232 | for ctx in service.get('required_data', []): | ||
1488 | 233 | context.update(ctx) | ||
1489 | 234 | templating.render(self.source, self.target, context, | ||
1490 | 235 | self.owner, self.group, self.perms) | ||
1491 | 236 | |||
1492 | 237 | |||
1493 | 238 | # Convenience aliases for templates | ||
1494 | 239 | render_template = template = TemplateCallback | ||
1495 | 0 | 240 | ||
1496 | === added file 'hooks/charmhelpers/core/templating.py' | |||
1497 | --- hooks/charmhelpers/core/templating.py 1970-01-01 00:00:00 +0000 | |||
1498 | +++ hooks/charmhelpers/core/templating.py 2014-09-26 08:01:25 +0000 | |||
1499 | @@ -0,0 +1,51 @@ | |||
1500 | 1 | import os | ||
1501 | 2 | |||
1502 | 3 | from charmhelpers.core import host | ||
1503 | 4 | from charmhelpers.core import hookenv | ||
1504 | 5 | |||
1505 | 6 | |||
1506 | 7 | def render(source, target, context, owner='root', group='root', perms=0444, templates_dir=None): | ||
1507 | 8 | """ | ||
1508 | 9 | Render a template. | ||
1509 | 10 | |||
1510 | 11 | The `source` path, if not absolute, is relative to the `templates_dir`. | ||
1511 | 12 | |||
1512 | 13 | The `target` path should be absolute. | ||
1513 | 14 | |||
1514 | 15 | The context should be a dict containing the values to be replaced in the | ||
1515 | 16 | template. | ||
1516 | 17 | |||
1517 | 18 | The `owner`, `group`, and `perms` options will be passed to `write_file`. | ||
1518 | 19 | |||
1519 | 20 | If omitted, `templates_dir` defaults to the `templates` folder in the charm. | ||
1520 | 21 | |||
1521 | 22 | Note: Using this requires python-jinja2; if it is not installed, calling | ||
1522 | 23 | this will attempt to use charmhelpers.fetch.apt_install to install it. | ||
1523 | 24 | """ | ||
1524 | 25 | try: | ||
1525 | 26 | from jinja2 import FileSystemLoader, Environment, exceptions | ||
1526 | 27 | except ImportError: | ||
1527 | 28 | try: | ||
1528 | 29 | from charmhelpers.fetch import apt_install | ||
1529 | 30 | except ImportError: | ||
1530 | 31 | hookenv.log('Could not import jinja2, and could not import ' | ||
1531 | 32 | 'charmhelpers.fetch to install it', | ||
1532 | 33 | level=hookenv.ERROR) | ||
1533 | 34 | raise | ||
1534 | 35 | apt_install('python-jinja2', fatal=True) | ||
1535 | 36 | from jinja2 import FileSystemLoader, Environment, exceptions | ||
1536 | 37 | |||
1537 | 38 | if templates_dir is None: | ||
1538 | 39 | templates_dir = os.path.join(hookenv.charm_dir(), 'templates') | ||
1539 | 40 | loader = Environment(loader=FileSystemLoader(templates_dir)) | ||
1540 | 41 | try: | ||
1541 | 42 | source = source | ||
1542 | 43 | template = loader.get_template(source) | ||
1543 | 44 | except exceptions.TemplateNotFound as e: | ||
1544 | 45 | hookenv.log('Could not load template %s from %s.' % | ||
1545 | 46 | (source, templates_dir), | ||
1546 | 47 | level=hookenv.ERROR) | ||
1547 | 48 | raise e | ||
1548 | 49 | content = template.render(context) | ||
1549 | 50 | host.mkdir(os.path.dirname(target)) | ||
1550 | 51 | host.write_file(target, content, owner, group, perms) | ||
1551 | 0 | 52 | ||
1552 | === modified file 'hooks/charmhelpers/fetch/__init__.py' | |||
1553 | --- hooks/charmhelpers/fetch/__init__.py 2013-08-29 18:41:54 +0000 | |||
1554 | +++ hooks/charmhelpers/fetch/__init__.py 2014-09-26 08:01:25 +0000 | |||
1555 | @@ -1,4 +1,6 @@ | |||
1556 | 1 | import importlib | 1 | import importlib |
1557 | 2 | from tempfile import NamedTemporaryFile | ||
1558 | 3 | import time | ||
1559 | 2 | from yaml import safe_load | 4 | from yaml import safe_load |
1560 | 3 | from charmhelpers.core.host import ( | 5 | from charmhelpers.core.host import ( |
1561 | 4 | lsb_release | 6 | lsb_release |
1562 | @@ -12,7 +14,8 @@ | |||
1563 | 12 | config, | 14 | config, |
1564 | 13 | log, | 15 | log, |
1565 | 14 | ) | 16 | ) |
1567 | 15 | import apt_pkg | 17 | import os |
1568 | 18 | |||
1569 | 16 | 19 | ||
1570 | 17 | CLOUD_ARCHIVE = """# Ubuntu Cloud Archive | 20 | CLOUD_ARCHIVE = """# Ubuntu Cloud Archive |
1571 | 18 | deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main | 21 | deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main |
1572 | @@ -20,12 +23,101 @@ | |||
1573 | 20 | PROPOSED_POCKET = """# Proposed | 23 | PROPOSED_POCKET = """# Proposed |
1574 | 21 | deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted | 24 | deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted |
1575 | 22 | """ | 25 | """ |
1576 | 26 | CLOUD_ARCHIVE_POCKETS = { | ||
1577 | 27 | # Folsom | ||
1578 | 28 | 'folsom': 'precise-updates/folsom', | ||
1579 | 29 | 'precise-folsom': 'precise-updates/folsom', | ||
1580 | 30 | 'precise-folsom/updates': 'precise-updates/folsom', | ||
1581 | 31 | 'precise-updates/folsom': 'precise-updates/folsom', | ||
1582 | 32 | 'folsom/proposed': 'precise-proposed/folsom', | ||
1583 | 33 | 'precise-folsom/proposed': 'precise-proposed/folsom', | ||
1584 | 34 | 'precise-proposed/folsom': 'precise-proposed/folsom', | ||
1585 | 35 | # Grizzly | ||
1586 | 36 | 'grizzly': 'precise-updates/grizzly', | ||
1587 | 37 | 'precise-grizzly': 'precise-updates/grizzly', | ||
1588 | 38 | 'precise-grizzly/updates': 'precise-updates/grizzly', | ||
1589 | 39 | 'precise-updates/grizzly': 'precise-updates/grizzly', | ||
1590 | 40 | 'grizzly/proposed': 'precise-proposed/grizzly', | ||
1591 | 41 | 'precise-grizzly/proposed': 'precise-proposed/grizzly', | ||
1592 | 42 | 'precise-proposed/grizzly': 'precise-proposed/grizzly', | ||
1593 | 43 | # Havana | ||
1594 | 44 | 'havana': 'precise-updates/havana', | ||
1595 | 45 | 'precise-havana': 'precise-updates/havana', | ||
1596 | 46 | 'precise-havana/updates': 'precise-updates/havana', | ||
1597 | 47 | 'precise-updates/havana': 'precise-updates/havana', | ||
1598 | 48 | 'havana/proposed': 'precise-proposed/havana', | ||
1599 | 49 | 'precise-havana/proposed': 'precise-proposed/havana', | ||
1600 | 50 | 'precise-proposed/havana': 'precise-proposed/havana', | ||
1601 | 51 | # Icehouse | ||
1602 | 52 | 'icehouse': 'precise-updates/icehouse', | ||
1603 | 53 | 'precise-icehouse': 'precise-updates/icehouse', | ||
1604 | 54 | 'precise-icehouse/updates': 'precise-updates/icehouse', | ||
1605 | 55 | 'precise-updates/icehouse': 'precise-updates/icehouse', | ||
1606 | 56 | 'icehouse/proposed': 'precise-proposed/icehouse', | ||
1607 | 57 | 'precise-icehouse/proposed': 'precise-proposed/icehouse', | ||
1608 | 58 | 'precise-proposed/icehouse': 'precise-proposed/icehouse', | ||
1609 | 59 | # Juno | ||
1610 | 60 | 'juno': 'trusty-updates/juno', | ||
1611 | 61 | 'trusty-juno': 'trusty-updates/juno', | ||
1612 | 62 | 'trusty-juno/updates': 'trusty-updates/juno', | ||
1613 | 63 | 'trusty-updates/juno': 'trusty-updates/juno', | ||
1614 | 64 | 'juno/proposed': 'trusty-proposed/juno', | ||
1615 | 65 | 'juno/proposed': 'trusty-proposed/juno', | ||
1616 | 66 | 'trusty-juno/proposed': 'trusty-proposed/juno', | ||
1617 | 67 | 'trusty-proposed/juno': 'trusty-proposed/juno', | ||
1618 | 68 | } | ||
1619 | 69 | |||
1620 | 70 | # The order of this list is very important. Handlers should be listed in from | ||
1621 | 71 | # least- to most-specific URL matching. | ||
1622 | 72 | FETCH_HANDLERS = ( | ||
1623 | 73 | 'charmhelpers.fetch.archiveurl.ArchiveUrlFetchHandler', | ||
1624 | 74 | 'charmhelpers.fetch.bzrurl.BzrUrlFetchHandler', | ||
1625 | 75 | ) | ||
1626 | 76 | |||
1627 | 77 | APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT. | ||
1628 | 78 | APT_NO_LOCK_RETRY_DELAY = 10 # Wait 10 seconds between apt lock checks. | ||
1629 | 79 | APT_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times. | ||
1630 | 80 | |||
1631 | 81 | |||
1632 | 82 | class SourceConfigError(Exception): | ||
1633 | 83 | pass | ||
1634 | 84 | |||
1635 | 85 | |||
1636 | 86 | class UnhandledSource(Exception): | ||
1637 | 87 | pass | ||
1638 | 88 | |||
1639 | 89 | |||
1640 | 90 | class AptLockError(Exception): | ||
1641 | 91 | pass | ||
1642 | 92 | |||
1643 | 93 | |||
1644 | 94 | class BaseFetchHandler(object): | ||
1645 | 95 | |||
1646 | 96 | """Base class for FetchHandler implementations in fetch plugins""" | ||
1647 | 97 | |||
1648 | 98 | def can_handle(self, source): | ||
1649 | 99 | """Returns True if the source can be handled. Otherwise returns | ||
1650 | 100 | a string explaining why it cannot""" | ||
1651 | 101 | return "Wrong source type" | ||
1652 | 102 | |||
1653 | 103 | def install(self, source): | ||
1654 | 104 | """Try to download and unpack the source. Return the path to the | ||
1655 | 105 | unpacked files or raise UnhandledSource.""" | ||
1656 | 106 | raise UnhandledSource("Wrong source type {}".format(source)) | ||
1657 | 107 | |||
1658 | 108 | def parse_url(self, url): | ||
1659 | 109 | return urlparse(url) | ||
1660 | 110 | |||
1661 | 111 | def base_url(self, url): | ||
1662 | 112 | """Return url without querystring or fragment""" | ||
1663 | 113 | parts = list(self.parse_url(url)) | ||
1664 | 114 | parts[4:] = ['' for i in parts[4:]] | ||
1665 | 115 | return urlunparse(parts) | ||
1666 | 23 | 116 | ||
1667 | 24 | 117 | ||
1668 | 25 | def filter_installed_packages(packages): | 118 | def filter_installed_packages(packages): |
1669 | 26 | """Returns a list of packages that require installation""" | 119 | """Returns a list of packages that require installation""" |
1672 | 27 | apt_pkg.init() | 120 | cache = apt_cache() |
1671 | 28 | cache = apt_pkg.Cache() | ||
1673 | 29 | _pkgs = [] | 121 | _pkgs = [] |
1674 | 30 | for package in packages: | 122 | for package in packages: |
1675 | 31 | try: | 123 | try: |
1676 | @@ -38,10 +130,22 @@ | |||
1677 | 38 | return _pkgs | 130 | return _pkgs |
1678 | 39 | 131 | ||
1679 | 40 | 132 | ||
1680 | 133 | def apt_cache(in_memory=True): | ||
1681 | 134 | """Build and return an apt cache""" | ||
1682 | 135 | import apt_pkg | ||
1683 | 136 | apt_pkg.init() | ||
1684 | 137 | if in_memory: | ||
1685 | 138 | apt_pkg.config.set("Dir::Cache::pkgcache", "") | ||
1686 | 139 | apt_pkg.config.set("Dir::Cache::srcpkgcache", "") | ||
1687 | 140 | return apt_pkg.Cache() | ||
1688 | 141 | |||
1689 | 142 | |||
1690 | 41 | def apt_install(packages, options=None, fatal=False): | 143 | def apt_install(packages, options=None, fatal=False): |
1691 | 42 | """Install one or more packages""" | 144 | """Install one or more packages""" |
1694 | 43 | options = options or [] | 145 | if options is None: |
1695 | 44 | cmd = ['apt-get', '-y'] | 146 | options = ['--option=Dpkg::Options::=--force-confold'] |
1696 | 147 | |||
1697 | 148 | cmd = ['apt-get', '--assume-yes'] | ||
1698 | 45 | cmd.extend(options) | 149 | cmd.extend(options) |
1699 | 46 | cmd.append('install') | 150 | cmd.append('install') |
1700 | 47 | if isinstance(packages, basestring): | 151 | if isinstance(packages, basestring): |
1701 | @@ -50,29 +154,50 @@ | |||
1702 | 50 | cmd.extend(packages) | 154 | cmd.extend(packages) |
1703 | 51 | log("Installing {} with options: {}".format(packages, | 155 | log("Installing {} with options: {}".format(packages, |
1704 | 52 | options)) | 156 | options)) |
1707 | 53 | if fatal: | 157 | _run_apt_command(cmd, fatal) |
1708 | 54 | subprocess.check_call(cmd) | 158 | |
1709 | 159 | |||
1710 | 160 | def apt_upgrade(options=None, fatal=False, dist=False): | ||
1711 | 161 | """Upgrade all packages""" | ||
1712 | 162 | if options is None: | ||
1713 | 163 | options = ['--option=Dpkg::Options::=--force-confold'] | ||
1714 | 164 | |||
1715 | 165 | cmd = ['apt-get', '--assume-yes'] | ||
1716 | 166 | cmd.extend(options) | ||
1717 | 167 | if dist: | ||
1718 | 168 | cmd.append('dist-upgrade') | ||
1719 | 55 | else: | 169 | else: |
1721 | 56 | subprocess.call(cmd) | 170 | cmd.append('upgrade') |
1722 | 171 | log("Upgrading with options: {}".format(options)) | ||
1723 | 172 | _run_apt_command(cmd, fatal) | ||
1724 | 57 | 173 | ||
1725 | 58 | 174 | ||
1726 | 59 | def apt_update(fatal=False): | 175 | def apt_update(fatal=False): |
1727 | 60 | """Update local apt cache""" | 176 | """Update local apt cache""" |
1728 | 61 | cmd = ['apt-get', 'update'] | 177 | cmd = ['apt-get', 'update'] |
1733 | 62 | if fatal: | 178 | _run_apt_command(cmd, fatal) |
1730 | 63 | subprocess.check_call(cmd) | ||
1731 | 64 | else: | ||
1732 | 65 | subprocess.call(cmd) | ||
1734 | 66 | 179 | ||
1735 | 67 | 180 | ||
1736 | 68 | def apt_purge(packages, fatal=False): | 181 | def apt_purge(packages, fatal=False): |
1737 | 69 | """Purge one or more packages""" | 182 | """Purge one or more packages""" |
1739 | 70 | cmd = ['apt-get', '-y', 'purge'] | 183 | cmd = ['apt-get', '--assume-yes', 'purge'] |
1740 | 71 | if isinstance(packages, basestring): | 184 | if isinstance(packages, basestring): |
1741 | 72 | cmd.append(packages) | 185 | cmd.append(packages) |
1742 | 73 | else: | 186 | else: |
1743 | 74 | cmd.extend(packages) | 187 | cmd.extend(packages) |
1744 | 75 | log("Purging {}".format(packages)) | 188 | log("Purging {}".format(packages)) |
1745 | 189 | _run_apt_command(cmd, fatal) | ||
1746 | 190 | |||
1747 | 191 | |||
1748 | 192 | def apt_hold(packages, fatal=False): | ||
1749 | 193 | """Hold one or more packages""" | ||
1750 | 194 | cmd = ['apt-mark', 'hold'] | ||
1751 | 195 | if isinstance(packages, basestring): | ||
1752 | 196 | cmd.append(packages) | ||
1753 | 197 | else: | ||
1754 | 198 | cmd.extend(packages) | ||
1755 | 199 | log("Holding {}".format(packages)) | ||
1756 | 200 | |||
1757 | 76 | if fatal: | 201 | if fatal: |
1758 | 77 | subprocess.check_call(cmd) | 202 | subprocess.check_call(cmd) |
1759 | 78 | else: | 203 | else: |
1760 | @@ -80,84 +205,142 @@ | |||
1761 | 80 | 205 | ||
1762 | 81 | 206 | ||
1763 | 82 | def add_source(source, key=None): | 207 | def add_source(source, key=None): |
1766 | 83 | if ((source.startswith('ppa:') or | 208 | """Add a package source to this system. |
1767 | 84 | source.startswith('http:'))): | 209 | |
1768 | 210 | @param source: a URL or sources.list entry, as supported by | ||
1769 | 211 | add-apt-repository(1). Examples:: | ||
1770 | 212 | |||
1771 | 213 | ppa:charmers/example | ||
1772 | 214 | deb https://stub:key@private.example.com/ubuntu trusty main | ||
1773 | 215 | |||
1774 | 216 | In addition: | ||
1775 | 217 | 'proposed:' may be used to enable the standard 'proposed' | ||
1776 | 218 | pocket for the release. | ||
1777 | 219 | 'cloud:' may be used to activate official cloud archive pockets, | ||
1778 | 220 | such as 'cloud:icehouse' | ||
1779 | 221 | |||
1780 | 222 | @param key: A key to be added to the system's APT keyring and used | ||
1781 | 223 | to verify the signatures on packages. Ideally, this should be an | ||
1782 | 224 | ASCII format GPG public key including the block headers. A GPG key | ||
1783 | 225 | id may also be used, but be aware that only insecure protocols are | ||
1784 | 226 | available to retrieve the actual public key from a public keyserver | ||
1785 | 227 | placing your Juju environment at risk. ppa and cloud archive keys | ||
1786 | 228 | are securely added automtically, so sould not be provided. | ||
1787 | 229 | """ | ||
1788 | 230 | if source is None: | ||
1789 | 231 | log('Source is not present. Skipping') | ||
1790 | 232 | return | ||
1791 | 233 | |||
1792 | 234 | if (source.startswith('ppa:') or | ||
1793 | 235 | source.startswith('http') or | ||
1794 | 236 | source.startswith('deb ') or | ||
1795 | 237 | source.startswith('cloud-archive:')): | ||
1796 | 85 | subprocess.check_call(['add-apt-repository', '--yes', source]) | 238 | subprocess.check_call(['add-apt-repository', '--yes', source]) |
1797 | 86 | elif source.startswith('cloud:'): | 239 | elif source.startswith('cloud:'): |
1798 | 87 | apt_install(filter_installed_packages(['ubuntu-cloud-keyring']), | 240 | apt_install(filter_installed_packages(['ubuntu-cloud-keyring']), |
1799 | 88 | fatal=True) | 241 | fatal=True) |
1800 | 89 | pocket = source.split(':')[-1] | 242 | pocket = source.split(':')[-1] |
1801 | 243 | if pocket not in CLOUD_ARCHIVE_POCKETS: | ||
1802 | 244 | raise SourceConfigError( | ||
1803 | 245 | 'Unsupported cloud: source option %s' % | ||
1804 | 246 | pocket) | ||
1805 | 247 | actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket] | ||
1806 | 90 | with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt: | 248 | with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt: |
1808 | 91 | apt.write(CLOUD_ARCHIVE.format(pocket)) | 249 | apt.write(CLOUD_ARCHIVE.format(actual_pocket)) |
1809 | 92 | elif source == 'proposed': | 250 | elif source == 'proposed': |
1810 | 93 | release = lsb_release()['DISTRIB_CODENAME'] | 251 | release = lsb_release()['DISTRIB_CODENAME'] |
1811 | 94 | with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt: | 252 | with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt: |
1812 | 95 | apt.write(PROPOSED_POCKET.format(release)) | 253 | apt.write(PROPOSED_POCKET.format(release)) |
1813 | 254 | else: | ||
1814 | 255 | raise SourceConfigError("Unknown source: {!r}".format(source)) | ||
1815 | 256 | |||
1816 | 96 | if key: | 257 | if key: |
1822 | 97 | subprocess.check_call(['apt-key', 'import', key]) | 258 | if '-----BEGIN PGP PUBLIC KEY BLOCK-----' in key: |
1823 | 98 | 259 | with NamedTemporaryFile() as key_file: | |
1824 | 99 | 260 | key_file.write(key) | |
1825 | 100 | class SourceConfigError(Exception): | 261 | key_file.flush() |
1826 | 101 | pass | 262 | key_file.seek(0) |
1827 | 263 | subprocess.check_call(['apt-key', 'add', '-'], stdin=key_file) | ||
1828 | 264 | else: | ||
1829 | 265 | # Note that hkp: is in no way a secure protocol. Using a | ||
1830 | 266 | # GPG key id is pointless from a security POV unless you | ||
1831 | 267 | # absolutely trust your network and DNS. | ||
1832 | 268 | subprocess.check_call(['apt-key', 'adv', '--keyserver', | ||
1833 | 269 | 'hkp://keyserver.ubuntu.com:80', '--recv', | ||
1834 | 270 | key]) | ||
1835 | 102 | 271 | ||
1836 | 103 | 272 | ||
1837 | 104 | def configure_sources(update=False, | 273 | def configure_sources(update=False, |
1838 | 105 | sources_var='install_sources', | 274 | sources_var='install_sources', |
1839 | 106 | keys_var='install_keys'): | 275 | keys_var='install_keys'): |
1840 | 107 | """ | 276 | """ |
1842 | 108 | Configure multiple sources from charm configuration | 277 | Configure multiple sources from charm configuration. |
1843 | 278 | |||
1844 | 279 | The lists are encoded as yaml fragments in the configuration. | ||
1845 | 280 | The frament needs to be included as a string. Sources and their | ||
1846 | 281 | corresponding keys are of the types supported by add_source(). | ||
1847 | 109 | 282 | ||
1848 | 110 | Example config: | 283 | Example config: |
1850 | 111 | install_sources: | 284 | install_sources: | |
1851 | 112 | - "ppa:foo" | 285 | - "ppa:foo" |
1852 | 113 | - "http://example.com/repo precise main" | 286 | - "http://example.com/repo precise main" |
1854 | 114 | install_keys: | 287 | install_keys: | |
1855 | 115 | - null | 288 | - null |
1856 | 116 | - "a1b2c3d4" | 289 | - "a1b2c3d4" |
1857 | 117 | 290 | ||
1858 | 118 | Note that 'null' (a.k.a. None) should not be quoted. | 291 | Note that 'null' (a.k.a. None) should not be quoted. |
1859 | 119 | """ | 292 | """ |
1864 | 120 | sources = safe_load(config(sources_var)) | 293 | sources = safe_load((config(sources_var) or '').strip()) or [] |
1865 | 121 | keys = safe_load(config(keys_var)) | 294 | keys = safe_load((config(keys_var) or '').strip()) or None |
1866 | 122 | if isinstance(sources, basestring) and isinstance(keys, basestring): | 295 | |
1867 | 123 | add_source(sources, keys) | 296 | if isinstance(sources, basestring): |
1868 | 297 | sources = [sources] | ||
1869 | 298 | |||
1870 | 299 | if keys is None: | ||
1871 | 300 | for source in sources: | ||
1872 | 301 | add_source(source, None) | ||
1873 | 124 | else: | 302 | else: |
1879 | 125 | if not len(sources) == len(keys): | 303 | if isinstance(keys, basestring): |
1880 | 126 | msg = 'Install sources and keys lists are different lengths' | 304 | keys = [keys] |
1881 | 127 | raise SourceConfigError(msg) | 305 | |
1882 | 128 | for src_num in range(len(sources)): | 306 | if len(sources) != len(keys): |
1883 | 129 | add_source(sources[src_num], keys[src_num]) | 307 | raise SourceConfigError( |
1884 | 308 | 'Install sources and keys lists are different lengths') | ||
1885 | 309 | for source, key in zip(sources, keys): | ||
1886 | 310 | add_source(source, key) | ||
1887 | 130 | if update: | 311 | if update: |
1888 | 131 | apt_update(fatal=True) | 312 | apt_update(fatal=True) |
1889 | 132 | 313 | ||
1903 | 133 | # The order of this list is very important. Handlers should be listed in from | 314 | |
1904 | 134 | # least- to most-specific URL matching. | 315 | def install_remote(source, *args, **kwargs): |
1892 | 135 | FETCH_HANDLERS = ( | ||
1893 | 136 | 'charmhelpers.fetch.archiveurl.ArchiveUrlFetchHandler', | ||
1894 | 137 | 'charmhelpers.fetch.bzrurl.BzrUrlFetchHandler', | ||
1895 | 138 | ) | ||
1896 | 139 | |||
1897 | 140 | |||
1898 | 141 | class UnhandledSource(Exception): | ||
1899 | 142 | pass | ||
1900 | 143 | |||
1901 | 144 | |||
1902 | 145 | def install_remote(source): | ||
1905 | 146 | """ | 316 | """ |
1906 | 147 | Install a file tree from a remote source | 317 | Install a file tree from a remote source |
1907 | 148 | 318 | ||
1908 | 149 | The specified source should be a url of the form: | 319 | The specified source should be a url of the form: |
1909 | 150 | scheme://[host]/path[#[option=value][&...]] | 320 | scheme://[host]/path[#[option=value][&...]] |
1910 | 151 | 321 | ||
1913 | 152 | Schemes supported are based on this modules submodules | 322 | Schemes supported are based on this modules submodules. |
1914 | 153 | Options supported are submodule-specific""" | 323 | Options supported are submodule-specific. |
1915 | 324 | Additional arguments are passed through to the submodule. | ||
1916 | 325 | |||
1917 | 326 | For example:: | ||
1918 | 327 | |||
1919 | 328 | dest = install_remote('http://example.com/archive.tgz', | ||
1920 | 329 | checksum='deadbeef', | ||
1921 | 330 | hash_type='sha1') | ||
1922 | 331 | |||
1923 | 332 | This will download `archive.tgz`, validate it using SHA1 and, if | ||
1924 | 333 | the file is ok, extract it and return the directory in which it | ||
1925 | 334 | was extracted. If the checksum fails, it will raise | ||
1926 | 335 | :class:`charmhelpers.core.host.ChecksumError`. | ||
1927 | 336 | """ | ||
1928 | 154 | # We ONLY check for True here because can_handle may return a string | 337 | # We ONLY check for True here because can_handle may return a string |
1929 | 155 | # explaining why it can't handle a given source. | 338 | # explaining why it can't handle a given source. |
1930 | 156 | handlers = [h for h in plugins() if h.can_handle(source) is True] | 339 | handlers = [h for h in plugins() if h.can_handle(source) is True] |
1931 | 157 | installed_to = None | 340 | installed_to = None |
1932 | 158 | for handler in handlers: | 341 | for handler in handlers: |
1933 | 159 | try: | 342 | try: |
1935 | 160 | installed_to = handler.install(source) | 343 | installed_to = handler.install(source, *args, **kwargs) |
1936 | 161 | except UnhandledSource: | 344 | except UnhandledSource: |
1937 | 162 | pass | 345 | pass |
1938 | 163 | if not installed_to: | 346 | if not installed_to: |
1939 | @@ -171,28 +354,6 @@ | |||
1940 | 171 | return install_remote(source) | 354 | return install_remote(source) |
1941 | 172 | 355 | ||
1942 | 173 | 356 | ||
1943 | 174 | class BaseFetchHandler(object): | ||
1944 | 175 | """Base class for FetchHandler implementations in fetch plugins""" | ||
1945 | 176 | def can_handle(self, source): | ||
1946 | 177 | """Returns True if the source can be handled. Otherwise returns | ||
1947 | 178 | a string explaining why it cannot""" | ||
1948 | 179 | return "Wrong source type" | ||
1949 | 180 | |||
1950 | 181 | def install(self, source): | ||
1951 | 182 | """Try to download and unpack the source. Return the path to the | ||
1952 | 183 | unpacked files or raise UnhandledSource.""" | ||
1953 | 184 | raise UnhandledSource("Wrong source type {}".format(source)) | ||
1954 | 185 | |||
1955 | 186 | def parse_url(self, url): | ||
1956 | 187 | return urlparse(url) | ||
1957 | 188 | |||
1958 | 189 | def base_url(self, url): | ||
1959 | 190 | """Return url without querystring or fragment""" | ||
1960 | 191 | parts = list(self.parse_url(url)) | ||
1961 | 192 | parts[4:] = ['' for i in parts[4:]] | ||
1962 | 193 | return urlunparse(parts) | ||
1963 | 194 | |||
1964 | 195 | |||
1965 | 196 | def plugins(fetch_handlers=None): | 357 | def plugins(fetch_handlers=None): |
1966 | 197 | if not fetch_handlers: | 358 | if not fetch_handlers: |
1967 | 198 | fetch_handlers = FETCH_HANDLERS | 359 | fetch_handlers = FETCH_HANDLERS |
1968 | @@ -200,10 +361,50 @@ | |||
1969 | 200 | for handler_name in fetch_handlers: | 361 | for handler_name in fetch_handlers: |
1970 | 201 | package, classname = handler_name.rsplit('.', 1) | 362 | package, classname = handler_name.rsplit('.', 1) |
1971 | 202 | try: | 363 | try: |
1973 | 203 | handler_class = getattr(importlib.import_module(package), classname) | 364 | handler_class = getattr( |
1974 | 365 | importlib.import_module(package), | ||
1975 | 366 | classname) | ||
1976 | 204 | plugin_list.append(handler_class()) | 367 | plugin_list.append(handler_class()) |
1977 | 205 | except (ImportError, AttributeError): | 368 | except (ImportError, AttributeError): |
1978 | 206 | # Skip missing plugins so that they can be ommitted from | 369 | # Skip missing plugins so that they can be ommitted from |
1979 | 207 | # installation if desired | 370 | # installation if desired |
1981 | 208 | log("FetchHandler {} not found, skipping plugin".format(handler_name)) | 371 | log("FetchHandler {} not found, skipping plugin".format( |
1982 | 372 | handler_name)) | ||
1983 | 209 | return plugin_list | 373 | return plugin_list |
1984 | 374 | |||
1985 | 375 | |||
1986 | 376 | def _run_apt_command(cmd, fatal=False): | ||
1987 | 377 | """ | ||
1988 | 378 | Run an APT command, checking output and retrying if the fatal flag is set | ||
1989 | 379 | to True. | ||
1990 | 380 | |||
1991 | 381 | :param: cmd: str: The apt command to run. | ||
1992 | 382 | :param: fatal: bool: Whether the command's output should be checked and | ||
1993 | 383 | retried. | ||
1994 | 384 | """ | ||
1995 | 385 | env = os.environ.copy() | ||
1996 | 386 | |||
1997 | 387 | if 'DEBIAN_FRONTEND' not in env: | ||
1998 | 388 | env['DEBIAN_FRONTEND'] = 'noninteractive' | ||
1999 | 389 | |||
2000 | 390 | if fatal: | ||
2001 | 391 | retry_count = 0 | ||
2002 | 392 | result = None | ||
2003 | 393 | |||
2004 | 394 | # If the command is considered "fatal", we need to retry if the apt | ||
2005 | 395 | # lock was not acquired. | ||
2006 | 396 | |||
2007 | 397 | while result is None or result == APT_NO_LOCK: | ||
2008 | 398 | try: | ||
2009 | 399 | result = subprocess.check_call(cmd, env=env) | ||
2010 | 400 | except subprocess.CalledProcessError, e: | ||
2011 | 401 | retry_count = retry_count + 1 | ||
2012 | 402 | if retry_count > APT_NO_LOCK_RETRY_COUNT: | ||
2013 | 403 | raise | ||
2014 | 404 | result = e.returncode | ||
2015 | 405 | log("Couldn't acquire DPKG lock. Will retry in {} seconds." | ||
2016 | 406 | "".format(APT_NO_LOCK_RETRY_DELAY)) | ||
2017 | 407 | time.sleep(APT_NO_LOCK_RETRY_DELAY) | ||
2018 | 408 | |||
2019 | 409 | else: | ||
2020 | 410 | subprocess.call(cmd, env=env) | ||
2021 | 210 | 411 | ||
2022 | === modified file 'hooks/charmhelpers/fetch/archiveurl.py' | |||
2023 | --- hooks/charmhelpers/fetch/archiveurl.py 2013-08-29 18:41:54 +0000 | |||
2024 | +++ hooks/charmhelpers/fetch/archiveurl.py 2014-09-26 08:01:25 +0000 | |||
2025 | @@ -1,5 +1,9 @@ | |||
2026 | 1 | import os | 1 | import os |
2027 | 2 | import urllib2 | 2 | import urllib2 |
2028 | 3 | from urllib import urlretrieve | ||
2029 | 4 | import urlparse | ||
2030 | 5 | import hashlib | ||
2031 | 6 | |||
2032 | 3 | from charmhelpers.fetch import ( | 7 | from charmhelpers.fetch import ( |
2033 | 4 | BaseFetchHandler, | 8 | BaseFetchHandler, |
2034 | 5 | UnhandledSource | 9 | UnhandledSource |
2035 | @@ -8,11 +12,19 @@ | |||
2036 | 8 | get_archive_handler, | 12 | get_archive_handler, |
2037 | 9 | extract, | 13 | extract, |
2038 | 10 | ) | 14 | ) |
2040 | 11 | from charmhelpers.core.host import mkdir | 15 | from charmhelpers.core.host import mkdir, check_hash |
2041 | 12 | 16 | ||
2042 | 13 | 17 | ||
2043 | 14 | class ArchiveUrlFetchHandler(BaseFetchHandler): | 18 | class ArchiveUrlFetchHandler(BaseFetchHandler): |
2045 | 15 | """Handler for archives via generic URLs""" | 19 | """ |
2046 | 20 | Handler to download archive files from arbitrary URLs. | ||
2047 | 21 | |||
2048 | 22 | Can fetch from http, https, ftp, and file URLs. | ||
2049 | 23 | |||
2050 | 24 | Can install either tarballs (.tar, .tgz, .tbz2, etc) or zip files. | ||
2051 | 25 | |||
2052 | 26 | Installs the contents of the archive in $CHARM_DIR/fetched/. | ||
2053 | 27 | """ | ||
2054 | 16 | def can_handle(self, source): | 28 | def can_handle(self, source): |
2055 | 17 | url_parts = self.parse_url(source) | 29 | url_parts = self.parse_url(source) |
2056 | 18 | if url_parts.scheme not in ('http', 'https', 'ftp', 'file'): | 30 | if url_parts.scheme not in ('http', 'https', 'ftp', 'file'): |
2057 | @@ -22,8 +34,27 @@ | |||
2058 | 22 | return False | 34 | return False |
2059 | 23 | 35 | ||
2060 | 24 | def download(self, source, dest): | 36 | def download(self, source, dest): |
2061 | 37 | """ | ||
2062 | 38 | Download an archive file. | ||
2063 | 39 | |||
2064 | 40 | :param str source: URL pointing to an archive file. | ||
2065 | 41 | :param str dest: Local path location to download archive file to. | ||
2066 | 42 | """ | ||
2067 | 25 | # propogate all exceptions | 43 | # propogate all exceptions |
2068 | 26 | # URLError, OSError, etc | 44 | # URLError, OSError, etc |
2069 | 45 | proto, netloc, path, params, query, fragment = urlparse.urlparse(source) | ||
2070 | 46 | if proto in ('http', 'https'): | ||
2071 | 47 | auth, barehost = urllib2.splituser(netloc) | ||
2072 | 48 | if auth is not None: | ||
2073 | 49 | source = urlparse.urlunparse((proto, barehost, path, params, query, fragment)) | ||
2074 | 50 | username, password = urllib2.splitpasswd(auth) | ||
2075 | 51 | passman = urllib2.HTTPPasswordMgrWithDefaultRealm() | ||
2076 | 52 | # Realm is set to None in add_password to force the username and password | ||
2077 | 53 | # to be used whatever the realm | ||
2078 | 54 | passman.add_password(None, source, username, password) | ||
2079 | 55 | authhandler = urllib2.HTTPBasicAuthHandler(passman) | ||
2080 | 56 | opener = urllib2.build_opener(authhandler) | ||
2081 | 57 | urllib2.install_opener(opener) | ||
2082 | 27 | response = urllib2.urlopen(source) | 58 | response = urllib2.urlopen(source) |
2083 | 28 | try: | 59 | try: |
2084 | 29 | with open(dest, 'w') as dest_file: | 60 | with open(dest, 'w') as dest_file: |
2085 | @@ -33,7 +64,30 @@ | |||
2086 | 33 | os.unlink(dest) | 64 | os.unlink(dest) |
2087 | 34 | raise e | 65 | raise e |
2088 | 35 | 66 | ||
2090 | 36 | def install(self, source): | 67 | # Mandatory file validation via Sha1 or MD5 hashing. |
2091 | 68 | def download_and_validate(self, url, hashsum, validate="sha1"): | ||
2092 | 69 | tempfile, headers = urlretrieve(url) | ||
2093 | 70 | check_hash(tempfile, hashsum, validate) | ||
2094 | 71 | return tempfile | ||
2095 | 72 | |||
2096 | 73 | def install(self, source, dest=None, checksum=None, hash_type='sha1'): | ||
2097 | 74 | """ | ||
2098 | 75 | Download and install an archive file, with optional checksum validation. | ||
2099 | 76 | |||
2100 | 77 | The checksum can also be given on the `source` URL's fragment. | ||
2101 | 78 | For example:: | ||
2102 | 79 | |||
2103 | 80 | handler.install('http://example.com/file.tgz#sha1=deadbeef') | ||
2104 | 81 | |||
2105 | 82 | :param str source: URL pointing to an archive file. | ||
2106 | 83 | :param str dest: Local destination path to install to. If not given, | ||
2107 | 84 | installs to `$CHARM_DIR/archives/archive_file_name`. | ||
2108 | 85 | :param str checksum: If given, validate the archive file after download. | ||
2109 | 86 | :param str hash_type: Algorithm used to generate `checksum`. | ||
2110 | 87 | Can be any hash alrgorithm supported by :mod:`hashlib`, | ||
2111 | 88 | such as md5, sha1, sha256, sha512, etc. | ||
2112 | 89 | |||
2113 | 90 | """ | ||
2114 | 37 | url_parts = self.parse_url(source) | 91 | url_parts = self.parse_url(source) |
2115 | 38 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched') | 92 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched') |
2116 | 39 | if not os.path.exists(dest_dir): | 93 | if not os.path.exists(dest_dir): |
2117 | @@ -45,4 +99,10 @@ | |||
2118 | 45 | raise UnhandledSource(e.reason) | 99 | raise UnhandledSource(e.reason) |
2119 | 46 | except OSError as e: | 100 | except OSError as e: |
2120 | 47 | raise UnhandledSource(e.strerror) | 101 | raise UnhandledSource(e.strerror) |
2122 | 48 | return extract(dld_file) | 102 | options = urlparse.parse_qs(url_parts.fragment) |
2123 | 103 | for key, value in options.items(): | ||
2124 | 104 | if key in hashlib.algorithms: | ||
2125 | 105 | check_hash(dld_file, value, key) | ||
2126 | 106 | if checksum: | ||
2127 | 107 | check_hash(dld_file, checksum, hash_type) | ||
2128 | 108 | return extract(dld_file, dest) | ||
2129 | 49 | 109 | ||
2130 | === modified file 'hooks/charmhelpers/fetch/bzrurl.py' | |||
2131 | --- hooks/charmhelpers/fetch/bzrurl.py 2013-08-29 18:41:54 +0000 | |||
2132 | +++ hooks/charmhelpers/fetch/bzrurl.py 2014-09-26 08:01:25 +0000 | |||
2133 | @@ -12,6 +12,7 @@ | |||
2134 | 12 | apt_install("python-bzrlib") | 12 | apt_install("python-bzrlib") |
2135 | 13 | from bzrlib.branch import Branch | 13 | from bzrlib.branch import Branch |
2136 | 14 | 14 | ||
2137 | 15 | |||
2138 | 15 | class BzrUrlFetchHandler(BaseFetchHandler): | 16 | class BzrUrlFetchHandler(BaseFetchHandler): |
2139 | 16 | """Handler for bazaar branches via generic and lp URLs""" | 17 | """Handler for bazaar branches via generic and lp URLs""" |
2140 | 17 | def can_handle(self, source): | 18 | def can_handle(self, source): |
2141 | @@ -38,7 +39,8 @@ | |||
2142 | 38 | def install(self, source): | 39 | def install(self, source): |
2143 | 39 | url_parts = self.parse_url(source) | 40 | url_parts = self.parse_url(source) |
2144 | 40 | branch_name = url_parts.path.strip("/").split("/")[-1] | 41 | branch_name = url_parts.path.strip("/").split("/")[-1] |
2146 | 41 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", branch_name) | 42 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", |
2147 | 43 | branch_name) | ||
2148 | 42 | if not os.path.exists(dest_dir): | 44 | if not os.path.exists(dest_dir): |
2149 | 43 | mkdir(dest_dir, perms=0755) | 45 | mkdir(dest_dir, perms=0755) |
2150 | 44 | try: | 46 | try: |
2151 | @@ -46,4 +48,3 @@ | |||
2152 | 46 | except OSError as e: | 48 | except OSError as e: |
2153 | 47 | raise UnhandledSource(e.strerror) | 49 | raise UnhandledSource(e.strerror) |
2154 | 48 | return dest_dir | 50 | return dest_dir |
2155 | 49 |
Looks great Chris, committed.