Merge lp:~openstack-charmers/charms/precise/ceph-osd/alternatives-config into lp:~charmers/charms/precise/ceph-osd/trunk
- Precise Pangolin (12.04)
- alternatives-config
- Merge into trunk
Proposed by
James Page
Status: | Merged |
---|---|
Merged at revision: | 16 |
Proposed branch: | lp:~openstack-charmers/charms/precise/ceph-osd/alternatives-config |
Merge into: | lp:~charmers/charms/precise/ceph-osd/trunk |
Diff against target: |
1004 lines (+372/-135) 10 files modified
charm-helpers-sync.yaml (+1/-0) hooks/charmhelpers/contrib/openstack/alternatives.py (+17/-0) hooks/charmhelpers/core/hookenv.py (+85/-24) hooks/charmhelpers/core/host.py (+60/-86) hooks/charmhelpers/fetch/__init__.py (+130/-11) hooks/charmhelpers/fetch/archiveurl.py (+12/-7) hooks/charmhelpers/fetch/bzrurl.py (+49/-0) hooks/hooks.py (+16/-5) hooks/utils.py (+1/-1) revision (+1/-1) |
To merge this branch: | bzr merge lp:~openstack-charmers/charms/precise/ceph-osd/alternatives-config |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Michael Nelson (community) | Approve | ||
Review via email: mp+195149@code.launchpad.net |
Commit message
Description of the change
Manage /etc/ceph/ceph.conf via alternatives for co-install with ceph and nova-compute
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'charm-helpers-sync.yaml' |
2 | --- charm-helpers-sync.yaml 2013-07-03 20:53:22 +0000 |
3 | +++ charm-helpers-sync.yaml 2013-11-13 22:24:03 +0000 |
4 | @@ -5,3 +5,4 @@ |
5 | - fetch |
6 | - contrib.storage.linux: |
7 | - utils |
8 | + - contrib.openstack.alternatives |
9 | |
10 | === added directory 'hooks/charmhelpers/contrib/openstack' |
11 | === added file 'hooks/charmhelpers/contrib/openstack/__init__.py' |
12 | === added file 'hooks/charmhelpers/contrib/openstack/alternatives.py' |
13 | --- hooks/charmhelpers/contrib/openstack/alternatives.py 1970-01-01 00:00:00 +0000 |
14 | +++ hooks/charmhelpers/contrib/openstack/alternatives.py 2013-11-13 22:24:03 +0000 |
15 | @@ -0,0 +1,17 @@ |
16 | +''' Helper for managing alternatives for file conflict resolution ''' |
17 | + |
18 | +import subprocess |
19 | +import shutil |
20 | +import os |
21 | + |
22 | + |
23 | +def install_alternative(name, target, source, priority=50): |
24 | + ''' Install alternative configuration ''' |
25 | + if (os.path.exists(target) and not os.path.islink(target)): |
26 | + # Move existing file/directory away before installing |
27 | + shutil.move(target, '{}.bak'.format(target)) |
28 | + cmd = [ |
29 | + 'update-alternatives', '--force', '--install', |
30 | + target, name, source, str(priority) |
31 | + ] |
32 | + subprocess.check_call(cmd) |
33 | |
34 | === modified file 'hooks/charmhelpers/core/hookenv.py' |
35 | --- hooks/charmhelpers/core/hookenv.py 2013-07-03 20:53:22 +0000 |
36 | +++ hooks/charmhelpers/core/hookenv.py 2013-11-13 22:24:03 +0000 |
37 | @@ -9,6 +9,7 @@ |
38 | import yaml |
39 | import subprocess |
40 | import UserDict |
41 | +from subprocess import CalledProcessError |
42 | |
43 | CRITICAL = "CRITICAL" |
44 | ERROR = "ERROR" |
45 | @@ -21,7 +22,7 @@ |
46 | |
47 | |
48 | def cached(func): |
49 | - ''' Cache return values for multiple executions of func + args |
50 | + """Cache return values for multiple executions of func + args |
51 | |
52 | For example: |
53 | |
54 | @@ -32,7 +33,7 @@ |
55 | unit_get('test') |
56 | |
57 | will cache the result of unit_get + 'test' for future calls. |
58 | - ''' |
59 | + """ |
60 | def wrapper(*args, **kwargs): |
61 | global cache |
62 | key = str((func, args, kwargs)) |
63 | @@ -46,8 +47,8 @@ |
64 | |
65 | |
66 | def flush(key): |
67 | - ''' Flushes any entries from function cache where the |
68 | - key is found in the function+args ''' |
69 | + """Flushes any entries from function cache where the |
70 | + key is found in the function+args """ |
71 | flush_list = [] |
72 | for item in cache: |
73 | if key in item: |
74 | @@ -57,7 +58,7 @@ |
75 | |
76 | |
77 | def log(message, level=None): |
78 | - "Write a message to the juju log" |
79 | + """Write a message to the juju log""" |
80 | command = ['juju-log'] |
81 | if level: |
82 | command += ['-l', level] |
83 | @@ -66,7 +67,7 @@ |
84 | |
85 | |
86 | class Serializable(UserDict.IterableUserDict): |
87 | - "Wrapper, an object that can be serialized to yaml or json" |
88 | + """Wrapper, an object that can be serialized to yaml or json""" |
89 | |
90 | def __init__(self, obj): |
91 | # wrap the object |
92 | @@ -96,11 +97,11 @@ |
93 | self.data = state |
94 | |
95 | def json(self): |
96 | - "Serialize the object to json" |
97 | + """Serialize the object to json""" |
98 | return json.dumps(self.data) |
99 | |
100 | def yaml(self): |
101 | - "Serialize the object to yaml" |
102 | + """Serialize the object to yaml""" |
103 | return yaml.dump(self.data) |
104 | |
105 | |
106 | @@ -119,33 +120,38 @@ |
107 | |
108 | |
109 | def in_relation_hook(): |
110 | - "Determine whether we're running in a relation hook" |
111 | + """Determine whether we're running in a relation hook""" |
112 | return 'JUJU_RELATION' in os.environ |
113 | |
114 | |
115 | def relation_type(): |
116 | - "The scope for the current relation hook" |
117 | + """The scope for the current relation hook""" |
118 | return os.environ.get('JUJU_RELATION', None) |
119 | |
120 | |
121 | def relation_id(): |
122 | - "The relation ID for the current relation hook" |
123 | + """The relation ID for the current relation hook""" |
124 | return os.environ.get('JUJU_RELATION_ID', None) |
125 | |
126 | |
127 | def local_unit(): |
128 | - "Local unit ID" |
129 | + """Local unit ID""" |
130 | return os.environ['JUJU_UNIT_NAME'] |
131 | |
132 | |
133 | def remote_unit(): |
134 | - "The remote unit for the current relation hook" |
135 | + """The remote unit for the current relation hook""" |
136 | return os.environ['JUJU_REMOTE_UNIT'] |
137 | |
138 | |
139 | +def service_name(): |
140 | + """The name service group this unit belongs to""" |
141 | + return local_unit().split('/')[0] |
142 | + |
143 | + |
144 | @cached |
145 | def config(scope=None): |
146 | - "Juju charm configuration" |
147 | + """Juju charm configuration""" |
148 | config_cmd_line = ['config-get'] |
149 | if scope is not None: |
150 | config_cmd_line.append(scope) |
151 | @@ -158,6 +164,7 @@ |
152 | |
153 | @cached |
154 | def relation_get(attribute=None, unit=None, rid=None): |
155 | + """Get relation information""" |
156 | _args = ['relation-get', '--format=json'] |
157 | if rid: |
158 | _args.append('-r') |
159 | @@ -169,9 +176,14 @@ |
160 | return json.loads(subprocess.check_output(_args)) |
161 | except ValueError: |
162 | return None |
163 | + except CalledProcessError, e: |
164 | + if e.returncode == 2: |
165 | + return None |
166 | + raise |
167 | |
168 | |
169 | def relation_set(relation_id=None, relation_settings={}, **kwargs): |
170 | + """Set relation information for the current unit""" |
171 | relation_cmd_line = ['relation-set'] |
172 | if relation_id is not None: |
173 | relation_cmd_line.extend(('-r', relation_id)) |
174 | @@ -187,28 +199,28 @@ |
175 | |
176 | @cached |
177 | def relation_ids(reltype=None): |
178 | - "A list of relation_ids" |
179 | + """A list of relation_ids""" |
180 | reltype = reltype or relation_type() |
181 | relid_cmd_line = ['relation-ids', '--format=json'] |
182 | if reltype is not None: |
183 | relid_cmd_line.append(reltype) |
184 | - return json.loads(subprocess.check_output(relid_cmd_line)) |
185 | + return json.loads(subprocess.check_output(relid_cmd_line)) or [] |
186 | return [] |
187 | |
188 | |
189 | @cached |
190 | def related_units(relid=None): |
191 | - "A list of related units" |
192 | + """A list of related units""" |
193 | relid = relid or relation_id() |
194 | units_cmd_line = ['relation-list', '--format=json'] |
195 | if relid is not None: |
196 | units_cmd_line.extend(('-r', relid)) |
197 | - return json.loads(subprocess.check_output(units_cmd_line)) |
198 | + return json.loads(subprocess.check_output(units_cmd_line)) or [] |
199 | |
200 | |
201 | @cached |
202 | def relation_for_unit(unit=None, rid=None): |
203 | - "Get the json represenation of a unit's relation" |
204 | + """Get the json represenation of a unit's relation""" |
205 | unit = unit or remote_unit() |
206 | relation = relation_get(unit=unit, rid=rid) |
207 | for key in relation: |
208 | @@ -220,7 +232,7 @@ |
209 | |
210 | @cached |
211 | def relations_for_id(relid=None): |
212 | - "Get relations of a specific relation ID" |
213 | + """Get relations of a specific relation ID""" |
214 | relation_data = [] |
215 | relid = relid or relation_ids() |
216 | for unit in related_units(relid): |
217 | @@ -232,7 +244,7 @@ |
218 | |
219 | @cached |
220 | def relations_of_type(reltype=None): |
221 | - "Get relations of a specific type" |
222 | + """Get relations of a specific type""" |
223 | relation_data = [] |
224 | reltype = reltype or relation_type() |
225 | for relid in relation_ids(reltype): |
226 | @@ -244,7 +256,7 @@ |
227 | |
228 | @cached |
229 | def relation_types(): |
230 | - "Get a list of relation types supported by this charm" |
231 | + """Get a list of relation types supported by this charm""" |
232 | charmdir = os.environ.get('CHARM_DIR', '') |
233 | mdf = open(os.path.join(charmdir, 'metadata.yaml')) |
234 | md = yaml.safe_load(mdf) |
235 | @@ -259,6 +271,7 @@ |
236 | |
237 | @cached |
238 | def relations(): |
239 | + """Get a nested dictionary of relation data for all related units""" |
240 | rels = {} |
241 | for reltype in relation_types(): |
242 | relids = {} |
243 | @@ -272,15 +285,35 @@ |
244 | return rels |
245 | |
246 | |
247 | +@cached |
248 | +def is_relation_made(relation, keys='private-address'): |
249 | + ''' |
250 | + Determine whether a relation is established by checking for |
251 | + presence of key(s). If a list of keys is provided, they |
252 | + must all be present for the relation to be identified as made |
253 | + ''' |
254 | + if isinstance(keys, str): |
255 | + keys = [keys] |
256 | + for r_id in relation_ids(relation): |
257 | + for unit in related_units(r_id): |
258 | + context = {} |
259 | + for k in keys: |
260 | + context[k] = relation_get(k, rid=r_id, |
261 | + unit=unit) |
262 | + if None not in context.values(): |
263 | + return True |
264 | + return False |
265 | + |
266 | + |
267 | def open_port(port, protocol="TCP"): |
268 | - "Open a service network port" |
269 | + """Open a service network port""" |
270 | _args = ['open-port'] |
271 | _args.append('{}/{}'.format(port, protocol)) |
272 | subprocess.check_call(_args) |
273 | |
274 | |
275 | def close_port(port, protocol="TCP"): |
276 | - "Close a service network port" |
277 | + """Close a service network port""" |
278 | _args = ['close-port'] |
279 | _args.append('{}/{}'.format(port, protocol)) |
280 | subprocess.check_call(_args) |
281 | @@ -288,6 +321,7 @@ |
282 | |
283 | @cached |
284 | def unit_get(attribute): |
285 | + """Get the unit ID for the remote unit""" |
286 | _args = ['unit-get', '--format=json', attribute] |
287 | try: |
288 | return json.loads(subprocess.check_output(_args)) |
289 | @@ -296,22 +330,46 @@ |
290 | |
291 | |
292 | def unit_private_ip(): |
293 | + """Get this unit's private IP address""" |
294 | return unit_get('private-address') |
295 | |
296 | |
297 | class UnregisteredHookError(Exception): |
298 | + """Raised when an undefined hook is called""" |
299 | pass |
300 | |
301 | |
302 | class Hooks(object): |
303 | + """A convenient handler for hook functions. |
304 | + |
305 | + Example: |
306 | + hooks = Hooks() |
307 | + |
308 | + # register a hook, taking its name from the function name |
309 | + @hooks.hook() |
310 | + def install(): |
311 | + ... |
312 | + |
313 | + # register a hook, providing a custom hook name |
314 | + @hooks.hook("config-changed") |
315 | + def config_changed(): |
316 | + ... |
317 | + |
318 | + if __name__ == "__main__": |
319 | + # execute a hook based on the name the program is called by |
320 | + hooks.execute(sys.argv) |
321 | + """ |
322 | + |
323 | def __init__(self): |
324 | super(Hooks, self).__init__() |
325 | self._hooks = {} |
326 | |
327 | def register(self, name, function): |
328 | + """Register a hook""" |
329 | self._hooks[name] = function |
330 | |
331 | def execute(self, args): |
332 | + """Execute a registered hook based on args[0]""" |
333 | hook_name = os.path.basename(args[0]) |
334 | if hook_name in self._hooks: |
335 | self._hooks[hook_name]() |
336 | @@ -319,6 +377,7 @@ |
337 | raise UnregisteredHookError(hook_name) |
338 | |
339 | def hook(self, *hook_names): |
340 | + """Decorator, registering them as hooks""" |
341 | def wrapper(decorated): |
342 | for hook_name in hook_names: |
343 | self.register(hook_name, decorated) |
344 | @@ -330,5 +389,7 @@ |
345 | return decorated |
346 | return wrapper |
347 | |
348 | + |
349 | def charm_dir(): |
350 | + """Return the root directory of the current charm""" |
351 | return os.environ.get('CHARM_DIR') |
352 | |
353 | === modified file 'hooks/charmhelpers/core/host.py' |
354 | --- hooks/charmhelpers/core/host.py 2013-07-03 11:46:27 +0000 |
355 | +++ hooks/charmhelpers/core/host.py 2013-11-13 22:24:03 +0000 |
356 | @@ -5,42 +5,63 @@ |
357 | # Nick Moffitt <nick.moffitt@canonical.com> |
358 | # Matthew Wedgwood <matthew.wedgwood@canonical.com> |
359 | |
360 | -import apt_pkg |
361 | import os |
362 | import pwd |
363 | import grp |
364 | +import random |
365 | +import string |
366 | import subprocess |
367 | import hashlib |
368 | |
369 | from collections import OrderedDict |
370 | |
371 | -from hookenv import log, execution_environment |
372 | +from hookenv import log |
373 | |
374 | |
375 | def service_start(service_name): |
376 | - service('start', service_name) |
377 | + """Start a system service""" |
378 | + return service('start', service_name) |
379 | |
380 | |
381 | def service_stop(service_name): |
382 | - service('stop', service_name) |
383 | + """Stop a system service""" |
384 | + return service('stop', service_name) |
385 | |
386 | |
387 | def service_restart(service_name): |
388 | - service('restart', service_name) |
389 | + """Restart a system service""" |
390 | + return service('restart', service_name) |
391 | |
392 | |
393 | def service_reload(service_name, restart_on_failure=False): |
394 | - if not service('reload', service_name) and restart_on_failure: |
395 | - service('restart', service_name) |
396 | + """Reload a system service, optionally falling back to restart if reload fails""" |
397 | + service_result = service('reload', service_name) |
398 | + if not service_result and restart_on_failure: |
399 | + service_result = service('restart', service_name) |
400 | + return service_result |
401 | |
402 | |
403 | def service(action, service_name): |
404 | + """Control a system service""" |
405 | cmd = ['service', service_name, action] |
406 | return subprocess.call(cmd) == 0 |
407 | |
408 | |
409 | +def service_running(service): |
410 | + """Determine whether a system service is running""" |
411 | + try: |
412 | + output = subprocess.check_output(['service', service, 'status']) |
413 | + except subprocess.CalledProcessError: |
414 | + return False |
415 | + else: |
416 | + if ("start/running" in output or "is running" in output): |
417 | + return True |
418 | + else: |
419 | + return False |
420 | + |
421 | + |
422 | def adduser(username, password=None, shell='/bin/bash', system_user=False): |
423 | - """Add a user""" |
424 | + """Add a user to the system""" |
425 | try: |
426 | user_info = pwd.getpwnam(username) |
427 | log('user {0} already exists!'.format(username)) |
428 | @@ -74,36 +95,33 @@ |
429 | |
430 | def rsync(from_path, to_path, flags='-r', options=None): |
431 | """Replicate the contents of a path""" |
432 | - context = execution_environment() |
433 | options = options or ['--delete', '--executability'] |
434 | cmd = ['/usr/bin/rsync', flags] |
435 | cmd.extend(options) |
436 | - cmd.append(from_path.format(**context)) |
437 | - cmd.append(to_path.format(**context)) |
438 | + cmd.append(from_path) |
439 | + cmd.append(to_path) |
440 | log(" ".join(cmd)) |
441 | return subprocess.check_output(cmd).strip() |
442 | |
443 | |
444 | def symlink(source, destination): |
445 | """Create a symbolic link""" |
446 | - context = execution_environment() |
447 | log("Symlinking {} as {}".format(source, destination)) |
448 | cmd = [ |
449 | 'ln', |
450 | '-sf', |
451 | - source.format(**context), |
452 | - destination.format(**context) |
453 | + source, |
454 | + destination, |
455 | ] |
456 | subprocess.check_call(cmd) |
457 | |
458 | |
459 | def mkdir(path, owner='root', group='root', perms=0555, force=False): |
460 | """Create a directory""" |
461 | - context = execution_environment() |
462 | log("Making dir {} {}:{} {:o}".format(path, owner, group, |
463 | perms)) |
464 | - uid = pwd.getpwnam(owner.format(**context)).pw_uid |
465 | - gid = grp.getgrnam(group.format(**context)).gr_gid |
466 | + uid = pwd.getpwnam(owner).pw_uid |
467 | + gid = grp.getgrnam(group).gr_gid |
468 | realpath = os.path.abspath(path) |
469 | if os.path.exists(realpath): |
470 | if force and not os.path.isdir(realpath): |
471 | @@ -114,75 +132,19 @@ |
472 | os.chown(realpath, uid, gid) |
473 | |
474 | |
475 | -def write_file(path, fmtstr, owner='root', group='root', perms=0444, **kwargs): |
476 | +def write_file(path, content, owner='root', group='root', perms=0444): |
477 | """Create or overwrite a file with the contents of a string""" |
478 | - context = execution_environment() |
479 | - context.update(kwargs) |
480 | - log("Writing file {} {}:{} {:o}".format(path, owner, group, |
481 | - perms)) |
482 | - uid = pwd.getpwnam(owner.format(**context)).pw_uid |
483 | - gid = grp.getgrnam(group.format(**context)).gr_gid |
484 | - with open(path.format(**context), 'w') as target: |
485 | + log("Writing file {} {}:{} {:o}".format(path, owner, group, perms)) |
486 | + uid = pwd.getpwnam(owner).pw_uid |
487 | + gid = grp.getgrnam(group).gr_gid |
488 | + with open(path, 'w') as target: |
489 | os.fchown(target.fileno(), uid, gid) |
490 | os.fchmod(target.fileno(), perms) |
491 | - target.write(fmtstr.format(**context)) |
492 | - |
493 | - |
494 | -def render_template_file(source, destination, **kwargs): |
495 | - """Create or overwrite a file using a template""" |
496 | - log("Rendering template {} for {}".format(source, |
497 | - destination)) |
498 | - context = execution_environment() |
499 | - with open(source.format(**context), 'r') as template: |
500 | - write_file(destination.format(**context), template.read(), |
501 | - **kwargs) |
502 | - |
503 | - |
504 | -def filter_installed_packages(packages): |
505 | - """Returns a list of packages that require installation""" |
506 | - apt_pkg.init() |
507 | - cache = apt_pkg.Cache() |
508 | - _pkgs = [] |
509 | - for package in packages: |
510 | - try: |
511 | - p = cache[package] |
512 | - p.current_ver or _pkgs.append(package) |
513 | - except KeyError: |
514 | - log('Package {} has no installation candidate.'.format(package), |
515 | - level='WARNING') |
516 | - _pkgs.append(package) |
517 | - return _pkgs |
518 | - |
519 | - |
520 | -def apt_install(packages, options=None, fatal=False): |
521 | - """Install one or more packages""" |
522 | - options = options or [] |
523 | - cmd = ['apt-get', '-y'] |
524 | - cmd.extend(options) |
525 | - cmd.append('install') |
526 | - if isinstance(packages, basestring): |
527 | - cmd.append(packages) |
528 | - else: |
529 | - cmd.extend(packages) |
530 | - log("Installing {} with options: {}".format(packages, |
531 | - options)) |
532 | - if fatal: |
533 | - subprocess.check_call(cmd) |
534 | - else: |
535 | - subprocess.call(cmd) |
536 | - |
537 | - |
538 | -def apt_update(fatal=False): |
539 | - """Update local apt cache""" |
540 | - cmd = ['apt-get', 'update'] |
541 | - if fatal: |
542 | - subprocess.check_call(cmd) |
543 | - else: |
544 | - subprocess.call(cmd) |
545 | + target.write(content) |
546 | |
547 | |
548 | def mount(device, mountpoint, options=None, persist=False): |
549 | - '''Mount a filesystem''' |
550 | + """Mount a filesystem at a particular mountpoint""" |
551 | cmd_args = ['mount'] |
552 | if options is not None: |
553 | cmd_args.extend(['-o', options]) |
554 | @@ -199,7 +161,7 @@ |
555 | |
556 | |
557 | def umount(mountpoint, persist=False): |
558 | - '''Unmount a filesystem''' |
559 | + """Unmount a filesystem""" |
560 | cmd_args = ['umount', mountpoint] |
561 | try: |
562 | subprocess.check_output(cmd_args) |
563 | @@ -213,7 +175,7 @@ |
564 | |
565 | |
566 | def mounts(): |
567 | - '''List of all mounted volumes as [[mountpoint,device],[...]]''' |
568 | + """Get a list of all mounted volumes as [[mountpoint,device],[...]]""" |
569 | with open('/proc/mounts') as f: |
570 | # [['/mount/point','/dev/path'],[...]] |
571 | system_mounts = [m[1::-1] for m in [l.strip().split() |
572 | @@ -222,7 +184,7 @@ |
573 | |
574 | |
575 | def file_hash(path): |
576 | - ''' Generate a md5 hash of the contents of 'path' or None if not found ''' |
577 | + """Generate a md5 hash of the contents of 'path' or None if not found """ |
578 | if os.path.exists(path): |
579 | h = hashlib.md5() |
580 | with open(path, 'r') as source: |
581 | @@ -233,7 +195,7 @@ |
582 | |
583 | |
584 | def restart_on_change(restart_map): |
585 | - ''' Restart services based on configuration files changing |
586 | + """Restart services based on configuration files changing |
587 | |
588 | This function is used a decorator, for example |
589 | |
590 | @@ -246,7 +208,7 @@ |
591 | In this example, the cinder-api and cinder-volume services |
592 | would be restarted if /etc/ceph/ceph.conf is changed by the |
593 | ceph_client_changed function. |
594 | - ''' |
595 | + """ |
596 | def wrap(f): |
597 | def wrapped_f(*args): |
598 | checksums = {} |
599 | @@ -264,10 +226,22 @@ |
600 | |
601 | |
602 | def lsb_release(): |
603 | - '''Return /etc/lsb-release in a dict''' |
604 | + """Return /etc/lsb-release in a dict""" |
605 | d = {} |
606 | with open('/etc/lsb-release', 'r') as lsb: |
607 | for l in lsb: |
608 | k, v = l.split('=') |
609 | d[k.strip()] = v.strip() |
610 | return d |
611 | + |
612 | + |
613 | +def pwgen(length=None): |
614 | + """Generate a random pasword.""" |
615 | + if length is None: |
616 | + length = random.choice(range(35, 45)) |
617 | + alphanumeric_chars = [ |
618 | + l for l in (string.letters + string.digits) |
619 | + if l not in 'l0QD1vAEIOUaeiou'] |
620 | + random_chars = [ |
621 | + random.choice(alphanumeric_chars) for _ in range(length)] |
622 | + return(''.join(random_chars)) |
623 | |
624 | === modified file 'hooks/charmhelpers/fetch/__init__.py' |
625 | --- hooks/charmhelpers/fetch/__init__.py 2013-07-03 11:46:27 +0000 |
626 | +++ hooks/charmhelpers/fetch/__init__.py 2013-11-13 22:24:03 +0000 |
627 | @@ -1,9 +1,6 @@ |
628 | import importlib |
629 | from yaml import safe_load |
630 | from charmhelpers.core.host import ( |
631 | - apt_install, |
632 | - apt_update, |
633 | - filter_installed_packages, |
634 | lsb_release |
635 | ) |
636 | from urlparse import ( |
637 | @@ -15,6 +12,8 @@ |
638 | config, |
639 | log, |
640 | ) |
641 | +import apt_pkg |
642 | +import os |
643 | |
644 | CLOUD_ARCHIVE = """# Ubuntu Cloud Archive |
645 | deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main |
646 | @@ -22,18 +21,128 @@ |
647 | PROPOSED_POCKET = """# Proposed |
648 | deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted |
649 | """ |
650 | +CLOUD_ARCHIVE_POCKETS = { |
651 | + # Folsom |
652 | + 'folsom': 'precise-updates/folsom', |
653 | + 'precise-folsom': 'precise-updates/folsom', |
654 | + 'precise-folsom/updates': 'precise-updates/folsom', |
655 | + 'precise-updates/folsom': 'precise-updates/folsom', |
656 | + 'folsom/proposed': 'precise-proposed/folsom', |
657 | + 'precise-folsom/proposed': 'precise-proposed/folsom', |
658 | + 'precise-proposed/folsom': 'precise-proposed/folsom', |
659 | + # Grizzly |
660 | + 'grizzly': 'precise-updates/grizzly', |
661 | + 'precise-grizzly': 'precise-updates/grizzly', |
662 | + 'precise-grizzly/updates': 'precise-updates/grizzly', |
663 | + 'precise-updates/grizzly': 'precise-updates/grizzly', |
664 | + 'grizzly/proposed': 'precise-proposed/grizzly', |
665 | + 'precise-grizzly/proposed': 'precise-proposed/grizzly', |
666 | + 'precise-proposed/grizzly': 'precise-proposed/grizzly', |
667 | + # Havana |
668 | + 'havana': 'precise-updates/havana', |
669 | + 'precise-havana': 'precise-updates/havana', |
670 | + 'precise-havana/updates': 'precise-updates/havana', |
671 | + 'precise-updates/havana': 'precise-updates/havana', |
672 | + 'havana/proposed': 'precise-proposed/havana', |
673 | + 'precies-havana/proposed': 'precise-proposed/havana', |
674 | + 'precise-proposed/havana': 'precise-proposed/havana', |
675 | +} |
676 | + |
677 | + |
678 | +def filter_installed_packages(packages): |
679 | + """Returns a list of packages that require installation""" |
680 | + apt_pkg.init() |
681 | + cache = apt_pkg.Cache() |
682 | + _pkgs = [] |
683 | + for package in packages: |
684 | + try: |
685 | + p = cache[package] |
686 | + p.current_ver or _pkgs.append(package) |
687 | + except KeyError: |
688 | + log('Package {} has no installation candidate.'.format(package), |
689 | + level='WARNING') |
690 | + _pkgs.append(package) |
691 | + return _pkgs |
692 | + |
693 | + |
694 | +def apt_install(packages, options=None, fatal=False): |
695 | + """Install one or more packages""" |
696 | + if options is None: |
697 | + options = ['--option=Dpkg::Options::=--force-confold'] |
698 | + |
699 | + cmd = ['apt-get', '--assume-yes'] |
700 | + cmd.extend(options) |
701 | + cmd.append('install') |
702 | + if isinstance(packages, basestring): |
703 | + cmd.append(packages) |
704 | + else: |
705 | + cmd.extend(packages) |
706 | + log("Installing {} with options: {}".format(packages, |
707 | + options)) |
708 | + env = os.environ.copy() |
709 | + if 'DEBIAN_FRONTEND' not in env: |
710 | + env['DEBIAN_FRONTEND'] = 'noninteractive' |
711 | + |
712 | + if fatal: |
713 | + subprocess.check_call(cmd, env=env) |
714 | + else: |
715 | + subprocess.call(cmd, env=env) |
716 | + |
717 | + |
718 | +def apt_update(fatal=False): |
719 | + """Update local apt cache""" |
720 | + cmd = ['apt-get', 'update'] |
721 | + if fatal: |
722 | + subprocess.check_call(cmd) |
723 | + else: |
724 | + subprocess.call(cmd) |
725 | + |
726 | + |
727 | +def apt_purge(packages, fatal=False): |
728 | + """Purge one or more packages""" |
729 | + cmd = ['apt-get', '--assume-yes', 'purge'] |
730 | + if isinstance(packages, basestring): |
731 | + cmd.append(packages) |
732 | + else: |
733 | + cmd.extend(packages) |
734 | + log("Purging {}".format(packages)) |
735 | + if fatal: |
736 | + subprocess.check_call(cmd) |
737 | + else: |
738 | + subprocess.call(cmd) |
739 | + |
740 | + |
741 | +def apt_hold(packages, fatal=False): |
742 | + """Hold one or more packages""" |
743 | + cmd = ['apt-mark', 'hold'] |
744 | + if isinstance(packages, basestring): |
745 | + cmd.append(packages) |
746 | + else: |
747 | + cmd.extend(packages) |
748 | + log("Holding {}".format(packages)) |
749 | + if fatal: |
750 | + subprocess.check_call(cmd) |
751 | + else: |
752 | + subprocess.call(cmd) |
753 | |
754 | |
755 | def add_source(source, key=None): |
756 | - if ((source.startswith('ppa:') or |
757 | - source.startswith('http:'))): |
758 | - subprocess.check_call(['add-apt-repository', source]) |
759 | + if (source.startswith('ppa:') or |
760 | + source.startswith('http:') or |
761 | + source.startswith('deb ') or |
762 | + source.startswith('cloud-archive:')): |
763 | + subprocess.check_call(['add-apt-repository', '--yes', source]) |
764 | elif source.startswith('cloud:'): |
765 | apt_install(filter_installed_packages(['ubuntu-cloud-keyring']), |
766 | fatal=True) |
767 | pocket = source.split(':')[-1] |
768 | + if pocket not in CLOUD_ARCHIVE_POCKETS: |
769 | + raise SourceConfigError( |
770 | + 'Unsupported cloud: source option %s' % |
771 | + pocket) |
772 | + actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket] |
773 | with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt: |
774 | - apt.write(CLOUD_ARCHIVE.format(pocket)) |
775 | + apt.write(CLOUD_ARCHIVE.format(actual_pocket)) |
776 | elif source == 'proposed': |
777 | release = lsb_release()['DISTRIB_CODENAME'] |
778 | with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt: |
779 | @@ -63,8 +172,11 @@ |
780 | Note that 'null' (a.k.a. None) should not be quoted. |
781 | """ |
782 | sources = safe_load(config(sources_var)) |
783 | - keys = safe_load(config(keys_var)) |
784 | - if isinstance(sources, basestring) and isinstance(keys, basestring): |
785 | + keys = config(keys_var) |
786 | + if keys is not None: |
787 | + keys = safe_load(keys) |
788 | + if isinstance(sources, basestring) and ( |
789 | + keys is None or isinstance(keys, basestring)): |
790 | add_source(sources, keys) |
791 | else: |
792 | if not len(sources) == len(keys): |
793 | @@ -79,6 +191,7 @@ |
794 | # least- to most-specific URL matching. |
795 | FETCH_HANDLERS = ( |
796 | 'charmhelpers.fetch.archiveurl.ArchiveUrlFetchHandler', |
797 | + 'charmhelpers.fetch.bzrurl.BzrUrlFetchHandler', |
798 | ) |
799 | |
800 | |
801 | @@ -98,6 +211,7 @@ |
802 | # We ONLY check for True here because can_handle may return a string |
803 | # explaining why it can't handle a given source. |
804 | handlers = [h for h in plugins() if h.can_handle(source) is True] |
805 | + installed_to = None |
806 | for handler in handlers: |
807 | try: |
808 | installed_to = handler.install(source) |
809 | @@ -115,7 +229,9 @@ |
810 | |
811 | |
812 | class BaseFetchHandler(object): |
813 | + |
814 | """Base class for FetchHandler implementations in fetch plugins""" |
815 | + |
816 | def can_handle(self, source): |
817 | """Returns True if the source can be handled. Otherwise returns |
818 | a string explaining why it cannot""" |
819 | @@ -143,10 +259,13 @@ |
820 | for handler_name in fetch_handlers: |
821 | package, classname = handler_name.rsplit('.', 1) |
822 | try: |
823 | - handler_class = getattr(importlib.import_module(package), classname) |
824 | + handler_class = getattr( |
825 | + importlib.import_module(package), |
826 | + classname) |
827 | plugin_list.append(handler_class()) |
828 | except (ImportError, AttributeError): |
829 | # Skip missing plugins so that they can be ommitted from |
830 | # installation if desired |
831 | - log("FetchHandler {} not found, skipping plugin".format(handler_name)) |
832 | + log("FetchHandler {} not found, skipping plugin".format( |
833 | + handler_name)) |
834 | return plugin_list |
835 | |
836 | === modified file 'hooks/charmhelpers/fetch/archiveurl.py' |
837 | --- hooks/charmhelpers/fetch/archiveurl.py 2013-07-03 09:04:42 +0000 |
838 | +++ hooks/charmhelpers/fetch/archiveurl.py 2013-11-13 22:24:03 +0000 |
839 | @@ -8,6 +8,7 @@ |
840 | get_archive_handler, |
841 | extract, |
842 | ) |
843 | +from charmhelpers.core.host import mkdir |
844 | |
845 | |
846 | class ArchiveUrlFetchHandler(BaseFetchHandler): |
847 | @@ -24,20 +25,24 @@ |
848 | # propogate all exceptions |
849 | # URLError, OSError, etc |
850 | response = urllib2.urlopen(source) |
851 | - with open(dest, 'w') as dest_file: |
852 | - dest_file.write(response.read()) |
853 | + try: |
854 | + with open(dest, 'w') as dest_file: |
855 | + dest_file.write(response.read()) |
856 | + except Exception as e: |
857 | + if os.path.isfile(dest): |
858 | + os.unlink(dest) |
859 | + raise e |
860 | |
861 | def install(self, source): |
862 | url_parts = self.parse_url(source) |
863 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched') |
864 | + if not os.path.exists(dest_dir): |
865 | + mkdir(dest_dir, perms=0755) |
866 | dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path)) |
867 | try: |
868 | self.download(source, dld_file) |
869 | except urllib2.URLError as e: |
870 | - return UnhandledSource(e.reason) |
871 | + raise UnhandledSource(e.reason) |
872 | except OSError as e: |
873 | - return UnhandledSource(e.strerror) |
874 | - finally: |
875 | - if os.path.isfile(dld_file): |
876 | - os.unlink(dld_file) |
877 | + raise UnhandledSource(e.strerror) |
878 | return extract(dld_file) |
879 | |
880 | === added file 'hooks/charmhelpers/fetch/bzrurl.py' |
881 | --- hooks/charmhelpers/fetch/bzrurl.py 1970-01-01 00:00:00 +0000 |
882 | +++ hooks/charmhelpers/fetch/bzrurl.py 2013-11-13 22:24:03 +0000 |
883 | @@ -0,0 +1,49 @@ |
884 | +import os |
885 | +from charmhelpers.fetch import ( |
886 | + BaseFetchHandler, |
887 | + UnhandledSource |
888 | +) |
889 | +from charmhelpers.core.host import mkdir |
890 | + |
891 | +try: |
892 | + from bzrlib.branch import Branch |
893 | +except ImportError: |
894 | + from charmhelpers.fetch import apt_install |
895 | + apt_install("python-bzrlib") |
896 | + from bzrlib.branch import Branch |
897 | + |
898 | + |
899 | +class BzrUrlFetchHandler(BaseFetchHandler): |
900 | + """Handler for bazaar branches via generic and lp URLs""" |
901 | + def can_handle(self, source): |
902 | + url_parts = self.parse_url(source) |
903 | + if url_parts.scheme not in ('bzr+ssh', 'lp'): |
904 | + return False |
905 | + else: |
906 | + return True |
907 | + |
908 | + def branch(self, source, dest): |
909 | + url_parts = self.parse_url(source) |
910 | + # If we use lp:branchname scheme we need to load plugins |
911 | + if not self.can_handle(source): |
912 | + raise UnhandledSource("Cannot handle {}".format(source)) |
913 | + if url_parts.scheme == "lp": |
914 | + from bzrlib.plugin import load_plugins |
915 | + load_plugins() |
916 | + try: |
917 | + remote_branch = Branch.open(source) |
918 | + remote_branch.bzrdir.sprout(dest).open_branch() |
919 | + except Exception as e: |
920 | + raise e |
921 | + |
922 | + def install(self, source): |
923 | + url_parts = self.parse_url(source) |
924 | + branch_name = url_parts.path.strip("/").split("/")[-1] |
925 | + dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", branch_name) |
926 | + if not os.path.exists(dest_dir): |
927 | + mkdir(dest_dir, perms=0755) |
928 | + try: |
929 | + self.branch(source, dest_dir) |
930 | + except OSError as e: |
931 | + raise UnhandledSource(e.strerror) |
932 | + return dest_dir |
933 | |
934 | === modified file 'hooks/hooks.py' |
935 | --- hooks/hooks.py 2013-08-27 13:42:48 +0000 |
936 | +++ hooks/hooks.py 2013-11-13 22:24:03 +0000 |
937 | @@ -21,21 +21,27 @@ |
938 | related_units, |
939 | relation_get, |
940 | Hooks, |
941 | - UnregisteredHookError |
942 | + UnregisteredHookError, |
943 | + service_name |
944 | ) |
945 | from charmhelpers.core.host import ( |
946 | + umount, |
947 | + mkdir |
948 | +) |
949 | +from charmhelpers.fetch import ( |
950 | + add_source, |
951 | apt_install, |
952 | apt_update, |
953 | filter_installed_packages, |
954 | - umount |
955 | ) |
956 | -from charmhelpers.fetch import add_source |
957 | |
958 | from utils import ( |
959 | render_template, |
960 | get_host_ip, |
961 | ) |
962 | |
963 | +from charmhelpers.contrib.openstack.alternatives import install_alternative |
964 | + |
965 | hooks = Hooks() |
966 | |
967 | |
968 | @@ -66,9 +72,14 @@ |
969 | 'fsid': get_fsid(), |
970 | 'version': ceph.get_ceph_version() |
971 | } |
972 | - |
973 | - with open('/etc/ceph/ceph.conf', 'w') as cephconf: |
974 | + # Install ceph.conf as an alternative to support |
975 | + # co-existence with other charms that write this file |
976 | + charm_ceph_conf = "/var/lib/charm/{}/ceph.conf".format(service_name()) |
977 | + mkdir(os.path.dirname(charm_ceph_conf)) |
978 | + with open(charm_ceph_conf, 'w') as cephconf: |
979 | cephconf.write(render_template('ceph.conf', cephcontext)) |
980 | + install_alternative('ceph.conf', '/etc/ceph/ceph.conf', |
981 | + charm_ceph_conf, 90) |
982 | |
983 | JOURNAL_ZAPPED = '/var/lib/ceph/journal_zapped' |
984 | |
985 | |
986 | === modified file 'hooks/utils.py' |
987 | --- hooks/utils.py 2013-06-25 11:03:55 +0000 |
988 | +++ hooks/utils.py 2013-11-13 22:24:03 +0000 |
989 | @@ -13,7 +13,7 @@ |
990 | unit_get, |
991 | cached |
992 | ) |
993 | -from charmhelpers.core.host import ( |
994 | +from charmhelpers.fetch import ( |
995 | apt_install, |
996 | filter_installed_packages |
997 | ) |
998 | |
999 | === modified file 'revision' |
1000 | --- revision 2013-08-27 13:57:22 +0000 |
1001 | +++ revision 2013-11-13 22:24:03 +0000 |
1002 | @@ -1,1 +1,1 @@ |
1003 | -11 |
1004 | +13 |
LGTM, again assuming that most of this is just updating the local copy of charm-helpers. The only real change seems to be in hooks/hooks.py and hooks/utils.py