Merge lp:~james-page/charms/precise/ceph-radosgw/apache24 into lp:~charmers/charms/precise/ceph-radosgw/trunk
- Precise Pangolin (12.04)
- apache24
- Merge into trunk
Proposed by
James Page
Status: | Merged |
---|---|
Merged at revision: | 15 |
Proposed branch: | lp:~james-page/charms/precise/ceph-radosgw/apache24 |
Merge into: | lp:~charmers/charms/precise/ceph-radosgw/trunk |
Diff against target: |
1986 lines (+1345/-277) 17 files modified
.project (+17/-0) .pydevproject (+8/-0) Makefile (+8/-0) charm-helpers-sync.yaml (+9/-0) hooks/ceph.py (+22/-21) hooks/charmhelpers/contrib/openstack/alternatives.py (+17/-0) hooks/charmhelpers/contrib/storage/linux/utils.py (+25/-0) hooks/charmhelpers/core/hookenv.py (+395/-0) hooks/charmhelpers/core/host.py (+291/-0) hooks/charmhelpers/fetch/__init__.py (+279/-0) hooks/charmhelpers/fetch/archiveurl.py (+48/-0) hooks/charmhelpers/fetch/bzrurl.py (+49/-0) hooks/charmhelpers/payload/__init__.py (+1/-0) hooks/charmhelpers/payload/execd.py (+50/-0) hooks/hooks.py (+109/-90) hooks/utils.py (+15/-166) metadata.yaml (+2/-0) |
To merge this branch: | bzr merge lp:~james-page/charms/precise/ceph-radosgw/apache24 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Marco Ceppi (community) | Approve | ||
OpenStack Charmers | Pending | ||
Review via email: mp+203129@code.launchpad.net |
Commit message
Description of the change
1) Fixes for compatibility with apache 2.4
2) General refresh to use charm-helpers inline with other ceph charms
To post a comment you must log in.
- 19. By James Page
-
Add pydev stuff
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === added file '.project' |
2 | --- .project 1970-01-01 00:00:00 +0000 |
3 | +++ .project 2014-01-24 17:20:41 +0000 |
4 | @@ -0,0 +1,17 @@ |
5 | +<?xml version="1.0" encoding="UTF-8"?> |
6 | +<projectDescription> |
7 | + <name>ceph-radosgw</name> |
8 | + <comment></comment> |
9 | + <projects> |
10 | + </projects> |
11 | + <buildSpec> |
12 | + <buildCommand> |
13 | + <name>org.python.pydev.PyDevBuilder</name> |
14 | + <arguments> |
15 | + </arguments> |
16 | + </buildCommand> |
17 | + </buildSpec> |
18 | + <natures> |
19 | + <nature>org.python.pydev.pythonNature</nature> |
20 | + </natures> |
21 | +</projectDescription> |
22 | |
23 | === added file '.pydevproject' |
24 | --- .pydevproject 1970-01-01 00:00:00 +0000 |
25 | +++ .pydevproject 2014-01-24 17:20:41 +0000 |
26 | @@ -0,0 +1,8 @@ |
27 | +<?xml version="1.0" encoding="UTF-8" standalone="no"?> |
28 | +<?eclipse-pydev version="1.0"?><pydev_project> |
29 | +<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.7</pydev_property> |
30 | +<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property> |
31 | +<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH"> |
32 | +<path>/ceph-radosgw/hooks</path> |
33 | +</pydev_pathproperty> |
34 | +</pydev_project> |
35 | |
36 | === added file 'Makefile' |
37 | --- Makefile 1970-01-01 00:00:00 +0000 |
38 | +++ Makefile 2014-01-24 17:20:41 +0000 |
39 | @@ -0,0 +1,8 @@ |
40 | +#!/usr/bin/make |
41 | + |
42 | +lint: |
43 | + @flake8 --exclude hooks/charmhelpers hooks |
44 | + @charm proof |
45 | + |
46 | +sync: |
47 | + @charm-helper-sync -c charm-helpers-sync.yaml |
48 | |
49 | === added file 'charm-helpers-sync.yaml' |
50 | --- charm-helpers-sync.yaml 1970-01-01 00:00:00 +0000 |
51 | +++ charm-helpers-sync.yaml 2014-01-24 17:20:41 +0000 |
52 | @@ -0,0 +1,9 @@ |
53 | +branch: lp:charm-helpers |
54 | +destination: hooks/charmhelpers |
55 | +include: |
56 | + - core |
57 | + - fetch |
58 | + - contrib.storage.linux: |
59 | + - utils |
60 | + - payload.execd |
61 | + - contrib.openstack.alternatives |
62 | |
63 | === modified file 'hooks/ceph.py' |
64 | --- hooks/ceph.py 2013-01-11 09:15:51 +0000 |
65 | +++ hooks/ceph.py 2014-01-24 17:20:41 +0000 |
66 | @@ -10,23 +10,24 @@ |
67 | import json |
68 | import subprocess |
69 | import time |
70 | -import utils |
71 | import os |
72 | import apt_pkg as apt |
73 | |
74 | +from socket import gethostname as get_unit_hostname |
75 | + |
76 | LEADER = 'leader' |
77 | PEON = 'peon' |
78 | QUORUM = [LEADER, PEON] |
79 | |
80 | |
81 | def is_quorum(): |
82 | - asok = "/var/run/ceph/ceph-mon.{}.asok".format(utils.get_unit_hostname()) |
83 | + asok = "/var/run/ceph/ceph-mon.{}.asok".format(get_unit_hostname()) |
84 | cmd = [ |
85 | "ceph", |
86 | "--admin-daemon", |
87 | asok, |
88 | "mon_status" |
89 | - ] |
90 | + ] |
91 | if os.path.exists(asok): |
92 | try: |
93 | result = json.loads(subprocess.check_output(cmd)) |
94 | @@ -44,13 +45,13 @@ |
95 | |
96 | |
97 | def is_leader(): |
98 | - asok = "/var/run/ceph/ceph-mon.{}.asok".format(utils.get_unit_hostname()) |
99 | + asok = "/var/run/ceph/ceph-mon.{}.asok".format(get_unit_hostname()) |
100 | cmd = [ |
101 | "ceph", |
102 | "--admin-daemon", |
103 | asok, |
104 | "mon_status" |
105 | - ] |
106 | + ] |
107 | if os.path.exists(asok): |
108 | try: |
109 | result = json.loads(subprocess.check_output(cmd)) |
110 | @@ -73,14 +74,14 @@ |
111 | |
112 | |
113 | def add_bootstrap_hint(peer): |
114 | - asok = "/var/run/ceph/ceph-mon.{}.asok".format(utils.get_unit_hostname()) |
115 | + asok = "/var/run/ceph/ceph-mon.{}.asok".format(get_unit_hostname()) |
116 | cmd = [ |
117 | "ceph", |
118 | "--admin-daemon", |
119 | asok, |
120 | "add_bootstrap_peer_hint", |
121 | peer |
122 | - ] |
123 | + ] |
124 | if os.path.exists(asok): |
125 | # Ignore any errors for this call |
126 | subprocess.call(cmd) |
127 | @@ -89,7 +90,7 @@ |
128 | 'xfs', |
129 | 'ext4', |
130 | 'btrfs' |
131 | - ] |
132 | +] |
133 | |
134 | |
135 | def is_osd_disk(dev): |
136 | @@ -99,7 +100,7 @@ |
137 | for line in info: |
138 | if line.startswith( |
139 | 'Partition GUID code: 4FBD7E29-9D25-41B8-AFD0-062C0CEFF05D' |
140 | - ): |
141 | + ): |
142 | return True |
143 | except subprocess.CalledProcessError: |
144 | pass |
145 | @@ -110,7 +111,7 @@ |
146 | cmd = [ |
147 | 'udevadm', 'trigger', |
148 | '--subsystem-match=block', '--action=add' |
149 | - ] |
150 | + ] |
151 | |
152 | subprocess.call(cmd) |
153 | |
154 | @@ -140,7 +141,7 @@ |
155 | '--create-keyring', |
156 | '--name=client.bootstrap-osd', |
157 | '--add-key={}'.format(key) |
158 | - ] |
159 | + ] |
160 | subprocess.check_call(cmd) |
161 | |
162 | # OSD caps taken from ceph-create-keys |
163 | @@ -148,10 +149,10 @@ |
164 | 'mon': [ |
165 | 'allow command osd create ...', |
166 | 'allow command osd crush set ...', |
167 | - r'allow command auth add * osd allow\ * mon allow\ rwx', |
168 | + r'allow command auth add * osd allow\ * mon allow\ rwx', |
169 | 'allow command mon getmap' |
170 | - ] |
171 | - } |
172 | + ] |
173 | +} |
174 | |
175 | |
176 | def get_osd_bootstrap_key(): |
177 | @@ -169,14 +170,14 @@ |
178 | '--create-keyring', |
179 | '--name=client.radosgw.gateway', |
180 | '--add-key={}'.format(key) |
181 | - ] |
182 | + ] |
183 | subprocess.check_call(cmd) |
184 | |
185 | # OSD caps taken from ceph-create-keys |
186 | _radosgw_caps = { |
187 | 'mon': ['allow r'], |
188 | 'osd': ['allow rwx'] |
189 | - } |
190 | +} |
191 | |
192 | |
193 | def get_radosgw_key(): |
194 | @@ -186,7 +187,7 @@ |
195 | _default_caps = { |
196 | 'mon': ['allow r'], |
197 | 'osd': ['allow rwx'] |
198 | - } |
199 | +} |
200 | |
201 | |
202 | def get_named_key(name, caps=None): |
203 | @@ -196,16 +197,16 @@ |
204 | '--name', 'mon.', |
205 | '--keyring', |
206 | '/var/lib/ceph/mon/ceph-{}/keyring'.format( |
207 | - utils.get_unit_hostname() |
208 | - ), |
209 | + get_unit_hostname() |
210 | + ), |
211 | 'auth', 'get-or-create', 'client.{}'.format(name), |
212 | - ] |
213 | + ] |
214 | # Add capabilities |
215 | for subsystem, subcaps in caps.iteritems(): |
216 | cmd.extend([ |
217 | subsystem, |
218 | '; '.join(subcaps), |
219 | - ]) |
220 | + ]) |
221 | output = subprocess.check_output(cmd).strip() # IGNORE:E1103 |
222 | # get-or-create appears to have different output depending |
223 | # on whether its 'get' or 'create' |
224 | |
225 | === added directory 'hooks/charmhelpers' |
226 | === added file 'hooks/charmhelpers/__init__.py' |
227 | === added directory 'hooks/charmhelpers/contrib' |
228 | === added file 'hooks/charmhelpers/contrib/__init__.py' |
229 | === added directory 'hooks/charmhelpers/contrib/openstack' |
230 | === added file 'hooks/charmhelpers/contrib/openstack/__init__.py' |
231 | === added file 'hooks/charmhelpers/contrib/openstack/alternatives.py' |
232 | --- hooks/charmhelpers/contrib/openstack/alternatives.py 1970-01-01 00:00:00 +0000 |
233 | +++ hooks/charmhelpers/contrib/openstack/alternatives.py 2014-01-24 17:20:41 +0000 |
234 | @@ -0,0 +1,17 @@ |
235 | +''' Helper for managing alternatives for file conflict resolution ''' |
236 | + |
237 | +import subprocess |
238 | +import shutil |
239 | +import os |
240 | + |
241 | + |
242 | +def install_alternative(name, target, source, priority=50): |
243 | + ''' Install alternative configuration ''' |
244 | + if (os.path.exists(target) and not os.path.islink(target)): |
245 | + # Move existing file/directory away before installing |
246 | + shutil.move(target, '{}.bak'.format(target)) |
247 | + cmd = [ |
248 | + 'update-alternatives', '--force', '--install', |
249 | + target, name, source, str(priority) |
250 | + ] |
251 | + subprocess.check_call(cmd) |
252 | |
253 | === added directory 'hooks/charmhelpers/contrib/storage' |
254 | === added file 'hooks/charmhelpers/contrib/storage/__init__.py' |
255 | === added directory 'hooks/charmhelpers/contrib/storage/linux' |
256 | === added file 'hooks/charmhelpers/contrib/storage/linux/__init__.py' |
257 | === added file 'hooks/charmhelpers/contrib/storage/linux/utils.py' |
258 | --- hooks/charmhelpers/contrib/storage/linux/utils.py 1970-01-01 00:00:00 +0000 |
259 | +++ hooks/charmhelpers/contrib/storage/linux/utils.py 2014-01-24 17:20:41 +0000 |
260 | @@ -0,0 +1,25 @@ |
261 | +from os import stat |
262 | +from stat import S_ISBLK |
263 | + |
264 | +from subprocess import ( |
265 | + check_call |
266 | +) |
267 | + |
268 | + |
269 | +def is_block_device(path): |
270 | + ''' |
271 | + Confirm device at path is a valid block device node. |
272 | + |
273 | + :returns: boolean: True if path is a block device, False if not. |
274 | + ''' |
275 | + return S_ISBLK(stat(path).st_mode) |
276 | + |
277 | + |
278 | +def zap_disk(block_device): |
279 | + ''' |
280 | + Clear a block device of partition table. Relies on sgdisk, which is |
281 | + installed as pat of the 'gdisk' package in Ubuntu. |
282 | + |
283 | + :param block_device: str: Full path of block device to clean. |
284 | + ''' |
285 | + check_call(['sgdisk', '--zap-all', '--mbrtogpt', block_device]) |
286 | |
287 | === added directory 'hooks/charmhelpers/core' |
288 | === added file 'hooks/charmhelpers/core/__init__.py' |
289 | === added file 'hooks/charmhelpers/core/hookenv.py' |
290 | --- hooks/charmhelpers/core/hookenv.py 1970-01-01 00:00:00 +0000 |
291 | +++ hooks/charmhelpers/core/hookenv.py 2014-01-24 17:20:41 +0000 |
292 | @@ -0,0 +1,395 @@ |
293 | +"Interactions with the Juju environment" |
294 | +# Copyright 2013 Canonical Ltd. |
295 | +# |
296 | +# Authors: |
297 | +# Charm Helpers Developers <juju@lists.ubuntu.com> |
298 | + |
299 | +import os |
300 | +import json |
301 | +import yaml |
302 | +import subprocess |
303 | +import UserDict |
304 | +from subprocess import CalledProcessError |
305 | + |
306 | +CRITICAL = "CRITICAL" |
307 | +ERROR = "ERROR" |
308 | +WARNING = "WARNING" |
309 | +INFO = "INFO" |
310 | +DEBUG = "DEBUG" |
311 | +MARKER = object() |
312 | + |
313 | +cache = {} |
314 | + |
315 | + |
316 | +def cached(func): |
317 | + """Cache return values for multiple executions of func + args |
318 | + |
319 | + For example: |
320 | + |
321 | + @cached |
322 | + def unit_get(attribute): |
323 | + pass |
324 | + |
325 | + unit_get('test') |
326 | + |
327 | + will cache the result of unit_get + 'test' for future calls. |
328 | + """ |
329 | + def wrapper(*args, **kwargs): |
330 | + global cache |
331 | + key = str((func, args, kwargs)) |
332 | + try: |
333 | + return cache[key] |
334 | + except KeyError: |
335 | + res = func(*args, **kwargs) |
336 | + cache[key] = res |
337 | + return res |
338 | + return wrapper |
339 | + |
340 | + |
341 | +def flush(key): |
342 | + """Flushes any entries from function cache where the |
343 | + key is found in the function+args """ |
344 | + flush_list = [] |
345 | + for item in cache: |
346 | + if key in item: |
347 | + flush_list.append(item) |
348 | + for item in flush_list: |
349 | + del cache[item] |
350 | + |
351 | + |
352 | +def log(message, level=None): |
353 | + """Write a message to the juju log""" |
354 | + command = ['juju-log'] |
355 | + if level: |
356 | + command += ['-l', level] |
357 | + command += [message] |
358 | + subprocess.call(command) |
359 | + |
360 | + |
361 | +class Serializable(UserDict.IterableUserDict): |
362 | + """Wrapper, an object that can be serialized to yaml or json""" |
363 | + |
364 | + def __init__(self, obj): |
365 | + # wrap the object |
366 | + UserDict.IterableUserDict.__init__(self) |
367 | + self.data = obj |
368 | + |
369 | + def __getattr__(self, attr): |
370 | + # See if this object has attribute. |
371 | + if attr in ("json", "yaml", "data"): |
372 | + return self.__dict__[attr] |
373 | + # Check for attribute in wrapped object. |
374 | + got = getattr(self.data, attr, MARKER) |
375 | + if got is not MARKER: |
376 | + return got |
377 | + # Proxy to the wrapped object via dict interface. |
378 | + try: |
379 | + return self.data[attr] |
380 | + except KeyError: |
381 | + raise AttributeError(attr) |
382 | + |
383 | + def __getstate__(self): |
384 | + # Pickle as a standard dictionary. |
385 | + return self.data |
386 | + |
387 | + def __setstate__(self, state): |
388 | + # Unpickle into our wrapper. |
389 | + self.data = state |
390 | + |
391 | + def json(self): |
392 | + """Serialize the object to json""" |
393 | + return json.dumps(self.data) |
394 | + |
395 | + def yaml(self): |
396 | + """Serialize the object to yaml""" |
397 | + return yaml.dump(self.data) |
398 | + |
399 | + |
400 | +def execution_environment(): |
401 | + """A convenient bundling of the current execution context""" |
402 | + context = {} |
403 | + context['conf'] = config() |
404 | + if relation_id(): |
405 | + context['reltype'] = relation_type() |
406 | + context['relid'] = relation_id() |
407 | + context['rel'] = relation_get() |
408 | + context['unit'] = local_unit() |
409 | + context['rels'] = relations() |
410 | + context['env'] = os.environ |
411 | + return context |
412 | + |
413 | + |
414 | +def in_relation_hook(): |
415 | + """Determine whether we're running in a relation hook""" |
416 | + return 'JUJU_RELATION' in os.environ |
417 | + |
418 | + |
419 | +def relation_type(): |
420 | + """The scope for the current relation hook""" |
421 | + return os.environ.get('JUJU_RELATION', None) |
422 | + |
423 | + |
424 | +def relation_id(): |
425 | + """The relation ID for the current relation hook""" |
426 | + return os.environ.get('JUJU_RELATION_ID', None) |
427 | + |
428 | + |
429 | +def local_unit(): |
430 | + """Local unit ID""" |
431 | + return os.environ['JUJU_UNIT_NAME'] |
432 | + |
433 | + |
434 | +def remote_unit(): |
435 | + """The remote unit for the current relation hook""" |
436 | + return os.environ['JUJU_REMOTE_UNIT'] |
437 | + |
438 | + |
439 | +def service_name(): |
440 | + """The name service group this unit belongs to""" |
441 | + return local_unit().split('/')[0] |
442 | + |
443 | + |
444 | +@cached |
445 | +def config(scope=None): |
446 | + """Juju charm configuration""" |
447 | + config_cmd_line = ['config-get'] |
448 | + if scope is not None: |
449 | + config_cmd_line.append(scope) |
450 | + config_cmd_line.append('--format=json') |
451 | + try: |
452 | + return json.loads(subprocess.check_output(config_cmd_line)) |
453 | + except ValueError: |
454 | + return None |
455 | + |
456 | + |
457 | +@cached |
458 | +def relation_get(attribute=None, unit=None, rid=None): |
459 | + """Get relation information""" |
460 | + _args = ['relation-get', '--format=json'] |
461 | + if rid: |
462 | + _args.append('-r') |
463 | + _args.append(rid) |
464 | + _args.append(attribute or '-') |
465 | + if unit: |
466 | + _args.append(unit) |
467 | + try: |
468 | + return json.loads(subprocess.check_output(_args)) |
469 | + except ValueError: |
470 | + return None |
471 | + except CalledProcessError, e: |
472 | + if e.returncode == 2: |
473 | + return None |
474 | + raise |
475 | + |
476 | + |
477 | +def relation_set(relation_id=None, relation_settings={}, **kwargs): |
478 | + """Set relation information for the current unit""" |
479 | + relation_cmd_line = ['relation-set'] |
480 | + if relation_id is not None: |
481 | + relation_cmd_line.extend(('-r', relation_id)) |
482 | + for k, v in (relation_settings.items() + kwargs.items()): |
483 | + if v is None: |
484 | + relation_cmd_line.append('{}='.format(k)) |
485 | + else: |
486 | + relation_cmd_line.append('{}={}'.format(k, v)) |
487 | + subprocess.check_call(relation_cmd_line) |
488 | + # Flush cache of any relation-gets for local unit |
489 | + flush(local_unit()) |
490 | + |
491 | + |
492 | +@cached |
493 | +def relation_ids(reltype=None): |
494 | + """A list of relation_ids""" |
495 | + reltype = reltype or relation_type() |
496 | + relid_cmd_line = ['relation-ids', '--format=json'] |
497 | + if reltype is not None: |
498 | + relid_cmd_line.append(reltype) |
499 | + return json.loads(subprocess.check_output(relid_cmd_line)) or [] |
500 | + return [] |
501 | + |
502 | + |
503 | +@cached |
504 | +def related_units(relid=None): |
505 | + """A list of related units""" |
506 | + relid = relid or relation_id() |
507 | + units_cmd_line = ['relation-list', '--format=json'] |
508 | + if relid is not None: |
509 | + units_cmd_line.extend(('-r', relid)) |
510 | + return json.loads(subprocess.check_output(units_cmd_line)) or [] |
511 | + |
512 | + |
513 | +@cached |
514 | +def relation_for_unit(unit=None, rid=None): |
515 | + """Get the json represenation of a unit's relation""" |
516 | + unit = unit or remote_unit() |
517 | + relation = relation_get(unit=unit, rid=rid) |
518 | + for key in relation: |
519 | + if key.endswith('-list'): |
520 | + relation[key] = relation[key].split() |
521 | + relation['__unit__'] = unit |
522 | + return relation |
523 | + |
524 | + |
525 | +@cached |
526 | +def relations_for_id(relid=None): |
527 | + """Get relations of a specific relation ID""" |
528 | + relation_data = [] |
529 | + relid = relid or relation_ids() |
530 | + for unit in related_units(relid): |
531 | + unit_data = relation_for_unit(unit, relid) |
532 | + unit_data['__relid__'] = relid |
533 | + relation_data.append(unit_data) |
534 | + return relation_data |
535 | + |
536 | + |
537 | +@cached |
538 | +def relations_of_type(reltype=None): |
539 | + """Get relations of a specific type""" |
540 | + relation_data = [] |
541 | + reltype = reltype or relation_type() |
542 | + for relid in relation_ids(reltype): |
543 | + for relation in relations_for_id(relid): |
544 | + relation['__relid__'] = relid |
545 | + relation_data.append(relation) |
546 | + return relation_data |
547 | + |
548 | + |
549 | +@cached |
550 | +def relation_types(): |
551 | + """Get a list of relation types supported by this charm""" |
552 | + charmdir = os.environ.get('CHARM_DIR', '') |
553 | + mdf = open(os.path.join(charmdir, 'metadata.yaml')) |
554 | + md = yaml.safe_load(mdf) |
555 | + rel_types = [] |
556 | + for key in ('provides', 'requires', 'peers'): |
557 | + section = md.get(key) |
558 | + if section: |
559 | + rel_types.extend(section.keys()) |
560 | + mdf.close() |
561 | + return rel_types |
562 | + |
563 | + |
564 | +@cached |
565 | +def relations(): |
566 | + """Get a nested dictionary of relation data for all related units""" |
567 | + rels = {} |
568 | + for reltype in relation_types(): |
569 | + relids = {} |
570 | + for relid in relation_ids(reltype): |
571 | + units = {local_unit(): relation_get(unit=local_unit(), rid=relid)} |
572 | + for unit in related_units(relid): |
573 | + reldata = relation_get(unit=unit, rid=relid) |
574 | + units[unit] = reldata |
575 | + relids[relid] = units |
576 | + rels[reltype] = relids |
577 | + return rels |
578 | + |
579 | + |
580 | +@cached |
581 | +def is_relation_made(relation, keys='private-address'): |
582 | + ''' |
583 | + Determine whether a relation is established by checking for |
584 | + presence of key(s). If a list of keys is provided, they |
585 | + must all be present for the relation to be identified as made |
586 | + ''' |
587 | + if isinstance(keys, str): |
588 | + keys = [keys] |
589 | + for r_id in relation_ids(relation): |
590 | + for unit in related_units(r_id): |
591 | + context = {} |
592 | + for k in keys: |
593 | + context[k] = relation_get(k, rid=r_id, |
594 | + unit=unit) |
595 | + if None not in context.values(): |
596 | + return True |
597 | + return False |
598 | + |
599 | + |
600 | +def open_port(port, protocol="TCP"): |
601 | + """Open a service network port""" |
602 | + _args = ['open-port'] |
603 | + _args.append('{}/{}'.format(port, protocol)) |
604 | + subprocess.check_call(_args) |
605 | + |
606 | + |
607 | +def close_port(port, protocol="TCP"): |
608 | + """Close a service network port""" |
609 | + _args = ['close-port'] |
610 | + _args.append('{}/{}'.format(port, protocol)) |
611 | + subprocess.check_call(_args) |
612 | + |
613 | + |
614 | +@cached |
615 | +def unit_get(attribute): |
616 | + """Get the unit ID for the remote unit""" |
617 | + _args = ['unit-get', '--format=json', attribute] |
618 | + try: |
619 | + return json.loads(subprocess.check_output(_args)) |
620 | + except ValueError: |
621 | + return None |
622 | + |
623 | + |
624 | +def unit_private_ip(): |
625 | + """Get this unit's private IP address""" |
626 | + return unit_get('private-address') |
627 | + |
628 | + |
629 | +class UnregisteredHookError(Exception): |
630 | + """Raised when an undefined hook is called""" |
631 | + pass |
632 | + |
633 | + |
634 | +class Hooks(object): |
635 | + """A convenient handler for hook functions. |
636 | + |
637 | + Example: |
638 | + hooks = Hooks() |
639 | + |
640 | + # register a hook, taking its name from the function name |
641 | + @hooks.hook() |
642 | + def install(): |
643 | + ... |
644 | + |
645 | + # register a hook, providing a custom hook name |
646 | + @hooks.hook("config-changed") |
647 | + def config_changed(): |
648 | + ... |
649 | + |
650 | + if __name__ == "__main__": |
651 | + # execute a hook based on the name the program is called by |
652 | + hooks.execute(sys.argv) |
653 | + """ |
654 | + |
655 | + def __init__(self): |
656 | + super(Hooks, self).__init__() |
657 | + self._hooks = {} |
658 | + |
659 | + def register(self, name, function): |
660 | + """Register a hook""" |
661 | + self._hooks[name] = function |
662 | + |
663 | + def execute(self, args): |
664 | + """Execute a registered hook based on args[0]""" |
665 | + hook_name = os.path.basename(args[0]) |
666 | + if hook_name in self._hooks: |
667 | + self._hooks[hook_name]() |
668 | + else: |
669 | + raise UnregisteredHookError(hook_name) |
670 | + |
671 | + def hook(self, *hook_names): |
672 | + """Decorator, registering them as hooks""" |
673 | + def wrapper(decorated): |
674 | + for hook_name in hook_names: |
675 | + self.register(hook_name, decorated) |
676 | + else: |
677 | + self.register(decorated.__name__, decorated) |
678 | + if '_' in decorated.__name__: |
679 | + self.register( |
680 | + decorated.__name__.replace('_', '-'), decorated) |
681 | + return decorated |
682 | + return wrapper |
683 | + |
684 | + |
685 | +def charm_dir(): |
686 | + """Return the root directory of the current charm""" |
687 | + return os.environ.get('CHARM_DIR') |
688 | |
689 | === added file 'hooks/charmhelpers/core/host.py' |
690 | --- hooks/charmhelpers/core/host.py 1970-01-01 00:00:00 +0000 |
691 | +++ hooks/charmhelpers/core/host.py 2014-01-24 17:20:41 +0000 |
692 | @@ -0,0 +1,291 @@ |
693 | +"""Tools for working with the host system""" |
694 | +# Copyright 2012 Canonical Ltd. |
695 | +# |
696 | +# Authors: |
697 | +# Nick Moffitt <nick.moffitt@canonical.com> |
698 | +# Matthew Wedgwood <matthew.wedgwood@canonical.com> |
699 | + |
700 | +import os |
701 | +import pwd |
702 | +import grp |
703 | +import random |
704 | +import string |
705 | +import subprocess |
706 | +import hashlib |
707 | + |
708 | +from collections import OrderedDict |
709 | + |
710 | +from hookenv import log |
711 | + |
712 | + |
713 | +def service_start(service_name): |
714 | + """Start a system service""" |
715 | + return service('start', service_name) |
716 | + |
717 | + |
718 | +def service_stop(service_name): |
719 | + """Stop a system service""" |
720 | + return service('stop', service_name) |
721 | + |
722 | + |
723 | +def service_restart(service_name): |
724 | + """Restart a system service""" |
725 | + return service('restart', service_name) |
726 | + |
727 | + |
728 | +def service_reload(service_name, restart_on_failure=False): |
729 | + """Reload a system service, optionally falling back to restart if reload fails""" |
730 | + service_result = service('reload', service_name) |
731 | + if not service_result and restart_on_failure: |
732 | + service_result = service('restart', service_name) |
733 | + return service_result |
734 | + |
735 | + |
736 | +def service(action, service_name): |
737 | + """Control a system service""" |
738 | + cmd = ['service', service_name, action] |
739 | + return subprocess.call(cmd) == 0 |
740 | + |
741 | + |
742 | +def service_running(service): |
743 | + """Determine whether a system service is running""" |
744 | + try: |
745 | + output = subprocess.check_output(['service', service, 'status']) |
746 | + except subprocess.CalledProcessError: |
747 | + return False |
748 | + else: |
749 | + if ("start/running" in output or "is running" in output): |
750 | + return True |
751 | + else: |
752 | + return False |
753 | + |
754 | + |
755 | +def adduser(username, password=None, shell='/bin/bash', system_user=False): |
756 | + """Add a user to the system""" |
757 | + try: |
758 | + user_info = pwd.getpwnam(username) |
759 | + log('user {0} already exists!'.format(username)) |
760 | + except KeyError: |
761 | + log('creating user {0}'.format(username)) |
762 | + cmd = ['useradd'] |
763 | + if system_user or password is None: |
764 | + cmd.append('--system') |
765 | + else: |
766 | + cmd.extend([ |
767 | + '--create-home', |
768 | + '--shell', shell, |
769 | + '--password', password, |
770 | + ]) |
771 | + cmd.append(username) |
772 | + subprocess.check_call(cmd) |
773 | + user_info = pwd.getpwnam(username) |
774 | + return user_info |
775 | + |
776 | + |
777 | +def add_user_to_group(username, group): |
778 | + """Add a user to a group""" |
779 | + cmd = [ |
780 | + 'gpasswd', '-a', |
781 | + username, |
782 | + group |
783 | + ] |
784 | + log("Adding user {} to group {}".format(username, group)) |
785 | + subprocess.check_call(cmd) |
786 | + |
787 | + |
788 | +def rsync(from_path, to_path, flags='-r', options=None): |
789 | + """Replicate the contents of a path""" |
790 | + options = options or ['--delete', '--executability'] |
791 | + cmd = ['/usr/bin/rsync', flags] |
792 | + cmd.extend(options) |
793 | + cmd.append(from_path) |
794 | + cmd.append(to_path) |
795 | + log(" ".join(cmd)) |
796 | + return subprocess.check_output(cmd).strip() |
797 | + |
798 | + |
799 | +def symlink(source, destination): |
800 | + """Create a symbolic link""" |
801 | + log("Symlinking {} as {}".format(source, destination)) |
802 | + cmd = [ |
803 | + 'ln', |
804 | + '-sf', |
805 | + source, |
806 | + destination, |
807 | + ] |
808 | + subprocess.check_call(cmd) |
809 | + |
810 | + |
811 | +def mkdir(path, owner='root', group='root', perms=0555, force=False): |
812 | + """Create a directory""" |
813 | + log("Making dir {} {}:{} {:o}".format(path, owner, group, |
814 | + perms)) |
815 | + uid = pwd.getpwnam(owner).pw_uid |
816 | + gid = grp.getgrnam(group).gr_gid |
817 | + realpath = os.path.abspath(path) |
818 | + if os.path.exists(realpath): |
819 | + if force and not os.path.isdir(realpath): |
820 | + log("Removing non-directory file {} prior to mkdir()".format(path)) |
821 | + os.unlink(realpath) |
822 | + else: |
823 | + os.makedirs(realpath, perms) |
824 | + os.chown(realpath, uid, gid) |
825 | + |
826 | + |
827 | +def write_file(path, content, owner='root', group='root', perms=0444): |
828 | + """Create or overwrite a file with the contents of a string""" |
829 | + log("Writing file {} {}:{} {:o}".format(path, owner, group, perms)) |
830 | + uid = pwd.getpwnam(owner).pw_uid |
831 | + gid = grp.getgrnam(group).gr_gid |
832 | + with open(path, 'w') as target: |
833 | + os.fchown(target.fileno(), uid, gid) |
834 | + os.fchmod(target.fileno(), perms) |
835 | + target.write(content) |
836 | + |
837 | + |
838 | +def mount(device, mountpoint, options=None, persist=False): |
839 | + """Mount a filesystem at a particular mountpoint""" |
840 | + cmd_args = ['mount'] |
841 | + if options is not None: |
842 | + cmd_args.extend(['-o', options]) |
843 | + cmd_args.extend([device, mountpoint]) |
844 | + try: |
845 | + subprocess.check_output(cmd_args) |
846 | + except subprocess.CalledProcessError, e: |
847 | + log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output)) |
848 | + return False |
849 | + if persist: |
850 | + # TODO: update fstab |
851 | + pass |
852 | + return True |
853 | + |
854 | + |
855 | +def umount(mountpoint, persist=False): |
856 | + """Unmount a filesystem""" |
857 | + cmd_args = ['umount', mountpoint] |
858 | + try: |
859 | + subprocess.check_output(cmd_args) |
860 | + except subprocess.CalledProcessError, e: |
861 | + log('Error unmounting {}\n{}'.format(mountpoint, e.output)) |
862 | + return False |
863 | + if persist: |
864 | + # TODO: update fstab |
865 | + pass |
866 | + return True |
867 | + |
868 | + |
869 | +def mounts(): |
870 | + """Get a list of all mounted volumes as [[mountpoint,device],[...]]""" |
871 | + with open('/proc/mounts') as f: |
872 | + # [['/mount/point','/dev/path'],[...]] |
873 | + system_mounts = [m[1::-1] for m in [l.strip().split() |
874 | + for l in f.readlines()]] |
875 | + return system_mounts |
876 | + |
877 | + |
878 | +def file_hash(path): |
879 | + """Generate a md5 hash of the contents of 'path' or None if not found """ |
880 | + if os.path.exists(path): |
881 | + h = hashlib.md5() |
882 | + with open(path, 'r') as source: |
883 | + h.update(source.read()) # IGNORE:E1101 - it does have update |
884 | + return h.hexdigest() |
885 | + else: |
886 | + return None |
887 | + |
888 | + |
889 | +def restart_on_change(restart_map): |
890 | + """Restart services based on configuration files changing |
891 | + |
892 | + This function is used a decorator, for example |
893 | + |
894 | + @restart_on_change({ |
895 | + '/etc/ceph/ceph.conf': [ 'cinder-api', 'cinder-volume' ] |
896 | + }) |
897 | + def ceph_client_changed(): |
898 | + ... |
899 | + |
900 | + In this example, the cinder-api and cinder-volume services |
901 | + would be restarted if /etc/ceph/ceph.conf is changed by the |
902 | + ceph_client_changed function. |
903 | + """ |
904 | + def wrap(f): |
905 | + def wrapped_f(*args): |
906 | + checksums = {} |
907 | + for path in restart_map: |
908 | + checksums[path] = file_hash(path) |
909 | + f(*args) |
910 | + restarts = [] |
911 | + for path in restart_map: |
912 | + if checksums[path] != file_hash(path): |
913 | + restarts += restart_map[path] |
914 | + for service_name in list(OrderedDict.fromkeys(restarts)): |
915 | + service('restart', service_name) |
916 | + return wrapped_f |
917 | + return wrap |
918 | + |
919 | + |
920 | +def lsb_release(): |
921 | + """Return /etc/lsb-release in a dict""" |
922 | + d = {} |
923 | + with open('/etc/lsb-release', 'r') as lsb: |
924 | + for l in lsb: |
925 | + k, v = l.split('=') |
926 | + d[k.strip()] = v.strip() |
927 | + return d |
928 | + |
929 | + |
930 | +def pwgen(length=None): |
931 | + """Generate a random pasword.""" |
932 | + if length is None: |
933 | + length = random.choice(range(35, 45)) |
934 | + alphanumeric_chars = [ |
935 | + l for l in (string.letters + string.digits) |
936 | + if l not in 'l0QD1vAEIOUaeiou'] |
937 | + random_chars = [ |
938 | + random.choice(alphanumeric_chars) for _ in range(length)] |
939 | + return(''.join(random_chars)) |
940 | + |
941 | + |
942 | +def list_nics(nic_type): |
943 | + '''Return a list of nics of given type(s)''' |
944 | + if isinstance(nic_type, basestring): |
945 | + int_types = [nic_type] |
946 | + else: |
947 | + int_types = nic_type |
948 | + interfaces = [] |
949 | + for int_type in int_types: |
950 | + cmd = ['ip', 'addr', 'show', 'label', int_type + '*'] |
951 | + ip_output = subprocess.check_output(cmd).split('\n') |
952 | + ip_output = (line for line in ip_output if line) |
953 | + for line in ip_output: |
954 | + if line.split()[1].startswith(int_type): |
955 | + interfaces.append(line.split()[1].replace(":", "")) |
956 | + return interfaces |
957 | + |
958 | + |
959 | +def set_nic_mtu(nic, mtu): |
960 | + '''Set MTU on a network interface''' |
961 | + cmd = ['ip', 'link', 'set', nic, 'mtu', mtu] |
962 | + subprocess.check_call(cmd) |
963 | + |
964 | + |
965 | +def get_nic_mtu(nic): |
966 | + cmd = ['ip', 'addr', 'show', nic] |
967 | + ip_output = subprocess.check_output(cmd).split('\n') |
968 | + mtu = "" |
969 | + for line in ip_output: |
970 | + words = line.split() |
971 | + if 'mtu' in words: |
972 | + mtu = words[words.index("mtu") + 1] |
973 | + return mtu |
974 | + |
975 | + |
976 | +def get_nic_hwaddr(nic): |
977 | + cmd = ['ip', '-o', '-0', 'addr', 'show', nic] |
978 | + ip_output = subprocess.check_output(cmd) |
979 | + hwaddr = "" |
980 | + words = ip_output.split() |
981 | + if 'link/ether' in words: |
982 | + hwaddr = words[words.index('link/ether') + 1] |
983 | + return hwaddr |
984 | |
985 | === added directory 'hooks/charmhelpers/fetch' |
986 | === added file 'hooks/charmhelpers/fetch/__init__.py' |
987 | --- hooks/charmhelpers/fetch/__init__.py 1970-01-01 00:00:00 +0000 |
988 | +++ hooks/charmhelpers/fetch/__init__.py 2014-01-24 17:20:41 +0000 |
989 | @@ -0,0 +1,279 @@ |
990 | +import importlib |
991 | +from yaml import safe_load |
992 | +from charmhelpers.core.host import ( |
993 | + lsb_release |
994 | +) |
995 | +from urlparse import ( |
996 | + urlparse, |
997 | + urlunparse, |
998 | +) |
999 | +import subprocess |
1000 | +from charmhelpers.core.hookenv import ( |
1001 | + config, |
1002 | + log, |
1003 | +) |
1004 | +import apt_pkg |
1005 | +import os |
1006 | + |
1007 | +CLOUD_ARCHIVE = """# Ubuntu Cloud Archive |
1008 | +deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main |
1009 | +""" |
1010 | +PROPOSED_POCKET = """# Proposed |
1011 | +deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted |
1012 | +""" |
1013 | +CLOUD_ARCHIVE_POCKETS = { |
1014 | + # Folsom |
1015 | + 'folsom': 'precise-updates/folsom', |
1016 | + 'precise-folsom': 'precise-updates/folsom', |
1017 | + 'precise-folsom/updates': 'precise-updates/folsom', |
1018 | + 'precise-updates/folsom': 'precise-updates/folsom', |
1019 | + 'folsom/proposed': 'precise-proposed/folsom', |
1020 | + 'precise-folsom/proposed': 'precise-proposed/folsom', |
1021 | + 'precise-proposed/folsom': 'precise-proposed/folsom', |
1022 | + # Grizzly |
1023 | + 'grizzly': 'precise-updates/grizzly', |
1024 | + 'precise-grizzly': 'precise-updates/grizzly', |
1025 | + 'precise-grizzly/updates': 'precise-updates/grizzly', |
1026 | + 'precise-updates/grizzly': 'precise-updates/grizzly', |
1027 | + 'grizzly/proposed': 'precise-proposed/grizzly', |
1028 | + 'precise-grizzly/proposed': 'precise-proposed/grizzly', |
1029 | + 'precise-proposed/grizzly': 'precise-proposed/grizzly', |
1030 | + # Havana |
1031 | + 'havana': 'precise-updates/havana', |
1032 | + 'precise-havana': 'precise-updates/havana', |
1033 | + 'precise-havana/updates': 'precise-updates/havana', |
1034 | + 'precise-updates/havana': 'precise-updates/havana', |
1035 | + 'havana/proposed': 'precise-proposed/havana', |
1036 | + 'precise-havana/proposed': 'precise-proposed/havana', |
1037 | + 'precise-proposed/havana': 'precise-proposed/havana', |
1038 | + # Icehouse |
1039 | + 'icehouse': 'precise-updates/icehouse', |
1040 | + 'precise-icehouse': 'precise-updates/icehouse', |
1041 | + 'precise-icehouse/updates': 'precise-updates/icehouse', |
1042 | + 'precise-updates/icehouse': 'precise-updates/icehouse', |
1043 | + 'icehouse/proposed': 'precise-proposed/icehouse', |
1044 | + 'precise-icehouse/proposed': 'precise-proposed/icehouse', |
1045 | + 'precise-proposed/icehouse': 'precise-proposed/icehouse', |
1046 | +} |
1047 | + |
1048 | + |
1049 | +def filter_installed_packages(packages): |
1050 | + """Returns a list of packages that require installation""" |
1051 | + apt_pkg.init() |
1052 | + cache = apt_pkg.Cache() |
1053 | + _pkgs = [] |
1054 | + for package in packages: |
1055 | + try: |
1056 | + p = cache[package] |
1057 | + p.current_ver or _pkgs.append(package) |
1058 | + except KeyError: |
1059 | + log('Package {} has no installation candidate.'.format(package), |
1060 | + level='WARNING') |
1061 | + _pkgs.append(package) |
1062 | + return _pkgs |
1063 | + |
1064 | + |
1065 | +def apt_install(packages, options=None, fatal=False): |
1066 | + """Install one or more packages""" |
1067 | + if options is None: |
1068 | + options = ['--option=Dpkg::Options::=--force-confold'] |
1069 | + |
1070 | + cmd = ['apt-get', '--assume-yes'] |
1071 | + cmd.extend(options) |
1072 | + cmd.append('install') |
1073 | + if isinstance(packages, basestring): |
1074 | + cmd.append(packages) |
1075 | + else: |
1076 | + cmd.extend(packages) |
1077 | + log("Installing {} with options: {}".format(packages, |
1078 | + options)) |
1079 | + env = os.environ.copy() |
1080 | + if 'DEBIAN_FRONTEND' not in env: |
1081 | + env['DEBIAN_FRONTEND'] = 'noninteractive' |
1082 | + |
1083 | + if fatal: |
1084 | + subprocess.check_call(cmd, env=env) |
1085 | + else: |
1086 | + subprocess.call(cmd, env=env) |
1087 | + |
1088 | + |
1089 | +def apt_update(fatal=False): |
1090 | + """Update local apt cache""" |
1091 | + cmd = ['apt-get', 'update'] |
1092 | + if fatal: |
1093 | + subprocess.check_call(cmd) |
1094 | + else: |
1095 | + subprocess.call(cmd) |
1096 | + |
1097 | + |
1098 | +def apt_purge(packages, fatal=False): |
1099 | + """Purge one or more packages""" |
1100 | + cmd = ['apt-get', '--assume-yes', 'purge'] |
1101 | + if isinstance(packages, basestring): |
1102 | + cmd.append(packages) |
1103 | + else: |
1104 | + cmd.extend(packages) |
1105 | + log("Purging {}".format(packages)) |
1106 | + if fatal: |
1107 | + subprocess.check_call(cmd) |
1108 | + else: |
1109 | + subprocess.call(cmd) |
1110 | + |
1111 | + |
1112 | +def apt_hold(packages, fatal=False): |
1113 | + """Hold one or more packages""" |
1114 | + cmd = ['apt-mark', 'hold'] |
1115 | + if isinstance(packages, basestring): |
1116 | + cmd.append(packages) |
1117 | + else: |
1118 | + cmd.extend(packages) |
1119 | + log("Holding {}".format(packages)) |
1120 | + if fatal: |
1121 | + subprocess.check_call(cmd) |
1122 | + else: |
1123 | + subprocess.call(cmd) |
1124 | + |
1125 | + |
1126 | +def add_source(source, key=None): |
1127 | + if (source.startswith('ppa:') or |
1128 | + source.startswith('http:') or |
1129 | + source.startswith('deb ') or |
1130 | + source.startswith('cloud-archive:')): |
1131 | + subprocess.check_call(['add-apt-repository', '--yes', source]) |
1132 | + elif source.startswith('cloud:'): |
1133 | + apt_install(filter_installed_packages(['ubuntu-cloud-keyring']), |
1134 | + fatal=True) |
1135 | + pocket = source.split(':')[-1] |
1136 | + if pocket not in CLOUD_ARCHIVE_POCKETS: |
1137 | + raise SourceConfigError( |
1138 | + 'Unsupported cloud: source option %s' % |
1139 | + pocket) |
1140 | + actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket] |
1141 | + with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt: |
1142 | + apt.write(CLOUD_ARCHIVE.format(actual_pocket)) |
1143 | + elif source == 'proposed': |
1144 | + release = lsb_release()['DISTRIB_CODENAME'] |
1145 | + with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt: |
1146 | + apt.write(PROPOSED_POCKET.format(release)) |
1147 | + if key: |
1148 | + subprocess.check_call(['apt-key', 'import', key]) |
1149 | + |
1150 | + |
1151 | +class SourceConfigError(Exception): |
1152 | + pass |
1153 | + |
1154 | + |
1155 | +def configure_sources(update=False, |
1156 | + sources_var='install_sources', |
1157 | + keys_var='install_keys'): |
1158 | + """ |
1159 | + Configure multiple sources from charm configuration |
1160 | + |
1161 | + Example config: |
1162 | + install_sources: |
1163 | + - "ppa:foo" |
1164 | + - "http://example.com/repo precise main" |
1165 | + install_keys: |
1166 | + - null |
1167 | + - "a1b2c3d4" |
1168 | + |
1169 | + Note that 'null' (a.k.a. None) should not be quoted. |
1170 | + """ |
1171 | + sources = safe_load(config(sources_var)) |
1172 | + keys = config(keys_var) |
1173 | + if keys is not None: |
1174 | + keys = safe_load(keys) |
1175 | + if isinstance(sources, basestring) and ( |
1176 | + keys is None or isinstance(keys, basestring)): |
1177 | + add_source(sources, keys) |
1178 | + else: |
1179 | + if not len(sources) == len(keys): |
1180 | + msg = 'Install sources and keys lists are different lengths' |
1181 | + raise SourceConfigError(msg) |
1182 | + for src_num in range(len(sources)): |
1183 | + add_source(sources[src_num], keys[src_num]) |
1184 | + if update: |
1185 | + apt_update(fatal=True) |
1186 | + |
1187 | +# The order of this list is very important. Handlers should be listed in from |
1188 | +# least- to most-specific URL matching. |
1189 | +FETCH_HANDLERS = ( |
1190 | + 'charmhelpers.fetch.archiveurl.ArchiveUrlFetchHandler', |
1191 | + 'charmhelpers.fetch.bzrurl.BzrUrlFetchHandler', |
1192 | +) |
1193 | + |
1194 | + |
1195 | +class UnhandledSource(Exception): |
1196 | + pass |
1197 | + |
1198 | + |
1199 | +def install_remote(source): |
1200 | + """ |
1201 | + Install a file tree from a remote source |
1202 | + |
1203 | + The specified source should be a url of the form: |
1204 | + scheme://[host]/path[#[option=value][&...]] |
1205 | + |
1206 | + Schemes supported are based on this modules submodules |
1207 | + Options supported are submodule-specific""" |
1208 | + # We ONLY check for True here because can_handle may return a string |
1209 | + # explaining why it can't handle a given source. |
1210 | + handlers = [h for h in plugins() if h.can_handle(source) is True] |
1211 | + installed_to = None |
1212 | + for handler in handlers: |
1213 | + try: |
1214 | + installed_to = handler.install(source) |
1215 | + except UnhandledSource: |
1216 | + pass |
1217 | + if not installed_to: |
1218 | + raise UnhandledSource("No handler found for source {}".format(source)) |
1219 | + return installed_to |
1220 | + |
1221 | + |
1222 | +def install_from_config(config_var_name): |
1223 | + charm_config = config() |
1224 | + source = charm_config[config_var_name] |
1225 | + return install_remote(source) |
1226 | + |
1227 | + |
1228 | +class BaseFetchHandler(object): |
1229 | + |
1230 | + """Base class for FetchHandler implementations in fetch plugins""" |
1231 | + |
1232 | + def can_handle(self, source): |
1233 | + """Returns True if the source can be handled. Otherwise returns |
1234 | + a string explaining why it cannot""" |
1235 | + return "Wrong source type" |
1236 | + |
1237 | + def install(self, source): |
1238 | + """Try to download and unpack the source. Return the path to the |
1239 | + unpacked files or raise UnhandledSource.""" |
1240 | + raise UnhandledSource("Wrong source type {}".format(source)) |
1241 | + |
1242 | + def parse_url(self, url): |
1243 | + return urlparse(url) |
1244 | + |
1245 | + def base_url(self, url): |
1246 | + """Return url without querystring or fragment""" |
1247 | + parts = list(self.parse_url(url)) |
1248 | + parts[4:] = ['' for i in parts[4:]] |
1249 | + return urlunparse(parts) |
1250 | + |
1251 | + |
1252 | +def plugins(fetch_handlers=None): |
1253 | + if not fetch_handlers: |
1254 | + fetch_handlers = FETCH_HANDLERS |
1255 | + plugin_list = [] |
1256 | + for handler_name in fetch_handlers: |
1257 | + package, classname = handler_name.rsplit('.', 1) |
1258 | + try: |
1259 | + handler_class = getattr( |
1260 | + importlib.import_module(package), |
1261 | + classname) |
1262 | + plugin_list.append(handler_class()) |
1263 | + except (ImportError, AttributeError): |
1264 | + # Skip missing plugins so that they can be ommitted from |
1265 | + # installation if desired |
1266 | + log("FetchHandler {} not found, skipping plugin".format( |
1267 | + handler_name)) |
1268 | + return plugin_list |
1269 | |
1270 | === added file 'hooks/charmhelpers/fetch/archiveurl.py' |
1271 | --- hooks/charmhelpers/fetch/archiveurl.py 1970-01-01 00:00:00 +0000 |
1272 | +++ hooks/charmhelpers/fetch/archiveurl.py 2014-01-24 17:20:41 +0000 |
1273 | @@ -0,0 +1,48 @@ |
1274 | +import os |
1275 | +import urllib2 |
1276 | +from charmhelpers.fetch import ( |
1277 | + BaseFetchHandler, |
1278 | + UnhandledSource |
1279 | +) |
1280 | +from charmhelpers.payload.archive import ( |
1281 | + get_archive_handler, |
1282 | + extract, |
1283 | +) |
1284 | +from charmhelpers.core.host import mkdir |
1285 | + |
1286 | + |
1287 | +class ArchiveUrlFetchHandler(BaseFetchHandler): |
1288 | + """Handler for archives via generic URLs""" |
1289 | + def can_handle(self, source): |
1290 | + url_parts = self.parse_url(source) |
1291 | + if url_parts.scheme not in ('http', 'https', 'ftp', 'file'): |
1292 | + return "Wrong source type" |
1293 | + if get_archive_handler(self.base_url(source)): |
1294 | + return True |
1295 | + return False |
1296 | + |
1297 | + def download(self, source, dest): |
1298 | + # propogate all exceptions |
1299 | + # URLError, OSError, etc |
1300 | + response = urllib2.urlopen(source) |
1301 | + try: |
1302 | + with open(dest, 'w') as dest_file: |
1303 | + dest_file.write(response.read()) |
1304 | + except Exception as e: |
1305 | + if os.path.isfile(dest): |
1306 | + os.unlink(dest) |
1307 | + raise e |
1308 | + |
1309 | + def install(self, source): |
1310 | + url_parts = self.parse_url(source) |
1311 | + dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched') |
1312 | + if not os.path.exists(dest_dir): |
1313 | + mkdir(dest_dir, perms=0755) |
1314 | + dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path)) |
1315 | + try: |
1316 | + self.download(source, dld_file) |
1317 | + except urllib2.URLError as e: |
1318 | + raise UnhandledSource(e.reason) |
1319 | + except OSError as e: |
1320 | + raise UnhandledSource(e.strerror) |
1321 | + return extract(dld_file) |
1322 | |
1323 | === added file 'hooks/charmhelpers/fetch/bzrurl.py' |
1324 | --- hooks/charmhelpers/fetch/bzrurl.py 1970-01-01 00:00:00 +0000 |
1325 | +++ hooks/charmhelpers/fetch/bzrurl.py 2014-01-24 17:20:41 +0000 |
1326 | @@ -0,0 +1,49 @@ |
1327 | +import os |
1328 | +from charmhelpers.fetch import ( |
1329 | + BaseFetchHandler, |
1330 | + UnhandledSource |
1331 | +) |
1332 | +from charmhelpers.core.host import mkdir |
1333 | + |
1334 | +try: |
1335 | + from bzrlib.branch import Branch |
1336 | +except ImportError: |
1337 | + from charmhelpers.fetch import apt_install |
1338 | + apt_install("python-bzrlib") |
1339 | + from bzrlib.branch import Branch |
1340 | + |
1341 | + |
1342 | +class BzrUrlFetchHandler(BaseFetchHandler): |
1343 | + """Handler for bazaar branches via generic and lp URLs""" |
1344 | + def can_handle(self, source): |
1345 | + url_parts = self.parse_url(source) |
1346 | + if url_parts.scheme not in ('bzr+ssh', 'lp'): |
1347 | + return False |
1348 | + else: |
1349 | + return True |
1350 | + |
1351 | + def branch(self, source, dest): |
1352 | + url_parts = self.parse_url(source) |
1353 | + # If we use lp:branchname scheme we need to load plugins |
1354 | + if not self.can_handle(source): |
1355 | + raise UnhandledSource("Cannot handle {}".format(source)) |
1356 | + if url_parts.scheme == "lp": |
1357 | + from bzrlib.plugin import load_plugins |
1358 | + load_plugins() |
1359 | + try: |
1360 | + remote_branch = Branch.open(source) |
1361 | + remote_branch.bzrdir.sprout(dest).open_branch() |
1362 | + except Exception as e: |
1363 | + raise e |
1364 | + |
1365 | + def install(self, source): |
1366 | + url_parts = self.parse_url(source) |
1367 | + branch_name = url_parts.path.strip("/").split("/")[-1] |
1368 | + dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", branch_name) |
1369 | + if not os.path.exists(dest_dir): |
1370 | + mkdir(dest_dir, perms=0755) |
1371 | + try: |
1372 | + self.branch(source, dest_dir) |
1373 | + except OSError as e: |
1374 | + raise UnhandledSource(e.strerror) |
1375 | + return dest_dir |
1376 | |
1377 | === added directory 'hooks/charmhelpers/payload' |
1378 | === added file 'hooks/charmhelpers/payload/__init__.py' |
1379 | --- hooks/charmhelpers/payload/__init__.py 1970-01-01 00:00:00 +0000 |
1380 | +++ hooks/charmhelpers/payload/__init__.py 2014-01-24 17:20:41 +0000 |
1381 | @@ -0,0 +1,1 @@ |
1382 | +"Tools for working with files injected into a charm just before deployment." |
1383 | |
1384 | === added file 'hooks/charmhelpers/payload/execd.py' |
1385 | --- hooks/charmhelpers/payload/execd.py 1970-01-01 00:00:00 +0000 |
1386 | +++ hooks/charmhelpers/payload/execd.py 2014-01-24 17:20:41 +0000 |
1387 | @@ -0,0 +1,50 @@ |
1388 | +#!/usr/bin/env python |
1389 | + |
1390 | +import os |
1391 | +import sys |
1392 | +import subprocess |
1393 | +from charmhelpers.core import hookenv |
1394 | + |
1395 | + |
1396 | +def default_execd_dir(): |
1397 | + return os.path.join(os.environ['CHARM_DIR'], 'exec.d') |
1398 | + |
1399 | + |
1400 | +def execd_module_paths(execd_dir=None): |
1401 | + """Generate a list of full paths to modules within execd_dir.""" |
1402 | + if not execd_dir: |
1403 | + execd_dir = default_execd_dir() |
1404 | + |
1405 | + if not os.path.exists(execd_dir): |
1406 | + return |
1407 | + |
1408 | + for subpath in os.listdir(execd_dir): |
1409 | + module = os.path.join(execd_dir, subpath) |
1410 | + if os.path.isdir(module): |
1411 | + yield module |
1412 | + |
1413 | + |
1414 | +def execd_submodule_paths(command, execd_dir=None): |
1415 | + """Generate a list of full paths to the specified command within exec_dir. |
1416 | + """ |
1417 | + for module_path in execd_module_paths(execd_dir): |
1418 | + path = os.path.join(module_path, command) |
1419 | + if os.access(path, os.X_OK) and os.path.isfile(path): |
1420 | + yield path |
1421 | + |
1422 | + |
1423 | +def execd_run(command, execd_dir=None, die_on_error=False, stderr=None): |
1424 | + """Run command for each module within execd_dir which defines it.""" |
1425 | + for submodule_path in execd_submodule_paths(command, execd_dir): |
1426 | + try: |
1427 | + subprocess.check_call(submodule_path, shell=True, stderr=stderr) |
1428 | + except subprocess.CalledProcessError as e: |
1429 | + hookenv.log("Error ({}) running {}. Output: {}".format( |
1430 | + e.returncode, e.cmd, e.output)) |
1431 | + if die_on_error: |
1432 | + sys.exit(e.returncode) |
1433 | + |
1434 | + |
1435 | +def execd_preinstall(execd_dir=None): |
1436 | + """Run charm-pre-install for each module within execd_dir.""" |
1437 | + execd_run('charm-pre-install', execd_dir=execd_dir) |
1438 | |
1439 | === modified file 'hooks/hooks.py' |
1440 | --- hooks/hooks.py 2013-01-11 09:15:51 +0000 |
1441 | +++ hooks/hooks.py 2014-01-24 17:20:41 +0000 |
1442 | @@ -14,7 +14,33 @@ |
1443 | import os |
1444 | import ceph |
1445 | |
1446 | -import utils |
1447 | +from charmhelpers.core.hookenv import ( |
1448 | + relation_get, |
1449 | + relation_ids, |
1450 | + related_units, |
1451 | + config, |
1452 | + unit_get, |
1453 | + open_port, |
1454 | + relation_set, |
1455 | + log, |
1456 | + Hooks, UnregisteredHookError, |
1457 | +) |
1458 | +from charmhelpers.fetch import ( |
1459 | + apt_update, |
1460 | + apt_install, |
1461 | + add_source, |
1462 | +) |
1463 | +from utils import ( |
1464 | + render_template, |
1465 | + get_host_ip, |
1466 | + enable_pocket, |
1467 | + is_apache_24 |
1468 | +) |
1469 | + |
1470 | +from charmhelpers.payload.execd import execd_preinstall |
1471 | +from socket import gethostname as get_unit_hostname |
1472 | + |
1473 | +hooks = Hooks() |
1474 | |
1475 | |
1476 | def install_www_scripts(): |
1477 | @@ -22,19 +48,20 @@ |
1478 | shutil.copy(x, '/var/www/') |
1479 | |
1480 | |
1481 | -NSS_DIR='/var/lib/ceph/nss' |
1482 | - |
1483 | - |
1484 | +NSS_DIR = '/var/lib/ceph/nss' |
1485 | + |
1486 | + |
1487 | +@hooks.hook('install') |
1488 | def install(): |
1489 | - utils.juju_log('INFO', 'Begin install hook.') |
1490 | - utils.enable_pocket('multiverse') |
1491 | - utils.configure_source() |
1492 | - utils.install('radosgw', |
1493 | - 'libapache2-mod-fastcgi', |
1494 | - 'apache2', |
1495 | - 'ntp') |
1496 | + execd_preinstall() |
1497 | + enable_pocket('multiverse') |
1498 | + add_source(config('source'), config('key')) |
1499 | + apt_update(fatal=True) |
1500 | + apt_install(['radosgw', |
1501 | + 'libapache2-mod-fastcgi', |
1502 | + 'apache2', |
1503 | + 'ntp'], fatal=True) |
1504 | os.makedirs(NSS_DIR) |
1505 | - utils.juju_log('INFO', 'End install hook.') |
1506 | |
1507 | |
1508 | def emit_cephconf(): |
1509 | @@ -45,68 +72,70 @@ |
1510 | cephcontext = { |
1511 | 'auth_supported': get_auth() or 'none', |
1512 | 'mon_hosts': ' '.join(get_mon_hosts()), |
1513 | - 'hostname': utils.get_unit_hostname(), |
1514 | + 'hostname': get_unit_hostname(), |
1515 | 'version': ceph.get_ceph_version('radosgw') |
1516 | - } |
1517 | - |
1518 | - # Check to ensure that correct version of ceph is |
1519 | + } |
1520 | + |
1521 | + # Check to ensure that correct version of ceph is |
1522 | # in use |
1523 | - if ceph.get_ceph_version('radosgw') >= "0.55": |
1524 | + if ceph.get_ceph_version('radosgw') >= "0.55": |
1525 | # Add keystone configuration if found |
1526 | ks_conf = get_keystone_conf() |
1527 | if ks_conf: |
1528 | cephcontext.update(ks_conf) |
1529 | |
1530 | with open('/etc/ceph/ceph.conf', 'w') as cephconf: |
1531 | - cephconf.write(utils.render_template('ceph.conf', cephcontext)) |
1532 | + cephconf.write(render_template('ceph.conf', cephcontext)) |
1533 | |
1534 | |
1535 | def emit_apacheconf(): |
1536 | apachecontext = { |
1537 | - "hostname": utils.unit_get('private-address') |
1538 | - } |
1539 | - with open('/etc/apache2/sites-available/rgw', 'w') as apacheconf: |
1540 | - apacheconf.write(utils.render_template('rgw', apachecontext)) |
1541 | + "hostname": unit_get('private-address') |
1542 | + } |
1543 | + site_conf = '/etc/apache2/sites-available/rgw' |
1544 | + if is_apache_24(): |
1545 | + site_conf = '/etc/apache2/sites-available/rgw.conf' |
1546 | + with open(site_conf, 'w') as apacheconf: |
1547 | + apacheconf.write(render_template('rgw', apachecontext)) |
1548 | |
1549 | |
1550 | def apache_sites(): |
1551 | - utils.juju_log('INFO', 'Begin apache_sites.') |
1552 | - subprocess.check_call(['a2dissite', 'default']) |
1553 | + if is_apache_24(): |
1554 | + subprocess.check_call(['a2dissite', '000-default']) |
1555 | + else: |
1556 | + subprocess.check_call(['a2dissite', 'default']) |
1557 | subprocess.check_call(['a2ensite', 'rgw']) |
1558 | - utils.juju_log('INFO', 'End apache_sites.') |
1559 | |
1560 | |
1561 | def apache_modules(): |
1562 | - utils.juju_log('INFO', 'Begin apache_sites.') |
1563 | subprocess.check_call(['a2enmod', 'fastcgi']) |
1564 | subprocess.check_call(['a2enmod', 'rewrite']) |
1565 | - utils.juju_log('INFO', 'End apache_sites.') |
1566 | |
1567 | |
1568 | def apache_reload(): |
1569 | subprocess.call(['service', 'apache2', 'reload']) |
1570 | |
1571 | |
1572 | +@hooks.hook('upgrade-charm', |
1573 | + 'config-changed') |
1574 | def config_changed(): |
1575 | - utils.juju_log('INFO', 'Begin config-changed hook.') |
1576 | emit_cephconf() |
1577 | emit_apacheconf() |
1578 | install_www_scripts() |
1579 | apache_sites() |
1580 | apache_modules() |
1581 | apache_reload() |
1582 | - utils.juju_log('INFO', 'End config-changed hook.') |
1583 | |
1584 | |
1585 | def get_mon_hosts(): |
1586 | hosts = [] |
1587 | - for relid in utils.relation_ids('mon'): |
1588 | - for unit in utils.relation_list(relid): |
1589 | + for relid in relation_ids('mon'): |
1590 | + for unit in related_units(relid): |
1591 | hosts.append( |
1592 | - '{}:6789'.format(utils.get_host_ip( |
1593 | - utils.relation_get('private-address', |
1594 | - unit, relid))) |
1595 | - ) |
1596 | + '{}:6789'.format(get_host_ip( |
1597 | + relation_get('private-address', |
1598 | + unit, relid))) |
1599 | + ) |
1600 | |
1601 | hosts.sort() |
1602 | return hosts |
1603 | @@ -117,100 +146,90 @@ |
1604 | |
1605 | |
1606 | def get_conf(name): |
1607 | - for relid in utils.relation_ids('mon'): |
1608 | - for unit in utils.relation_list(relid): |
1609 | - conf = utils.relation_get(name, |
1610 | - unit, relid) |
1611 | + for relid in relation_ids('mon'): |
1612 | + for unit in related_units(relid): |
1613 | + conf = relation_get(name, |
1614 | + unit, relid) |
1615 | if conf: |
1616 | return conf |
1617 | return None |
1618 | |
1619 | + |
1620 | def get_keystone_conf(): |
1621 | - for relid in utils.relation_ids('identity-service'): |
1622 | - for unit in utils.relation_list(relid): |
1623 | + for relid in relation_ids('identity-service'): |
1624 | + for unit in related_units(relid): |
1625 | ks_auth = { |
1626 | 'auth_type': 'keystone', |
1627 | 'auth_protocol': 'http', |
1628 | - 'auth_host': utils.relation_get('auth_host', unit, relid), |
1629 | - 'auth_port': utils.relation_get('auth_port', unit, relid), |
1630 | - 'admin_token': utils.relation_get('admin_token', unit, relid), |
1631 | - 'user_roles': utils.config_get('operator-roles'), |
1632 | - 'cache_size': utils.config_get('cache-size'), |
1633 | - 'revocation_check_interval': utils.config_get('revocation-check-interval') |
1634 | + 'auth_host': relation_get('auth_host', unit, relid), |
1635 | + 'auth_port': relation_get('auth_port', unit, relid), |
1636 | + 'admin_token': relation_get('admin_token', unit, relid), |
1637 | + 'user_roles': config('operator-roles'), |
1638 | + 'cache_size': config('cache-size'), |
1639 | + 'revocation_check_interval': |
1640 | + config('revocation-check-interval') |
1641 | } |
1642 | if None not in ks_auth.itervalues(): |
1643 | return ks_auth |
1644 | return None |
1645 | |
1646 | |
1647 | +@hooks.hook('mon-relation-departed', |
1648 | + 'mon-relation-changed') |
1649 | def mon_relation(): |
1650 | - utils.juju_log('INFO', 'Begin mon-relation hook.') |
1651 | emit_cephconf() |
1652 | - key = utils.relation_get('radosgw_key') |
1653 | + key = relation_get('radosgw_key') |
1654 | if key: |
1655 | ceph.import_radosgw_key(key) |
1656 | restart() # TODO figure out a better way todo this |
1657 | - utils.juju_log('INFO', 'End mon-relation hook.') |
1658 | - |
1659 | - |
1660 | + |
1661 | + |
1662 | +@hooks.hook('gateway-relation-joined') |
1663 | def gateway_relation(): |
1664 | - utils.juju_log('INFO', 'Begin gateway-relation hook.') |
1665 | - utils.relation_set(hostname=utils.unit_get('private-address'), |
1666 | - port=80) |
1667 | - utils.juju_log('INFO', 'Begin gateway-relation hook.') |
1668 | - |
1669 | - |
1670 | -def upgrade_charm(): |
1671 | - utils.juju_log('INFO', 'Begin upgrade-charm hook.') |
1672 | - utils.juju_log('INFO', 'End upgrade-charm hook.') |
1673 | + relation_set(hostname=unit_get('private-address'), |
1674 | + port=80) |
1675 | |
1676 | |
1677 | def start(): |
1678 | subprocess.call(['service', 'radosgw', 'start']) |
1679 | - utils.expose(port=80) |
1680 | + open_port(port=80) |
1681 | |
1682 | |
1683 | def stop(): |
1684 | subprocess.call(['service', 'radosgw', 'stop']) |
1685 | - utils.expose(port=80) |
1686 | + open_port(port=80) |
1687 | |
1688 | |
1689 | def restart(): |
1690 | subprocess.call(['service', 'radosgw', 'restart']) |
1691 | - utils.expose(port=80) |
1692 | - |
1693 | - |
1694 | + open_port(port=80) |
1695 | + |
1696 | + |
1697 | +@hooks.hook('identity-service-relation-joined', |
1698 | + 'identity-service-relation-changed') |
1699 | def identity_joined(relid=None): |
1700 | if ceph.get_ceph_version('radosgw') < "0.55": |
1701 | - utils.juju_log('ERROR', |
1702 | - 'Integration with keystone requires ceph >= 0.55') |
1703 | + log('Integration with keystone requires ceph >= 0.55') |
1704 | sys.exit(1) |
1705 | |
1706 | - hostname = utils.unit_get('private-address') |
1707 | + hostname = unit_get('private-address') |
1708 | admin_url = 'http://{}:80/swift'.format(hostname) |
1709 | internal_url = public_url = '{}/v1'.format(admin_url) |
1710 | - utils.relation_set(service='swift', |
1711 | - region=utils.config_get('region'), |
1712 | - public_url=public_url, internal_url=internal_url, |
1713 | - admin_url=admin_url, |
1714 | - requested_roles=utils.config_get('operator-roles'), |
1715 | - rid=relid) |
1716 | + relation_set(service='swift', |
1717 | + region=config('region'), |
1718 | + public_url=public_url, internal_url=internal_url, |
1719 | + admin_url=admin_url, |
1720 | + requested_roles=config('operator-roles'), |
1721 | + rid=relid) |
1722 | |
1723 | |
1724 | def identity_changed(): |
1725 | emit_cephconf() |
1726 | - restart() |
1727 | - |
1728 | - |
1729 | -utils.do_hooks({ |
1730 | - 'install': install, |
1731 | - 'config-changed': config_changed, |
1732 | - 'mon-relation-departed': mon_relation, |
1733 | - 'mon-relation-changed': mon_relation, |
1734 | - 'gateway-relation-joined': gateway_relation, |
1735 | - 'upgrade-charm': config_changed, # same function ATM |
1736 | - 'identity-service-relation-joined': identity_joined, |
1737 | - 'identity-service-relation-changed': identity_changed |
1738 | - }) |
1739 | - |
1740 | -sys.exit(0) |
1741 | + restart() |
1742 | + |
1743 | + |
1744 | +if __name__ == '__main__': |
1745 | + try: |
1746 | + hooks.execute(sys.argv) |
1747 | + except UnregisteredHookError as e: |
1748 | + log('Unknown hook {} - skipping.'.format(e)) |
1749 | |
1750 | === modified file 'hooks/utils.py' |
1751 | --- hooks/utils.py 2013-09-24 11:29:07 +0000 |
1752 | +++ hooks/utils.py 2014-01-24 17:20:41 +0000 |
1753 | @@ -7,97 +7,36 @@ |
1754 | # Paul Collins <paul.collins@canonical.com> |
1755 | # |
1756 | |
1757 | -import os |
1758 | -import subprocess |
1759 | import socket |
1760 | -import sys |
1761 | import re |
1762 | - |
1763 | - |
1764 | -def do_hooks(hooks): |
1765 | - hook = os.path.basename(sys.argv[0]) |
1766 | - |
1767 | - try: |
1768 | - hook_func = hooks[hook] |
1769 | - except KeyError: |
1770 | - juju_log('INFO', |
1771 | - "This charm doesn't know how to handle '{}'.".format(hook)) |
1772 | - else: |
1773 | - hook_func() |
1774 | - |
1775 | - |
1776 | -def install(*pkgs): |
1777 | - cmd = [ |
1778 | - 'apt-get', |
1779 | - '-y', |
1780 | - 'install' |
1781 | - ] |
1782 | - for pkg in pkgs: |
1783 | - cmd.append(pkg) |
1784 | - subprocess.check_call(cmd) |
1785 | +import os |
1786 | + |
1787 | +from charmhelpers.core.hookenv import unit_get |
1788 | +from charmhelpers.fetch import apt_install |
1789 | |
1790 | TEMPLATES_DIR = 'templates' |
1791 | |
1792 | try: |
1793 | import jinja2 |
1794 | except ImportError: |
1795 | - install('python-jinja2') |
1796 | + apt_install('python-jinja2', fatal=True) |
1797 | import jinja2 |
1798 | |
1799 | try: |
1800 | import dns.resolver |
1801 | except ImportError: |
1802 | - install('python-dnspython') |
1803 | + apt_install('python-dnspython', fatal=True) |
1804 | import dns.resolver |
1805 | |
1806 | |
1807 | def render_template(template_name, context, template_dir=TEMPLATES_DIR): |
1808 | templates = jinja2.Environment( |
1809 | - loader=jinja2.FileSystemLoader(template_dir) |
1810 | - ) |
1811 | + loader=jinja2.FileSystemLoader(template_dir) |
1812 | + ) |
1813 | template = templates.get_template(template_name) |
1814 | return template.render(context) |
1815 | |
1816 | |
1817 | -CLOUD_ARCHIVE = \ |
1818 | -""" # Ubuntu Cloud Archive |
1819 | -deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main |
1820 | -""" |
1821 | - |
1822 | - |
1823 | -def configure_source(): |
1824 | - source = str(config_get('source')) |
1825 | - if not source: |
1826 | - return |
1827 | - if source.startswith('ppa:'): |
1828 | - cmd = [ |
1829 | - 'add-apt-repository', |
1830 | - source |
1831 | - ] |
1832 | - subprocess.check_call(cmd) |
1833 | - if source.startswith('cloud:'): |
1834 | - install('ubuntu-cloud-keyring') |
1835 | - pocket = source.split(':')[1] |
1836 | - with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt: |
1837 | - apt.write(CLOUD_ARCHIVE.format(pocket)) |
1838 | - if source.startswith('http:'): |
1839 | - with open('/etc/apt/sources.list.d/quantum.list', 'w') as apt: |
1840 | - apt.write("deb " + source + "\n") |
1841 | - key = config_get('key') |
1842 | - if key: |
1843 | - cmd = [ |
1844 | - 'apt-key', |
1845 | - 'adv', '--keyserver keyserver.ubuntu.com', |
1846 | - '--recv-keys', key |
1847 | - ] |
1848 | - subprocess.check_call(cmd) |
1849 | - cmd = [ |
1850 | - 'apt-get', |
1851 | - 'update' |
1852 | - ] |
1853 | - subprocess.check_call(cmd) |
1854 | - |
1855 | - |
1856 | def enable_pocket(pocket): |
1857 | apt_sources = "/etc/apt/sources.list" |
1858 | with open(apt_sources, "r") as sources: |
1859 | @@ -109,103 +48,6 @@ |
1860 | else: |
1861 | sources.write(line) |
1862 | |
1863 | -# Protocols |
1864 | -TCP = 'TCP' |
1865 | -UDP = 'UDP' |
1866 | - |
1867 | - |
1868 | -def expose(port, protocol='TCP'): |
1869 | - cmd = [ |
1870 | - 'open-port', |
1871 | - '{}/{}'.format(port, protocol) |
1872 | - ] |
1873 | - subprocess.check_call(cmd) |
1874 | - |
1875 | - |
1876 | -def juju_log(severity, message): |
1877 | - cmd = [ |
1878 | - 'juju-log', |
1879 | - '--log-level', severity, |
1880 | - message |
1881 | - ] |
1882 | - subprocess.check_call(cmd) |
1883 | - |
1884 | - |
1885 | -def relation_ids(relation): |
1886 | - cmd = [ |
1887 | - 'relation-ids', |
1888 | - relation |
1889 | - ] |
1890 | - return subprocess.check_output(cmd).split() # IGNORE:E1103 |
1891 | - |
1892 | - |
1893 | -def relation_list(rid): |
1894 | - cmd = [ |
1895 | - 'relation-list', |
1896 | - '-r', rid, |
1897 | - ] |
1898 | - return subprocess.check_output(cmd).split() # IGNORE:E1103 |
1899 | - |
1900 | - |
1901 | -def relation_get(attribute, unit=None, rid=None): |
1902 | - cmd = [ |
1903 | - 'relation-get', |
1904 | - ] |
1905 | - if rid: |
1906 | - cmd.append('-r') |
1907 | - cmd.append(rid) |
1908 | - cmd.append(attribute) |
1909 | - if unit: |
1910 | - cmd.append(unit) |
1911 | - value = str(subprocess.check_output(cmd)).strip() |
1912 | - if value == "": |
1913 | - return None |
1914 | - else: |
1915 | - return value |
1916 | - |
1917 | - |
1918 | -def relation_set(**kwargs): |
1919 | - cmd = [ |
1920 | - 'relation-set' |
1921 | - ] |
1922 | - args = [] |
1923 | - for k, v in kwargs.items(): |
1924 | - if k == 'rid' and v: |
1925 | - cmd.append('-r') |
1926 | - cmd.append(v) |
1927 | - elif k != 'rid': |
1928 | - args.append('{}={}'.format(k, v)) |
1929 | - cmd += args |
1930 | - subprocess.check_call(cmd) |
1931 | - |
1932 | - |
1933 | -def unit_get(attribute): |
1934 | - cmd = [ |
1935 | - 'unit-get', |
1936 | - attribute |
1937 | - ] |
1938 | - value = str(subprocess.check_output(cmd)).strip() |
1939 | - if value == "": |
1940 | - return None |
1941 | - else: |
1942 | - return value |
1943 | - |
1944 | - |
1945 | -def config_get(attribute): |
1946 | - cmd = [ |
1947 | - 'config-get', |
1948 | - attribute |
1949 | - ] |
1950 | - value = str(subprocess.check_output(cmd)).strip() |
1951 | - if value == "": |
1952 | - return None |
1953 | - else: |
1954 | - return value |
1955 | - |
1956 | - |
1957 | -def get_unit_hostname(): |
1958 | - return socket.gethostname() |
1959 | - |
1960 | |
1961 | def get_host_ip(hostname=unit_get('private-address')): |
1962 | try: |
1963 | @@ -218,3 +60,10 @@ |
1964 | answers = dns.resolver.query(hostname, 'A') |
1965 | if answers: |
1966 | return answers[0].address |
1967 | + |
1968 | + |
1969 | +def is_apache_24(): |
1970 | + if os.path.exists('/etc/apache2/conf-available'): |
1971 | + return True |
1972 | + else: |
1973 | + return False |
1974 | |
1975 | === modified file 'metadata.yaml' |
1976 | --- metadata.yaml 2013-01-11 09:15:51 +0000 |
1977 | +++ metadata.yaml 2014-01-24 17:20:41 +0000 |
1978 | @@ -7,6 +7,8 @@ |
1979 | . |
1980 | This charm provides the RADOS HTTP gateway supporting S3 and Swift protocols |
1981 | for object storage. |
1982 | +categories: |
1983 | + - misc |
1984 | requires: |
1985 | mon: |
1986 | interface: ceph-radosgw |
Deferring to openstack-charmers