Merge lp:~james-page/charms/precise/ceph-radosgw/apache24 into lp:~charmers/charms/precise/ceph-radosgw/trunk
- Precise Pangolin (12.04)
- apache24
- Merge into trunk
Proposed by
James Page
Status: | Merged |
---|---|
Merged at revision: | 15 |
Proposed branch: | lp:~james-page/charms/precise/ceph-radosgw/apache24 |
Merge into: | lp:~charmers/charms/precise/ceph-radosgw/trunk |
Diff against target: |
1986 lines (+1345/-277) 17 files modified
.project (+17/-0) .pydevproject (+8/-0) Makefile (+8/-0) charm-helpers-sync.yaml (+9/-0) hooks/ceph.py (+22/-21) hooks/charmhelpers/contrib/openstack/alternatives.py (+17/-0) hooks/charmhelpers/contrib/storage/linux/utils.py (+25/-0) hooks/charmhelpers/core/hookenv.py (+395/-0) hooks/charmhelpers/core/host.py (+291/-0) hooks/charmhelpers/fetch/__init__.py (+279/-0) hooks/charmhelpers/fetch/archiveurl.py (+48/-0) hooks/charmhelpers/fetch/bzrurl.py (+49/-0) hooks/charmhelpers/payload/__init__.py (+1/-0) hooks/charmhelpers/payload/execd.py (+50/-0) hooks/hooks.py (+109/-90) hooks/utils.py (+15/-166) metadata.yaml (+2/-0) |
To merge this branch: | bzr merge lp:~james-page/charms/precise/ceph-radosgw/apache24 |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Marco Ceppi (community) | Approve | ||
OpenStack Charmers | Pending | ||
Review via email: mp+203129@code.launchpad.net |
Commit message
Description of the change
1) Fixes for compatibility with apache 2.4
2) General refresh to use charm-helpers inline with other ceph charms
To post a comment you must log in.
- 19. By James Page
-
Add pydev stuff
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === added file '.project' | |||
2 | --- .project 1970-01-01 00:00:00 +0000 | |||
3 | +++ .project 2014-01-24 17:20:41 +0000 | |||
4 | @@ -0,0 +1,17 @@ | |||
5 | 1 | <?xml version="1.0" encoding="UTF-8"?> | ||
6 | 2 | <projectDescription> | ||
7 | 3 | <name>ceph-radosgw</name> | ||
8 | 4 | <comment></comment> | ||
9 | 5 | <projects> | ||
10 | 6 | </projects> | ||
11 | 7 | <buildSpec> | ||
12 | 8 | <buildCommand> | ||
13 | 9 | <name>org.python.pydev.PyDevBuilder</name> | ||
14 | 10 | <arguments> | ||
15 | 11 | </arguments> | ||
16 | 12 | </buildCommand> | ||
17 | 13 | </buildSpec> | ||
18 | 14 | <natures> | ||
19 | 15 | <nature>org.python.pydev.pythonNature</nature> | ||
20 | 16 | </natures> | ||
21 | 17 | </projectDescription> | ||
22 | 0 | 18 | ||
23 | === added file '.pydevproject' | |||
24 | --- .pydevproject 1970-01-01 00:00:00 +0000 | |||
25 | +++ .pydevproject 2014-01-24 17:20:41 +0000 | |||
26 | @@ -0,0 +1,8 @@ | |||
27 | 1 | <?xml version="1.0" encoding="UTF-8" standalone="no"?> | ||
28 | 2 | <?eclipse-pydev version="1.0"?><pydev_project> | ||
29 | 3 | <pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.7</pydev_property> | ||
30 | 4 | <pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property> | ||
31 | 5 | <pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH"> | ||
32 | 6 | <path>/ceph-radosgw/hooks</path> | ||
33 | 7 | </pydev_pathproperty> | ||
34 | 8 | </pydev_project> | ||
35 | 0 | 9 | ||
36 | === added file 'Makefile' | |||
37 | --- Makefile 1970-01-01 00:00:00 +0000 | |||
38 | +++ Makefile 2014-01-24 17:20:41 +0000 | |||
39 | @@ -0,0 +1,8 @@ | |||
40 | 1 | #!/usr/bin/make | ||
41 | 2 | |||
42 | 3 | lint: | ||
43 | 4 | @flake8 --exclude hooks/charmhelpers hooks | ||
44 | 5 | @charm proof | ||
45 | 6 | |||
46 | 7 | sync: | ||
47 | 8 | @charm-helper-sync -c charm-helpers-sync.yaml | ||
48 | 0 | 9 | ||
49 | === added file 'charm-helpers-sync.yaml' | |||
50 | --- charm-helpers-sync.yaml 1970-01-01 00:00:00 +0000 | |||
51 | +++ charm-helpers-sync.yaml 2014-01-24 17:20:41 +0000 | |||
52 | @@ -0,0 +1,9 @@ | |||
53 | 1 | branch: lp:charm-helpers | ||
54 | 2 | destination: hooks/charmhelpers | ||
55 | 3 | include: | ||
56 | 4 | - core | ||
57 | 5 | - fetch | ||
58 | 6 | - contrib.storage.linux: | ||
59 | 7 | - utils | ||
60 | 8 | - payload.execd | ||
61 | 9 | - contrib.openstack.alternatives | ||
62 | 0 | 10 | ||
63 | === modified file 'hooks/ceph.py' | |||
64 | --- hooks/ceph.py 2013-01-11 09:15:51 +0000 | |||
65 | +++ hooks/ceph.py 2014-01-24 17:20:41 +0000 | |||
66 | @@ -10,23 +10,24 @@ | |||
67 | 10 | import json | 10 | import json |
68 | 11 | import subprocess | 11 | import subprocess |
69 | 12 | import time | 12 | import time |
70 | 13 | import utils | ||
71 | 14 | import os | 13 | import os |
72 | 15 | import apt_pkg as apt | 14 | import apt_pkg as apt |
73 | 16 | 15 | ||
74 | 16 | from socket import gethostname as get_unit_hostname | ||
75 | 17 | |||
76 | 17 | LEADER = 'leader' | 18 | LEADER = 'leader' |
77 | 18 | PEON = 'peon' | 19 | PEON = 'peon' |
78 | 19 | QUORUM = [LEADER, PEON] | 20 | QUORUM = [LEADER, PEON] |
79 | 20 | 21 | ||
80 | 21 | 22 | ||
81 | 22 | def is_quorum(): | 23 | def is_quorum(): |
83 | 23 | asok = "/var/run/ceph/ceph-mon.{}.asok".format(utils.get_unit_hostname()) | 24 | asok = "/var/run/ceph/ceph-mon.{}.asok".format(get_unit_hostname()) |
84 | 24 | cmd = [ | 25 | cmd = [ |
85 | 25 | "ceph", | 26 | "ceph", |
86 | 26 | "--admin-daemon", | 27 | "--admin-daemon", |
87 | 27 | asok, | 28 | asok, |
88 | 28 | "mon_status" | 29 | "mon_status" |
90 | 29 | ] | 30 | ] |
91 | 30 | if os.path.exists(asok): | 31 | if os.path.exists(asok): |
92 | 31 | try: | 32 | try: |
93 | 32 | result = json.loads(subprocess.check_output(cmd)) | 33 | result = json.loads(subprocess.check_output(cmd)) |
94 | @@ -44,13 +45,13 @@ | |||
95 | 44 | 45 | ||
96 | 45 | 46 | ||
97 | 46 | def is_leader(): | 47 | def is_leader(): |
99 | 47 | asok = "/var/run/ceph/ceph-mon.{}.asok".format(utils.get_unit_hostname()) | 48 | asok = "/var/run/ceph/ceph-mon.{}.asok".format(get_unit_hostname()) |
100 | 48 | cmd = [ | 49 | cmd = [ |
101 | 49 | "ceph", | 50 | "ceph", |
102 | 50 | "--admin-daemon", | 51 | "--admin-daemon", |
103 | 51 | asok, | 52 | asok, |
104 | 52 | "mon_status" | 53 | "mon_status" |
106 | 53 | ] | 54 | ] |
107 | 54 | if os.path.exists(asok): | 55 | if os.path.exists(asok): |
108 | 55 | try: | 56 | try: |
109 | 56 | result = json.loads(subprocess.check_output(cmd)) | 57 | result = json.loads(subprocess.check_output(cmd)) |
110 | @@ -73,14 +74,14 @@ | |||
111 | 73 | 74 | ||
112 | 74 | 75 | ||
113 | 75 | def add_bootstrap_hint(peer): | 76 | def add_bootstrap_hint(peer): |
115 | 76 | asok = "/var/run/ceph/ceph-mon.{}.asok".format(utils.get_unit_hostname()) | 77 | asok = "/var/run/ceph/ceph-mon.{}.asok".format(get_unit_hostname()) |
116 | 77 | cmd = [ | 78 | cmd = [ |
117 | 78 | "ceph", | 79 | "ceph", |
118 | 79 | "--admin-daemon", | 80 | "--admin-daemon", |
119 | 80 | asok, | 81 | asok, |
120 | 81 | "add_bootstrap_peer_hint", | 82 | "add_bootstrap_peer_hint", |
121 | 82 | peer | 83 | peer |
123 | 83 | ] | 84 | ] |
124 | 84 | if os.path.exists(asok): | 85 | if os.path.exists(asok): |
125 | 85 | # Ignore any errors for this call | 86 | # Ignore any errors for this call |
126 | 86 | subprocess.call(cmd) | 87 | subprocess.call(cmd) |
127 | @@ -89,7 +90,7 @@ | |||
128 | 89 | 'xfs', | 90 | 'xfs', |
129 | 90 | 'ext4', | 91 | 'ext4', |
130 | 91 | 'btrfs' | 92 | 'btrfs' |
132 | 92 | ] | 93 | ] |
133 | 93 | 94 | ||
134 | 94 | 95 | ||
135 | 95 | def is_osd_disk(dev): | 96 | def is_osd_disk(dev): |
136 | @@ -99,7 +100,7 @@ | |||
137 | 99 | for line in info: | 100 | for line in info: |
138 | 100 | if line.startswith( | 101 | if line.startswith( |
139 | 101 | 'Partition GUID code: 4FBD7E29-9D25-41B8-AFD0-062C0CEFF05D' | 102 | 'Partition GUID code: 4FBD7E29-9D25-41B8-AFD0-062C0CEFF05D' |
141 | 102 | ): | 103 | ): |
142 | 103 | return True | 104 | return True |
143 | 104 | except subprocess.CalledProcessError: | 105 | except subprocess.CalledProcessError: |
144 | 105 | pass | 106 | pass |
145 | @@ -110,7 +111,7 @@ | |||
146 | 110 | cmd = [ | 111 | cmd = [ |
147 | 111 | 'udevadm', 'trigger', | 112 | 'udevadm', 'trigger', |
148 | 112 | '--subsystem-match=block', '--action=add' | 113 | '--subsystem-match=block', '--action=add' |
150 | 113 | ] | 114 | ] |
151 | 114 | 115 | ||
152 | 115 | subprocess.call(cmd) | 116 | subprocess.call(cmd) |
153 | 116 | 117 | ||
154 | @@ -140,7 +141,7 @@ | |||
155 | 140 | '--create-keyring', | 141 | '--create-keyring', |
156 | 141 | '--name=client.bootstrap-osd', | 142 | '--name=client.bootstrap-osd', |
157 | 142 | '--add-key={}'.format(key) | 143 | '--add-key={}'.format(key) |
159 | 143 | ] | 144 | ] |
160 | 144 | subprocess.check_call(cmd) | 145 | subprocess.check_call(cmd) |
161 | 145 | 146 | ||
162 | 146 | # OSD caps taken from ceph-create-keys | 147 | # OSD caps taken from ceph-create-keys |
163 | @@ -148,10 +149,10 @@ | |||
164 | 148 | 'mon': [ | 149 | 'mon': [ |
165 | 149 | 'allow command osd create ...', | 150 | 'allow command osd create ...', |
166 | 150 | 'allow command osd crush set ...', | 151 | 'allow command osd crush set ...', |
168 | 151 | r'allow command auth add * osd allow\ * mon allow\ rwx', | 152 | r'allow command auth add * osd allow\ * mon allow\ rwx', |
169 | 152 | 'allow command mon getmap' | 153 | 'allow command mon getmap' |
172 | 153 | ] | 154 | ] |
173 | 154 | } | 155 | } |
174 | 155 | 156 | ||
175 | 156 | 157 | ||
176 | 157 | def get_osd_bootstrap_key(): | 158 | def get_osd_bootstrap_key(): |
177 | @@ -169,14 +170,14 @@ | |||
178 | 169 | '--create-keyring', | 170 | '--create-keyring', |
179 | 170 | '--name=client.radosgw.gateway', | 171 | '--name=client.radosgw.gateway', |
180 | 171 | '--add-key={}'.format(key) | 172 | '--add-key={}'.format(key) |
182 | 172 | ] | 173 | ] |
183 | 173 | subprocess.check_call(cmd) | 174 | subprocess.check_call(cmd) |
184 | 174 | 175 | ||
185 | 175 | # OSD caps taken from ceph-create-keys | 176 | # OSD caps taken from ceph-create-keys |
186 | 176 | _radosgw_caps = { | 177 | _radosgw_caps = { |
187 | 177 | 'mon': ['allow r'], | 178 | 'mon': ['allow r'], |
188 | 178 | 'osd': ['allow rwx'] | 179 | 'osd': ['allow rwx'] |
190 | 179 | } | 180 | } |
191 | 180 | 181 | ||
192 | 181 | 182 | ||
193 | 182 | def get_radosgw_key(): | 183 | def get_radosgw_key(): |
194 | @@ -186,7 +187,7 @@ | |||
195 | 186 | _default_caps = { | 187 | _default_caps = { |
196 | 187 | 'mon': ['allow r'], | 188 | 'mon': ['allow r'], |
197 | 188 | 'osd': ['allow rwx'] | 189 | 'osd': ['allow rwx'] |
199 | 189 | } | 190 | } |
200 | 190 | 191 | ||
201 | 191 | 192 | ||
202 | 192 | def get_named_key(name, caps=None): | 193 | def get_named_key(name, caps=None): |
203 | @@ -196,16 +197,16 @@ | |||
204 | 196 | '--name', 'mon.', | 197 | '--name', 'mon.', |
205 | 197 | '--keyring', | 198 | '--keyring', |
206 | 198 | '/var/lib/ceph/mon/ceph-{}/keyring'.format( | 199 | '/var/lib/ceph/mon/ceph-{}/keyring'.format( |
209 | 199 | utils.get_unit_hostname() | 200 | get_unit_hostname() |
210 | 200 | ), | 201 | ), |
211 | 201 | 'auth', 'get-or-create', 'client.{}'.format(name), | 202 | 'auth', 'get-or-create', 'client.{}'.format(name), |
213 | 202 | ] | 203 | ] |
214 | 203 | # Add capabilities | 204 | # Add capabilities |
215 | 204 | for subsystem, subcaps in caps.iteritems(): | 205 | for subsystem, subcaps in caps.iteritems(): |
216 | 205 | cmd.extend([ | 206 | cmd.extend([ |
217 | 206 | subsystem, | 207 | subsystem, |
218 | 207 | '; '.join(subcaps), | 208 | '; '.join(subcaps), |
220 | 208 | ]) | 209 | ]) |
221 | 209 | output = subprocess.check_output(cmd).strip() # IGNORE:E1103 | 210 | output = subprocess.check_output(cmd).strip() # IGNORE:E1103 |
222 | 210 | # get-or-create appears to have different output depending | 211 | # get-or-create appears to have different output depending |
223 | 211 | # on whether its 'get' or 'create' | 212 | # on whether its 'get' or 'create' |
224 | 212 | 213 | ||
225 | === added directory 'hooks/charmhelpers' | |||
226 | === added file 'hooks/charmhelpers/__init__.py' | |||
227 | === added directory 'hooks/charmhelpers/contrib' | |||
228 | === added file 'hooks/charmhelpers/contrib/__init__.py' | |||
229 | === added directory 'hooks/charmhelpers/contrib/openstack' | |||
230 | === added file 'hooks/charmhelpers/contrib/openstack/__init__.py' | |||
231 | === added file 'hooks/charmhelpers/contrib/openstack/alternatives.py' | |||
232 | --- hooks/charmhelpers/contrib/openstack/alternatives.py 1970-01-01 00:00:00 +0000 | |||
233 | +++ hooks/charmhelpers/contrib/openstack/alternatives.py 2014-01-24 17:20:41 +0000 | |||
234 | @@ -0,0 +1,17 @@ | |||
235 | 1 | ''' Helper for managing alternatives for file conflict resolution ''' | ||
236 | 2 | |||
237 | 3 | import subprocess | ||
238 | 4 | import shutil | ||
239 | 5 | import os | ||
240 | 6 | |||
241 | 7 | |||
242 | 8 | def install_alternative(name, target, source, priority=50): | ||
243 | 9 | ''' Install alternative configuration ''' | ||
244 | 10 | if (os.path.exists(target) and not os.path.islink(target)): | ||
245 | 11 | # Move existing file/directory away before installing | ||
246 | 12 | shutil.move(target, '{}.bak'.format(target)) | ||
247 | 13 | cmd = [ | ||
248 | 14 | 'update-alternatives', '--force', '--install', | ||
249 | 15 | target, name, source, str(priority) | ||
250 | 16 | ] | ||
251 | 17 | subprocess.check_call(cmd) | ||
252 | 0 | 18 | ||
253 | === added directory 'hooks/charmhelpers/contrib/storage' | |||
254 | === added file 'hooks/charmhelpers/contrib/storage/__init__.py' | |||
255 | === added directory 'hooks/charmhelpers/contrib/storage/linux' | |||
256 | === added file 'hooks/charmhelpers/contrib/storage/linux/__init__.py' | |||
257 | === added file 'hooks/charmhelpers/contrib/storage/linux/utils.py' | |||
258 | --- hooks/charmhelpers/contrib/storage/linux/utils.py 1970-01-01 00:00:00 +0000 | |||
259 | +++ hooks/charmhelpers/contrib/storage/linux/utils.py 2014-01-24 17:20:41 +0000 | |||
260 | @@ -0,0 +1,25 @@ | |||
261 | 1 | from os import stat | ||
262 | 2 | from stat import S_ISBLK | ||
263 | 3 | |||
264 | 4 | from subprocess import ( | ||
265 | 5 | check_call | ||
266 | 6 | ) | ||
267 | 7 | |||
268 | 8 | |||
269 | 9 | def is_block_device(path): | ||
270 | 10 | ''' | ||
271 | 11 | Confirm device at path is a valid block device node. | ||
272 | 12 | |||
273 | 13 | :returns: boolean: True if path is a block device, False if not. | ||
274 | 14 | ''' | ||
275 | 15 | return S_ISBLK(stat(path).st_mode) | ||
276 | 16 | |||
277 | 17 | |||
278 | 18 | def zap_disk(block_device): | ||
279 | 19 | ''' | ||
280 | 20 | Clear a block device of partition table. Relies on sgdisk, which is | ||
281 | 21 | installed as pat of the 'gdisk' package in Ubuntu. | ||
282 | 22 | |||
283 | 23 | :param block_device: str: Full path of block device to clean. | ||
284 | 24 | ''' | ||
285 | 25 | check_call(['sgdisk', '--zap-all', '--mbrtogpt', block_device]) | ||
286 | 0 | 26 | ||
287 | === added directory 'hooks/charmhelpers/core' | |||
288 | === added file 'hooks/charmhelpers/core/__init__.py' | |||
289 | === added file 'hooks/charmhelpers/core/hookenv.py' | |||
290 | --- hooks/charmhelpers/core/hookenv.py 1970-01-01 00:00:00 +0000 | |||
291 | +++ hooks/charmhelpers/core/hookenv.py 2014-01-24 17:20:41 +0000 | |||
292 | @@ -0,0 +1,395 @@ | |||
293 | 1 | "Interactions with the Juju environment" | ||
294 | 2 | # Copyright 2013 Canonical Ltd. | ||
295 | 3 | # | ||
296 | 4 | # Authors: | ||
297 | 5 | # Charm Helpers Developers <juju@lists.ubuntu.com> | ||
298 | 6 | |||
299 | 7 | import os | ||
300 | 8 | import json | ||
301 | 9 | import yaml | ||
302 | 10 | import subprocess | ||
303 | 11 | import UserDict | ||
304 | 12 | from subprocess import CalledProcessError | ||
305 | 13 | |||
306 | 14 | CRITICAL = "CRITICAL" | ||
307 | 15 | ERROR = "ERROR" | ||
308 | 16 | WARNING = "WARNING" | ||
309 | 17 | INFO = "INFO" | ||
310 | 18 | DEBUG = "DEBUG" | ||
311 | 19 | MARKER = object() | ||
312 | 20 | |||
313 | 21 | cache = {} | ||
314 | 22 | |||
315 | 23 | |||
316 | 24 | def cached(func): | ||
317 | 25 | """Cache return values for multiple executions of func + args | ||
318 | 26 | |||
319 | 27 | For example: | ||
320 | 28 | |||
321 | 29 | @cached | ||
322 | 30 | def unit_get(attribute): | ||
323 | 31 | pass | ||
324 | 32 | |||
325 | 33 | unit_get('test') | ||
326 | 34 | |||
327 | 35 | will cache the result of unit_get + 'test' for future calls. | ||
328 | 36 | """ | ||
329 | 37 | def wrapper(*args, **kwargs): | ||
330 | 38 | global cache | ||
331 | 39 | key = str((func, args, kwargs)) | ||
332 | 40 | try: | ||
333 | 41 | return cache[key] | ||
334 | 42 | except KeyError: | ||
335 | 43 | res = func(*args, **kwargs) | ||
336 | 44 | cache[key] = res | ||
337 | 45 | return res | ||
338 | 46 | return wrapper | ||
339 | 47 | |||
340 | 48 | |||
341 | 49 | def flush(key): | ||
342 | 50 | """Flushes any entries from function cache where the | ||
343 | 51 | key is found in the function+args """ | ||
344 | 52 | flush_list = [] | ||
345 | 53 | for item in cache: | ||
346 | 54 | if key in item: | ||
347 | 55 | flush_list.append(item) | ||
348 | 56 | for item in flush_list: | ||
349 | 57 | del cache[item] | ||
350 | 58 | |||
351 | 59 | |||
352 | 60 | def log(message, level=None): | ||
353 | 61 | """Write a message to the juju log""" | ||
354 | 62 | command = ['juju-log'] | ||
355 | 63 | if level: | ||
356 | 64 | command += ['-l', level] | ||
357 | 65 | command += [message] | ||
358 | 66 | subprocess.call(command) | ||
359 | 67 | |||
360 | 68 | |||
361 | 69 | class Serializable(UserDict.IterableUserDict): | ||
362 | 70 | """Wrapper, an object that can be serialized to yaml or json""" | ||
363 | 71 | |||
364 | 72 | def __init__(self, obj): | ||
365 | 73 | # wrap the object | ||
366 | 74 | UserDict.IterableUserDict.__init__(self) | ||
367 | 75 | self.data = obj | ||
368 | 76 | |||
369 | 77 | def __getattr__(self, attr): | ||
370 | 78 | # See if this object has attribute. | ||
371 | 79 | if attr in ("json", "yaml", "data"): | ||
372 | 80 | return self.__dict__[attr] | ||
373 | 81 | # Check for attribute in wrapped object. | ||
374 | 82 | got = getattr(self.data, attr, MARKER) | ||
375 | 83 | if got is not MARKER: | ||
376 | 84 | return got | ||
377 | 85 | # Proxy to the wrapped object via dict interface. | ||
378 | 86 | try: | ||
379 | 87 | return self.data[attr] | ||
380 | 88 | except KeyError: | ||
381 | 89 | raise AttributeError(attr) | ||
382 | 90 | |||
383 | 91 | def __getstate__(self): | ||
384 | 92 | # Pickle as a standard dictionary. | ||
385 | 93 | return self.data | ||
386 | 94 | |||
387 | 95 | def __setstate__(self, state): | ||
388 | 96 | # Unpickle into our wrapper. | ||
389 | 97 | self.data = state | ||
390 | 98 | |||
391 | 99 | def json(self): | ||
392 | 100 | """Serialize the object to json""" | ||
393 | 101 | return json.dumps(self.data) | ||
394 | 102 | |||
395 | 103 | def yaml(self): | ||
396 | 104 | """Serialize the object to yaml""" | ||
397 | 105 | return yaml.dump(self.data) | ||
398 | 106 | |||
399 | 107 | |||
400 | 108 | def execution_environment(): | ||
401 | 109 | """A convenient bundling of the current execution context""" | ||
402 | 110 | context = {} | ||
403 | 111 | context['conf'] = config() | ||
404 | 112 | if relation_id(): | ||
405 | 113 | context['reltype'] = relation_type() | ||
406 | 114 | context['relid'] = relation_id() | ||
407 | 115 | context['rel'] = relation_get() | ||
408 | 116 | context['unit'] = local_unit() | ||
409 | 117 | context['rels'] = relations() | ||
410 | 118 | context['env'] = os.environ | ||
411 | 119 | return context | ||
412 | 120 | |||
413 | 121 | |||
414 | 122 | def in_relation_hook(): | ||
415 | 123 | """Determine whether we're running in a relation hook""" | ||
416 | 124 | return 'JUJU_RELATION' in os.environ | ||
417 | 125 | |||
418 | 126 | |||
419 | 127 | def relation_type(): | ||
420 | 128 | """The scope for the current relation hook""" | ||
421 | 129 | return os.environ.get('JUJU_RELATION', None) | ||
422 | 130 | |||
423 | 131 | |||
424 | 132 | def relation_id(): | ||
425 | 133 | """The relation ID for the current relation hook""" | ||
426 | 134 | return os.environ.get('JUJU_RELATION_ID', None) | ||
427 | 135 | |||
428 | 136 | |||
429 | 137 | def local_unit(): | ||
430 | 138 | """Local unit ID""" | ||
431 | 139 | return os.environ['JUJU_UNIT_NAME'] | ||
432 | 140 | |||
433 | 141 | |||
434 | 142 | def remote_unit(): | ||
435 | 143 | """The remote unit for the current relation hook""" | ||
436 | 144 | return os.environ['JUJU_REMOTE_UNIT'] | ||
437 | 145 | |||
438 | 146 | |||
439 | 147 | def service_name(): | ||
440 | 148 | """The name service group this unit belongs to""" | ||
441 | 149 | return local_unit().split('/')[0] | ||
442 | 150 | |||
443 | 151 | |||
444 | 152 | @cached | ||
445 | 153 | def config(scope=None): | ||
446 | 154 | """Juju charm configuration""" | ||
447 | 155 | config_cmd_line = ['config-get'] | ||
448 | 156 | if scope is not None: | ||
449 | 157 | config_cmd_line.append(scope) | ||
450 | 158 | config_cmd_line.append('--format=json') | ||
451 | 159 | try: | ||
452 | 160 | return json.loads(subprocess.check_output(config_cmd_line)) | ||
453 | 161 | except ValueError: | ||
454 | 162 | return None | ||
455 | 163 | |||
456 | 164 | |||
457 | 165 | @cached | ||
458 | 166 | def relation_get(attribute=None, unit=None, rid=None): | ||
459 | 167 | """Get relation information""" | ||
460 | 168 | _args = ['relation-get', '--format=json'] | ||
461 | 169 | if rid: | ||
462 | 170 | _args.append('-r') | ||
463 | 171 | _args.append(rid) | ||
464 | 172 | _args.append(attribute or '-') | ||
465 | 173 | if unit: | ||
466 | 174 | _args.append(unit) | ||
467 | 175 | try: | ||
468 | 176 | return json.loads(subprocess.check_output(_args)) | ||
469 | 177 | except ValueError: | ||
470 | 178 | return None | ||
471 | 179 | except CalledProcessError, e: | ||
472 | 180 | if e.returncode == 2: | ||
473 | 181 | return None | ||
474 | 182 | raise | ||
475 | 183 | |||
476 | 184 | |||
477 | 185 | def relation_set(relation_id=None, relation_settings={}, **kwargs): | ||
478 | 186 | """Set relation information for the current unit""" | ||
479 | 187 | relation_cmd_line = ['relation-set'] | ||
480 | 188 | if relation_id is not None: | ||
481 | 189 | relation_cmd_line.extend(('-r', relation_id)) | ||
482 | 190 | for k, v in (relation_settings.items() + kwargs.items()): | ||
483 | 191 | if v is None: | ||
484 | 192 | relation_cmd_line.append('{}='.format(k)) | ||
485 | 193 | else: | ||
486 | 194 | relation_cmd_line.append('{}={}'.format(k, v)) | ||
487 | 195 | subprocess.check_call(relation_cmd_line) | ||
488 | 196 | # Flush cache of any relation-gets for local unit | ||
489 | 197 | flush(local_unit()) | ||
490 | 198 | |||
491 | 199 | |||
492 | 200 | @cached | ||
493 | 201 | def relation_ids(reltype=None): | ||
494 | 202 | """A list of relation_ids""" | ||
495 | 203 | reltype = reltype or relation_type() | ||
496 | 204 | relid_cmd_line = ['relation-ids', '--format=json'] | ||
497 | 205 | if reltype is not None: | ||
498 | 206 | relid_cmd_line.append(reltype) | ||
499 | 207 | return json.loads(subprocess.check_output(relid_cmd_line)) or [] | ||
500 | 208 | return [] | ||
501 | 209 | |||
502 | 210 | |||
503 | 211 | @cached | ||
504 | 212 | def related_units(relid=None): | ||
505 | 213 | """A list of related units""" | ||
506 | 214 | relid = relid or relation_id() | ||
507 | 215 | units_cmd_line = ['relation-list', '--format=json'] | ||
508 | 216 | if relid is not None: | ||
509 | 217 | units_cmd_line.extend(('-r', relid)) | ||
510 | 218 | return json.loads(subprocess.check_output(units_cmd_line)) or [] | ||
511 | 219 | |||
512 | 220 | |||
513 | 221 | @cached | ||
514 | 222 | def relation_for_unit(unit=None, rid=None): | ||
515 | 223 | """Get the json represenation of a unit's relation""" | ||
516 | 224 | unit = unit or remote_unit() | ||
517 | 225 | relation = relation_get(unit=unit, rid=rid) | ||
518 | 226 | for key in relation: | ||
519 | 227 | if key.endswith('-list'): | ||
520 | 228 | relation[key] = relation[key].split() | ||
521 | 229 | relation['__unit__'] = unit | ||
522 | 230 | return relation | ||
523 | 231 | |||
524 | 232 | |||
525 | 233 | @cached | ||
526 | 234 | def relations_for_id(relid=None): | ||
527 | 235 | """Get relations of a specific relation ID""" | ||
528 | 236 | relation_data = [] | ||
529 | 237 | relid = relid or relation_ids() | ||
530 | 238 | for unit in related_units(relid): | ||
531 | 239 | unit_data = relation_for_unit(unit, relid) | ||
532 | 240 | unit_data['__relid__'] = relid | ||
533 | 241 | relation_data.append(unit_data) | ||
534 | 242 | return relation_data | ||
535 | 243 | |||
536 | 244 | |||
537 | 245 | @cached | ||
538 | 246 | def relations_of_type(reltype=None): | ||
539 | 247 | """Get relations of a specific type""" | ||
540 | 248 | relation_data = [] | ||
541 | 249 | reltype = reltype or relation_type() | ||
542 | 250 | for relid in relation_ids(reltype): | ||
543 | 251 | for relation in relations_for_id(relid): | ||
544 | 252 | relation['__relid__'] = relid | ||
545 | 253 | relation_data.append(relation) | ||
546 | 254 | return relation_data | ||
547 | 255 | |||
548 | 256 | |||
549 | 257 | @cached | ||
550 | 258 | def relation_types(): | ||
551 | 259 | """Get a list of relation types supported by this charm""" | ||
552 | 260 | charmdir = os.environ.get('CHARM_DIR', '') | ||
553 | 261 | mdf = open(os.path.join(charmdir, 'metadata.yaml')) | ||
554 | 262 | md = yaml.safe_load(mdf) | ||
555 | 263 | rel_types = [] | ||
556 | 264 | for key in ('provides', 'requires', 'peers'): | ||
557 | 265 | section = md.get(key) | ||
558 | 266 | if section: | ||
559 | 267 | rel_types.extend(section.keys()) | ||
560 | 268 | mdf.close() | ||
561 | 269 | return rel_types | ||
562 | 270 | |||
563 | 271 | |||
564 | 272 | @cached | ||
565 | 273 | def relations(): | ||
566 | 274 | """Get a nested dictionary of relation data for all related units""" | ||
567 | 275 | rels = {} | ||
568 | 276 | for reltype in relation_types(): | ||
569 | 277 | relids = {} | ||
570 | 278 | for relid in relation_ids(reltype): | ||
571 | 279 | units = {local_unit(): relation_get(unit=local_unit(), rid=relid)} | ||
572 | 280 | for unit in related_units(relid): | ||
573 | 281 | reldata = relation_get(unit=unit, rid=relid) | ||
574 | 282 | units[unit] = reldata | ||
575 | 283 | relids[relid] = units | ||
576 | 284 | rels[reltype] = relids | ||
577 | 285 | return rels | ||
578 | 286 | |||
579 | 287 | |||
580 | 288 | @cached | ||
581 | 289 | def is_relation_made(relation, keys='private-address'): | ||
582 | 290 | ''' | ||
583 | 291 | Determine whether a relation is established by checking for | ||
584 | 292 | presence of key(s). If a list of keys is provided, they | ||
585 | 293 | must all be present for the relation to be identified as made | ||
586 | 294 | ''' | ||
587 | 295 | if isinstance(keys, str): | ||
588 | 296 | keys = [keys] | ||
589 | 297 | for r_id in relation_ids(relation): | ||
590 | 298 | for unit in related_units(r_id): | ||
591 | 299 | context = {} | ||
592 | 300 | for k in keys: | ||
593 | 301 | context[k] = relation_get(k, rid=r_id, | ||
594 | 302 | unit=unit) | ||
595 | 303 | if None not in context.values(): | ||
596 | 304 | return True | ||
597 | 305 | return False | ||
598 | 306 | |||
599 | 307 | |||
600 | 308 | def open_port(port, protocol="TCP"): | ||
601 | 309 | """Open a service network port""" | ||
602 | 310 | _args = ['open-port'] | ||
603 | 311 | _args.append('{}/{}'.format(port, protocol)) | ||
604 | 312 | subprocess.check_call(_args) | ||
605 | 313 | |||
606 | 314 | |||
607 | 315 | def close_port(port, protocol="TCP"): | ||
608 | 316 | """Close a service network port""" | ||
609 | 317 | _args = ['close-port'] | ||
610 | 318 | _args.append('{}/{}'.format(port, protocol)) | ||
611 | 319 | subprocess.check_call(_args) | ||
612 | 320 | |||
613 | 321 | |||
614 | 322 | @cached | ||
615 | 323 | def unit_get(attribute): | ||
616 | 324 | """Get the unit ID for the remote unit""" | ||
617 | 325 | _args = ['unit-get', '--format=json', attribute] | ||
618 | 326 | try: | ||
619 | 327 | return json.loads(subprocess.check_output(_args)) | ||
620 | 328 | except ValueError: | ||
621 | 329 | return None | ||
622 | 330 | |||
623 | 331 | |||
624 | 332 | def unit_private_ip(): | ||
625 | 333 | """Get this unit's private IP address""" | ||
626 | 334 | return unit_get('private-address') | ||
627 | 335 | |||
628 | 336 | |||
629 | 337 | class UnregisteredHookError(Exception): | ||
630 | 338 | """Raised when an undefined hook is called""" | ||
631 | 339 | pass | ||
632 | 340 | |||
633 | 341 | |||
634 | 342 | class Hooks(object): | ||
635 | 343 | """A convenient handler for hook functions. | ||
636 | 344 | |||
637 | 345 | Example: | ||
638 | 346 | hooks = Hooks() | ||
639 | 347 | |||
640 | 348 | # register a hook, taking its name from the function name | ||
641 | 349 | @hooks.hook() | ||
642 | 350 | def install(): | ||
643 | 351 | ... | ||
644 | 352 | |||
645 | 353 | # register a hook, providing a custom hook name | ||
646 | 354 | @hooks.hook("config-changed") | ||
647 | 355 | def config_changed(): | ||
648 | 356 | ... | ||
649 | 357 | |||
650 | 358 | if __name__ == "__main__": | ||
651 | 359 | # execute a hook based on the name the program is called by | ||
652 | 360 | hooks.execute(sys.argv) | ||
653 | 361 | """ | ||
654 | 362 | |||
655 | 363 | def __init__(self): | ||
656 | 364 | super(Hooks, self).__init__() | ||
657 | 365 | self._hooks = {} | ||
658 | 366 | |||
659 | 367 | def register(self, name, function): | ||
660 | 368 | """Register a hook""" | ||
661 | 369 | self._hooks[name] = function | ||
662 | 370 | |||
663 | 371 | def execute(self, args): | ||
664 | 372 | """Execute a registered hook based on args[0]""" | ||
665 | 373 | hook_name = os.path.basename(args[0]) | ||
666 | 374 | if hook_name in self._hooks: | ||
667 | 375 | self._hooks[hook_name]() | ||
668 | 376 | else: | ||
669 | 377 | raise UnregisteredHookError(hook_name) | ||
670 | 378 | |||
671 | 379 | def hook(self, *hook_names): | ||
672 | 380 | """Decorator, registering them as hooks""" | ||
673 | 381 | def wrapper(decorated): | ||
674 | 382 | for hook_name in hook_names: | ||
675 | 383 | self.register(hook_name, decorated) | ||
676 | 384 | else: | ||
677 | 385 | self.register(decorated.__name__, decorated) | ||
678 | 386 | if '_' in decorated.__name__: | ||
679 | 387 | self.register( | ||
680 | 388 | decorated.__name__.replace('_', '-'), decorated) | ||
681 | 389 | return decorated | ||
682 | 390 | return wrapper | ||
683 | 391 | |||
684 | 392 | |||
685 | 393 | def charm_dir(): | ||
686 | 394 | """Return the root directory of the current charm""" | ||
687 | 395 | return os.environ.get('CHARM_DIR') | ||
688 | 0 | 396 | ||
689 | === added file 'hooks/charmhelpers/core/host.py' | |||
690 | --- hooks/charmhelpers/core/host.py 1970-01-01 00:00:00 +0000 | |||
691 | +++ hooks/charmhelpers/core/host.py 2014-01-24 17:20:41 +0000 | |||
692 | @@ -0,0 +1,291 @@ | |||
693 | 1 | """Tools for working with the host system""" | ||
694 | 2 | # Copyright 2012 Canonical Ltd. | ||
695 | 3 | # | ||
696 | 4 | # Authors: | ||
697 | 5 | # Nick Moffitt <nick.moffitt@canonical.com> | ||
698 | 6 | # Matthew Wedgwood <matthew.wedgwood@canonical.com> | ||
699 | 7 | |||
700 | 8 | import os | ||
701 | 9 | import pwd | ||
702 | 10 | import grp | ||
703 | 11 | import random | ||
704 | 12 | import string | ||
705 | 13 | import subprocess | ||
706 | 14 | import hashlib | ||
707 | 15 | |||
708 | 16 | from collections import OrderedDict | ||
709 | 17 | |||
710 | 18 | from hookenv import log | ||
711 | 19 | |||
712 | 20 | |||
713 | 21 | def service_start(service_name): | ||
714 | 22 | """Start a system service""" | ||
715 | 23 | return service('start', service_name) | ||
716 | 24 | |||
717 | 25 | |||
718 | 26 | def service_stop(service_name): | ||
719 | 27 | """Stop a system service""" | ||
720 | 28 | return service('stop', service_name) | ||
721 | 29 | |||
722 | 30 | |||
723 | 31 | def service_restart(service_name): | ||
724 | 32 | """Restart a system service""" | ||
725 | 33 | return service('restart', service_name) | ||
726 | 34 | |||
727 | 35 | |||
728 | 36 | def service_reload(service_name, restart_on_failure=False): | ||
729 | 37 | """Reload a system service, optionally falling back to restart if reload fails""" | ||
730 | 38 | service_result = service('reload', service_name) | ||
731 | 39 | if not service_result and restart_on_failure: | ||
732 | 40 | service_result = service('restart', service_name) | ||
733 | 41 | return service_result | ||
734 | 42 | |||
735 | 43 | |||
736 | 44 | def service(action, service_name): | ||
737 | 45 | """Control a system service""" | ||
738 | 46 | cmd = ['service', service_name, action] | ||
739 | 47 | return subprocess.call(cmd) == 0 | ||
740 | 48 | |||
741 | 49 | |||
742 | 50 | def service_running(service): | ||
743 | 51 | """Determine whether a system service is running""" | ||
744 | 52 | try: | ||
745 | 53 | output = subprocess.check_output(['service', service, 'status']) | ||
746 | 54 | except subprocess.CalledProcessError: | ||
747 | 55 | return False | ||
748 | 56 | else: | ||
749 | 57 | if ("start/running" in output or "is running" in output): | ||
750 | 58 | return True | ||
751 | 59 | else: | ||
752 | 60 | return False | ||
753 | 61 | |||
754 | 62 | |||
755 | 63 | def adduser(username, password=None, shell='/bin/bash', system_user=False): | ||
756 | 64 | """Add a user to the system""" | ||
757 | 65 | try: | ||
758 | 66 | user_info = pwd.getpwnam(username) | ||
759 | 67 | log('user {0} already exists!'.format(username)) | ||
760 | 68 | except KeyError: | ||
761 | 69 | log('creating user {0}'.format(username)) | ||
762 | 70 | cmd = ['useradd'] | ||
763 | 71 | if system_user or password is None: | ||
764 | 72 | cmd.append('--system') | ||
765 | 73 | else: | ||
766 | 74 | cmd.extend([ | ||
767 | 75 | '--create-home', | ||
768 | 76 | '--shell', shell, | ||
769 | 77 | '--password', password, | ||
770 | 78 | ]) | ||
771 | 79 | cmd.append(username) | ||
772 | 80 | subprocess.check_call(cmd) | ||
773 | 81 | user_info = pwd.getpwnam(username) | ||
774 | 82 | return user_info | ||
775 | 83 | |||
776 | 84 | |||
777 | 85 | def add_user_to_group(username, group): | ||
778 | 86 | """Add a user to a group""" | ||
779 | 87 | cmd = [ | ||
780 | 88 | 'gpasswd', '-a', | ||
781 | 89 | username, | ||
782 | 90 | group | ||
783 | 91 | ] | ||
784 | 92 | log("Adding user {} to group {}".format(username, group)) | ||
785 | 93 | subprocess.check_call(cmd) | ||
786 | 94 | |||
787 | 95 | |||
788 | 96 | def rsync(from_path, to_path, flags='-r', options=None): | ||
789 | 97 | """Replicate the contents of a path""" | ||
790 | 98 | options = options or ['--delete', '--executability'] | ||
791 | 99 | cmd = ['/usr/bin/rsync', flags] | ||
792 | 100 | cmd.extend(options) | ||
793 | 101 | cmd.append(from_path) | ||
794 | 102 | cmd.append(to_path) | ||
795 | 103 | log(" ".join(cmd)) | ||
796 | 104 | return subprocess.check_output(cmd).strip() | ||
797 | 105 | |||
798 | 106 | |||
799 | 107 | def symlink(source, destination): | ||
800 | 108 | """Create a symbolic link""" | ||
801 | 109 | log("Symlinking {} as {}".format(source, destination)) | ||
802 | 110 | cmd = [ | ||
803 | 111 | 'ln', | ||
804 | 112 | '-sf', | ||
805 | 113 | source, | ||
806 | 114 | destination, | ||
807 | 115 | ] | ||
808 | 116 | subprocess.check_call(cmd) | ||
809 | 117 | |||
810 | 118 | |||
811 | 119 | def mkdir(path, owner='root', group='root', perms=0555, force=False): | ||
812 | 120 | """Create a directory""" | ||
813 | 121 | log("Making dir {} {}:{} {:o}".format(path, owner, group, | ||
814 | 122 | perms)) | ||
815 | 123 | uid = pwd.getpwnam(owner).pw_uid | ||
816 | 124 | gid = grp.getgrnam(group).gr_gid | ||
817 | 125 | realpath = os.path.abspath(path) | ||
818 | 126 | if os.path.exists(realpath): | ||
819 | 127 | if force and not os.path.isdir(realpath): | ||
820 | 128 | log("Removing non-directory file {} prior to mkdir()".format(path)) | ||
821 | 129 | os.unlink(realpath) | ||
822 | 130 | else: | ||
823 | 131 | os.makedirs(realpath, perms) | ||
824 | 132 | os.chown(realpath, uid, gid) | ||
825 | 133 | |||
826 | 134 | |||
827 | 135 | def write_file(path, content, owner='root', group='root', perms=0444): | ||
828 | 136 | """Create or overwrite a file with the contents of a string""" | ||
829 | 137 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms)) | ||
830 | 138 | uid = pwd.getpwnam(owner).pw_uid | ||
831 | 139 | gid = grp.getgrnam(group).gr_gid | ||
832 | 140 | with open(path, 'w') as target: | ||
833 | 141 | os.fchown(target.fileno(), uid, gid) | ||
834 | 142 | os.fchmod(target.fileno(), perms) | ||
835 | 143 | target.write(content) | ||
836 | 144 | |||
837 | 145 | |||
838 | 146 | def mount(device, mountpoint, options=None, persist=False): | ||
839 | 147 | """Mount a filesystem at a particular mountpoint""" | ||
840 | 148 | cmd_args = ['mount'] | ||
841 | 149 | if options is not None: | ||
842 | 150 | cmd_args.extend(['-o', options]) | ||
843 | 151 | cmd_args.extend([device, mountpoint]) | ||
844 | 152 | try: | ||
845 | 153 | subprocess.check_output(cmd_args) | ||
846 | 154 | except subprocess.CalledProcessError, e: | ||
847 | 155 | log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output)) | ||
848 | 156 | return False | ||
849 | 157 | if persist: | ||
850 | 158 | # TODO: update fstab | ||
851 | 159 | pass | ||
852 | 160 | return True | ||
853 | 161 | |||
854 | 162 | |||
855 | 163 | def umount(mountpoint, persist=False): | ||
856 | 164 | """Unmount a filesystem""" | ||
857 | 165 | cmd_args = ['umount', mountpoint] | ||
858 | 166 | try: | ||
859 | 167 | subprocess.check_output(cmd_args) | ||
860 | 168 | except subprocess.CalledProcessError, e: | ||
861 | 169 | log('Error unmounting {}\n{}'.format(mountpoint, e.output)) | ||
862 | 170 | return False | ||
863 | 171 | if persist: | ||
864 | 172 | # TODO: update fstab | ||
865 | 173 | pass | ||
866 | 174 | return True | ||
867 | 175 | |||
868 | 176 | |||
869 | 177 | def mounts(): | ||
870 | 178 | """Get a list of all mounted volumes as [[mountpoint,device],[...]]""" | ||
871 | 179 | with open('/proc/mounts') as f: | ||
872 | 180 | # [['/mount/point','/dev/path'],[...]] | ||
873 | 181 | system_mounts = [m[1::-1] for m in [l.strip().split() | ||
874 | 182 | for l in f.readlines()]] | ||
875 | 183 | return system_mounts | ||
876 | 184 | |||
877 | 185 | |||
878 | 186 | def file_hash(path): | ||
879 | 187 | """Generate a md5 hash of the contents of 'path' or None if not found """ | ||
880 | 188 | if os.path.exists(path): | ||
881 | 189 | h = hashlib.md5() | ||
882 | 190 | with open(path, 'r') as source: | ||
883 | 191 | h.update(source.read()) # IGNORE:E1101 - it does have update | ||
884 | 192 | return h.hexdigest() | ||
885 | 193 | else: | ||
886 | 194 | return None | ||
887 | 195 | |||
888 | 196 | |||
889 | 197 | def restart_on_change(restart_map): | ||
890 | 198 | """Restart services based on configuration files changing | ||
891 | 199 | |||
892 | 200 | This function is used a decorator, for example | ||
893 | 201 | |||
894 | 202 | @restart_on_change({ | ||
895 | 203 | '/etc/ceph/ceph.conf': [ 'cinder-api', 'cinder-volume' ] | ||
896 | 204 | }) | ||
897 | 205 | def ceph_client_changed(): | ||
898 | 206 | ... | ||
899 | 207 | |||
900 | 208 | In this example, the cinder-api and cinder-volume services | ||
901 | 209 | would be restarted if /etc/ceph/ceph.conf is changed by the | ||
902 | 210 | ceph_client_changed function. | ||
903 | 211 | """ | ||
904 | 212 | def wrap(f): | ||
905 | 213 | def wrapped_f(*args): | ||
906 | 214 | checksums = {} | ||
907 | 215 | for path in restart_map: | ||
908 | 216 | checksums[path] = file_hash(path) | ||
909 | 217 | f(*args) | ||
910 | 218 | restarts = [] | ||
911 | 219 | for path in restart_map: | ||
912 | 220 | if checksums[path] != file_hash(path): | ||
913 | 221 | restarts += restart_map[path] | ||
914 | 222 | for service_name in list(OrderedDict.fromkeys(restarts)): | ||
915 | 223 | service('restart', service_name) | ||
916 | 224 | return wrapped_f | ||
917 | 225 | return wrap | ||
918 | 226 | |||
919 | 227 | |||
920 | 228 | def lsb_release(): | ||
921 | 229 | """Return /etc/lsb-release in a dict""" | ||
922 | 230 | d = {} | ||
923 | 231 | with open('/etc/lsb-release', 'r') as lsb: | ||
924 | 232 | for l in lsb: | ||
925 | 233 | k, v = l.split('=') | ||
926 | 234 | d[k.strip()] = v.strip() | ||
927 | 235 | return d | ||
928 | 236 | |||
929 | 237 | |||
930 | 238 | def pwgen(length=None): | ||
931 | 239 | """Generate a random pasword.""" | ||
932 | 240 | if length is None: | ||
933 | 241 | length = random.choice(range(35, 45)) | ||
934 | 242 | alphanumeric_chars = [ | ||
935 | 243 | l for l in (string.letters + string.digits) | ||
936 | 244 | if l not in 'l0QD1vAEIOUaeiou'] | ||
937 | 245 | random_chars = [ | ||
938 | 246 | random.choice(alphanumeric_chars) for _ in range(length)] | ||
939 | 247 | return(''.join(random_chars)) | ||
940 | 248 | |||
941 | 249 | |||
942 | 250 | def list_nics(nic_type): | ||
943 | 251 | '''Return a list of nics of given type(s)''' | ||
944 | 252 | if isinstance(nic_type, basestring): | ||
945 | 253 | int_types = [nic_type] | ||
946 | 254 | else: | ||
947 | 255 | int_types = nic_type | ||
948 | 256 | interfaces = [] | ||
949 | 257 | for int_type in int_types: | ||
950 | 258 | cmd = ['ip', 'addr', 'show', 'label', int_type + '*'] | ||
951 | 259 | ip_output = subprocess.check_output(cmd).split('\n') | ||
952 | 260 | ip_output = (line for line in ip_output if line) | ||
953 | 261 | for line in ip_output: | ||
954 | 262 | if line.split()[1].startswith(int_type): | ||
955 | 263 | interfaces.append(line.split()[1].replace(":", "")) | ||
956 | 264 | return interfaces | ||
957 | 265 | |||
958 | 266 | |||
959 | 267 | def set_nic_mtu(nic, mtu): | ||
960 | 268 | '''Set MTU on a network interface''' | ||
961 | 269 | cmd = ['ip', 'link', 'set', nic, 'mtu', mtu] | ||
962 | 270 | subprocess.check_call(cmd) | ||
963 | 271 | |||
964 | 272 | |||
965 | 273 | def get_nic_mtu(nic): | ||
966 | 274 | cmd = ['ip', 'addr', 'show', nic] | ||
967 | 275 | ip_output = subprocess.check_output(cmd).split('\n') | ||
968 | 276 | mtu = "" | ||
969 | 277 | for line in ip_output: | ||
970 | 278 | words = line.split() | ||
971 | 279 | if 'mtu' in words: | ||
972 | 280 | mtu = words[words.index("mtu") + 1] | ||
973 | 281 | return mtu | ||
974 | 282 | |||
975 | 283 | |||
976 | 284 | def get_nic_hwaddr(nic): | ||
977 | 285 | cmd = ['ip', '-o', '-0', 'addr', 'show', nic] | ||
978 | 286 | ip_output = subprocess.check_output(cmd) | ||
979 | 287 | hwaddr = "" | ||
980 | 288 | words = ip_output.split() | ||
981 | 289 | if 'link/ether' in words: | ||
982 | 290 | hwaddr = words[words.index('link/ether') + 1] | ||
983 | 291 | return hwaddr | ||
984 | 0 | 292 | ||
985 | === added directory 'hooks/charmhelpers/fetch' | |||
986 | === added file 'hooks/charmhelpers/fetch/__init__.py' | |||
987 | --- hooks/charmhelpers/fetch/__init__.py 1970-01-01 00:00:00 +0000 | |||
988 | +++ hooks/charmhelpers/fetch/__init__.py 2014-01-24 17:20:41 +0000 | |||
989 | @@ -0,0 +1,279 @@ | |||
990 | 1 | import importlib | ||
991 | 2 | from yaml import safe_load | ||
992 | 3 | from charmhelpers.core.host import ( | ||
993 | 4 | lsb_release | ||
994 | 5 | ) | ||
995 | 6 | from urlparse import ( | ||
996 | 7 | urlparse, | ||
997 | 8 | urlunparse, | ||
998 | 9 | ) | ||
999 | 10 | import subprocess | ||
1000 | 11 | from charmhelpers.core.hookenv import ( | ||
1001 | 12 | config, | ||
1002 | 13 | log, | ||
1003 | 14 | ) | ||
1004 | 15 | import apt_pkg | ||
1005 | 16 | import os | ||
1006 | 17 | |||
1007 | 18 | CLOUD_ARCHIVE = """# Ubuntu Cloud Archive | ||
1008 | 19 | deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main | ||
1009 | 20 | """ | ||
1010 | 21 | PROPOSED_POCKET = """# Proposed | ||
1011 | 22 | deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted | ||
1012 | 23 | """ | ||
1013 | 24 | CLOUD_ARCHIVE_POCKETS = { | ||
1014 | 25 | # Folsom | ||
1015 | 26 | 'folsom': 'precise-updates/folsom', | ||
1016 | 27 | 'precise-folsom': 'precise-updates/folsom', | ||
1017 | 28 | 'precise-folsom/updates': 'precise-updates/folsom', | ||
1018 | 29 | 'precise-updates/folsom': 'precise-updates/folsom', | ||
1019 | 30 | 'folsom/proposed': 'precise-proposed/folsom', | ||
1020 | 31 | 'precise-folsom/proposed': 'precise-proposed/folsom', | ||
1021 | 32 | 'precise-proposed/folsom': 'precise-proposed/folsom', | ||
1022 | 33 | # Grizzly | ||
1023 | 34 | 'grizzly': 'precise-updates/grizzly', | ||
1024 | 35 | 'precise-grizzly': 'precise-updates/grizzly', | ||
1025 | 36 | 'precise-grizzly/updates': 'precise-updates/grizzly', | ||
1026 | 37 | 'precise-updates/grizzly': 'precise-updates/grizzly', | ||
1027 | 38 | 'grizzly/proposed': 'precise-proposed/grizzly', | ||
1028 | 39 | 'precise-grizzly/proposed': 'precise-proposed/grizzly', | ||
1029 | 40 | 'precise-proposed/grizzly': 'precise-proposed/grizzly', | ||
1030 | 41 | # Havana | ||
1031 | 42 | 'havana': 'precise-updates/havana', | ||
1032 | 43 | 'precise-havana': 'precise-updates/havana', | ||
1033 | 44 | 'precise-havana/updates': 'precise-updates/havana', | ||
1034 | 45 | 'precise-updates/havana': 'precise-updates/havana', | ||
1035 | 46 | 'havana/proposed': 'precise-proposed/havana', | ||
1036 | 47 | 'precise-havana/proposed': 'precise-proposed/havana', | ||
1037 | 48 | 'precise-proposed/havana': 'precise-proposed/havana', | ||
1038 | 49 | # Icehouse | ||
1039 | 50 | 'icehouse': 'precise-updates/icehouse', | ||
1040 | 51 | 'precise-icehouse': 'precise-updates/icehouse', | ||
1041 | 52 | 'precise-icehouse/updates': 'precise-updates/icehouse', | ||
1042 | 53 | 'precise-updates/icehouse': 'precise-updates/icehouse', | ||
1043 | 54 | 'icehouse/proposed': 'precise-proposed/icehouse', | ||
1044 | 55 | 'precise-icehouse/proposed': 'precise-proposed/icehouse', | ||
1045 | 56 | 'precise-proposed/icehouse': 'precise-proposed/icehouse', | ||
1046 | 57 | } | ||
1047 | 58 | |||
1048 | 59 | |||
1049 | 60 | def filter_installed_packages(packages): | ||
1050 | 61 | """Returns a list of packages that require installation""" | ||
1051 | 62 | apt_pkg.init() | ||
1052 | 63 | cache = apt_pkg.Cache() | ||
1053 | 64 | _pkgs = [] | ||
1054 | 65 | for package in packages: | ||
1055 | 66 | try: | ||
1056 | 67 | p = cache[package] | ||
1057 | 68 | p.current_ver or _pkgs.append(package) | ||
1058 | 69 | except KeyError: | ||
1059 | 70 | log('Package {} has no installation candidate.'.format(package), | ||
1060 | 71 | level='WARNING') | ||
1061 | 72 | _pkgs.append(package) | ||
1062 | 73 | return _pkgs | ||
1063 | 74 | |||
1064 | 75 | |||
1065 | 76 | def apt_install(packages, options=None, fatal=False): | ||
1066 | 77 | """Install one or more packages""" | ||
1067 | 78 | if options is None: | ||
1068 | 79 | options = ['--option=Dpkg::Options::=--force-confold'] | ||
1069 | 80 | |||
1070 | 81 | cmd = ['apt-get', '--assume-yes'] | ||
1071 | 82 | cmd.extend(options) | ||
1072 | 83 | cmd.append('install') | ||
1073 | 84 | if isinstance(packages, basestring): | ||
1074 | 85 | cmd.append(packages) | ||
1075 | 86 | else: | ||
1076 | 87 | cmd.extend(packages) | ||
1077 | 88 | log("Installing {} with options: {}".format(packages, | ||
1078 | 89 | options)) | ||
1079 | 90 | env = os.environ.copy() | ||
1080 | 91 | if 'DEBIAN_FRONTEND' not in env: | ||
1081 | 92 | env['DEBIAN_FRONTEND'] = 'noninteractive' | ||
1082 | 93 | |||
1083 | 94 | if fatal: | ||
1084 | 95 | subprocess.check_call(cmd, env=env) | ||
1085 | 96 | else: | ||
1086 | 97 | subprocess.call(cmd, env=env) | ||
1087 | 98 | |||
1088 | 99 | |||
1089 | 100 | def apt_update(fatal=False): | ||
1090 | 101 | """Update local apt cache""" | ||
1091 | 102 | cmd = ['apt-get', 'update'] | ||
1092 | 103 | if fatal: | ||
1093 | 104 | subprocess.check_call(cmd) | ||
1094 | 105 | else: | ||
1095 | 106 | subprocess.call(cmd) | ||
1096 | 107 | |||
1097 | 108 | |||
1098 | 109 | def apt_purge(packages, fatal=False): | ||
1099 | 110 | """Purge one or more packages""" | ||
1100 | 111 | cmd = ['apt-get', '--assume-yes', 'purge'] | ||
1101 | 112 | if isinstance(packages, basestring): | ||
1102 | 113 | cmd.append(packages) | ||
1103 | 114 | else: | ||
1104 | 115 | cmd.extend(packages) | ||
1105 | 116 | log("Purging {}".format(packages)) | ||
1106 | 117 | if fatal: | ||
1107 | 118 | subprocess.check_call(cmd) | ||
1108 | 119 | else: | ||
1109 | 120 | subprocess.call(cmd) | ||
1110 | 121 | |||
1111 | 122 | |||
1112 | 123 | def apt_hold(packages, fatal=False): | ||
1113 | 124 | """Hold one or more packages""" | ||
1114 | 125 | cmd = ['apt-mark', 'hold'] | ||
1115 | 126 | if isinstance(packages, basestring): | ||
1116 | 127 | cmd.append(packages) | ||
1117 | 128 | else: | ||
1118 | 129 | cmd.extend(packages) | ||
1119 | 130 | log("Holding {}".format(packages)) | ||
1120 | 131 | if fatal: | ||
1121 | 132 | subprocess.check_call(cmd) | ||
1122 | 133 | else: | ||
1123 | 134 | subprocess.call(cmd) | ||
1124 | 135 | |||
1125 | 136 | |||
1126 | 137 | def add_source(source, key=None): | ||
1127 | 138 | if (source.startswith('ppa:') or | ||
1128 | 139 | source.startswith('http:') or | ||
1129 | 140 | source.startswith('deb ') or | ||
1130 | 141 | source.startswith('cloud-archive:')): | ||
1131 | 142 | subprocess.check_call(['add-apt-repository', '--yes', source]) | ||
1132 | 143 | elif source.startswith('cloud:'): | ||
1133 | 144 | apt_install(filter_installed_packages(['ubuntu-cloud-keyring']), | ||
1134 | 145 | fatal=True) | ||
1135 | 146 | pocket = source.split(':')[-1] | ||
1136 | 147 | if pocket not in CLOUD_ARCHIVE_POCKETS: | ||
1137 | 148 | raise SourceConfigError( | ||
1138 | 149 | 'Unsupported cloud: source option %s' % | ||
1139 | 150 | pocket) | ||
1140 | 151 | actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket] | ||
1141 | 152 | with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt: | ||
1142 | 153 | apt.write(CLOUD_ARCHIVE.format(actual_pocket)) | ||
1143 | 154 | elif source == 'proposed': | ||
1144 | 155 | release = lsb_release()['DISTRIB_CODENAME'] | ||
1145 | 156 | with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt: | ||
1146 | 157 | apt.write(PROPOSED_POCKET.format(release)) | ||
1147 | 158 | if key: | ||
1148 | 159 | subprocess.check_call(['apt-key', 'import', key]) | ||
1149 | 160 | |||
1150 | 161 | |||
1151 | 162 | class SourceConfigError(Exception): | ||
1152 | 163 | pass | ||
1153 | 164 | |||
1154 | 165 | |||
1155 | 166 | def configure_sources(update=False, | ||
1156 | 167 | sources_var='install_sources', | ||
1157 | 168 | keys_var='install_keys'): | ||
1158 | 169 | """ | ||
1159 | 170 | Configure multiple sources from charm configuration | ||
1160 | 171 | |||
1161 | 172 | Example config: | ||
1162 | 173 | install_sources: | ||
1163 | 174 | - "ppa:foo" | ||
1164 | 175 | - "http://example.com/repo precise main" | ||
1165 | 176 | install_keys: | ||
1166 | 177 | - null | ||
1167 | 178 | - "a1b2c3d4" | ||
1168 | 179 | |||
1169 | 180 | Note that 'null' (a.k.a. None) should not be quoted. | ||
1170 | 181 | """ | ||
1171 | 182 | sources = safe_load(config(sources_var)) | ||
1172 | 183 | keys = config(keys_var) | ||
1173 | 184 | if keys is not None: | ||
1174 | 185 | keys = safe_load(keys) | ||
1175 | 186 | if isinstance(sources, basestring) and ( | ||
1176 | 187 | keys is None or isinstance(keys, basestring)): | ||
1177 | 188 | add_source(sources, keys) | ||
1178 | 189 | else: | ||
1179 | 190 | if not len(sources) == len(keys): | ||
1180 | 191 | msg = 'Install sources and keys lists are different lengths' | ||
1181 | 192 | raise SourceConfigError(msg) | ||
1182 | 193 | for src_num in range(len(sources)): | ||
1183 | 194 | add_source(sources[src_num], keys[src_num]) | ||
1184 | 195 | if update: | ||
1185 | 196 | apt_update(fatal=True) | ||
1186 | 197 | |||
1187 | 198 | # The order of this list is very important. Handlers should be listed in from | ||
1188 | 199 | # least- to most-specific URL matching. | ||
1189 | 200 | FETCH_HANDLERS = ( | ||
1190 | 201 | 'charmhelpers.fetch.archiveurl.ArchiveUrlFetchHandler', | ||
1191 | 202 | 'charmhelpers.fetch.bzrurl.BzrUrlFetchHandler', | ||
1192 | 203 | ) | ||
1193 | 204 | |||
1194 | 205 | |||
1195 | 206 | class UnhandledSource(Exception): | ||
1196 | 207 | pass | ||
1197 | 208 | |||
1198 | 209 | |||
1199 | 210 | def install_remote(source): | ||
1200 | 211 | """ | ||
1201 | 212 | Install a file tree from a remote source | ||
1202 | 213 | |||
1203 | 214 | The specified source should be a url of the form: | ||
1204 | 215 | scheme://[host]/path[#[option=value][&...]] | ||
1205 | 216 | |||
1206 | 217 | Schemes supported are based on this modules submodules | ||
1207 | 218 | Options supported are submodule-specific""" | ||
1208 | 219 | # We ONLY check for True here because can_handle may return a string | ||
1209 | 220 | # explaining why it can't handle a given source. | ||
1210 | 221 | handlers = [h for h in plugins() if h.can_handle(source) is True] | ||
1211 | 222 | installed_to = None | ||
1212 | 223 | for handler in handlers: | ||
1213 | 224 | try: | ||
1214 | 225 | installed_to = handler.install(source) | ||
1215 | 226 | except UnhandledSource: | ||
1216 | 227 | pass | ||
1217 | 228 | if not installed_to: | ||
1218 | 229 | raise UnhandledSource("No handler found for source {}".format(source)) | ||
1219 | 230 | return installed_to | ||
1220 | 231 | |||
1221 | 232 | |||
1222 | 233 | def install_from_config(config_var_name): | ||
1223 | 234 | charm_config = config() | ||
1224 | 235 | source = charm_config[config_var_name] | ||
1225 | 236 | return install_remote(source) | ||
1226 | 237 | |||
1227 | 238 | |||
1228 | 239 | class BaseFetchHandler(object): | ||
1229 | 240 | |||
1230 | 241 | """Base class for FetchHandler implementations in fetch plugins""" | ||
1231 | 242 | |||
1232 | 243 | def can_handle(self, source): | ||
1233 | 244 | """Returns True if the source can be handled. Otherwise returns | ||
1234 | 245 | a string explaining why it cannot""" | ||
1235 | 246 | return "Wrong source type" | ||
1236 | 247 | |||
1237 | 248 | def install(self, source): | ||
1238 | 249 | """Try to download and unpack the source. Return the path to the | ||
1239 | 250 | unpacked files or raise UnhandledSource.""" | ||
1240 | 251 | raise UnhandledSource("Wrong source type {}".format(source)) | ||
1241 | 252 | |||
1242 | 253 | def parse_url(self, url): | ||
1243 | 254 | return urlparse(url) | ||
1244 | 255 | |||
1245 | 256 | def base_url(self, url): | ||
1246 | 257 | """Return url without querystring or fragment""" | ||
1247 | 258 | parts = list(self.parse_url(url)) | ||
1248 | 259 | parts[4:] = ['' for i in parts[4:]] | ||
1249 | 260 | return urlunparse(parts) | ||
1250 | 261 | |||
1251 | 262 | |||
1252 | 263 | def plugins(fetch_handlers=None): | ||
1253 | 264 | if not fetch_handlers: | ||
1254 | 265 | fetch_handlers = FETCH_HANDLERS | ||
1255 | 266 | plugin_list = [] | ||
1256 | 267 | for handler_name in fetch_handlers: | ||
1257 | 268 | package, classname = handler_name.rsplit('.', 1) | ||
1258 | 269 | try: | ||
1259 | 270 | handler_class = getattr( | ||
1260 | 271 | importlib.import_module(package), | ||
1261 | 272 | classname) | ||
1262 | 273 | plugin_list.append(handler_class()) | ||
1263 | 274 | except (ImportError, AttributeError): | ||
1264 | 275 | # Skip missing plugins so that they can be ommitted from | ||
1265 | 276 | # installation if desired | ||
1266 | 277 | log("FetchHandler {} not found, skipping plugin".format( | ||
1267 | 278 | handler_name)) | ||
1268 | 279 | return plugin_list | ||
1269 | 0 | 280 | ||
1270 | === added file 'hooks/charmhelpers/fetch/archiveurl.py' | |||
1271 | --- hooks/charmhelpers/fetch/archiveurl.py 1970-01-01 00:00:00 +0000 | |||
1272 | +++ hooks/charmhelpers/fetch/archiveurl.py 2014-01-24 17:20:41 +0000 | |||
1273 | @@ -0,0 +1,48 @@ | |||
1274 | 1 | import os | ||
1275 | 2 | import urllib2 | ||
1276 | 3 | from charmhelpers.fetch import ( | ||
1277 | 4 | BaseFetchHandler, | ||
1278 | 5 | UnhandledSource | ||
1279 | 6 | ) | ||
1280 | 7 | from charmhelpers.payload.archive import ( | ||
1281 | 8 | get_archive_handler, | ||
1282 | 9 | extract, | ||
1283 | 10 | ) | ||
1284 | 11 | from charmhelpers.core.host import mkdir | ||
1285 | 12 | |||
1286 | 13 | |||
1287 | 14 | class ArchiveUrlFetchHandler(BaseFetchHandler): | ||
1288 | 15 | """Handler for archives via generic URLs""" | ||
1289 | 16 | def can_handle(self, source): | ||
1290 | 17 | url_parts = self.parse_url(source) | ||
1291 | 18 | if url_parts.scheme not in ('http', 'https', 'ftp', 'file'): | ||
1292 | 19 | return "Wrong source type" | ||
1293 | 20 | if get_archive_handler(self.base_url(source)): | ||
1294 | 21 | return True | ||
1295 | 22 | return False | ||
1296 | 23 | |||
1297 | 24 | def download(self, source, dest): | ||
1298 | 25 | # propogate all exceptions | ||
1299 | 26 | # URLError, OSError, etc | ||
1300 | 27 | response = urllib2.urlopen(source) | ||
1301 | 28 | try: | ||
1302 | 29 | with open(dest, 'w') as dest_file: | ||
1303 | 30 | dest_file.write(response.read()) | ||
1304 | 31 | except Exception as e: | ||
1305 | 32 | if os.path.isfile(dest): | ||
1306 | 33 | os.unlink(dest) | ||
1307 | 34 | raise e | ||
1308 | 35 | |||
1309 | 36 | def install(self, source): | ||
1310 | 37 | url_parts = self.parse_url(source) | ||
1311 | 38 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched') | ||
1312 | 39 | if not os.path.exists(dest_dir): | ||
1313 | 40 | mkdir(dest_dir, perms=0755) | ||
1314 | 41 | dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path)) | ||
1315 | 42 | try: | ||
1316 | 43 | self.download(source, dld_file) | ||
1317 | 44 | except urllib2.URLError as e: | ||
1318 | 45 | raise UnhandledSource(e.reason) | ||
1319 | 46 | except OSError as e: | ||
1320 | 47 | raise UnhandledSource(e.strerror) | ||
1321 | 48 | return extract(dld_file) | ||
1322 | 0 | 49 | ||
1323 | === added file 'hooks/charmhelpers/fetch/bzrurl.py' | |||
1324 | --- hooks/charmhelpers/fetch/bzrurl.py 1970-01-01 00:00:00 +0000 | |||
1325 | +++ hooks/charmhelpers/fetch/bzrurl.py 2014-01-24 17:20:41 +0000 | |||
1326 | @@ -0,0 +1,49 @@ | |||
1327 | 1 | import os | ||
1328 | 2 | from charmhelpers.fetch import ( | ||
1329 | 3 | BaseFetchHandler, | ||
1330 | 4 | UnhandledSource | ||
1331 | 5 | ) | ||
1332 | 6 | from charmhelpers.core.host import mkdir | ||
1333 | 7 | |||
1334 | 8 | try: | ||
1335 | 9 | from bzrlib.branch import Branch | ||
1336 | 10 | except ImportError: | ||
1337 | 11 | from charmhelpers.fetch import apt_install | ||
1338 | 12 | apt_install("python-bzrlib") | ||
1339 | 13 | from bzrlib.branch import Branch | ||
1340 | 14 | |||
1341 | 15 | |||
1342 | 16 | class BzrUrlFetchHandler(BaseFetchHandler): | ||
1343 | 17 | """Handler for bazaar branches via generic and lp URLs""" | ||
1344 | 18 | def can_handle(self, source): | ||
1345 | 19 | url_parts = self.parse_url(source) | ||
1346 | 20 | if url_parts.scheme not in ('bzr+ssh', 'lp'): | ||
1347 | 21 | return False | ||
1348 | 22 | else: | ||
1349 | 23 | return True | ||
1350 | 24 | |||
1351 | 25 | def branch(self, source, dest): | ||
1352 | 26 | url_parts = self.parse_url(source) | ||
1353 | 27 | # If we use lp:branchname scheme we need to load plugins | ||
1354 | 28 | if not self.can_handle(source): | ||
1355 | 29 | raise UnhandledSource("Cannot handle {}".format(source)) | ||
1356 | 30 | if url_parts.scheme == "lp": | ||
1357 | 31 | from bzrlib.plugin import load_plugins | ||
1358 | 32 | load_plugins() | ||
1359 | 33 | try: | ||
1360 | 34 | remote_branch = Branch.open(source) | ||
1361 | 35 | remote_branch.bzrdir.sprout(dest).open_branch() | ||
1362 | 36 | except Exception as e: | ||
1363 | 37 | raise e | ||
1364 | 38 | |||
1365 | 39 | def install(self, source): | ||
1366 | 40 | url_parts = self.parse_url(source) | ||
1367 | 41 | branch_name = url_parts.path.strip("/").split("/")[-1] | ||
1368 | 42 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", branch_name) | ||
1369 | 43 | if not os.path.exists(dest_dir): | ||
1370 | 44 | mkdir(dest_dir, perms=0755) | ||
1371 | 45 | try: | ||
1372 | 46 | self.branch(source, dest_dir) | ||
1373 | 47 | except OSError as e: | ||
1374 | 48 | raise UnhandledSource(e.strerror) | ||
1375 | 49 | return dest_dir | ||
1376 | 0 | 50 | ||
1377 | === added directory 'hooks/charmhelpers/payload' | |||
1378 | === added file 'hooks/charmhelpers/payload/__init__.py' | |||
1379 | --- hooks/charmhelpers/payload/__init__.py 1970-01-01 00:00:00 +0000 | |||
1380 | +++ hooks/charmhelpers/payload/__init__.py 2014-01-24 17:20:41 +0000 | |||
1381 | @@ -0,0 +1,1 @@ | |||
1382 | 1 | "Tools for working with files injected into a charm just before deployment." | ||
1383 | 0 | 2 | ||
1384 | === added file 'hooks/charmhelpers/payload/execd.py' | |||
1385 | --- hooks/charmhelpers/payload/execd.py 1970-01-01 00:00:00 +0000 | |||
1386 | +++ hooks/charmhelpers/payload/execd.py 2014-01-24 17:20:41 +0000 | |||
1387 | @@ -0,0 +1,50 @@ | |||
1388 | 1 | #!/usr/bin/env python | ||
1389 | 2 | |||
1390 | 3 | import os | ||
1391 | 4 | import sys | ||
1392 | 5 | import subprocess | ||
1393 | 6 | from charmhelpers.core import hookenv | ||
1394 | 7 | |||
1395 | 8 | |||
1396 | 9 | def default_execd_dir(): | ||
1397 | 10 | return os.path.join(os.environ['CHARM_DIR'], 'exec.d') | ||
1398 | 11 | |||
1399 | 12 | |||
1400 | 13 | def execd_module_paths(execd_dir=None): | ||
1401 | 14 | """Generate a list of full paths to modules within execd_dir.""" | ||
1402 | 15 | if not execd_dir: | ||
1403 | 16 | execd_dir = default_execd_dir() | ||
1404 | 17 | |||
1405 | 18 | if not os.path.exists(execd_dir): | ||
1406 | 19 | return | ||
1407 | 20 | |||
1408 | 21 | for subpath in os.listdir(execd_dir): | ||
1409 | 22 | module = os.path.join(execd_dir, subpath) | ||
1410 | 23 | if os.path.isdir(module): | ||
1411 | 24 | yield module | ||
1412 | 25 | |||
1413 | 26 | |||
1414 | 27 | def execd_submodule_paths(command, execd_dir=None): | ||
1415 | 28 | """Generate a list of full paths to the specified command within exec_dir. | ||
1416 | 29 | """ | ||
1417 | 30 | for module_path in execd_module_paths(execd_dir): | ||
1418 | 31 | path = os.path.join(module_path, command) | ||
1419 | 32 | if os.access(path, os.X_OK) and os.path.isfile(path): | ||
1420 | 33 | yield path | ||
1421 | 34 | |||
1422 | 35 | |||
1423 | 36 | def execd_run(command, execd_dir=None, die_on_error=False, stderr=None): | ||
1424 | 37 | """Run command for each module within execd_dir which defines it.""" | ||
1425 | 38 | for submodule_path in execd_submodule_paths(command, execd_dir): | ||
1426 | 39 | try: | ||
1427 | 40 | subprocess.check_call(submodule_path, shell=True, stderr=stderr) | ||
1428 | 41 | except subprocess.CalledProcessError as e: | ||
1429 | 42 | hookenv.log("Error ({}) running {}. Output: {}".format( | ||
1430 | 43 | e.returncode, e.cmd, e.output)) | ||
1431 | 44 | if die_on_error: | ||
1432 | 45 | sys.exit(e.returncode) | ||
1433 | 46 | |||
1434 | 47 | |||
1435 | 48 | def execd_preinstall(execd_dir=None): | ||
1436 | 49 | """Run charm-pre-install for each module within execd_dir.""" | ||
1437 | 50 | execd_run('charm-pre-install', execd_dir=execd_dir) | ||
1438 | 0 | 51 | ||
1439 | === modified file 'hooks/hooks.py' | |||
1440 | --- hooks/hooks.py 2013-01-11 09:15:51 +0000 | |||
1441 | +++ hooks/hooks.py 2014-01-24 17:20:41 +0000 | |||
1442 | @@ -14,7 +14,33 @@ | |||
1443 | 14 | import os | 14 | import os |
1444 | 15 | import ceph | 15 | import ceph |
1445 | 16 | 16 | ||
1447 | 17 | import utils | 17 | from charmhelpers.core.hookenv import ( |
1448 | 18 | relation_get, | ||
1449 | 19 | relation_ids, | ||
1450 | 20 | related_units, | ||
1451 | 21 | config, | ||
1452 | 22 | unit_get, | ||
1453 | 23 | open_port, | ||
1454 | 24 | relation_set, | ||
1455 | 25 | log, | ||
1456 | 26 | Hooks, UnregisteredHookError, | ||
1457 | 27 | ) | ||
1458 | 28 | from charmhelpers.fetch import ( | ||
1459 | 29 | apt_update, | ||
1460 | 30 | apt_install, | ||
1461 | 31 | add_source, | ||
1462 | 32 | ) | ||
1463 | 33 | from utils import ( | ||
1464 | 34 | render_template, | ||
1465 | 35 | get_host_ip, | ||
1466 | 36 | enable_pocket, | ||
1467 | 37 | is_apache_24 | ||
1468 | 38 | ) | ||
1469 | 39 | |||
1470 | 40 | from charmhelpers.payload.execd import execd_preinstall | ||
1471 | 41 | from socket import gethostname as get_unit_hostname | ||
1472 | 42 | |||
1473 | 43 | hooks = Hooks() | ||
1474 | 18 | 44 | ||
1475 | 19 | 45 | ||
1476 | 20 | def install_www_scripts(): | 46 | def install_www_scripts(): |
1477 | @@ -22,19 +48,20 @@ | |||
1478 | 22 | shutil.copy(x, '/var/www/') | 48 | shutil.copy(x, '/var/www/') |
1479 | 23 | 49 | ||
1480 | 24 | 50 | ||
1484 | 25 | NSS_DIR='/var/lib/ceph/nss' | 51 | NSS_DIR = '/var/lib/ceph/nss' |
1485 | 26 | 52 | ||
1486 | 27 | 53 | ||
1487 | 54 | @hooks.hook('install') | ||
1488 | 28 | def install(): | 55 | def install(): |
1496 | 29 | utils.juju_log('INFO', 'Begin install hook.') | 56 | execd_preinstall() |
1497 | 30 | utils.enable_pocket('multiverse') | 57 | enable_pocket('multiverse') |
1498 | 31 | utils.configure_source() | 58 | add_source(config('source'), config('key')) |
1499 | 32 | utils.install('radosgw', | 59 | apt_update(fatal=True) |
1500 | 33 | 'libapache2-mod-fastcgi', | 60 | apt_install(['radosgw', |
1501 | 34 | 'apache2', | 61 | 'libapache2-mod-fastcgi', |
1502 | 35 | 'ntp') | 62 | 'apache2', |
1503 | 63 | 'ntp'], fatal=True) | ||
1504 | 36 | os.makedirs(NSS_DIR) | 64 | os.makedirs(NSS_DIR) |
1505 | 37 | utils.juju_log('INFO', 'End install hook.') | ||
1506 | 38 | 65 | ||
1507 | 39 | 66 | ||
1508 | 40 | def emit_cephconf(): | 67 | def emit_cephconf(): |
1509 | @@ -45,68 +72,70 @@ | |||
1510 | 45 | cephcontext = { | 72 | cephcontext = { |
1511 | 46 | 'auth_supported': get_auth() or 'none', | 73 | 'auth_supported': get_auth() or 'none', |
1512 | 47 | 'mon_hosts': ' '.join(get_mon_hosts()), | 74 | 'mon_hosts': ' '.join(get_mon_hosts()), |
1514 | 48 | 'hostname': utils.get_unit_hostname(), | 75 | 'hostname': get_unit_hostname(), |
1515 | 49 | 'version': ceph.get_ceph_version('radosgw') | 76 | 'version': ceph.get_ceph_version('radosgw') |
1519 | 50 | } | 77 | } |
1520 | 51 | 78 | ||
1521 | 52 | # Check to ensure that correct version of ceph is | 79 | # Check to ensure that correct version of ceph is |
1522 | 53 | # in use | 80 | # in use |
1524 | 54 | if ceph.get_ceph_version('radosgw') >= "0.55": | 81 | if ceph.get_ceph_version('radosgw') >= "0.55": |
1525 | 55 | # Add keystone configuration if found | 82 | # Add keystone configuration if found |
1526 | 56 | ks_conf = get_keystone_conf() | 83 | ks_conf = get_keystone_conf() |
1527 | 57 | if ks_conf: | 84 | if ks_conf: |
1528 | 58 | cephcontext.update(ks_conf) | 85 | cephcontext.update(ks_conf) |
1529 | 59 | 86 | ||
1530 | 60 | with open('/etc/ceph/ceph.conf', 'w') as cephconf: | 87 | with open('/etc/ceph/ceph.conf', 'w') as cephconf: |
1532 | 61 | cephconf.write(utils.render_template('ceph.conf', cephcontext)) | 88 | cephconf.write(render_template('ceph.conf', cephcontext)) |
1533 | 62 | 89 | ||
1534 | 63 | 90 | ||
1535 | 64 | def emit_apacheconf(): | 91 | def emit_apacheconf(): |
1536 | 65 | apachecontext = { | 92 | apachecontext = { |
1541 | 66 | "hostname": utils.unit_get('private-address') | 93 | "hostname": unit_get('private-address') |
1542 | 67 | } | 94 | } |
1543 | 68 | with open('/etc/apache2/sites-available/rgw', 'w') as apacheconf: | 95 | site_conf = '/etc/apache2/sites-available/rgw' |
1544 | 69 | apacheconf.write(utils.render_template('rgw', apachecontext)) | 96 | if is_apache_24(): |
1545 | 97 | site_conf = '/etc/apache2/sites-available/rgw.conf' | ||
1546 | 98 | with open(site_conf, 'w') as apacheconf: | ||
1547 | 99 | apacheconf.write(render_template('rgw', apachecontext)) | ||
1548 | 70 | 100 | ||
1549 | 71 | 101 | ||
1550 | 72 | def apache_sites(): | 102 | def apache_sites(): |
1553 | 73 | utils.juju_log('INFO', 'Begin apache_sites.') | 103 | if is_apache_24(): |
1554 | 74 | subprocess.check_call(['a2dissite', 'default']) | 104 | subprocess.check_call(['a2dissite', '000-default']) |
1555 | 105 | else: | ||
1556 | 106 | subprocess.check_call(['a2dissite', 'default']) | ||
1557 | 75 | subprocess.check_call(['a2ensite', 'rgw']) | 107 | subprocess.check_call(['a2ensite', 'rgw']) |
1558 | 76 | utils.juju_log('INFO', 'End apache_sites.') | ||
1559 | 77 | 108 | ||
1560 | 78 | 109 | ||
1561 | 79 | def apache_modules(): | 110 | def apache_modules(): |
1562 | 80 | utils.juju_log('INFO', 'Begin apache_sites.') | ||
1563 | 81 | subprocess.check_call(['a2enmod', 'fastcgi']) | 111 | subprocess.check_call(['a2enmod', 'fastcgi']) |
1564 | 82 | subprocess.check_call(['a2enmod', 'rewrite']) | 112 | subprocess.check_call(['a2enmod', 'rewrite']) |
1565 | 83 | utils.juju_log('INFO', 'End apache_sites.') | ||
1566 | 84 | 113 | ||
1567 | 85 | 114 | ||
1568 | 86 | def apache_reload(): | 115 | def apache_reload(): |
1569 | 87 | subprocess.call(['service', 'apache2', 'reload']) | 116 | subprocess.call(['service', 'apache2', 'reload']) |
1570 | 88 | 117 | ||
1571 | 89 | 118 | ||
1572 | 119 | @hooks.hook('upgrade-charm', | ||
1573 | 120 | 'config-changed') | ||
1574 | 90 | def config_changed(): | 121 | def config_changed(): |
1575 | 91 | utils.juju_log('INFO', 'Begin config-changed hook.') | ||
1576 | 92 | emit_cephconf() | 122 | emit_cephconf() |
1577 | 93 | emit_apacheconf() | 123 | emit_apacheconf() |
1578 | 94 | install_www_scripts() | 124 | install_www_scripts() |
1579 | 95 | apache_sites() | 125 | apache_sites() |
1580 | 96 | apache_modules() | 126 | apache_modules() |
1581 | 97 | apache_reload() | 127 | apache_reload() |
1582 | 98 | utils.juju_log('INFO', 'End config-changed hook.') | ||
1583 | 99 | 128 | ||
1584 | 100 | 129 | ||
1585 | 101 | def get_mon_hosts(): | 130 | def get_mon_hosts(): |
1586 | 102 | hosts = [] | 131 | hosts = [] |
1589 | 103 | for relid in utils.relation_ids('mon'): | 132 | for relid in relation_ids('mon'): |
1590 | 104 | for unit in utils.relation_list(relid): | 133 | for unit in related_units(relid): |
1591 | 105 | hosts.append( | 134 | hosts.append( |
1596 | 106 | '{}:6789'.format(utils.get_host_ip( | 135 | '{}:6789'.format(get_host_ip( |
1597 | 107 | utils.relation_get('private-address', | 136 | relation_get('private-address', |
1598 | 108 | unit, relid))) | 137 | unit, relid))) |
1599 | 109 | ) | 138 | ) |
1600 | 110 | 139 | ||
1601 | 111 | hosts.sort() | 140 | hosts.sort() |
1602 | 112 | return hosts | 141 | return hosts |
1603 | @@ -117,100 +146,90 @@ | |||
1604 | 117 | 146 | ||
1605 | 118 | 147 | ||
1606 | 119 | def get_conf(name): | 148 | def get_conf(name): |
1611 | 120 | for relid in utils.relation_ids('mon'): | 149 | for relid in relation_ids('mon'): |
1612 | 121 | for unit in utils.relation_list(relid): | 150 | for unit in related_units(relid): |
1613 | 122 | conf = utils.relation_get(name, | 151 | conf = relation_get(name, |
1614 | 123 | unit, relid) | 152 | unit, relid) |
1615 | 124 | if conf: | 153 | if conf: |
1616 | 125 | return conf | 154 | return conf |
1617 | 126 | return None | 155 | return None |
1618 | 127 | 156 | ||
1619 | 157 | |||
1620 | 128 | def get_keystone_conf(): | 158 | def get_keystone_conf(): |
1623 | 129 | for relid in utils.relation_ids('identity-service'): | 159 | for relid in relation_ids('identity-service'): |
1624 | 130 | for unit in utils.relation_list(relid): | 160 | for unit in related_units(relid): |
1625 | 131 | ks_auth = { | 161 | ks_auth = { |
1626 | 132 | 'auth_type': 'keystone', | 162 | 'auth_type': 'keystone', |
1627 | 133 | 'auth_protocol': 'http', | 163 | 'auth_protocol': 'http', |
1634 | 134 | 'auth_host': utils.relation_get('auth_host', unit, relid), | 164 | 'auth_host': relation_get('auth_host', unit, relid), |
1635 | 135 | 'auth_port': utils.relation_get('auth_port', unit, relid), | 165 | 'auth_port': relation_get('auth_port', unit, relid), |
1636 | 136 | 'admin_token': utils.relation_get('admin_token', unit, relid), | 166 | 'admin_token': relation_get('admin_token', unit, relid), |
1637 | 137 | 'user_roles': utils.config_get('operator-roles'), | 167 | 'user_roles': config('operator-roles'), |
1638 | 138 | 'cache_size': utils.config_get('cache-size'), | 168 | 'cache_size': config('cache-size'), |
1639 | 139 | 'revocation_check_interval': utils.config_get('revocation-check-interval') | 169 | 'revocation_check_interval': |
1640 | 170 | config('revocation-check-interval') | ||
1641 | 140 | } | 171 | } |
1642 | 141 | if None not in ks_auth.itervalues(): | 172 | if None not in ks_auth.itervalues(): |
1643 | 142 | return ks_auth | 173 | return ks_auth |
1644 | 143 | return None | 174 | return None |
1645 | 144 | 175 | ||
1646 | 145 | 176 | ||
1647 | 177 | @hooks.hook('mon-relation-departed', | ||
1648 | 178 | 'mon-relation-changed') | ||
1649 | 146 | def mon_relation(): | 179 | def mon_relation(): |
1650 | 147 | utils.juju_log('INFO', 'Begin mon-relation hook.') | ||
1651 | 148 | emit_cephconf() | 180 | emit_cephconf() |
1653 | 149 | key = utils.relation_get('radosgw_key') | 181 | key = relation_get('radosgw_key') |
1654 | 150 | if key: | 182 | if key: |
1655 | 151 | ceph.import_radosgw_key(key) | 183 | ceph.import_radosgw_key(key) |
1656 | 152 | restart() # TODO figure out a better way todo this | 184 | restart() # TODO figure out a better way todo this |
1660 | 153 | utils.juju_log('INFO', 'End mon-relation hook.') | 185 | |
1661 | 154 | 186 | ||
1662 | 155 | 187 | @hooks.hook('gateway-relation-joined') | |
1663 | 156 | def gateway_relation(): | 188 | def gateway_relation(): |
1673 | 157 | utils.juju_log('INFO', 'Begin gateway-relation hook.') | 189 | relation_set(hostname=unit_get('private-address'), |
1674 | 158 | utils.relation_set(hostname=utils.unit_get('private-address'), | 190 | port=80) |
1666 | 159 | port=80) | ||
1667 | 160 | utils.juju_log('INFO', 'Begin gateway-relation hook.') | ||
1668 | 161 | |||
1669 | 162 | |||
1670 | 163 | def upgrade_charm(): | ||
1671 | 164 | utils.juju_log('INFO', 'Begin upgrade-charm hook.') | ||
1672 | 165 | utils.juju_log('INFO', 'End upgrade-charm hook.') | ||
1675 | 166 | 191 | ||
1676 | 167 | 192 | ||
1677 | 168 | def start(): | 193 | def start(): |
1678 | 169 | subprocess.call(['service', 'radosgw', 'start']) | 194 | subprocess.call(['service', 'radosgw', 'start']) |
1680 | 170 | utils.expose(port=80) | 195 | open_port(port=80) |
1681 | 171 | 196 | ||
1682 | 172 | 197 | ||
1683 | 173 | def stop(): | 198 | def stop(): |
1684 | 174 | subprocess.call(['service', 'radosgw', 'stop']) | 199 | subprocess.call(['service', 'radosgw', 'stop']) |
1686 | 175 | utils.expose(port=80) | 200 | open_port(port=80) |
1687 | 176 | 201 | ||
1688 | 177 | 202 | ||
1689 | 178 | def restart(): | 203 | def restart(): |
1690 | 179 | subprocess.call(['service', 'radosgw', 'restart']) | 204 | subprocess.call(['service', 'radosgw', 'restart']) |
1694 | 180 | utils.expose(port=80) | 205 | open_port(port=80) |
1695 | 181 | 206 | ||
1696 | 182 | 207 | ||
1697 | 208 | @hooks.hook('identity-service-relation-joined', | ||
1698 | 209 | 'identity-service-relation-changed') | ||
1699 | 183 | def identity_joined(relid=None): | 210 | def identity_joined(relid=None): |
1700 | 184 | if ceph.get_ceph_version('radosgw') < "0.55": | 211 | if ceph.get_ceph_version('radosgw') < "0.55": |
1703 | 185 | utils.juju_log('ERROR', | 212 | log('Integration with keystone requires ceph >= 0.55') |
1702 | 186 | 'Integration with keystone requires ceph >= 0.55') | ||
1704 | 187 | sys.exit(1) | 213 | sys.exit(1) |
1705 | 188 | 214 | ||
1707 | 189 | hostname = utils.unit_get('private-address') | 215 | hostname = unit_get('private-address') |
1708 | 190 | admin_url = 'http://{}:80/swift'.format(hostname) | 216 | admin_url = 'http://{}:80/swift'.format(hostname) |
1709 | 191 | internal_url = public_url = '{}/v1'.format(admin_url) | 217 | internal_url = public_url = '{}/v1'.format(admin_url) |
1716 | 192 | utils.relation_set(service='swift', | 218 | relation_set(service='swift', |
1717 | 193 | region=utils.config_get('region'), | 219 | region=config('region'), |
1718 | 194 | public_url=public_url, internal_url=internal_url, | 220 | public_url=public_url, internal_url=internal_url, |
1719 | 195 | admin_url=admin_url, | 221 | admin_url=admin_url, |
1720 | 196 | requested_roles=utils.config_get('operator-roles'), | 222 | requested_roles=config('operator-roles'), |
1721 | 197 | rid=relid) | 223 | rid=relid) |
1722 | 198 | 224 | ||
1723 | 199 | 225 | ||
1724 | 200 | def identity_changed(): | 226 | def identity_changed(): |
1725 | 201 | emit_cephconf() | 227 | emit_cephconf() |
1741 | 202 | restart() | 228 | restart() |
1742 | 203 | 229 | ||
1743 | 204 | 230 | ||
1744 | 205 | utils.do_hooks({ | 231 | if __name__ == '__main__': |
1745 | 206 | 'install': install, | 232 | try: |
1746 | 207 | 'config-changed': config_changed, | 233 | hooks.execute(sys.argv) |
1747 | 208 | 'mon-relation-departed': mon_relation, | 234 | except UnregisteredHookError as e: |
1748 | 209 | 'mon-relation-changed': mon_relation, | 235 | log('Unknown hook {} - skipping.'.format(e)) |
1734 | 210 | 'gateway-relation-joined': gateway_relation, | ||
1735 | 211 | 'upgrade-charm': config_changed, # same function ATM | ||
1736 | 212 | 'identity-service-relation-joined': identity_joined, | ||
1737 | 213 | 'identity-service-relation-changed': identity_changed | ||
1738 | 214 | }) | ||
1739 | 215 | |||
1740 | 216 | sys.exit(0) | ||
1749 | 217 | 236 | ||
1750 | === modified file 'hooks/utils.py' | |||
1751 | --- hooks/utils.py 2013-09-24 11:29:07 +0000 | |||
1752 | +++ hooks/utils.py 2014-01-24 17:20:41 +0000 | |||
1753 | @@ -7,97 +7,36 @@ | |||
1754 | 7 | # Paul Collins <paul.collins@canonical.com> | 7 | # Paul Collins <paul.collins@canonical.com> |
1755 | 8 | # | 8 | # |
1756 | 9 | 9 | ||
1757 | 10 | import os | ||
1758 | 11 | import subprocess | ||
1759 | 12 | import socket | 10 | import socket |
1760 | 13 | import sys | ||
1761 | 14 | import re | 11 | import re |
1785 | 15 | 12 | import os | |
1786 | 16 | 13 | ||
1787 | 17 | def do_hooks(hooks): | 14 | from charmhelpers.core.hookenv import unit_get |
1788 | 18 | hook = os.path.basename(sys.argv[0]) | 15 | from charmhelpers.fetch import apt_install |
1766 | 19 | |||
1767 | 20 | try: | ||
1768 | 21 | hook_func = hooks[hook] | ||
1769 | 22 | except KeyError: | ||
1770 | 23 | juju_log('INFO', | ||
1771 | 24 | "This charm doesn't know how to handle '{}'.".format(hook)) | ||
1772 | 25 | else: | ||
1773 | 26 | hook_func() | ||
1774 | 27 | |||
1775 | 28 | |||
1776 | 29 | def install(*pkgs): | ||
1777 | 30 | cmd = [ | ||
1778 | 31 | 'apt-get', | ||
1779 | 32 | '-y', | ||
1780 | 33 | 'install' | ||
1781 | 34 | ] | ||
1782 | 35 | for pkg in pkgs: | ||
1783 | 36 | cmd.append(pkg) | ||
1784 | 37 | subprocess.check_call(cmd) | ||
1789 | 38 | 16 | ||
1790 | 39 | TEMPLATES_DIR = 'templates' | 17 | TEMPLATES_DIR = 'templates' |
1791 | 40 | 18 | ||
1792 | 41 | try: | 19 | try: |
1793 | 42 | import jinja2 | 20 | import jinja2 |
1794 | 43 | except ImportError: | 21 | except ImportError: |
1796 | 44 | install('python-jinja2') | 22 | apt_install('python-jinja2', fatal=True) |
1797 | 45 | import jinja2 | 23 | import jinja2 |
1798 | 46 | 24 | ||
1799 | 47 | try: | 25 | try: |
1800 | 48 | import dns.resolver | 26 | import dns.resolver |
1801 | 49 | except ImportError: | 27 | except ImportError: |
1803 | 50 | install('python-dnspython') | 28 | apt_install('python-dnspython', fatal=True) |
1804 | 51 | import dns.resolver | 29 | import dns.resolver |
1805 | 52 | 30 | ||
1806 | 53 | 31 | ||
1807 | 54 | def render_template(template_name, context, template_dir=TEMPLATES_DIR): | 32 | def render_template(template_name, context, template_dir=TEMPLATES_DIR): |
1808 | 55 | templates = jinja2.Environment( | 33 | templates = jinja2.Environment( |
1811 | 56 | loader=jinja2.FileSystemLoader(template_dir) | 34 | loader=jinja2.FileSystemLoader(template_dir) |
1812 | 57 | ) | 35 | ) |
1813 | 58 | template = templates.get_template(template_name) | 36 | template = templates.get_template(template_name) |
1814 | 59 | return template.render(context) | 37 | return template.render(context) |
1815 | 60 | 38 | ||
1816 | 61 | 39 | ||
1817 | 62 | CLOUD_ARCHIVE = \ | ||
1818 | 63 | """ # Ubuntu Cloud Archive | ||
1819 | 64 | deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main | ||
1820 | 65 | """ | ||
1821 | 66 | |||
1822 | 67 | |||
1823 | 68 | def configure_source(): | ||
1824 | 69 | source = str(config_get('source')) | ||
1825 | 70 | if not source: | ||
1826 | 71 | return | ||
1827 | 72 | if source.startswith('ppa:'): | ||
1828 | 73 | cmd = [ | ||
1829 | 74 | 'add-apt-repository', | ||
1830 | 75 | source | ||
1831 | 76 | ] | ||
1832 | 77 | subprocess.check_call(cmd) | ||
1833 | 78 | if source.startswith('cloud:'): | ||
1834 | 79 | install('ubuntu-cloud-keyring') | ||
1835 | 80 | pocket = source.split(':')[1] | ||
1836 | 81 | with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt: | ||
1837 | 82 | apt.write(CLOUD_ARCHIVE.format(pocket)) | ||
1838 | 83 | if source.startswith('http:'): | ||
1839 | 84 | with open('/etc/apt/sources.list.d/quantum.list', 'w') as apt: | ||
1840 | 85 | apt.write("deb " + source + "\n") | ||
1841 | 86 | key = config_get('key') | ||
1842 | 87 | if key: | ||
1843 | 88 | cmd = [ | ||
1844 | 89 | 'apt-key', | ||
1845 | 90 | 'adv', '--keyserver keyserver.ubuntu.com', | ||
1846 | 91 | '--recv-keys', key | ||
1847 | 92 | ] | ||
1848 | 93 | subprocess.check_call(cmd) | ||
1849 | 94 | cmd = [ | ||
1850 | 95 | 'apt-get', | ||
1851 | 96 | 'update' | ||
1852 | 97 | ] | ||
1853 | 98 | subprocess.check_call(cmd) | ||
1854 | 99 | |||
1855 | 100 | |||
1856 | 101 | def enable_pocket(pocket): | 40 | def enable_pocket(pocket): |
1857 | 102 | apt_sources = "/etc/apt/sources.list" | 41 | apt_sources = "/etc/apt/sources.list" |
1858 | 103 | with open(apt_sources, "r") as sources: | 42 | with open(apt_sources, "r") as sources: |
1859 | @@ -109,103 +48,6 @@ | |||
1860 | 109 | else: | 48 | else: |
1861 | 110 | sources.write(line) | 49 | sources.write(line) |
1862 | 111 | 50 | ||
1863 | 112 | # Protocols | ||
1864 | 113 | TCP = 'TCP' | ||
1865 | 114 | UDP = 'UDP' | ||
1866 | 115 | |||
1867 | 116 | |||
1868 | 117 | def expose(port, protocol='TCP'): | ||
1869 | 118 | cmd = [ | ||
1870 | 119 | 'open-port', | ||
1871 | 120 | '{}/{}'.format(port, protocol) | ||
1872 | 121 | ] | ||
1873 | 122 | subprocess.check_call(cmd) | ||
1874 | 123 | |||
1875 | 124 | |||
1876 | 125 | def juju_log(severity, message): | ||
1877 | 126 | cmd = [ | ||
1878 | 127 | 'juju-log', | ||
1879 | 128 | '--log-level', severity, | ||
1880 | 129 | message | ||
1881 | 130 | ] | ||
1882 | 131 | subprocess.check_call(cmd) | ||
1883 | 132 | |||
1884 | 133 | |||
1885 | 134 | def relation_ids(relation): | ||
1886 | 135 | cmd = [ | ||
1887 | 136 | 'relation-ids', | ||
1888 | 137 | relation | ||
1889 | 138 | ] | ||
1890 | 139 | return subprocess.check_output(cmd).split() # IGNORE:E1103 | ||
1891 | 140 | |||
1892 | 141 | |||
1893 | 142 | def relation_list(rid): | ||
1894 | 143 | cmd = [ | ||
1895 | 144 | 'relation-list', | ||
1896 | 145 | '-r', rid, | ||
1897 | 146 | ] | ||
1898 | 147 | return subprocess.check_output(cmd).split() # IGNORE:E1103 | ||
1899 | 148 | |||
1900 | 149 | |||
1901 | 150 | def relation_get(attribute, unit=None, rid=None): | ||
1902 | 151 | cmd = [ | ||
1903 | 152 | 'relation-get', | ||
1904 | 153 | ] | ||
1905 | 154 | if rid: | ||
1906 | 155 | cmd.append('-r') | ||
1907 | 156 | cmd.append(rid) | ||
1908 | 157 | cmd.append(attribute) | ||
1909 | 158 | if unit: | ||
1910 | 159 | cmd.append(unit) | ||
1911 | 160 | value = str(subprocess.check_output(cmd)).strip() | ||
1912 | 161 | if value == "": | ||
1913 | 162 | return None | ||
1914 | 163 | else: | ||
1915 | 164 | return value | ||
1916 | 165 | |||
1917 | 166 | |||
1918 | 167 | def relation_set(**kwargs): | ||
1919 | 168 | cmd = [ | ||
1920 | 169 | 'relation-set' | ||
1921 | 170 | ] | ||
1922 | 171 | args = [] | ||
1923 | 172 | for k, v in kwargs.items(): | ||
1924 | 173 | if k == 'rid' and v: | ||
1925 | 174 | cmd.append('-r') | ||
1926 | 175 | cmd.append(v) | ||
1927 | 176 | elif k != 'rid': | ||
1928 | 177 | args.append('{}={}'.format(k, v)) | ||
1929 | 178 | cmd += args | ||
1930 | 179 | subprocess.check_call(cmd) | ||
1931 | 180 | |||
1932 | 181 | |||
1933 | 182 | def unit_get(attribute): | ||
1934 | 183 | cmd = [ | ||
1935 | 184 | 'unit-get', | ||
1936 | 185 | attribute | ||
1937 | 186 | ] | ||
1938 | 187 | value = str(subprocess.check_output(cmd)).strip() | ||
1939 | 188 | if value == "": | ||
1940 | 189 | return None | ||
1941 | 190 | else: | ||
1942 | 191 | return value | ||
1943 | 192 | |||
1944 | 193 | |||
1945 | 194 | def config_get(attribute): | ||
1946 | 195 | cmd = [ | ||
1947 | 196 | 'config-get', | ||
1948 | 197 | attribute | ||
1949 | 198 | ] | ||
1950 | 199 | value = str(subprocess.check_output(cmd)).strip() | ||
1951 | 200 | if value == "": | ||
1952 | 201 | return None | ||
1953 | 202 | else: | ||
1954 | 203 | return value | ||
1955 | 204 | |||
1956 | 205 | |||
1957 | 206 | def get_unit_hostname(): | ||
1958 | 207 | return socket.gethostname() | ||
1959 | 208 | |||
1960 | 209 | 51 | ||
1961 | 210 | def get_host_ip(hostname=unit_get('private-address')): | 52 | def get_host_ip(hostname=unit_get('private-address')): |
1962 | 211 | try: | 53 | try: |
1963 | @@ -218,3 +60,10 @@ | |||
1964 | 218 | answers = dns.resolver.query(hostname, 'A') | 60 | answers = dns.resolver.query(hostname, 'A') |
1965 | 219 | if answers: | 61 | if answers: |
1966 | 220 | return answers[0].address | 62 | return answers[0].address |
1967 | 63 | |||
1968 | 64 | |||
1969 | 65 | def is_apache_24(): | ||
1970 | 66 | if os.path.exists('/etc/apache2/conf-available'): | ||
1971 | 67 | return True | ||
1972 | 68 | else: | ||
1973 | 69 | return False | ||
1974 | 221 | 70 | ||
1975 | === modified file 'metadata.yaml' | |||
1976 | --- metadata.yaml 2013-01-11 09:15:51 +0000 | |||
1977 | +++ metadata.yaml 2014-01-24 17:20:41 +0000 | |||
1978 | @@ -7,6 +7,8 @@ | |||
1979 | 7 | . | 7 | . |
1980 | 8 | This charm provides the RADOS HTTP gateway supporting S3 and Swift protocols | 8 | This charm provides the RADOS HTTP gateway supporting S3 and Swift protocols |
1981 | 9 | for object storage. | 9 | for object storage. |
1982 | 10 | categories: | ||
1983 | 11 | - misc | ||
1984 | 10 | requires: | 12 | requires: |
1985 | 11 | mon: | 13 | mon: |
1986 | 12 | interface: ceph-radosgw | 14 | interface: ceph-radosgw |
Deferring to openstack-charmers