Merge lp:~lazypower/charms/trusty/hadoop2-devel/python_rewrite into lp:~asanjar/charms/trusty/hadoop2-devel/trunk
- Trusty Tahr (14.04)
- python_rewrite
- Merge into trunk
Proposed by
Charles Butler
Status: | Merged |
---|---|
Merged at revision: | 2 |
Proposed branch: | lp:~lazypower/charms/trusty/hadoop2-devel/python_rewrite |
Merge into: | lp:~asanjar/charms/trusty/hadoop2-devel/trunk |
Diff against target: |
2006 lines (+1779/-101) 22 files modified
Makefile (+19/-0) charm-helpers.yaml (+5/-0) config.yaml (+0/-9) files/hadoop/hdfs_format.sh (+15/-0) files/template.py (+56/-0) files/templates/defaults.j2 (+10/-0) files/upstart/setenv.sh (+0/-13) hooks/charmhelpers/core/fstab.py (+114/-0) hooks/charmhelpers/core/hookenv.py (+498/-0) hooks/charmhelpers/core/host.py (+325/-0) hooks/charmhelpers/fetch/__init__.py (+349/-0) hooks/charmhelpers/fetch/archiveurl.py (+63/-0) hooks/charmhelpers/fetch/bzrurl.py (+50/-0) hooks/charmhelpers/setup.py (+12/-0) hooks/common.py (+231/-0) hooks/install (+0/-58) hooks/relation-name-relation-broken (+0/-2) hooks/relation-name-relation-changed (+0/-9) hooks/relation-name-relation-departed (+0/-5) hooks/relation-name-relation-joined (+0/-5) tests/01_test_install_hook.py (+22/-0) tests/tests.yaml (+10/-0) |
To merge this branch: | bzr merge lp:~lazypower/charms/trusty/hadoop2-devel/python_rewrite |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
amir sanjar | Approve | ||
Review via email: mp+224647@code.launchpad.net |
Commit message
Description of the change
Removes the class component of the code, and leaves it as a straight shot encapsulated procedural python script.
Adds argparse for future modifications of the charm, and defines the callstack using argparse.
To post a comment you must log in.
- 10. By Charles Butler
-
Removed the old_install archived hook
Revision history for this message
amir sanjar (asanjar) : | # |
review:
Approve
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === added file 'Makefile' | |||
2 | --- Makefile 1970-01-01 00:00:00 +0000 | |||
3 | +++ Makefile 2014-06-26 20:05:25 +0000 | |||
4 | @@ -0,0 +1,19 @@ | |||
5 | 1 | #!/usr/bin/make | ||
6 | 2 | PYTHON := /usr/bin/env python | ||
7 | 3 | |||
8 | 4 | sync-charm-helpers: bin/charm_helpers_sync.py | ||
9 | 5 | @$(PYTHON) bin/charm_helpers_sync.py -c charm-helpers.yaml | ||
10 | 6 | |||
11 | 7 | bin/charm_helpers_sync.py: | ||
12 | 8 | @mkdir -p bin | ||
13 | 9 | @bzr cat lp:charm-helpers/tools/charm_helpers_sync/charm_helpers_sync.py > bin/charm_helpers_sync.py | ||
14 | 10 | |||
15 | 11 | autotest: | ||
16 | 12 | watchmedo shell-command --patterns="*.py" --recursive --command="make test" | ||
17 | 13 | |||
18 | 14 | test: | ||
19 | 15 | @nosetests tests/*.py | ||
20 | 16 | |||
21 | 17 | clean: | ||
22 | 18 | @find -name *.pyc | xargs rm | ||
23 | 19 | @rm .coverage | ||
24 | 0 | 20 | ||
25 | === added file 'charm-helpers.yaml' | |||
26 | --- charm-helpers.yaml 1970-01-01 00:00:00 +0000 | |||
27 | +++ charm-helpers.yaml 2014-06-26 20:05:25 +0000 | |||
28 | @@ -0,0 +1,5 @@ | |||
29 | 1 | branch: lp:charm-helpers | ||
30 | 2 | destination: hooks/charmhelpers | ||
31 | 3 | include: | ||
32 | 4 | - core | ||
33 | 5 | - fetch | ||
34 | 0 | 6 | ||
35 | === modified file 'config.yaml' | |||
36 | --- config.yaml 2014-05-22 17:00:22 +0000 | |||
37 | +++ config.yaml 2014-06-26 20:05:25 +0000 | |||
38 | @@ -1,14 +1,5 @@ | |||
39 | 1 | options: | 1 | options: |
40 | 2 | string-option: | ||
41 | 3 | type: string | ||
42 | 4 | default: "Default Value" | ||
43 | 5 | description: "A short description of the configuration option" | ||
44 | 6 | boolean-option: | 2 | boolean-option: |
45 | 7 | type: boolean | 3 | type: boolean |
46 | 8 | default: False | 4 | default: False |
47 | 9 | description: "A short description of the configuration option" | 5 | description: "A short description of the configuration option" |
48 | 10 | int-option: | ||
49 | 11 | type: int | ||
50 | 12 | default: 9001 | ||
51 | 13 | description: "A short description of the configuration option" | ||
52 | 14 | |||
53 | 15 | 6 | ||
54 | === added file 'files/__init__.py' | |||
55 | === added file 'files/hadoop/hdfs_format.sh' | |||
56 | --- files/hadoop/hdfs_format.sh 1970-01-01 00:00:00 +0000 | |||
57 | +++ files/hadoop/hdfs_format.sh 2014-06-26 20:05:25 +0000 | |||
58 | @@ -0,0 +1,15 @@ | |||
59 | 1 | #!/bin/sh | ||
60 | 2 | HADOOP_VERSION="hadoop-2.2.0" | ||
61 | 3 | HOME_DIR="/home/ubuntu" | ||
62 | 4 | HADOOP_DIR="/home/ubuntu/hadoop" | ||
63 | 5 | HADOOP_TMP_DIR=$HADOOP_DIR/tmp | ||
64 | 6 | export HADOOP_INSTALL=$HADOOP_DIR/$HADOOP_VERSION | ||
65 | 7 | export HADOOP_HOME=$HADOOP_INSTALL | ||
66 | 8 | export HADOOP_COMMON_HOME=$HADOOP_INSTALL | ||
67 | 9 | export HADOOP_HDFS_HOME=$HADOOP_INSTALL | ||
68 | 10 | export HADOOP_MAPRED_HOME=$HADOOP_INSTALL | ||
69 | 11 | export HADOOP_YARN_HOME=$HADOOP_INSTALL | ||
70 | 12 | export PATH=$PATH:$HADOOP_INSTALL/bin:$HADOOP_INSTALL/sbin | ||
71 | 13 | export YARN_HOME=$HADOOP_INSTALL | ||
72 | 14 | export HADOOP_CONF_DIR=$HADOOP_INSTALL/etc/hadoop | ||
73 | 15 | hdfs namenode -format | ||
74 | 0 | 16 | ||
75 | === added symlink 'files/template' | |||
76 | === target is u'template.py' | |||
77 | === added file 'files/template.py' | |||
78 | --- files/template.py 1970-01-01 00:00:00 +0000 | |||
79 | +++ files/template.py 2014-06-26 20:05:25 +0000 | |||
80 | @@ -0,0 +1,56 @@ | |||
81 | 1 | #!/usr/bin/env python | ||
82 | 2 | |||
83 | 3 | import argparse | ||
84 | 4 | import logging | ||
85 | 5 | import os | ||
86 | 6 | import shutil | ||
87 | 7 | import subprocess | ||
88 | 8 | |||
89 | 9 | # Provide failout cases to install jinja2 templating and argparse. | ||
90 | 10 | # This will not work in offline environments. | ||
91 | 11 | try: | ||
92 | 12 | from jinja2 import Template | ||
93 | 13 | except: | ||
94 | 14 | subprocess.call(['pip', 'install', 'jinja2']) | ||
95 | 15 | raise("Attempted to install missing dependencies. Try re-running") | ||
96 | 16 | |||
97 | 17 | logging.basicConfig(level=logging.INFO) | ||
98 | 18 | |||
99 | 19 | |||
100 | 20 | class TemplateWiz: | ||
101 | 21 | |||
102 | 22 | def __init__(self, args=None): | ||
103 | 23 | self.values = {} | ||
104 | 24 | parser = argparse.ArgumentParser() | ||
105 | 25 | parser.add_argument("-t", "--template", help="Template Path") | ||
106 | 26 | parser.add_argument("-o", "--output", help="Fullpath to output file") | ||
107 | 27 | self.paths, data = parser.parse_known_args(args) | ||
108 | 28 | if data: | ||
109 | 29 | # Process key = value keys | ||
110 | 30 | for item in data: | ||
111 | 31 | kv = item.split('=') | ||
112 | 32 | self.values[kv[0]] = kv[-1] | ||
113 | 33 | |||
114 | 34 | def read_template(self): | ||
115 | 35 | if not os.path.exists(self.paths.template): | ||
116 | 36 | raise IOError("Error loading template: %s" % self.paths.template) | ||
117 | 37 | else: | ||
118 | 38 | with open(self.paths.template, 'r') as f: | ||
119 | 39 | self.template = Template(f.read()) | ||
120 | 40 | |||
121 | 41 | def write_template(self): | ||
122 | 42 | if os.path.exists(self.paths.output): | ||
123 | 43 | shutil.copy2(self.paths.output, "%s.bak" % self.paths.output) | ||
124 | 44 | logging.info("Saving {f} as {f}.bak".format(f=self.paths.output)) | ||
125 | 45 | with open(self.paths.output, 'w') as f: | ||
126 | 46 | f.write(self.template.render(self.values)) | ||
127 | 47 | logging.info('Rendered %s' % self.paths.output) | ||
128 | 48 | |||
129 | 49 | def run(self): | ||
130 | 50 | self.read_template() | ||
131 | 51 | self.write_template() | ||
132 | 52 | |||
133 | 53 | |||
134 | 54 | if __name__ == "__main__": | ||
135 | 55 | tw = TemplateWiz() | ||
136 | 56 | tw.run() | ||
137 | 0 | 57 | ||
138 | === added directory 'files/templates' | |||
139 | === added file 'files/templates/defaults.j2' | |||
140 | --- files/templates/defaults.j2 1970-01-01 00:00:00 +0000 | |||
141 | +++ files/templates/defaults.j2 2014-06-26 20:05:25 +0000 | |||
142 | @@ -0,0 +1,10 @@ | |||
143 | 1 | export JAVA_HOME={{java_home}} | ||
144 | 2 | export HADOOP_INSTALL={{hadoop_home}} | ||
145 | 3 | export HADOOP_HOME=$HADOOP_INSTALL | ||
146 | 4 | export HADOOP_COMMON_HOME=$HADOOP_INSTALL | ||
147 | 5 | export HADOOP_HDFS_HOME=$HADOOP_INSTALL | ||
148 | 6 | export HADOOP_MAPRED_HOME=$HADOOP_INSTALL | ||
149 | 7 | export HADOOP_YARN_HOME=$HADOOP_INSTALL | ||
150 | 8 | export PATH=$JAVA_HOME/bin:$PATH:$HADOOP_INSTALL/bin:$HADOOP_INSTALL/sbin | ||
151 | 9 | export YARN_HOME=$HADOOP_INSTALL | ||
152 | 10 | export HADOOP_CONF_DIR={{conf}} | ||
153 | 0 | 11 | ||
154 | === removed file 'files/upstart/setenv.sh' | |||
155 | --- files/upstart/setenv.sh 2014-05-22 17:00:22 +0000 | |||
156 | +++ files/upstart/setenv.sh 1970-01-01 00:00:00 +0000 | |||
157 | @@ -1,13 +0,0 @@ | |||
158 | 1 | HADOOP_VERSION="hadoop-2.2.0" | ||
159 | 2 | HOME_DIR="/home/ubuntu" | ||
160 | 3 | HADOOP_DIR="/home/ubuntu/hadoop" | ||
161 | 4 | HADOOP_TMP_DIR=$HADOOP_DIR/tmp | ||
162 | 5 | export HADOOP_INSTALL=$HADOOP_DIR/$HADOOP_VERSION | ||
163 | 6 | export HADOOP_HOME=$HADOOP_INSTALL | ||
164 | 7 | export HADOOP_COMMON_HOME=$HADOOP_INSTALL | ||
165 | 8 | export HADOOP_HDFS_HOME=$HADOOP_INSTALL | ||
166 | 9 | export HADOOP_MAPRED_HOME=$HADOOP_INSTALL | ||
167 | 10 | export HADOOP_YARN_HOME=$HADOOP_INSTALL | ||
168 | 11 | export PATH=$JAVA_HOME/bin:$PATH:$HADOOP_INSTALL/bin:$HADOOP_INSTALL/sbin | ||
169 | 12 | export YARN_HOME=$HADOOP_INSTALL | ||
170 | 13 | export HADOOP_CONF_DIR=/etc/hadoop/conf.juju | ||
171 | 14 | 0 | ||
172 | === added file 'hooks/__init__.py' | |||
173 | === added directory 'hooks/charmhelpers' | |||
174 | === added file 'hooks/charmhelpers/__init__.py' | |||
175 | === added directory 'hooks/charmhelpers/core' | |||
176 | === added file 'hooks/charmhelpers/core/__init__.py' | |||
177 | === added file 'hooks/charmhelpers/core/fstab.py' | |||
178 | --- hooks/charmhelpers/core/fstab.py 1970-01-01 00:00:00 +0000 | |||
179 | +++ hooks/charmhelpers/core/fstab.py 2014-06-26 20:05:25 +0000 | |||
180 | @@ -0,0 +1,114 @@ | |||
181 | 1 | #!/usr/bin/env python | ||
182 | 2 | # -*- coding: utf-8 -*- | ||
183 | 3 | |||
184 | 4 | __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' | ||
185 | 5 | |||
186 | 6 | import os | ||
187 | 7 | |||
188 | 8 | |||
189 | 9 | class Fstab(file): | ||
190 | 10 | """This class extends file in order to implement a file reader/writer | ||
191 | 11 | for file `/etc/fstab` | ||
192 | 12 | """ | ||
193 | 13 | |||
194 | 14 | class Entry(object): | ||
195 | 15 | """Entry class represents a non-comment line on the `/etc/fstab` file | ||
196 | 16 | """ | ||
197 | 17 | def __init__(self, device, mountpoint, filesystem, | ||
198 | 18 | options, d=0, p=0): | ||
199 | 19 | self.device = device | ||
200 | 20 | self.mountpoint = mountpoint | ||
201 | 21 | self.filesystem = filesystem | ||
202 | 22 | |||
203 | 23 | if not options: | ||
204 | 24 | options = "defaults" | ||
205 | 25 | |||
206 | 26 | self.options = options | ||
207 | 27 | self.d = d | ||
208 | 28 | self.p = p | ||
209 | 29 | |||
210 | 30 | def __eq__(self, o): | ||
211 | 31 | return str(self) == str(o) | ||
212 | 32 | |||
213 | 33 | def __str__(self): | ||
214 | 34 | return "{} {} {} {} {} {}".format(self.device, | ||
215 | 35 | self.mountpoint, | ||
216 | 36 | self.filesystem, | ||
217 | 37 | self.options, | ||
218 | 38 | self.d, | ||
219 | 39 | self.p) | ||
220 | 40 | |||
221 | 41 | DEFAULT_PATH = os.path.join(os.path.sep, 'etc', 'fstab') | ||
222 | 42 | |||
223 | 43 | def __init__(self, path=None): | ||
224 | 44 | if path: | ||
225 | 45 | self._path = path | ||
226 | 46 | else: | ||
227 | 47 | self._path = self.DEFAULT_PATH | ||
228 | 48 | file.__init__(self, self._path, 'r+') | ||
229 | 49 | |||
230 | 50 | def _hydrate_entry(self, line): | ||
231 | 51 | return Fstab.Entry(*filter( | ||
232 | 52 | lambda x: x not in ('', None), | ||
233 | 53 | line.strip("\n").split(" "))) | ||
234 | 54 | |||
235 | 55 | @property | ||
236 | 56 | def entries(self): | ||
237 | 57 | self.seek(0) | ||
238 | 58 | for line in self.readlines(): | ||
239 | 59 | try: | ||
240 | 60 | if not line.startswith("#"): | ||
241 | 61 | yield self._hydrate_entry(line) | ||
242 | 62 | except ValueError: | ||
243 | 63 | pass | ||
244 | 64 | |||
245 | 65 | def get_entry_by_attr(self, attr, value): | ||
246 | 66 | for entry in self.entries: | ||
247 | 67 | e_attr = getattr(entry, attr) | ||
248 | 68 | if e_attr == value: | ||
249 | 69 | return entry | ||
250 | 70 | return None | ||
251 | 71 | |||
252 | 72 | def add_entry(self, entry): | ||
253 | 73 | if self.get_entry_by_attr('device', entry.device): | ||
254 | 74 | return False | ||
255 | 75 | |||
256 | 76 | self.write(str(entry) + '\n') | ||
257 | 77 | self.truncate() | ||
258 | 78 | return entry | ||
259 | 79 | |||
260 | 80 | def remove_entry(self, entry): | ||
261 | 81 | self.seek(0) | ||
262 | 82 | |||
263 | 83 | lines = self.readlines() | ||
264 | 84 | |||
265 | 85 | found = False | ||
266 | 86 | for index, line in enumerate(lines): | ||
267 | 87 | if not line.startswith("#"): | ||
268 | 88 | if self._hydrate_entry(line) == entry: | ||
269 | 89 | found = True | ||
270 | 90 | break | ||
271 | 91 | |||
272 | 92 | if not found: | ||
273 | 93 | return False | ||
274 | 94 | |||
275 | 95 | lines.remove(line) | ||
276 | 96 | |||
277 | 97 | self.seek(0) | ||
278 | 98 | self.write(''.join(lines)) | ||
279 | 99 | self.truncate() | ||
280 | 100 | return True | ||
281 | 101 | |||
282 | 102 | @classmethod | ||
283 | 103 | def remove_by_mountpoint(cls, mountpoint, path=None): | ||
284 | 104 | fstab = cls(path=path) | ||
285 | 105 | entry = fstab.get_entry_by_attr('mountpoint', mountpoint) | ||
286 | 106 | if entry: | ||
287 | 107 | return fstab.remove_entry(entry) | ||
288 | 108 | return False | ||
289 | 109 | |||
290 | 110 | @classmethod | ||
291 | 111 | def add(cls, device, mountpoint, filesystem, options=None, path=None): | ||
292 | 112 | return cls(path=path).add_entry(Fstab.Entry(device, | ||
293 | 113 | mountpoint, filesystem, | ||
294 | 114 | options=options)) | ||
295 | 0 | 115 | ||
296 | === added file 'hooks/charmhelpers/core/hookenv.py' | |||
297 | --- hooks/charmhelpers/core/hookenv.py 1970-01-01 00:00:00 +0000 | |||
298 | +++ hooks/charmhelpers/core/hookenv.py 2014-06-26 20:05:25 +0000 | |||
299 | @@ -0,0 +1,498 @@ | |||
300 | 1 | "Interactions with the Juju environment" | ||
301 | 2 | # Copyright 2013 Canonical Ltd. | ||
302 | 3 | # | ||
303 | 4 | # Authors: | ||
304 | 5 | # Charm Helpers Developers <juju@lists.ubuntu.com> | ||
305 | 6 | |||
306 | 7 | import os | ||
307 | 8 | import json | ||
308 | 9 | import yaml | ||
309 | 10 | import subprocess | ||
310 | 11 | import sys | ||
311 | 12 | import UserDict | ||
312 | 13 | from subprocess import CalledProcessError | ||
313 | 14 | |||
314 | 15 | CRITICAL = "CRITICAL" | ||
315 | 16 | ERROR = "ERROR" | ||
316 | 17 | WARNING = "WARNING" | ||
317 | 18 | INFO = "INFO" | ||
318 | 19 | DEBUG = "DEBUG" | ||
319 | 20 | MARKER = object() | ||
320 | 21 | |||
321 | 22 | cache = {} | ||
322 | 23 | |||
323 | 24 | |||
324 | 25 | def cached(func): | ||
325 | 26 | """Cache return values for multiple executions of func + args | ||
326 | 27 | |||
327 | 28 | For example: | ||
328 | 29 | |||
329 | 30 | @cached | ||
330 | 31 | def unit_get(attribute): | ||
331 | 32 | pass | ||
332 | 33 | |||
333 | 34 | unit_get('test') | ||
334 | 35 | |||
335 | 36 | will cache the result of unit_get + 'test' for future calls. | ||
336 | 37 | """ | ||
337 | 38 | def wrapper(*args, **kwargs): | ||
338 | 39 | global cache | ||
339 | 40 | key = str((func, args, kwargs)) | ||
340 | 41 | try: | ||
341 | 42 | return cache[key] | ||
342 | 43 | except KeyError: | ||
343 | 44 | res = func(*args, **kwargs) | ||
344 | 45 | cache[key] = res | ||
345 | 46 | return res | ||
346 | 47 | return wrapper | ||
347 | 48 | |||
348 | 49 | |||
349 | 50 | def flush(key): | ||
350 | 51 | """Flushes any entries from function cache where the | ||
351 | 52 | key is found in the function+args """ | ||
352 | 53 | flush_list = [] | ||
353 | 54 | for item in cache: | ||
354 | 55 | if key in item: | ||
355 | 56 | flush_list.append(item) | ||
356 | 57 | for item in flush_list: | ||
357 | 58 | del cache[item] | ||
358 | 59 | |||
359 | 60 | |||
360 | 61 | def log(message, level=None): | ||
361 | 62 | """Write a message to the juju log""" | ||
362 | 63 | command = ['juju-log'] | ||
363 | 64 | if level: | ||
364 | 65 | command += ['-l', level] | ||
365 | 66 | command += [message] | ||
366 | 67 | subprocess.call(command) | ||
367 | 68 | |||
368 | 69 | |||
369 | 70 | class Serializable(UserDict.IterableUserDict): | ||
370 | 71 | """Wrapper, an object that can be serialized to yaml or json""" | ||
371 | 72 | |||
372 | 73 | def __init__(self, obj): | ||
373 | 74 | # wrap the object | ||
374 | 75 | UserDict.IterableUserDict.__init__(self) | ||
375 | 76 | self.data = obj | ||
376 | 77 | |||
377 | 78 | def __getattr__(self, attr): | ||
378 | 79 | # See if this object has attribute. | ||
379 | 80 | if attr in ("json", "yaml", "data"): | ||
380 | 81 | return self.__dict__[attr] | ||
381 | 82 | # Check for attribute in wrapped object. | ||
382 | 83 | got = getattr(self.data, attr, MARKER) | ||
383 | 84 | if got is not MARKER: | ||
384 | 85 | return got | ||
385 | 86 | # Proxy to the wrapped object via dict interface. | ||
386 | 87 | try: | ||
387 | 88 | return self.data[attr] | ||
388 | 89 | except KeyError: | ||
389 | 90 | raise AttributeError(attr) | ||
390 | 91 | |||
391 | 92 | def __getstate__(self): | ||
392 | 93 | # Pickle as a standard dictionary. | ||
393 | 94 | return self.data | ||
394 | 95 | |||
395 | 96 | def __setstate__(self, state): | ||
396 | 97 | # Unpickle into our wrapper. | ||
397 | 98 | self.data = state | ||
398 | 99 | |||
399 | 100 | def json(self): | ||
400 | 101 | """Serialize the object to json""" | ||
401 | 102 | return json.dumps(self.data) | ||
402 | 103 | |||
403 | 104 | def yaml(self): | ||
404 | 105 | """Serialize the object to yaml""" | ||
405 | 106 | return yaml.dump(self.data) | ||
406 | 107 | |||
407 | 108 | |||
408 | 109 | def execution_environment(): | ||
409 | 110 | """A convenient bundling of the current execution context""" | ||
410 | 111 | context = {} | ||
411 | 112 | context['conf'] = config() | ||
412 | 113 | if relation_id(): | ||
413 | 114 | context['reltype'] = relation_type() | ||
414 | 115 | context['relid'] = relation_id() | ||
415 | 116 | context['rel'] = relation_get() | ||
416 | 117 | context['unit'] = local_unit() | ||
417 | 118 | context['rels'] = relations() | ||
418 | 119 | context['env'] = os.environ | ||
419 | 120 | return context | ||
420 | 121 | |||
421 | 122 | |||
422 | 123 | def in_relation_hook(): | ||
423 | 124 | """Determine whether we're running in a relation hook""" | ||
424 | 125 | return 'JUJU_RELATION' in os.environ | ||
425 | 126 | |||
426 | 127 | |||
427 | 128 | def relation_type(): | ||
428 | 129 | """The scope for the current relation hook""" | ||
429 | 130 | return os.environ.get('JUJU_RELATION', None) | ||
430 | 131 | |||
431 | 132 | |||
432 | 133 | def relation_id(): | ||
433 | 134 | """The relation ID for the current relation hook""" | ||
434 | 135 | return os.environ.get('JUJU_RELATION_ID', None) | ||
435 | 136 | |||
436 | 137 | |||
437 | 138 | def local_unit(): | ||
438 | 139 | """Local unit ID""" | ||
439 | 140 | return os.environ['JUJU_UNIT_NAME'] | ||
440 | 141 | |||
441 | 142 | |||
442 | 143 | def remote_unit(): | ||
443 | 144 | """The remote unit for the current relation hook""" | ||
444 | 145 | return os.environ['JUJU_REMOTE_UNIT'] | ||
445 | 146 | |||
446 | 147 | |||
447 | 148 | def service_name(): | ||
448 | 149 | """The name service group this unit belongs to""" | ||
449 | 150 | return local_unit().split('/')[0] | ||
450 | 151 | |||
451 | 152 | |||
452 | 153 | def hook_name(): | ||
453 | 154 | """The name of the currently executing hook""" | ||
454 | 155 | return os.path.basename(sys.argv[0]) | ||
455 | 156 | |||
456 | 157 | |||
457 | 158 | class Config(dict): | ||
458 | 159 | """A Juju charm config dictionary that can write itself to | ||
459 | 160 | disk (as json) and track which values have changed since | ||
460 | 161 | the previous hook invocation. | ||
461 | 162 | |||
462 | 163 | Do not instantiate this object directly - instead call | ||
463 | 164 | ``hookenv.config()`` | ||
464 | 165 | |||
465 | 166 | Example usage:: | ||
466 | 167 | |||
467 | 168 | >>> # inside a hook | ||
468 | 169 | >>> from charmhelpers.core import hookenv | ||
469 | 170 | >>> config = hookenv.config() | ||
470 | 171 | >>> config['foo'] | ||
471 | 172 | 'bar' | ||
472 | 173 | >>> config['mykey'] = 'myval' | ||
473 | 174 | >>> config.save() | ||
474 | 175 | |||
475 | 176 | |||
476 | 177 | >>> # user runs `juju set mycharm foo=baz` | ||
477 | 178 | >>> # now we're inside subsequent config-changed hook | ||
478 | 179 | >>> config = hookenv.config() | ||
479 | 180 | >>> config['foo'] | ||
480 | 181 | 'baz' | ||
481 | 182 | >>> # test to see if this val has changed since last hook | ||
482 | 183 | >>> config.changed('foo') | ||
483 | 184 | True | ||
484 | 185 | >>> # what was the previous value? | ||
485 | 186 | >>> config.previous('foo') | ||
486 | 187 | 'bar' | ||
487 | 188 | >>> # keys/values that we add are preserved across hooks | ||
488 | 189 | >>> config['mykey'] | ||
489 | 190 | 'myval' | ||
490 | 191 | >>> # don't forget to save at the end of hook! | ||
491 | 192 | >>> config.save() | ||
492 | 193 | |||
493 | 194 | """ | ||
494 | 195 | CONFIG_FILE_NAME = '.juju-persistent-config' | ||
495 | 196 | |||
496 | 197 | def __init__(self, *args, **kw): | ||
497 | 198 | super(Config, self).__init__(*args, **kw) | ||
498 | 199 | self._prev_dict = None | ||
499 | 200 | self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME) | ||
500 | 201 | if os.path.exists(self.path): | ||
501 | 202 | self.load_previous() | ||
502 | 203 | |||
503 | 204 | def load_previous(self, path=None): | ||
504 | 205 | """Load previous copy of config from disk so that current values | ||
505 | 206 | can be compared to previous values. | ||
506 | 207 | |||
507 | 208 | :param path: | ||
508 | 209 | |||
509 | 210 | File path from which to load the previous config. If `None`, | ||
510 | 211 | config is loaded from the default location. If `path` is | ||
511 | 212 | specified, subsequent `save()` calls will write to the same | ||
512 | 213 | path. | ||
513 | 214 | |||
514 | 215 | """ | ||
515 | 216 | self.path = path or self.path | ||
516 | 217 | with open(self.path) as f: | ||
517 | 218 | self._prev_dict = json.load(f) | ||
518 | 219 | |||
519 | 220 | def changed(self, key): | ||
520 | 221 | """Return true if the value for this key has changed since | ||
521 | 222 | the last save. | ||
522 | 223 | |||
523 | 224 | """ | ||
524 | 225 | if self._prev_dict is None: | ||
525 | 226 | return True | ||
526 | 227 | return self.previous(key) != self.get(key) | ||
527 | 228 | |||
528 | 229 | def previous(self, key): | ||
529 | 230 | """Return previous value for this key, or None if there | ||
530 | 231 | is no "previous" value. | ||
531 | 232 | |||
532 | 233 | """ | ||
533 | 234 | if self._prev_dict: | ||
534 | 235 | return self._prev_dict.get(key) | ||
535 | 236 | return None | ||
536 | 237 | |||
537 | 238 | def save(self): | ||
538 | 239 | """Save this config to disk. | ||
539 | 240 | |||
540 | 241 | Preserves items in _prev_dict that do not exist in self. | ||
541 | 242 | |||
542 | 243 | """ | ||
543 | 244 | if self._prev_dict: | ||
544 | 245 | for k, v in self._prev_dict.iteritems(): | ||
545 | 246 | if k not in self: | ||
546 | 247 | self[k] = v | ||
547 | 248 | with open(self.path, 'w') as f: | ||
548 | 249 | json.dump(self, f) | ||
549 | 250 | |||
550 | 251 | |||
551 | 252 | @cached | ||
552 | 253 | def config(scope=None): | ||
553 | 254 | """Juju charm configuration""" | ||
554 | 255 | config_cmd_line = ['config-get'] | ||
555 | 256 | if scope is not None: | ||
556 | 257 | config_cmd_line.append(scope) | ||
557 | 258 | config_cmd_line.append('--format=json') | ||
558 | 259 | try: | ||
559 | 260 | config_data = json.loads(subprocess.check_output(config_cmd_line)) | ||
560 | 261 | if scope is not None: | ||
561 | 262 | return config_data | ||
562 | 263 | return Config(config_data) | ||
563 | 264 | except ValueError: | ||
564 | 265 | return None | ||
565 | 266 | |||
566 | 267 | |||
567 | 268 | @cached | ||
568 | 269 | def relation_get(attribute=None, unit=None, rid=None): | ||
569 | 270 | """Get relation information""" | ||
570 | 271 | _args = ['relation-get', '--format=json'] | ||
571 | 272 | if rid: | ||
572 | 273 | _args.append('-r') | ||
573 | 274 | _args.append(rid) | ||
574 | 275 | _args.append(attribute or '-') | ||
575 | 276 | if unit: | ||
576 | 277 | _args.append(unit) | ||
577 | 278 | try: | ||
578 | 279 | return json.loads(subprocess.check_output(_args)) | ||
579 | 280 | except ValueError: | ||
580 | 281 | return None | ||
581 | 282 | except CalledProcessError, e: | ||
582 | 283 | if e.returncode == 2: | ||
583 | 284 | return None | ||
584 | 285 | raise | ||
585 | 286 | |||
586 | 287 | |||
587 | 288 | def relation_set(relation_id=None, relation_settings={}, **kwargs): | ||
588 | 289 | """Set relation information for the current unit""" | ||
589 | 290 | relation_cmd_line = ['relation-set'] | ||
590 | 291 | if relation_id is not None: | ||
591 | 292 | relation_cmd_line.extend(('-r', relation_id)) | ||
592 | 293 | for k, v in (relation_settings.items() + kwargs.items()): | ||
593 | 294 | if v is None: | ||
594 | 295 | relation_cmd_line.append('{}='.format(k)) | ||
595 | 296 | else: | ||
596 | 297 | relation_cmd_line.append('{}={}'.format(k, v)) | ||
597 | 298 | subprocess.check_call(relation_cmd_line) | ||
598 | 299 | # Flush cache of any relation-gets for local unit | ||
599 | 300 | flush(local_unit()) | ||
600 | 301 | |||
601 | 302 | |||
602 | 303 | @cached | ||
603 | 304 | def relation_ids(reltype=None): | ||
604 | 305 | """A list of relation_ids""" | ||
605 | 306 | reltype = reltype or relation_type() | ||
606 | 307 | relid_cmd_line = ['relation-ids', '--format=json'] | ||
607 | 308 | if reltype is not None: | ||
608 | 309 | relid_cmd_line.append(reltype) | ||
609 | 310 | return json.loads(subprocess.check_output(relid_cmd_line)) or [] | ||
610 | 311 | return [] | ||
611 | 312 | |||
612 | 313 | |||
613 | 314 | @cached | ||
614 | 315 | def related_units(relid=None): | ||
615 | 316 | """A list of related units""" | ||
616 | 317 | relid = relid or relation_id() | ||
617 | 318 | units_cmd_line = ['relation-list', '--format=json'] | ||
618 | 319 | if relid is not None: | ||
619 | 320 | units_cmd_line.extend(('-r', relid)) | ||
620 | 321 | return json.loads(subprocess.check_output(units_cmd_line)) or [] | ||
621 | 322 | |||
622 | 323 | |||
623 | 324 | @cached | ||
624 | 325 | def relation_for_unit(unit=None, rid=None): | ||
625 | 326 | """Get the json represenation of a unit's relation""" | ||
626 | 327 | unit = unit or remote_unit() | ||
627 | 328 | relation = relation_get(unit=unit, rid=rid) | ||
628 | 329 | for key in relation: | ||
629 | 330 | if key.endswith('-list'): | ||
630 | 331 | relation[key] = relation[key].split() | ||
631 | 332 | relation['__unit__'] = unit | ||
632 | 333 | return relation | ||
633 | 334 | |||
634 | 335 | |||
635 | 336 | @cached | ||
636 | 337 | def relations_for_id(relid=None): | ||
637 | 338 | """Get relations of a specific relation ID""" | ||
638 | 339 | relation_data = [] | ||
639 | 340 | relid = relid or relation_ids() | ||
640 | 341 | for unit in related_units(relid): | ||
641 | 342 | unit_data = relation_for_unit(unit, relid) | ||
642 | 343 | unit_data['__relid__'] = relid | ||
643 | 344 | relation_data.append(unit_data) | ||
644 | 345 | return relation_data | ||
645 | 346 | |||
646 | 347 | |||
647 | 348 | @cached | ||
648 | 349 | def relations_of_type(reltype=None): | ||
649 | 350 | """Get relations of a specific type""" | ||
650 | 351 | relation_data = [] | ||
651 | 352 | reltype = reltype or relation_type() | ||
652 | 353 | for relid in relation_ids(reltype): | ||
653 | 354 | for relation in relations_for_id(relid): | ||
654 | 355 | relation['__relid__'] = relid | ||
655 | 356 | relation_data.append(relation) | ||
656 | 357 | return relation_data | ||
657 | 358 | |||
658 | 359 | |||
659 | 360 | @cached | ||
660 | 361 | def relation_types(): | ||
661 | 362 | """Get a list of relation types supported by this charm""" | ||
662 | 363 | charmdir = os.environ.get('CHARM_DIR', '') | ||
663 | 364 | mdf = open(os.path.join(charmdir, 'metadata.yaml')) | ||
664 | 365 | md = yaml.safe_load(mdf) | ||
665 | 366 | rel_types = [] | ||
666 | 367 | for key in ('provides', 'requires', 'peers'): | ||
667 | 368 | section = md.get(key) | ||
668 | 369 | if section: | ||
669 | 370 | rel_types.extend(section.keys()) | ||
670 | 371 | mdf.close() | ||
671 | 372 | return rel_types | ||
672 | 373 | |||
673 | 374 | |||
674 | 375 | @cached | ||
675 | 376 | def relations(): | ||
676 | 377 | """Get a nested dictionary of relation data for all related units""" | ||
677 | 378 | rels = {} | ||
678 | 379 | for reltype in relation_types(): | ||
679 | 380 | relids = {} | ||
680 | 381 | for relid in relation_ids(reltype): | ||
681 | 382 | units = {local_unit(): relation_get(unit=local_unit(), rid=relid)} | ||
682 | 383 | for unit in related_units(relid): | ||
683 | 384 | reldata = relation_get(unit=unit, rid=relid) | ||
684 | 385 | units[unit] = reldata | ||
685 | 386 | relids[relid] = units | ||
686 | 387 | rels[reltype] = relids | ||
687 | 388 | return rels | ||
688 | 389 | |||
689 | 390 | |||
690 | 391 | @cached | ||
691 | 392 | def is_relation_made(relation, keys='private-address'): | ||
692 | 393 | ''' | ||
693 | 394 | Determine whether a relation is established by checking for | ||
694 | 395 | presence of key(s). If a list of keys is provided, they | ||
695 | 396 | must all be present for the relation to be identified as made | ||
696 | 397 | ''' | ||
697 | 398 | if isinstance(keys, str): | ||
698 | 399 | keys = [keys] | ||
699 | 400 | for r_id in relation_ids(relation): | ||
700 | 401 | for unit in related_units(r_id): | ||
701 | 402 | context = {} | ||
702 | 403 | for k in keys: | ||
703 | 404 | context[k] = relation_get(k, rid=r_id, | ||
704 | 405 | unit=unit) | ||
705 | 406 | if None not in context.values(): | ||
706 | 407 | return True | ||
707 | 408 | return False | ||
708 | 409 | |||
709 | 410 | |||
710 | 411 | def open_port(port, protocol="TCP"): | ||
711 | 412 | """Open a service network port""" | ||
712 | 413 | _args = ['open-port'] | ||
713 | 414 | _args.append('{}/{}'.format(port, protocol)) | ||
714 | 415 | subprocess.check_call(_args) | ||
715 | 416 | |||
716 | 417 | |||
717 | 418 | def close_port(port, protocol="TCP"): | ||
718 | 419 | """Close a service network port""" | ||
719 | 420 | _args = ['close-port'] | ||
720 | 421 | _args.append('{}/{}'.format(port, protocol)) | ||
721 | 422 | subprocess.check_call(_args) | ||
722 | 423 | |||
723 | 424 | |||
724 | 425 | @cached | ||
725 | 426 | def unit_get(attribute): | ||
726 | 427 | """Get the unit ID for the remote unit""" | ||
727 | 428 | _args = ['unit-get', '--format=json', attribute] | ||
728 | 429 | try: | ||
729 | 430 | return json.loads(subprocess.check_output(_args)) | ||
730 | 431 | except ValueError: | ||
731 | 432 | return None | ||
732 | 433 | |||
733 | 434 | |||
734 | 435 | def unit_private_ip(): | ||
735 | 436 | """Get this unit's private IP address""" | ||
736 | 437 | return unit_get('private-address') | ||
737 | 438 | |||
738 | 439 | |||
739 | 440 | class UnregisteredHookError(Exception): | ||
740 | 441 | """Raised when an undefined hook is called""" | ||
741 | 442 | pass | ||
742 | 443 | |||
743 | 444 | |||
744 | 445 | class Hooks(object): | ||
745 | 446 | """A convenient handler for hook functions. | ||
746 | 447 | |||
747 | 448 | Example: | ||
748 | 449 | hooks = Hooks() | ||
749 | 450 | |||
750 | 451 | # register a hook, taking its name from the function name | ||
751 | 452 | @hooks.hook() | ||
752 | 453 | def install(): | ||
753 | 454 | ... | ||
754 | 455 | |||
755 | 456 | # register a hook, providing a custom hook name | ||
756 | 457 | @hooks.hook("config-changed") | ||
757 | 458 | def config_changed(): | ||
758 | 459 | ... | ||
759 | 460 | |||
760 | 461 | if __name__ == "__main__": | ||
761 | 462 | # execute a hook based on the name the program is called by | ||
762 | 463 | hooks.execute(sys.argv) | ||
763 | 464 | """ | ||
764 | 465 | |||
765 | 466 | def __init__(self): | ||
766 | 467 | super(Hooks, self).__init__() | ||
767 | 468 | self._hooks = {} | ||
768 | 469 | |||
769 | 470 | def register(self, name, function): | ||
770 | 471 | """Register a hook""" | ||
771 | 472 | self._hooks[name] = function | ||
772 | 473 | |||
773 | 474 | def execute(self, args): | ||
774 | 475 | """Execute a registered hook based on args[0]""" | ||
775 | 476 | hook_name = os.path.basename(args[0]) | ||
776 | 477 | if hook_name in self._hooks: | ||
777 | 478 | self._hooks[hook_name]() | ||
778 | 479 | else: | ||
779 | 480 | raise UnregisteredHookError(hook_name) | ||
780 | 481 | |||
781 | 482 | def hook(self, *hook_names): | ||
782 | 483 | """Decorator, registering them as hooks""" | ||
783 | 484 | def wrapper(decorated): | ||
784 | 485 | for hook_name in hook_names: | ||
785 | 486 | self.register(hook_name, decorated) | ||
786 | 487 | else: | ||
787 | 488 | self.register(decorated.__name__, decorated) | ||
788 | 489 | if '_' in decorated.__name__: | ||
789 | 490 | self.register( | ||
790 | 491 | decorated.__name__.replace('_', '-'), decorated) | ||
791 | 492 | return decorated | ||
792 | 493 | return wrapper | ||
793 | 494 | |||
794 | 495 | |||
795 | 496 | def charm_dir(): | ||
796 | 497 | """Return the root directory of the current charm""" | ||
797 | 498 | return os.environ.get('CHARM_DIR') | ||
798 | 0 | 499 | ||
799 | === added file 'hooks/charmhelpers/core/host.py' | |||
800 | --- hooks/charmhelpers/core/host.py 1970-01-01 00:00:00 +0000 | |||
801 | +++ hooks/charmhelpers/core/host.py 2014-06-26 20:05:25 +0000 | |||
802 | @@ -0,0 +1,325 @@ | |||
803 | 1 | """Tools for working with the host system""" | ||
804 | 2 | # Copyright 2012 Canonical Ltd. | ||
805 | 3 | # | ||
806 | 4 | # Authors: | ||
807 | 5 | # Nick Moffitt <nick.moffitt@canonical.com> | ||
808 | 6 | # Matthew Wedgwood <matthew.wedgwood@canonical.com> | ||
809 | 7 | |||
810 | 8 | import os | ||
811 | 9 | import pwd | ||
812 | 10 | import grp | ||
813 | 11 | import random | ||
814 | 12 | import string | ||
815 | 13 | import subprocess | ||
816 | 14 | import hashlib | ||
817 | 15 | import apt_pkg | ||
818 | 16 | |||
819 | 17 | from collections import OrderedDict | ||
820 | 18 | |||
821 | 19 | from hookenv import log | ||
822 | 20 | from fstab import Fstab | ||
823 | 21 | |||
824 | 22 | |||
825 | 23 | def service_start(service_name): | ||
826 | 24 | """Start a system service""" | ||
827 | 25 | return service('start', service_name) | ||
828 | 26 | |||
829 | 27 | |||
830 | 28 | def service_stop(service_name): | ||
831 | 29 | """Stop a system service""" | ||
832 | 30 | return service('stop', service_name) | ||
833 | 31 | |||
834 | 32 | |||
835 | 33 | def service_restart(service_name): | ||
836 | 34 | """Restart a system service""" | ||
837 | 35 | return service('restart', service_name) | ||
838 | 36 | |||
839 | 37 | |||
840 | 38 | def service_reload(service_name, restart_on_failure=False): | ||
841 | 39 | """Reload a system service, optionally falling back to restart if | ||
842 | 40 | reload fails""" | ||
843 | 41 | service_result = service('reload', service_name) | ||
844 | 42 | if not service_result and restart_on_failure: | ||
845 | 43 | service_result = service('restart', service_name) | ||
846 | 44 | return service_result | ||
847 | 45 | |||
848 | 46 | |||
849 | 47 | def service(action, service_name): | ||
850 | 48 | """Control a system service""" | ||
851 | 49 | cmd = ['service', service_name, action] | ||
852 | 50 | return subprocess.call(cmd) == 0 | ||
853 | 51 | |||
854 | 52 | |||
855 | 53 | def service_running(service): | ||
856 | 54 | """Determine whether a system service is running""" | ||
857 | 55 | try: | ||
858 | 56 | output = subprocess.check_output(['service', service, 'status']) | ||
859 | 57 | except subprocess.CalledProcessError: | ||
860 | 58 | return False | ||
861 | 59 | else: | ||
862 | 60 | if ("start/running" in output or "is running" in output): | ||
863 | 61 | return True | ||
864 | 62 | else: | ||
865 | 63 | return False | ||
866 | 64 | |||
867 | 65 | |||
868 | 66 | def adduser(username, password=None, shell='/bin/bash', system_user=False): | ||
869 | 67 | """Add a user to the system""" | ||
870 | 68 | try: | ||
871 | 69 | user_info = pwd.getpwnam(username) | ||
872 | 70 | log('user {0} already exists!'.format(username)) | ||
873 | 71 | except KeyError: | ||
874 | 72 | log('creating user {0}'.format(username)) | ||
875 | 73 | cmd = ['useradd'] | ||
876 | 74 | if system_user or password is None: | ||
877 | 75 | cmd.append('--system') | ||
878 | 76 | else: | ||
879 | 77 | cmd.extend([ | ||
880 | 78 | '--create-home', | ||
881 | 79 | '--shell', shell, | ||
882 | 80 | '--password', password, | ||
883 | 81 | ]) | ||
884 | 82 | cmd.append(username) | ||
885 | 83 | subprocess.check_call(cmd) | ||
886 | 84 | user_info = pwd.getpwnam(username) | ||
887 | 85 | return user_info | ||
888 | 86 | |||
889 | 87 | |||
890 | 88 | def add_user_to_group(username, group): | ||
891 | 89 | """Add a user to a group""" | ||
892 | 90 | cmd = [ | ||
893 | 91 | 'gpasswd', '-a', | ||
894 | 92 | username, | ||
895 | 93 | group | ||
896 | 94 | ] | ||
897 | 95 | log("Adding user {} to group {}".format(username, group)) | ||
898 | 96 | subprocess.check_call(cmd) | ||
899 | 97 | |||
900 | 98 | |||
901 | 99 | def rsync(from_path, to_path, flags='-r', options=None): | ||
902 | 100 | """Replicate the contents of a path""" | ||
903 | 101 | options = options or ['--delete', '--executability'] | ||
904 | 102 | cmd = ['/usr/bin/rsync', flags] | ||
905 | 103 | cmd.extend(options) | ||
906 | 104 | cmd.append(from_path) | ||
907 | 105 | cmd.append(to_path) | ||
908 | 106 | log(" ".join(cmd)) | ||
909 | 107 | return subprocess.check_output(cmd).strip() | ||
910 | 108 | |||
911 | 109 | |||
912 | 110 | def symlink(source, destination): | ||
913 | 111 | """Create a symbolic link""" | ||
914 | 112 | log("Symlinking {} as {}".format(source, destination)) | ||
915 | 113 | cmd = [ | ||
916 | 114 | 'ln', | ||
917 | 115 | '-sf', | ||
918 | 116 | source, | ||
919 | 117 | destination, | ||
920 | 118 | ] | ||
921 | 119 | subprocess.check_call(cmd) | ||
922 | 120 | |||
923 | 121 | |||
924 | 122 | def mkdir(path, owner='root', group='root', perms=0555, force=False): | ||
925 | 123 | """Create a directory""" | ||
926 | 124 | log("Making dir {} {}:{} {:o}".format(path, owner, group, | ||
927 | 125 | perms)) | ||
928 | 126 | uid = pwd.getpwnam(owner).pw_uid | ||
929 | 127 | gid = grp.getgrnam(group).gr_gid | ||
930 | 128 | realpath = os.path.abspath(path) | ||
931 | 129 | if os.path.exists(realpath): | ||
932 | 130 | if force and not os.path.isdir(realpath): | ||
933 | 131 | log("Removing non-directory file {} prior to mkdir()".format(path)) | ||
934 | 132 | os.unlink(realpath) | ||
935 | 133 | else: | ||
936 | 134 | os.makedirs(realpath, perms) | ||
937 | 135 | os.chown(realpath, uid, gid) | ||
938 | 136 | |||
939 | 137 | |||
940 | 138 | def write_file(path, content, owner='root', group='root', perms=0444): | ||
941 | 139 | """Create or overwrite a file with the contents of a string""" | ||
942 | 140 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms)) | ||
943 | 141 | uid = pwd.getpwnam(owner).pw_uid | ||
944 | 142 | gid = grp.getgrnam(group).gr_gid | ||
945 | 143 | with open(path, 'w') as target: | ||
946 | 144 | os.fchown(target.fileno(), uid, gid) | ||
947 | 145 | os.fchmod(target.fileno(), perms) | ||
948 | 146 | target.write(content) | ||
949 | 147 | |||
950 | 148 | |||
951 | 149 | def fstab_remove(mp): | ||
952 | 150 | """Remove the given mountpoint entry from /etc/fstab | ||
953 | 151 | """ | ||
954 | 152 | return Fstab.remove_by_mountpoint(mp) | ||
955 | 153 | |||
956 | 154 | |||
957 | 155 | def fstab_add(dev, mp, fs, options=None): | ||
958 | 156 | """Adds the given device entry to the /etc/fstab file | ||
959 | 157 | """ | ||
960 | 158 | return Fstab.add(dev, mp, fs, options=options) | ||
961 | 159 | |||
962 | 160 | |||
963 | 161 | def mount(device, mountpoint, options=None, persist=False, filesystem="ext3"): | ||
964 | 162 | """Mount a filesystem at a particular mountpoint""" | ||
965 | 163 | cmd_args = ['mount'] | ||
966 | 164 | if options is not None: | ||
967 | 165 | cmd_args.extend(['-o', options]) | ||
968 | 166 | cmd_args.extend([device, mountpoint]) | ||
969 | 167 | try: | ||
970 | 168 | subprocess.check_output(cmd_args) | ||
971 | 169 | except subprocess.CalledProcessError, e: | ||
972 | 170 | log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output)) | ||
973 | 171 | return False | ||
974 | 172 | |||
975 | 173 | if persist: | ||
976 | 174 | return fstab_add(device, mountpoint, filesystem, options=options) | ||
977 | 175 | return True | ||
978 | 176 | |||
979 | 177 | |||
980 | 178 | def umount(mountpoint, persist=False): | ||
981 | 179 | """Unmount a filesystem""" | ||
982 | 180 | cmd_args = ['umount', mountpoint] | ||
983 | 181 | try: | ||
984 | 182 | subprocess.check_output(cmd_args) | ||
985 | 183 | except subprocess.CalledProcessError, e: | ||
986 | 184 | log('Error unmounting {}\n{}'.format(mountpoint, e.output)) | ||
987 | 185 | return False | ||
988 | 186 | |||
989 | 187 | if persist: | ||
990 | 188 | return fstab_remove(mountpoint) | ||
991 | 189 | return True | ||
992 | 190 | |||
993 | 191 | |||
994 | 192 | def mounts(): | ||
995 | 193 | """Get a list of all mounted volumes as [[mountpoint,device],[...]]""" | ||
996 | 194 | with open('/proc/mounts') as f: | ||
997 | 195 | # [['/mount/point','/dev/path'],[...]] | ||
998 | 196 | system_mounts = [m[1::-1] for m in [l.strip().split() | ||
999 | 197 | for l in f.readlines()]] | ||
1000 | 198 | return system_mounts | ||
1001 | 199 | |||
1002 | 200 | |||
1003 | 201 | def file_hash(path): | ||
1004 | 202 | """Generate a md5 hash of the contents of 'path' or None if not found """ | ||
1005 | 203 | if os.path.exists(path): | ||
1006 | 204 | h = hashlib.md5() | ||
1007 | 205 | with open(path, 'r') as source: | ||
1008 | 206 | h.update(source.read()) # IGNORE:E1101 - it does have update | ||
1009 | 207 | return h.hexdigest() | ||
1010 | 208 | else: | ||
1011 | 209 | return None | ||
1012 | 210 | |||
1013 | 211 | |||
1014 | 212 | def restart_on_change(restart_map, stopstart=False): | ||
1015 | 213 | """Restart services based on configuration files changing | ||
1016 | 214 | |||
1017 | 215 | This function is used a decorator, for example | ||
1018 | 216 | |||
1019 | 217 | @restart_on_change({ | ||
1020 | 218 | '/etc/ceph/ceph.conf': [ 'cinder-api', 'cinder-volume' ] | ||
1021 | 219 | }) | ||
1022 | 220 | def ceph_client_changed(): | ||
1023 | 221 | ... | ||
1024 | 222 | |||
1025 | 223 | In this example, the cinder-api and cinder-volume services | ||
1026 | 224 | would be restarted if /etc/ceph/ceph.conf is changed by the | ||
1027 | 225 | ceph_client_changed function. | ||
1028 | 226 | """ | ||
1029 | 227 | def wrap(f): | ||
1030 | 228 | def wrapped_f(*args): | ||
1031 | 229 | checksums = {} | ||
1032 | 230 | for path in restart_map: | ||
1033 | 231 | checksums[path] = file_hash(path) | ||
1034 | 232 | f(*args) | ||
1035 | 233 | restarts = [] | ||
1036 | 234 | for path in restart_map: | ||
1037 | 235 | if checksums[path] != file_hash(path): | ||
1038 | 236 | restarts += restart_map[path] | ||
1039 | 237 | services_list = list(OrderedDict.fromkeys(restarts)) | ||
1040 | 238 | if not stopstart: | ||
1041 | 239 | for service_name in services_list: | ||
1042 | 240 | service('restart', service_name) | ||
1043 | 241 | else: | ||
1044 | 242 | for action in ['stop', 'start']: | ||
1045 | 243 | for service_name in services_list: | ||
1046 | 244 | service(action, service_name) | ||
1047 | 245 | return wrapped_f | ||
1048 | 246 | return wrap | ||
1049 | 247 | |||
1050 | 248 | |||
1051 | 249 | def lsb_release(): | ||
1052 | 250 | """Return /etc/lsb-release in a dict""" | ||
1053 | 251 | d = {} | ||
1054 | 252 | with open('/etc/lsb-release', 'r') as lsb: | ||
1055 | 253 | for l in lsb: | ||
1056 | 254 | k, v = l.split('=') | ||
1057 | 255 | d[k.strip()] = v.strip() | ||
1058 | 256 | return d | ||
1059 | 257 | |||
1060 | 258 | |||
1061 | 259 | def pwgen(length=None): | ||
1062 | 260 | """Generate a random pasword.""" | ||
1063 | 261 | if length is None: | ||
1064 | 262 | length = random.choice(range(35, 45)) | ||
1065 | 263 | alphanumeric_chars = [ | ||
1066 | 264 | l for l in (string.letters + string.digits) | ||
1067 | 265 | if l not in 'l0QD1vAEIOUaeiou'] | ||
1068 | 266 | random_chars = [ | ||
1069 | 267 | random.choice(alphanumeric_chars) for _ in range(length)] | ||
1070 | 268 | return(''.join(random_chars)) | ||
1071 | 269 | |||
1072 | 270 | |||
1073 | 271 | def list_nics(nic_type): | ||
1074 | 272 | '''Return a list of nics of given type(s)''' | ||
1075 | 273 | if isinstance(nic_type, basestring): | ||
1076 | 274 | int_types = [nic_type] | ||
1077 | 275 | else: | ||
1078 | 276 | int_types = nic_type | ||
1079 | 277 | interfaces = [] | ||
1080 | 278 | for int_type in int_types: | ||
1081 | 279 | cmd = ['ip', 'addr', 'show', 'label', int_type + '*'] | ||
1082 | 280 | ip_output = subprocess.check_output(cmd).split('\n') | ||
1083 | 281 | ip_output = (line for line in ip_output if line) | ||
1084 | 282 | for line in ip_output: | ||
1085 | 283 | if line.split()[1].startswith(int_type): | ||
1086 | 284 | interfaces.append(line.split()[1].replace(":", "")) | ||
1087 | 285 | return interfaces | ||
1088 | 286 | |||
1089 | 287 | |||
1090 | 288 | def set_nic_mtu(nic, mtu): | ||
1091 | 289 | '''Set MTU on a network interface''' | ||
1092 | 290 | cmd = ['ip', 'link', 'set', nic, 'mtu', mtu] | ||
1093 | 291 | subprocess.check_call(cmd) | ||
1094 | 292 | |||
1095 | 293 | |||
1096 | 294 | def get_nic_mtu(nic): | ||
1097 | 295 | cmd = ['ip', 'addr', 'show', nic] | ||
1098 | 296 | ip_output = subprocess.check_output(cmd).split('\n') | ||
1099 | 297 | mtu = "" | ||
1100 | 298 | for line in ip_output: | ||
1101 | 299 | words = line.split() | ||
1102 | 300 | if 'mtu' in words: | ||
1103 | 301 | mtu = words[words.index("mtu") + 1] | ||
1104 | 302 | return mtu | ||
1105 | 303 | |||
1106 | 304 | |||
1107 | 305 | def get_nic_hwaddr(nic): | ||
1108 | 306 | cmd = ['ip', '-o', '-0', 'addr', 'show', nic] | ||
1109 | 307 | ip_output = subprocess.check_output(cmd) | ||
1110 | 308 | hwaddr = "" | ||
1111 | 309 | words = ip_output.split() | ||
1112 | 310 | if 'link/ether' in words: | ||
1113 | 311 | hwaddr = words[words.index('link/ether') + 1] | ||
1114 | 312 | return hwaddr | ||
1115 | 313 | |||
1116 | 314 | |||
1117 | 315 | def cmp_pkgrevno(package, revno, pkgcache=None): | ||
1118 | 316 | '''Compare supplied revno with the revno of the installed package | ||
1119 | 317 | 1 => Installed revno is greater than supplied arg | ||
1120 | 318 | 0 => Installed revno is the same as supplied arg | ||
1121 | 319 | -1 => Installed revno is less than supplied arg | ||
1122 | 320 | ''' | ||
1123 | 321 | if not pkgcache: | ||
1124 | 322 | apt_pkg.init() | ||
1125 | 323 | pkgcache = apt_pkg.Cache() | ||
1126 | 324 | pkg = pkgcache[package] | ||
1127 | 325 | return apt_pkg.version_compare(pkg.current_ver.ver_str, revno) | ||
1128 | 0 | 326 | ||
1129 | === added directory 'hooks/charmhelpers/fetch' | |||
1130 | === added file 'hooks/charmhelpers/fetch/__init__.py' | |||
1131 | --- hooks/charmhelpers/fetch/__init__.py 1970-01-01 00:00:00 +0000 | |||
1132 | +++ hooks/charmhelpers/fetch/__init__.py 2014-06-26 20:05:25 +0000 | |||
1133 | @@ -0,0 +1,349 @@ | |||
1134 | 1 | import importlib | ||
1135 | 2 | import time | ||
1136 | 3 | from yaml import safe_load | ||
1137 | 4 | from charmhelpers.core.host import ( | ||
1138 | 5 | lsb_release | ||
1139 | 6 | ) | ||
1140 | 7 | from urlparse import ( | ||
1141 | 8 | urlparse, | ||
1142 | 9 | urlunparse, | ||
1143 | 10 | ) | ||
1144 | 11 | import subprocess | ||
1145 | 12 | from charmhelpers.core.hookenv import ( | ||
1146 | 13 | config, | ||
1147 | 14 | log, | ||
1148 | 15 | ) | ||
1149 | 16 | import apt_pkg | ||
1150 | 17 | import os | ||
1151 | 18 | |||
1152 | 19 | |||
1153 | 20 | CLOUD_ARCHIVE = """# Ubuntu Cloud Archive | ||
1154 | 21 | deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main | ||
1155 | 22 | """ | ||
1156 | 23 | PROPOSED_POCKET = """# Proposed | ||
1157 | 24 | deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted | ||
1158 | 25 | """ | ||
1159 | 26 | CLOUD_ARCHIVE_POCKETS = { | ||
1160 | 27 | # Folsom | ||
1161 | 28 | 'folsom': 'precise-updates/folsom', | ||
1162 | 29 | 'precise-folsom': 'precise-updates/folsom', | ||
1163 | 30 | 'precise-folsom/updates': 'precise-updates/folsom', | ||
1164 | 31 | 'precise-updates/folsom': 'precise-updates/folsom', | ||
1165 | 32 | 'folsom/proposed': 'precise-proposed/folsom', | ||
1166 | 33 | 'precise-folsom/proposed': 'precise-proposed/folsom', | ||
1167 | 34 | 'precise-proposed/folsom': 'precise-proposed/folsom', | ||
1168 | 35 | # Grizzly | ||
1169 | 36 | 'grizzly': 'precise-updates/grizzly', | ||
1170 | 37 | 'precise-grizzly': 'precise-updates/grizzly', | ||
1171 | 38 | 'precise-grizzly/updates': 'precise-updates/grizzly', | ||
1172 | 39 | 'precise-updates/grizzly': 'precise-updates/grizzly', | ||
1173 | 40 | 'grizzly/proposed': 'precise-proposed/grizzly', | ||
1174 | 41 | 'precise-grizzly/proposed': 'precise-proposed/grizzly', | ||
1175 | 42 | 'precise-proposed/grizzly': 'precise-proposed/grizzly', | ||
1176 | 43 | # Havana | ||
1177 | 44 | 'havana': 'precise-updates/havana', | ||
1178 | 45 | 'precise-havana': 'precise-updates/havana', | ||
1179 | 46 | 'precise-havana/updates': 'precise-updates/havana', | ||
1180 | 47 | 'precise-updates/havana': 'precise-updates/havana', | ||
1181 | 48 | 'havana/proposed': 'precise-proposed/havana', | ||
1182 | 49 | 'precise-havana/proposed': 'precise-proposed/havana', | ||
1183 | 50 | 'precise-proposed/havana': 'precise-proposed/havana', | ||
1184 | 51 | # Icehouse | ||
1185 | 52 | 'icehouse': 'precise-updates/icehouse', | ||
1186 | 53 | 'precise-icehouse': 'precise-updates/icehouse', | ||
1187 | 54 | 'precise-icehouse/updates': 'precise-updates/icehouse', | ||
1188 | 55 | 'precise-updates/icehouse': 'precise-updates/icehouse', | ||
1189 | 56 | 'icehouse/proposed': 'precise-proposed/icehouse', | ||
1190 | 57 | 'precise-icehouse/proposed': 'precise-proposed/icehouse', | ||
1191 | 58 | 'precise-proposed/icehouse': 'precise-proposed/icehouse', | ||
1192 | 59 | # Juno | ||
1193 | 60 | 'juno': 'trusty-updates/juno', | ||
1194 | 61 | 'trusty-juno': 'trusty-updates/juno', | ||
1195 | 62 | 'trusty-juno/updates': 'trusty-updates/juno', | ||
1196 | 63 | 'trusty-updates/juno': 'trusty-updates/juno', | ||
1197 | 64 | 'juno/proposed': 'trusty-proposed/juno', | ||
1198 | 65 | 'juno/proposed': 'trusty-proposed/juno', | ||
1199 | 66 | 'trusty-juno/proposed': 'trusty-proposed/juno', | ||
1200 | 67 | 'trusty-proposed/juno': 'trusty-proposed/juno', | ||
1201 | 68 | } | ||
1202 | 69 | |||
1203 | 70 | # The order of this list is very important. Handlers should be listed in from | ||
1204 | 71 | # least- to most-specific URL matching. | ||
1205 | 72 | FETCH_HANDLERS = ( | ||
1206 | 73 | 'charmhelpers.fetch.archiveurl.ArchiveUrlFetchHandler', | ||
1207 | 74 | 'charmhelpers.fetch.bzrurl.BzrUrlFetchHandler', | ||
1208 | 75 | ) | ||
1209 | 76 | |||
1210 | 77 | APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT. | ||
1211 | 78 | APT_NO_LOCK_RETRY_DELAY = 10 # Wait 10 seconds between apt lock checks. | ||
1212 | 79 | APT_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times. | ||
1213 | 80 | |||
1214 | 81 | |||
1215 | 82 | class SourceConfigError(Exception): | ||
1216 | 83 | pass | ||
1217 | 84 | |||
1218 | 85 | |||
1219 | 86 | class UnhandledSource(Exception): | ||
1220 | 87 | pass | ||
1221 | 88 | |||
1222 | 89 | |||
1223 | 90 | class AptLockError(Exception): | ||
1224 | 91 | pass | ||
1225 | 92 | |||
1226 | 93 | |||
1227 | 94 | class BaseFetchHandler(object): | ||
1228 | 95 | |||
1229 | 96 | """Base class for FetchHandler implementations in fetch plugins""" | ||
1230 | 97 | |||
1231 | 98 | def can_handle(self, source): | ||
1232 | 99 | """Returns True if the source can be handled. Otherwise returns | ||
1233 | 100 | a string explaining why it cannot""" | ||
1234 | 101 | return "Wrong source type" | ||
1235 | 102 | |||
1236 | 103 | def install(self, source): | ||
1237 | 104 | """Try to download and unpack the source. Return the path to the | ||
1238 | 105 | unpacked files or raise UnhandledSource.""" | ||
1239 | 106 | raise UnhandledSource("Wrong source type {}".format(source)) | ||
1240 | 107 | |||
1241 | 108 | def parse_url(self, url): | ||
1242 | 109 | return urlparse(url) | ||
1243 | 110 | |||
1244 | 111 | def base_url(self, url): | ||
1245 | 112 | """Return url without querystring or fragment""" | ||
1246 | 113 | parts = list(self.parse_url(url)) | ||
1247 | 114 | parts[4:] = ['' for i in parts[4:]] | ||
1248 | 115 | return urlunparse(parts) | ||
1249 | 116 | |||
1250 | 117 | |||
1251 | 118 | def filter_installed_packages(packages): | ||
1252 | 119 | """Returns a list of packages that require installation""" | ||
1253 | 120 | apt_pkg.init() | ||
1254 | 121 | |||
1255 | 122 | # Tell apt to build an in-memory cache to prevent race conditions (if | ||
1256 | 123 | # another process is already building the cache). | ||
1257 | 124 | apt_pkg.config.set("Dir::Cache::pkgcache", "") | ||
1258 | 125 | |||
1259 | 126 | cache = apt_pkg.Cache() | ||
1260 | 127 | _pkgs = [] | ||
1261 | 128 | for package in packages: | ||
1262 | 129 | try: | ||
1263 | 130 | p = cache[package] | ||
1264 | 131 | p.current_ver or _pkgs.append(package) | ||
1265 | 132 | except KeyError: | ||
1266 | 133 | log('Package {} has no installation candidate.'.format(package), | ||
1267 | 134 | level='WARNING') | ||
1268 | 135 | _pkgs.append(package) | ||
1269 | 136 | return _pkgs | ||
1270 | 137 | |||
1271 | 138 | |||
1272 | 139 | def apt_install(packages, options=None, fatal=False): | ||
1273 | 140 | """Install one or more packages""" | ||
1274 | 141 | if options is None: | ||
1275 | 142 | options = ['--option=Dpkg::Options::=--force-confold'] | ||
1276 | 143 | |||
1277 | 144 | cmd = ['apt-get', '--assume-yes'] | ||
1278 | 145 | cmd.extend(options) | ||
1279 | 146 | cmd.append('install') | ||
1280 | 147 | if isinstance(packages, basestring): | ||
1281 | 148 | cmd.append(packages) | ||
1282 | 149 | else: | ||
1283 | 150 | cmd.extend(packages) | ||
1284 | 151 | log("Installing {} with options: {}".format(packages, | ||
1285 | 152 | options)) | ||
1286 | 153 | _run_apt_command(cmd, fatal) | ||
1287 | 154 | |||
1288 | 155 | |||
1289 | 156 | def apt_upgrade(options=None, fatal=False, dist=False): | ||
1290 | 157 | """Upgrade all packages""" | ||
1291 | 158 | if options is None: | ||
1292 | 159 | options = ['--option=Dpkg::Options::=--force-confold'] | ||
1293 | 160 | |||
1294 | 161 | cmd = ['apt-get', '--assume-yes'] | ||
1295 | 162 | cmd.extend(options) | ||
1296 | 163 | if dist: | ||
1297 | 164 | cmd.append('dist-upgrade') | ||
1298 | 165 | else: | ||
1299 | 166 | cmd.append('upgrade') | ||
1300 | 167 | log("Upgrading with options: {}".format(options)) | ||
1301 | 168 | _run_apt_command(cmd, fatal) | ||
1302 | 169 | |||
1303 | 170 | |||
1304 | 171 | def apt_update(fatal=False): | ||
1305 | 172 | """Update local apt cache""" | ||
1306 | 173 | cmd = ['apt-get', 'update'] | ||
1307 | 174 | _run_apt_command(cmd, fatal) | ||
1308 | 175 | |||
1309 | 176 | |||
1310 | 177 | def apt_purge(packages, fatal=False): | ||
1311 | 178 | """Purge one or more packages""" | ||
1312 | 179 | cmd = ['apt-get', '--assume-yes', 'purge'] | ||
1313 | 180 | if isinstance(packages, basestring): | ||
1314 | 181 | cmd.append(packages) | ||
1315 | 182 | else: | ||
1316 | 183 | cmd.extend(packages) | ||
1317 | 184 | log("Purging {}".format(packages)) | ||
1318 | 185 | _run_apt_command(cmd, fatal) | ||
1319 | 186 | |||
1320 | 187 | |||
1321 | 188 | def apt_hold(packages, fatal=False): | ||
1322 | 189 | """Hold one or more packages""" | ||
1323 | 190 | cmd = ['apt-mark', 'hold'] | ||
1324 | 191 | if isinstance(packages, basestring): | ||
1325 | 192 | cmd.append(packages) | ||
1326 | 193 | else: | ||
1327 | 194 | cmd.extend(packages) | ||
1328 | 195 | log("Holding {}".format(packages)) | ||
1329 | 196 | |||
1330 | 197 | if fatal: | ||
1331 | 198 | subprocess.check_call(cmd) | ||
1332 | 199 | else: | ||
1333 | 200 | subprocess.call(cmd) | ||
1334 | 201 | |||
1335 | 202 | |||
1336 | 203 | def add_source(source, key=None): | ||
1337 | 204 | if source is None: | ||
1338 | 205 | log('Source is not present. Skipping') | ||
1339 | 206 | return | ||
1340 | 207 | |||
1341 | 208 | if (source.startswith('ppa:') or | ||
1342 | 209 | source.startswith('http') or | ||
1343 | 210 | source.startswith('deb ') or | ||
1344 | 211 | source.startswith('cloud-archive:')): | ||
1345 | 212 | subprocess.check_call(['add-apt-repository', '--yes', source]) | ||
1346 | 213 | elif source.startswith('cloud:'): | ||
1347 | 214 | apt_install(filter_installed_packages(['ubuntu-cloud-keyring']), | ||
1348 | 215 | fatal=True) | ||
1349 | 216 | pocket = source.split(':')[-1] | ||
1350 | 217 | if pocket not in CLOUD_ARCHIVE_POCKETS: | ||
1351 | 218 | raise SourceConfigError( | ||
1352 | 219 | 'Unsupported cloud: source option %s' % | ||
1353 | 220 | pocket) | ||
1354 | 221 | actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket] | ||
1355 | 222 | with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt: | ||
1356 | 223 | apt.write(CLOUD_ARCHIVE.format(actual_pocket)) | ||
1357 | 224 | elif source == 'proposed': | ||
1358 | 225 | release = lsb_release()['DISTRIB_CODENAME'] | ||
1359 | 226 | with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt: | ||
1360 | 227 | apt.write(PROPOSED_POCKET.format(release)) | ||
1361 | 228 | if key: | ||
1362 | 229 | subprocess.check_call(['apt-key', 'adv', '--keyserver', | ||
1363 | 230 | 'hkp://keyserver.ubuntu.com:80', '--recv', | ||
1364 | 231 | key]) | ||
1365 | 232 | |||
1366 | 233 | |||
1367 | 234 | def configure_sources(update=False, | ||
1368 | 235 | sources_var='install_sources', | ||
1369 | 236 | keys_var='install_keys'): | ||
1370 | 237 | """ | ||
1371 | 238 | Configure multiple sources from charm configuration | ||
1372 | 239 | |||
1373 | 240 | Example config: | ||
1374 | 241 | install_sources: | ||
1375 | 242 | - "ppa:foo" | ||
1376 | 243 | - "http://example.com/repo precise main" | ||
1377 | 244 | install_keys: | ||
1378 | 245 | - null | ||
1379 | 246 | - "a1b2c3d4" | ||
1380 | 247 | |||
1381 | 248 | Note that 'null' (a.k.a. None) should not be quoted. | ||
1382 | 249 | """ | ||
1383 | 250 | sources = safe_load(config(sources_var)) | ||
1384 | 251 | keys = config(keys_var) | ||
1385 | 252 | if keys is not None: | ||
1386 | 253 | keys = safe_load(keys) | ||
1387 | 254 | if isinstance(sources, basestring) and ( | ||
1388 | 255 | keys is None or isinstance(keys, basestring)): | ||
1389 | 256 | add_source(sources, keys) | ||
1390 | 257 | else: | ||
1391 | 258 | if not len(sources) == len(keys): | ||
1392 | 259 | msg = 'Install sources and keys lists are different lengths' | ||
1393 | 260 | raise SourceConfigError(msg) | ||
1394 | 261 | for src_num in range(len(sources)): | ||
1395 | 262 | add_source(sources[src_num], keys[src_num]) | ||
1396 | 263 | if update: | ||
1397 | 264 | apt_update(fatal=True) | ||
1398 | 265 | |||
1399 | 266 | |||
1400 | 267 | def install_remote(source): | ||
1401 | 268 | """ | ||
1402 | 269 | Install a file tree from a remote source | ||
1403 | 270 | |||
1404 | 271 | The specified source should be a url of the form: | ||
1405 | 272 | scheme://[host]/path[#[option=value][&...]] | ||
1406 | 273 | |||
1407 | 274 | Schemes supported are based on this modules submodules | ||
1408 | 275 | Options supported are submodule-specific""" | ||
1409 | 276 | # We ONLY check for True here because can_handle may return a string | ||
1410 | 277 | # explaining why it can't handle a given source. | ||
1411 | 278 | handlers = [h for h in plugins() if h.can_handle(source) is True] | ||
1412 | 279 | installed_to = None | ||
1413 | 280 | for handler in handlers: | ||
1414 | 281 | try: | ||
1415 | 282 | installed_to = handler.install(source) | ||
1416 | 283 | except UnhandledSource: | ||
1417 | 284 | pass | ||
1418 | 285 | if not installed_to: | ||
1419 | 286 | raise UnhandledSource("No handler found for source {}".format(source)) | ||
1420 | 287 | return installed_to | ||
1421 | 288 | |||
1422 | 289 | |||
1423 | 290 | def install_from_config(config_var_name): | ||
1424 | 291 | charm_config = config() | ||
1425 | 292 | source = charm_config[config_var_name] | ||
1426 | 293 | return install_remote(source) | ||
1427 | 294 | |||
1428 | 295 | |||
1429 | 296 | def plugins(fetch_handlers=None): | ||
1430 | 297 | if not fetch_handlers: | ||
1431 | 298 | fetch_handlers = FETCH_HANDLERS | ||
1432 | 299 | plugin_list = [] | ||
1433 | 300 | for handler_name in fetch_handlers: | ||
1434 | 301 | package, classname = handler_name.rsplit('.', 1) | ||
1435 | 302 | try: | ||
1436 | 303 | handler_class = getattr( | ||
1437 | 304 | importlib.import_module(package), | ||
1438 | 305 | classname) | ||
1439 | 306 | plugin_list.append(handler_class()) | ||
1440 | 307 | except (ImportError, AttributeError): | ||
1441 | 308 | # Skip missing plugins so that they can be ommitted from | ||
1442 | 309 | # installation if desired | ||
1443 | 310 | log("FetchHandler {} not found, skipping plugin".format( | ||
1444 | 311 | handler_name)) | ||
1445 | 312 | return plugin_list | ||
1446 | 313 | |||
1447 | 314 | |||
1448 | 315 | def _run_apt_command(cmd, fatal=False): | ||
1449 | 316 | """ | ||
1450 | 317 | Run an APT command, checking output and retrying if the fatal flag is set | ||
1451 | 318 | to True. | ||
1452 | 319 | |||
1453 | 320 | :param: cmd: str: The apt command to run. | ||
1454 | 321 | :param: fatal: bool: Whether the command's output should be checked and | ||
1455 | 322 | retried. | ||
1456 | 323 | """ | ||
1457 | 324 | env = os.environ.copy() | ||
1458 | 325 | |||
1459 | 326 | if 'DEBIAN_FRONTEND' not in env: | ||
1460 | 327 | env['DEBIAN_FRONTEND'] = 'noninteractive' | ||
1461 | 328 | |||
1462 | 329 | if fatal: | ||
1463 | 330 | retry_count = 0 | ||
1464 | 331 | result = None | ||
1465 | 332 | |||
1466 | 333 | # If the command is considered "fatal", we need to retry if the apt | ||
1467 | 334 | # lock was not acquired. | ||
1468 | 335 | |||
1469 | 336 | while result is None or result == APT_NO_LOCK: | ||
1470 | 337 | try: | ||
1471 | 338 | result = subprocess.check_call(cmd, env=env) | ||
1472 | 339 | except subprocess.CalledProcessError, e: | ||
1473 | 340 | retry_count = retry_count + 1 | ||
1474 | 341 | if retry_count > APT_NO_LOCK_RETRY_COUNT: | ||
1475 | 342 | raise | ||
1476 | 343 | result = e.returncode | ||
1477 | 344 | log("Couldn't acquire DPKG lock. Will retry in {} seconds." | ||
1478 | 345 | "".format(APT_NO_LOCK_RETRY_DELAY)) | ||
1479 | 346 | time.sleep(APT_NO_LOCK_RETRY_DELAY) | ||
1480 | 347 | |||
1481 | 348 | else: | ||
1482 | 349 | subprocess.call(cmd, env=env) | ||
1483 | 0 | 350 | ||
1484 | === added file 'hooks/charmhelpers/fetch/archiveurl.py' | |||
1485 | --- hooks/charmhelpers/fetch/archiveurl.py 1970-01-01 00:00:00 +0000 | |||
1486 | +++ hooks/charmhelpers/fetch/archiveurl.py 2014-06-26 20:05:25 +0000 | |||
1487 | @@ -0,0 +1,63 @@ | |||
1488 | 1 | import os | ||
1489 | 2 | import urllib2 | ||
1490 | 3 | import urlparse | ||
1491 | 4 | |||
1492 | 5 | from charmhelpers.fetch import ( | ||
1493 | 6 | BaseFetchHandler, | ||
1494 | 7 | UnhandledSource | ||
1495 | 8 | ) | ||
1496 | 9 | from charmhelpers.payload.archive import ( | ||
1497 | 10 | get_archive_handler, | ||
1498 | 11 | extract, | ||
1499 | 12 | ) | ||
1500 | 13 | from charmhelpers.core.host import mkdir | ||
1501 | 14 | |||
1502 | 15 | |||
1503 | 16 | class ArchiveUrlFetchHandler(BaseFetchHandler): | ||
1504 | 17 | """Handler for archives via generic URLs""" | ||
1505 | 18 | def can_handle(self, source): | ||
1506 | 19 | url_parts = self.parse_url(source) | ||
1507 | 20 | if url_parts.scheme not in ('http', 'https', 'ftp', 'file'): | ||
1508 | 21 | return "Wrong source type" | ||
1509 | 22 | if get_archive_handler(self.base_url(source)): | ||
1510 | 23 | return True | ||
1511 | 24 | return False | ||
1512 | 25 | |||
1513 | 26 | def download(self, source, dest): | ||
1514 | 27 | # propogate all exceptions | ||
1515 | 28 | # URLError, OSError, etc | ||
1516 | 29 | proto, netloc, path, params, query, fragment = urlparse.urlparse(source) | ||
1517 | 30 | if proto in ('http', 'https'): | ||
1518 | 31 | auth, barehost = urllib2.splituser(netloc) | ||
1519 | 32 | if auth is not None: | ||
1520 | 33 | source = urlparse.urlunparse((proto, barehost, path, params, query, fragment)) | ||
1521 | 34 | username, password = urllib2.splitpasswd(auth) | ||
1522 | 35 | passman = urllib2.HTTPPasswordMgrWithDefaultRealm() | ||
1523 | 36 | # Realm is set to None in add_password to force the username and password | ||
1524 | 37 | # to be used whatever the realm | ||
1525 | 38 | passman.add_password(None, source, username, password) | ||
1526 | 39 | authhandler = urllib2.HTTPBasicAuthHandler(passman) | ||
1527 | 40 | opener = urllib2.build_opener(authhandler) | ||
1528 | 41 | urllib2.install_opener(opener) | ||
1529 | 42 | response = urllib2.urlopen(source) | ||
1530 | 43 | try: | ||
1531 | 44 | with open(dest, 'w') as dest_file: | ||
1532 | 45 | dest_file.write(response.read()) | ||
1533 | 46 | except Exception as e: | ||
1534 | 47 | if os.path.isfile(dest): | ||
1535 | 48 | os.unlink(dest) | ||
1536 | 49 | raise e | ||
1537 | 50 | |||
1538 | 51 | def install(self, source): | ||
1539 | 52 | url_parts = self.parse_url(source) | ||
1540 | 53 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched') | ||
1541 | 54 | if not os.path.exists(dest_dir): | ||
1542 | 55 | mkdir(dest_dir, perms=0755) | ||
1543 | 56 | dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path)) | ||
1544 | 57 | try: | ||
1545 | 58 | self.download(source, dld_file) | ||
1546 | 59 | except urllib2.URLError as e: | ||
1547 | 60 | raise UnhandledSource(e.reason) | ||
1548 | 61 | except OSError as e: | ||
1549 | 62 | raise UnhandledSource(e.strerror) | ||
1550 | 63 | return extract(dld_file) | ||
1551 | 0 | 64 | ||
1552 | === added file 'hooks/charmhelpers/fetch/bzrurl.py' | |||
1553 | --- hooks/charmhelpers/fetch/bzrurl.py 1970-01-01 00:00:00 +0000 | |||
1554 | +++ hooks/charmhelpers/fetch/bzrurl.py 2014-06-26 20:05:25 +0000 | |||
1555 | @@ -0,0 +1,50 @@ | |||
1556 | 1 | import os | ||
1557 | 2 | from charmhelpers.fetch import ( | ||
1558 | 3 | BaseFetchHandler, | ||
1559 | 4 | UnhandledSource | ||
1560 | 5 | ) | ||
1561 | 6 | from charmhelpers.core.host import mkdir | ||
1562 | 7 | |||
1563 | 8 | try: | ||
1564 | 9 | from bzrlib.branch import Branch | ||
1565 | 10 | except ImportError: | ||
1566 | 11 | from charmhelpers.fetch import apt_install | ||
1567 | 12 | apt_install("python-bzrlib") | ||
1568 | 13 | from bzrlib.branch import Branch | ||
1569 | 14 | |||
1570 | 15 | |||
1571 | 16 | class BzrUrlFetchHandler(BaseFetchHandler): | ||
1572 | 17 | """Handler for bazaar branches via generic and lp URLs""" | ||
1573 | 18 | def can_handle(self, source): | ||
1574 | 19 | url_parts = self.parse_url(source) | ||
1575 | 20 | if url_parts.scheme not in ('bzr+ssh', 'lp'): | ||
1576 | 21 | return False | ||
1577 | 22 | else: | ||
1578 | 23 | return True | ||
1579 | 24 | |||
1580 | 25 | def branch(self, source, dest): | ||
1581 | 26 | url_parts = self.parse_url(source) | ||
1582 | 27 | # If we use lp:branchname scheme we need to load plugins | ||
1583 | 28 | if not self.can_handle(source): | ||
1584 | 29 | raise UnhandledSource("Cannot handle {}".format(source)) | ||
1585 | 30 | if url_parts.scheme == "lp": | ||
1586 | 31 | from bzrlib.plugin import load_plugins | ||
1587 | 32 | load_plugins() | ||
1588 | 33 | try: | ||
1589 | 34 | remote_branch = Branch.open(source) | ||
1590 | 35 | remote_branch.bzrdir.sprout(dest).open_branch() | ||
1591 | 36 | except Exception as e: | ||
1592 | 37 | raise e | ||
1593 | 38 | |||
1594 | 39 | def install(self, source): | ||
1595 | 40 | url_parts = self.parse_url(source) | ||
1596 | 41 | branch_name = url_parts.path.strip("/").split("/")[-1] | ||
1597 | 42 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", | ||
1598 | 43 | branch_name) | ||
1599 | 44 | if not os.path.exists(dest_dir): | ||
1600 | 45 | mkdir(dest_dir, perms=0755) | ||
1601 | 46 | try: | ||
1602 | 47 | self.branch(source, dest_dir) | ||
1603 | 48 | except OSError as e: | ||
1604 | 49 | raise UnhandledSource(e.strerror) | ||
1605 | 50 | return dest_dir | ||
1606 | 0 | 51 | ||
1607 | === added file 'hooks/charmhelpers/setup.py' | |||
1608 | --- hooks/charmhelpers/setup.py 1970-01-01 00:00:00 +0000 | |||
1609 | +++ hooks/charmhelpers/setup.py 2014-06-26 20:05:25 +0000 | |||
1610 | @@ -0,0 +1,12 @@ | |||
1611 | 1 | #!/usr/bin/env python | ||
1612 | 2 | |||
1613 | 3 | from distutils.core import setup | ||
1614 | 4 | |||
1615 | 5 | setup(name='charmhelpers', | ||
1616 | 6 | version='1.0', | ||
1617 | 7 | description='this is dumb', | ||
1618 | 8 | author='nobody', | ||
1619 | 9 | author_email='dummy@amulet', | ||
1620 | 10 | url='http://google.com', | ||
1621 | 11 | packages=[], | ||
1622 | 12 | ) | ||
1623 | 0 | 13 | ||
1624 | === added file 'hooks/common.py' | |||
1625 | --- hooks/common.py 1970-01-01 00:00:00 +0000 | |||
1626 | +++ hooks/common.py 2014-06-26 20:05:25 +0000 | |||
1627 | @@ -0,0 +1,231 @@ | |||
1628 | 1 | #!/usr/bin/env python | ||
1629 | 2 | |||
1630 | 3 | import grp | ||
1631 | 4 | import os | ||
1632 | 5 | import pwd | ||
1633 | 6 | import subprocess | ||
1634 | 7 | import sys | ||
1635 | 8 | import tarfile | ||
1636 | 9 | import argparse | ||
1637 | 10 | import logging | ||
1638 | 11 | |||
1639 | 12 | from shutil import rmtree, copyfile | ||
1640 | 13 | from charmhelpers.core.hookenv import log | ||
1641 | 14 | # Required for unit tests... :( | ||
1642 | 15 | try: | ||
1643 | 16 | from charmhelpers.fetch import apt_install, apt_update | ||
1644 | 17 | except: | ||
1645 | 18 | pass | ||
1646 | 19 | |||
1647 | 20 | FORMAT = '%(asctime)-15s %(user)-8s %(message)s' | ||
1648 | 21 | logging.basicConfig(level=logging.INFO) | ||
1649 | 22 | |||
1650 | 23 | |||
1651 | 24 | # ########################################## | ||
1652 | 25 | # Globals | ||
1653 | 26 | # ########################################## | ||
1654 | 27 | JAVA_VERSION = "7" | ||
1655 | 28 | JAVA_HOME = None | ||
1656 | 29 | HADOOP_VERSION = "hadoop-2.2.0" | ||
1657 | 30 | PLATFORM_ARCH = "amd64" | ||
1658 | 31 | HOME_DIR = os.path.join(os.path.sep, "home", "ubuntu") | ||
1659 | 32 | HADOOP_DIR = os.path.join(HOME_DIR, "hadoop") | ||
1660 | 33 | HADOOP_TMP_DIR = os.path.join(HADOOP_DIR, "tmp") | ||
1661 | 34 | HADOOP_INSTALL = os.path.join(HADOOP_DIR, HADOOP_VERSION) | ||
1662 | 35 | HADOOP_CONF_DIR = os.path.join(HADOOP_INSTALL, "etc/hadoop") | ||
1663 | 36 | |||
1664 | 37 | |||
1665 | 38 | # ########################################## | ||
1666 | 39 | # Utility Methods | ||
1667 | 40 | # ########################################## | ||
1668 | 41 | |||
1669 | 42 | def install_dev_packages(): | ||
1670 | 43 | packages = ['maven', | ||
1671 | 44 | 'build-essential', | ||
1672 | 45 | 'autoconf', | ||
1673 | 46 | 'automake', | ||
1674 | 47 | 'libtool', | ||
1675 | 48 | 'cmake', | ||
1676 | 49 | 'zlib1g-dev', | ||
1677 | 50 | 'pkg-config', | ||
1678 | 51 | 'libssl-dev', | ||
1679 | 52 | 'snappy', | ||
1680 | 53 | 'libsnappy-dev', | ||
1681 | 54 | 'openjdk-7-jdk', | ||
1682 | 55 | 'python-pip', | ||
1683 | 56 | 'python-jinja2', | ||
1684 | 57 | ] | ||
1685 | 58 | apt_update() | ||
1686 | 59 | apt_install(packages) | ||
1687 | 60 | |||
1688 | 61 | |||
1689 | 62 | def ssh_wizardry(): | ||
1690 | 63 | # Set NonStrict Hostkey Checking to .ssh config | ||
1691 | 64 | # this both confuses and angers me! | ||
1692 | 65 | log("Setting NonStrict HostKey Checking for SSH", "INFO") | ||
1693 | 66 | |||
1694 | 67 | nonstrict = "Host *\n\tStrictHostKeyChecking no" | ||
1695 | 68 | with open("{dir}/.ssh/config".format(dir=HOME_DIR), 'w+') as f: | ||
1696 | 69 | f.write(nonstrict) | ||
1697 | 70 | |||
1698 | 71 | keyfile = os.path.join(os.path.sep, 'home', 'ubuntu', '.ssh', 'id_rsa') | ||
1699 | 72 | cmd = 'yes | ssh-keygen -t rsa -N "" -f {d}'.format(d=keyfile) | ||
1700 | 73 | ps = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, | ||
1701 | 74 | stderr=subprocess.STDOUT) | ||
1702 | 75 | output = ps.communicate()[0] | ||
1703 | 76 | log("Output of ssh keygen: {o}".format(o=output), "INFO") | ||
1704 | 77 | with open("{dir}/.ssh/id_rsa.pub".format(dir=HOME_DIR), 'r') as f: | ||
1705 | 78 | hostkey = f.read() | ||
1706 | 79 | |||
1707 | 80 | auth_keys = "{dir}/.ssh/authorized_keys".format(dir=HOME_DIR) | ||
1708 | 81 | with open(auth_keys, 'a') as f: | ||
1709 | 82 | f.write(hostkey) | ||
1710 | 83 | subprocess.call(['chown', '-R', 'ubuntu.ubuntu', | ||
1711 | 84 | "{dir}/.ssh".format(dir=HOME_DIR)]) | ||
1712 | 85 | |||
1713 | 86 | |||
1714 | 87 | def set_java_home(): | ||
1715 | 88 | jbin = 'java-{v}-openjdk-{a}'.format(v=JAVA_VERSION, a=PLATFORM_ARCH) | ||
1716 | 89 | jpath = subprocess.check_output(['find', '/usr/', '-name', jbin]) | ||
1717 | 90 | if not jpath: | ||
1718 | 91 | raise ValueError("Unable to locate Java Path for %s" % jbin) | ||
1719 | 92 | script = '{d}/etc/hadoop/hadoop-env.sh'.format(d=HADOOP_INSTALL) | ||
1720 | 93 | |||
1721 | 94 | with open(script) as f: | ||
1722 | 95 | contents = f.readlines() | ||
1723 | 96 | for l in range(0, len(contents)): | ||
1724 | 97 | if contents[l].startswith('export JAVA_HOME='): | ||
1725 | 98 | contents[l] = "export JAVA_HOME='{jp}'".format( | ||
1726 | 99 | jp=jpath.strip()) | ||
1727 | 100 | |||
1728 | 101 | with open(script, 'w') as f: | ||
1729 | 102 | f.writelines(contents) | ||
1730 | 103 | JAVA_HOME = jpath.strip() | ||
1731 | 104 | log('Set JAVA_HOME in {p} to {v}'.format(p=script, v=jpath), 'INFO') | ||
1732 | 105 | JAVA_HOME = jpath.strip() | ||
1733 | 106 | log("JAVA_HOME set to: {j}".format(j=JAVA_HOME), "INFO") | ||
1734 | 107 | |||
1735 | 108 | |||
1736 | 109 | # TODO: this should be configureable at some point and reach out | ||
1737 | 110 | # over the wire to install w/ Sha1 Sig Validation | ||
1738 | 111 | def extract_hadoop(): | ||
1739 | 112 | pa = os.path.join("{d}", "files", "archives", "{hdpver}.tar.gz") | ||
1740 | 113 | tarballpath = os.path.join(pa.format( | ||
1741 | 114 | d=os.environ['CHARM_DIR'], | ||
1742 | 115 | hdpver=HADOOP_VERSION)) | ||
1743 | 116 | |||
1744 | 117 | if tarfile.is_tarfile(tarballpath): | ||
1745 | 118 | tball = tarfile.open(tarballpath) | ||
1746 | 119 | tball.extractall(HADOOP_DIR) | ||
1747 | 120 | else: | ||
1748 | 121 | log("Unable to extract Hadoop Tarball {tb}".format(tb=tarballpath), | ||
1749 | 122 | "Warning") | ||
1750 | 123 | |||
1751 | 124 | |||
1752 | 125 | def add_shim(): | ||
1753 | 126 | if os.path.exists("{d}/.hadoop_shim".format(d=HOME_DIR)): | ||
1754 | 127 | log("Found .hadoop_shim, bailing", "INFO") | ||
1755 | 128 | log("To re-execute the template, re-run this hook after removing" | ||
1756 | 129 | "{d}/.hadoop_shim".format(d=HOME_DIR), 'INFO') | ||
1757 | 130 | return | ||
1758 | 131 | |||
1759 | 132 | # TODO: Refactor this nasty shell callout with some implicit python | ||
1760 | 133 | # by requiring the templatewiz class, and passing args. | ||
1761 | 134 | templ_bin_path = os.path.join('files', 'template') | ||
1762 | 135 | template_path = os.path.join(os.environ['CHARM_DIR'], 'files', | ||
1763 | 136 | 'templates', 'defaults.j2') | ||
1764 | 137 | user_path = os.path.join(os.path.sep, 'home', 'ubuntu', '.hadoop_shim') | ||
1765 | 138 | subprocess.call(['python', templ_bin_path, '-t', template_path, '-o', | ||
1766 | 139 | user_path, 'java_home={j}'.format(j=JAVA_HOME), | ||
1767 | 140 | 'hadoop_home={h}'.format(h=HADOOP_INSTALL), | ||
1768 | 141 | 'conf={c}'.format(c=HADOOP_CONF_DIR)]) | ||
1769 | 142 | |||
1770 | 143 | with open('{d}/.profile'.format(d=HOME_DIR), 'a') as f: | ||
1771 | 144 | f.write('[ -f "$HOME/.hadoop_shim" ] && . "$HOME/.hadoop_shim"') | ||
1772 | 145 | |||
1773 | 146 | |||
1774 | 147 | def template(src, dst, own='root', grp='root', mod=0644): | ||
1775 | 148 | copyfile(src, dst) | ||
1776 | 149 | os.chown(dst, own, grp) | ||
1777 | 150 | os.chmod(dst, mod) | ||
1778 | 151 | |||
1779 | 152 | |||
1780 | 153 | def copy_templates(): | ||
1781 | 154 | # Get UID's | ||
1782 | 155 | hadoop_uid = pwd.getpwnam('ubuntu').pw_uid | ||
1783 | 156 | hadoop_gid = grp.getgrnam('ubuntu').gr_gid | ||
1784 | 157 | |||
1785 | 158 | srcp = os.path.join('files', 'hadoop') | ||
1786 | 159 | dstp = os.path.join(HADOOP_INSTALL, 'etc', 'hadoop') | ||
1787 | 160 | template(os.path.join(srcp, 'core-site.xml'), | ||
1788 | 161 | os.path.join(dstp, 'core-site.xml'), hadoop_uid, | ||
1789 | 162 | hadoop_gid) | ||
1790 | 163 | template(os.path.join(srcp, 'hdfs-site.xml'), | ||
1791 | 164 | os.path.join(dstp, 'hdfs-site.xml'), hadoop_uid, | ||
1792 | 165 | hadoop_gid) | ||
1793 | 166 | template(os.path.join(srcp, 'yarn-site.xml'), | ||
1794 | 167 | os.path.join(dstp, 'yarn-site.xml'), hadoop_uid, | ||
1795 | 168 | hadoop_gid) | ||
1796 | 169 | template(os.path.join(srcp, 'mapreduce-site.xml'), | ||
1797 | 170 | os.path.join(dstp, 'mapreduce-site.xml'), hadoop_uid, | ||
1798 | 171 | hadoop_gid) | ||
1799 | 172 | template(os.path.join(srcp, 'hdfs_format.sh'), | ||
1800 | 173 | os.path.join(HOME_DIR, 'hdfs_format.sh'), hadoop_uid, | ||
1801 | 174 | hadoop_gid, 0777) | ||
1802 | 175 | |||
1803 | 176 | |||
1804 | 177 | # ########################################## | ||
1805 | 178 | # Charm Hooks | ||
1806 | 179 | # ########################################## | ||
1807 | 180 | |||
1808 | 181 | def install(): | ||
1809 | 182 | # Remove the hadoop directory if it exists | ||
1810 | 183 | if os.path.exists(HADOOP_DIR): | ||
1811 | 184 | rmtree(HADOOP_DIR) | ||
1812 | 185 | ssh_wizardry() | ||
1813 | 186 | |||
1814 | 187 | os.makedirs(HADOOP_DIR) | ||
1815 | 188 | os.makedirs(HADOOP_TMP_DIR) | ||
1816 | 189 | install_dev_packages() | ||
1817 | 190 | extract_hadoop() | ||
1818 | 191 | set_java_home() | ||
1819 | 192 | |||
1820 | 193 | # Create HDFS Dir Tree | ||
1821 | 194 | nndir = os.path.join("{d}".format(d=HADOOP_DIR), "data", | ||
1822 | 195 | "hdfs", "namenode") | ||
1823 | 196 | if not os.path.exists(nndir): | ||
1824 | 197 | os.makedirs(nndir) | ||
1825 | 198 | dndir = "{d}/data/hdfs/datanode".format(d=HADOOP_DIR) | ||
1826 | 199 | if not os.path.exists(nndir): | ||
1827 | 200 | os.makedirs(dndir) | ||
1828 | 201 | |||
1829 | 202 | copy_templates() | ||
1830 | 203 | add_shim() | ||
1831 | 204 | |||
1832 | 205 | # Call the HDFS Format script post installation | ||
1833 | 206 | cmd = "{d}{s}hdfs_format.sh".format(d=HOME_DIR, s=os.path.sep) | ||
1834 | 207 | log(cmd, "INFO") | ||
1835 | 208 | subprocess.call(cmd) | ||
1836 | 209 | |||
1837 | 210 | # Ensure the Ubuntu user owns everything in the hadoop tree | ||
1838 | 211 | # Stick a fork in it, we're done. | ||
1839 | 212 | subprocess.call(['chown', '-R', 'ubuntu.ubuntu', HADOOP_DIR]) | ||
1840 | 213 | log("Install complete.", "INFO") | ||
1841 | 214 | |||
1842 | 215 | |||
1843 | 216 | if __name__ == "__main__": | ||
1844 | 217 | parser = argparse.ArgumentParser() | ||
1845 | 218 | parser.add_argument('-H', '--hook_name', dest='hook_name', | ||
1846 | 219 | help='hook to call') | ||
1847 | 220 | |||
1848 | 221 | args = parser.parse_args() | ||
1849 | 222 | |||
1850 | 223 | if args.hook_name is not None: | ||
1851 | 224 | hook_name = args.hook_name | ||
1852 | 225 | else: | ||
1853 | 226 | hook_name = os.path.basename(sys.argv[0]) | ||
1854 | 227 | |||
1855 | 228 | if hook_name == "install": | ||
1856 | 229 | retVal = install() | ||
1857 | 230 | else: | ||
1858 | 231 | pass | ||
1859 | 0 | 232 | ||
1860 | === modified file 'hooks/install' | |||
1861 | --- hooks/install 2014-05-22 17:00:22 +0000 | |||
1862 | +++ hooks/install 1970-01-01 00:00:00 +0000 | |||
1863 | @@ -1,58 +0,0 @@ | |||
1864 | 1 | #!/bin/bash | ||
1865 | 2 | set -ex | ||
1866 | 3 | |||
1867 | 4 | source files/upstart/hadoop_env | ||
1868 | 5 | |||
1869 | 6 | if [ -d $HADOOP_DIR ]; then | ||
1870 | 7 | rm -rf $HADOOP_DIR | ||
1871 | 8 | fi | ||
1872 | 9 | |||
1873 | 10 | echo -e "Host *\n\tStrictHostKeyChecking no" > /home/ubuntu/.ssh/config | ||
1874 | 11 | |||
1875 | 12 | yes | ssh-keygen -t rsa -N "" -f /home/ubuntu/.ssh/id_rsa | ||
1876 | 13 | cat /home/ubuntu/.ssh/id_rsa.pub >> /home/ubuntu/.ssh/authorized_keys | ||
1877 | 14 | chown -R ubuntu.ubuntu /home/ubuntu/.ssh | ||
1878 | 15 | # ssh localhost just to make sure | ||
1879 | 16 | # ssh -o StrictHostKeyChecking=no localhost | ||
1880 | 17 | |||
1881 | 18 | juju-log "Installing JAVA!" | ||
1882 | 19 | |||
1883 | 20 | mkdir -p $HADOOP_DIR | ||
1884 | 21 | apt-get update -qqy | ||
1885 | 22 | apt-get install -y openjdk-7-jdk | ||
1886 | 23 | JAVA_HOME_PATH=$(find /usr/ -name java-$JAVA_VERSION-openjdk-$PLATFORM_ARCH) | ||
1887 | 24 | |||
1888 | 25 | juju-log "installing other development packages" | ||
1889 | 26 | apt-get -qqy install maven build-essential autoconf automake libtool cmake zlib1g-dev pkg-config libssl-dev snappy libsnappy-dev | ||
1890 | 27 | juju-log "Now for Hadoop!" | ||
1891 | 28 | |||
1892 | 29 | mkdir -p $HADOOP_DIR | ||
1893 | 30 | mkdir -p $HADOOP_TMP_DIR | ||
1894 | 31 | # cd $HADOOP_DIR | ||
1895 | 32 | tar -xvzf files/archives/$HADOOP_VERSION.tar.gz -C $HADOOP_DIR | ||
1896 | 33 | cd $HADOOP_DIR | ||
1897 | 34 | sed -ir 's|export JAVA_HOME=.*|export JAVA_HOME='$JAVA_HOME_PATH'|' $(find -name hadoop-env.sh) | ||
1898 | 35 | |||
1899 | 36 | mkdir -p $HADOOP_DIR/data/hdfs/namenode | ||
1900 | 37 | mkdir -p $HADOOP_DIR/data/hdfs/datanode | ||
1901 | 38 | cd $CHARM_DIR | ||
1902 | 39 | install -o root -g root -m 0644 files/upstart/defaults /etc/default/hadoop | ||
1903 | 40 | |||
1904 | 41 | # Do a bunch of fun stuff in XML | ||
1905 | 42 | |||
1906 | 43 | install -o ubuntu -g ubuntu -m 0644 files/hadoop/core-site.xml $HADOOP_DIR/$HADOOP_VERSION/etc/hadoop/core-site.xml | ||
1907 | 44 | install -o ubuntu -g ubuntu -m 0644 files/hadoop/hdfs-site.xml $HADOOP_DIR/$HADOOP_VERSION/etc/hadoop/hdfs-site.xml | ||
1908 | 45 | install -o ubuntu -g ubuntu -m 0644 files/hadoop/yarn-site.xml $HADOOP_DIR/$HADOOP_VERSION/etc/hadoop/yarn-site.xml | ||
1909 | 46 | install -o ubuntu -g ubuntu -m 0644 files/hadoop/mapreduce-site.xml $HADOOP_DIR/$HADOOP_VERSION/etc/hadoop/mapred-site.xml | ||
1910 | 47 | |||
1911 | 48 | # Upstart | ||
1912 | 49 | |||
1913 | 50 | . /etc/default/hadoop $JAVA_HOME_PATH $HADOOP_INSTALL $HOME_DIR | ||
1914 | 51 | source $HOME_DIR/.profile | ||
1915 | 52 | hdfs namenode -format | ||
1916 | 53 | |||
1917 | 54 | chown -R ubuntu.ubuntu ~ubuntu/hadoop | ||
1918 | 55 | |||
1919 | 56 | juju-log "Install complete." | ||
1920 | 57 | |||
1921 | 58 | |||
1922 | 59 | 0 | ||
1923 | === target is u'common.py' | |||
1924 | === removed file 'hooks/relation-name-relation-broken' | |||
1925 | --- hooks/relation-name-relation-broken 2014-05-22 17:00:22 +0000 | |||
1926 | +++ hooks/relation-name-relation-broken 1970-01-01 00:00:00 +0000 | |||
1927 | @@ -1,2 +0,0 @@ | |||
1928 | 1 | #!/bin/sh | ||
1929 | 2 | # This hook runs when the full relation is removed (not just a single member) | ||
1930 | 3 | 0 | ||
1931 | === removed file 'hooks/relation-name-relation-changed' | |||
1932 | --- hooks/relation-name-relation-changed 2014-05-22 17:00:22 +0000 | |||
1933 | +++ hooks/relation-name-relation-changed 1970-01-01 00:00:00 +0000 | |||
1934 | @@ -1,9 +0,0 @@ | |||
1935 | 1 | #!/bin/bash | ||
1936 | 2 | # This must be renamed to the name of the relation. The goal here is to | ||
1937 | 3 | # affect any change needed by relationships being formed, modified, or broken | ||
1938 | 4 | # This script should be idempotent. | ||
1939 | 5 | juju-log $JUJU_REMOTE_UNIT modified its settings | ||
1940 | 6 | juju-log Relation settings: | ||
1941 | 7 | relation-get | ||
1942 | 8 | juju-log Relation members: | ||
1943 | 9 | relation-list | ||
1944 | 10 | 0 | ||
1945 | === removed file 'hooks/relation-name-relation-departed' | |||
1946 | --- hooks/relation-name-relation-departed 2014-05-22 17:00:22 +0000 | |||
1947 | +++ hooks/relation-name-relation-departed 1970-01-01 00:00:00 +0000 | |||
1948 | @@ -1,5 +0,0 @@ | |||
1949 | 1 | #!/bin/sh | ||
1950 | 2 | # This must be renamed to the name of the relation. The goal here is to | ||
1951 | 3 | # affect any change needed by the remote unit leaving the relationship. | ||
1952 | 4 | # This script should be idempotent. | ||
1953 | 5 | juju-log $JUJU_REMOTE_UNIT departed | ||
1954 | 6 | 0 | ||
1955 | === removed file 'hooks/relation-name-relation-joined' | |||
1956 | --- hooks/relation-name-relation-joined 2014-05-22 17:00:22 +0000 | |||
1957 | +++ hooks/relation-name-relation-joined 1970-01-01 00:00:00 +0000 | |||
1958 | @@ -1,5 +0,0 @@ | |||
1959 | 1 | #!/bin/sh | ||
1960 | 2 | # This must be renamed to the name of the relation. The goal here is to | ||
1961 | 3 | # affect any change needed by relationships being formed | ||
1962 | 4 | # This script should be idempotent. | ||
1963 | 5 | juju-log $JUJU_REMOTE_UNIT joined | ||
1964 | 6 | 0 | ||
1965 | === added directory 'tests' | |||
1966 | === added file 'tests/01_test_install_hook.py' | |||
1967 | --- tests/01_test_install_hook.py 1970-01-01 00:00:00 +0000 | |||
1968 | +++ tests/01_test_install_hook.py 2014-06-26 20:05:25 +0000 | |||
1969 | @@ -0,0 +1,22 @@ | |||
1970 | 1 | import sys | ||
1971 | 2 | import os | ||
1972 | 3 | import unittest | ||
1973 | 4 | |||
1974 | 5 | from mock import patch, Mock | ||
1975 | 6 | sys.path.insert(0, os.path.abspath(os.path.join('..', 'hooks'))) | ||
1976 | 7 | |||
1977 | 8 | from hooks.common import HadoopDevel | ||
1978 | 9 | |||
1979 | 10 | |||
1980 | 11 | class TestCommon(unittest.TestCase): | ||
1981 | 12 | |||
1982 | 13 | @patch('grp.getgrnam') | ||
1983 | 14 | @patch('pwd.getpwnam') | ||
1984 | 15 | def test_init(self, pwm, grpm): | ||
1985 | 16 | hd = HadoopDevel() | ||
1986 | 17 | self.assertEqual(hd.JAVA_VERSION, "7") | ||
1987 | 18 | self.assertEqual(hd.HADOOP_VERSION, "hadoop-2.2.0") | ||
1988 | 19 | self.assertEqual(hd.PLATFORM_ARCH, "amd64") | ||
1989 | 20 | self.assertEqual(hd.HOME_DIR, "/home/ubuntu") | ||
1990 | 21 | self.assertEqual(hd.HADOOP_TMP_DIR, "/home/ubuntu/hadoop/tmp") | ||
1991 | 22 | self.assertEqual(hd.HADOOP_INSTALL, "/home/ubuntu/hadoop/hadoop-2.2.0") | ||
1992 | 0 | 23 | ||
1993 | === added file 'tests/tests.yaml' | |||
1994 | --- tests/tests.yaml 1970-01-01 00:00:00 +0000 | |||
1995 | +++ tests/tests.yaml 2014-06-26 20:05:25 +0000 | |||
1996 | @@ -0,0 +1,10 @@ | |||
1997 | 1 | bootstrap: false | ||
1998 | 2 | reset: false | ||
1999 | 3 | setup: script | ||
2000 | 4 | teardown: script | ||
2001 | 5 | tests: "[0-9]*" | ||
2002 | 6 | virtualenv: false | ||
2003 | 7 | #sources: | ||
2004 | 8 | packages: | ||
2005 | 9 | - amulet | ||
2006 | 10 | - python-requests |
please remove a file called old_install