Merge lp:~lazypower/charms/trusty/hadoop2-devel/python_rewrite into lp:~asanjar/charms/trusty/hadoop2-devel/trunk
- Trusty Tahr (14.04)
- python_rewrite
- Merge into trunk
Proposed by
Charles Butler
Status: | Merged |
---|---|
Merged at revision: | 2 |
Proposed branch: | lp:~lazypower/charms/trusty/hadoop2-devel/python_rewrite |
Merge into: | lp:~asanjar/charms/trusty/hadoop2-devel/trunk |
Diff against target: |
2006 lines (+1779/-101) 22 files modified
Makefile (+19/-0) charm-helpers.yaml (+5/-0) config.yaml (+0/-9) files/hadoop/hdfs_format.sh (+15/-0) files/template.py (+56/-0) files/templates/defaults.j2 (+10/-0) files/upstart/setenv.sh (+0/-13) hooks/charmhelpers/core/fstab.py (+114/-0) hooks/charmhelpers/core/hookenv.py (+498/-0) hooks/charmhelpers/core/host.py (+325/-0) hooks/charmhelpers/fetch/__init__.py (+349/-0) hooks/charmhelpers/fetch/archiveurl.py (+63/-0) hooks/charmhelpers/fetch/bzrurl.py (+50/-0) hooks/charmhelpers/setup.py (+12/-0) hooks/common.py (+231/-0) hooks/install (+0/-58) hooks/relation-name-relation-broken (+0/-2) hooks/relation-name-relation-changed (+0/-9) hooks/relation-name-relation-departed (+0/-5) hooks/relation-name-relation-joined (+0/-5) tests/01_test_install_hook.py (+22/-0) tests/tests.yaml (+10/-0) |
To merge this branch: | bzr merge lp:~lazypower/charms/trusty/hadoop2-devel/python_rewrite |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
amir sanjar | Approve | ||
Review via email: mp+224647@code.launchpad.net |
Commit message
Description of the change
Removes the class component of the code, and leaves it as a straight shot encapsulated procedural python script.
Adds argparse for future modifications of the charm, and defines the callstack using argparse.
To post a comment you must log in.
- 10. By Charles Butler
-
Removed the old_install archived hook
Revision history for this message
amir sanjar (asanjar) : | # |
review:
Approve
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === added file 'Makefile' |
2 | --- Makefile 1970-01-01 00:00:00 +0000 |
3 | +++ Makefile 2014-06-26 20:05:25 +0000 |
4 | @@ -0,0 +1,19 @@ |
5 | +#!/usr/bin/make |
6 | +PYTHON := /usr/bin/env python |
7 | + |
8 | +sync-charm-helpers: bin/charm_helpers_sync.py |
9 | + @$(PYTHON) bin/charm_helpers_sync.py -c charm-helpers.yaml |
10 | + |
11 | +bin/charm_helpers_sync.py: |
12 | + @mkdir -p bin |
13 | + @bzr cat lp:charm-helpers/tools/charm_helpers_sync/charm_helpers_sync.py > bin/charm_helpers_sync.py |
14 | + |
15 | +autotest: |
16 | + watchmedo shell-command --patterns="*.py" --recursive --command="make test" |
17 | + |
18 | +test: |
19 | + @nosetests tests/*.py |
20 | + |
21 | +clean: |
22 | + @find -name *.pyc | xargs rm |
23 | + @rm .coverage |
24 | |
25 | === added file 'charm-helpers.yaml' |
26 | --- charm-helpers.yaml 1970-01-01 00:00:00 +0000 |
27 | +++ charm-helpers.yaml 2014-06-26 20:05:25 +0000 |
28 | @@ -0,0 +1,5 @@ |
29 | +branch: lp:charm-helpers |
30 | +destination: hooks/charmhelpers |
31 | +include: |
32 | + - core |
33 | + - fetch |
34 | |
35 | === modified file 'config.yaml' |
36 | --- config.yaml 2014-05-22 17:00:22 +0000 |
37 | +++ config.yaml 2014-06-26 20:05:25 +0000 |
38 | @@ -1,14 +1,5 @@ |
39 | options: |
40 | - string-option: |
41 | - type: string |
42 | - default: "Default Value" |
43 | - description: "A short description of the configuration option" |
44 | boolean-option: |
45 | type: boolean |
46 | default: False |
47 | description: "A short description of the configuration option" |
48 | - int-option: |
49 | - type: int |
50 | - default: 9001 |
51 | - description: "A short description of the configuration option" |
52 | - |
53 | |
54 | === added file 'files/__init__.py' |
55 | === added file 'files/hadoop/hdfs_format.sh' |
56 | --- files/hadoop/hdfs_format.sh 1970-01-01 00:00:00 +0000 |
57 | +++ files/hadoop/hdfs_format.sh 2014-06-26 20:05:25 +0000 |
58 | @@ -0,0 +1,15 @@ |
59 | +#!/bin/sh |
60 | +HADOOP_VERSION="hadoop-2.2.0" |
61 | +HOME_DIR="/home/ubuntu" |
62 | +HADOOP_DIR="/home/ubuntu/hadoop" |
63 | +HADOOP_TMP_DIR=$HADOOP_DIR/tmp |
64 | +export HADOOP_INSTALL=$HADOOP_DIR/$HADOOP_VERSION |
65 | +export HADOOP_HOME=$HADOOP_INSTALL |
66 | +export HADOOP_COMMON_HOME=$HADOOP_INSTALL |
67 | +export HADOOP_HDFS_HOME=$HADOOP_INSTALL |
68 | +export HADOOP_MAPRED_HOME=$HADOOP_INSTALL |
69 | +export HADOOP_YARN_HOME=$HADOOP_INSTALL |
70 | +export PATH=$PATH:$HADOOP_INSTALL/bin:$HADOOP_INSTALL/sbin |
71 | +export YARN_HOME=$HADOOP_INSTALL |
72 | +export HADOOP_CONF_DIR=$HADOOP_INSTALL/etc/hadoop |
73 | +hdfs namenode -format |
74 | |
75 | === added symlink 'files/template' |
76 | === target is u'template.py' |
77 | === added file 'files/template.py' |
78 | --- files/template.py 1970-01-01 00:00:00 +0000 |
79 | +++ files/template.py 2014-06-26 20:05:25 +0000 |
80 | @@ -0,0 +1,56 @@ |
81 | +#!/usr/bin/env python |
82 | + |
83 | +import argparse |
84 | +import logging |
85 | +import os |
86 | +import shutil |
87 | +import subprocess |
88 | + |
89 | +# Provide failout cases to install jinja2 templating and argparse. |
90 | +# This will not work in offline environments. |
91 | +try: |
92 | + from jinja2 import Template |
93 | +except: |
94 | + subprocess.call(['pip', 'install', 'jinja2']) |
95 | + raise("Attempted to install missing dependencies. Try re-running") |
96 | + |
97 | +logging.basicConfig(level=logging.INFO) |
98 | + |
99 | + |
100 | +class TemplateWiz: |
101 | + |
102 | + def __init__(self, args=None): |
103 | + self.values = {} |
104 | + parser = argparse.ArgumentParser() |
105 | + parser.add_argument("-t", "--template", help="Template Path") |
106 | + parser.add_argument("-o", "--output", help="Fullpath to output file") |
107 | + self.paths, data = parser.parse_known_args(args) |
108 | + if data: |
109 | + # Process key = value keys |
110 | + for item in data: |
111 | + kv = item.split('=') |
112 | + self.values[kv[0]] = kv[-1] |
113 | + |
114 | + def read_template(self): |
115 | + if not os.path.exists(self.paths.template): |
116 | + raise IOError("Error loading template: %s" % self.paths.template) |
117 | + else: |
118 | + with open(self.paths.template, 'r') as f: |
119 | + self.template = Template(f.read()) |
120 | + |
121 | + def write_template(self): |
122 | + if os.path.exists(self.paths.output): |
123 | + shutil.copy2(self.paths.output, "%s.bak" % self.paths.output) |
124 | + logging.info("Saving {f} as {f}.bak".format(f=self.paths.output)) |
125 | + with open(self.paths.output, 'w') as f: |
126 | + f.write(self.template.render(self.values)) |
127 | + logging.info('Rendered %s' % self.paths.output) |
128 | + |
129 | + def run(self): |
130 | + self.read_template() |
131 | + self.write_template() |
132 | + |
133 | + |
134 | +if __name__ == "__main__": |
135 | + tw = TemplateWiz() |
136 | + tw.run() |
137 | |
138 | === added directory 'files/templates' |
139 | === added file 'files/templates/defaults.j2' |
140 | --- files/templates/defaults.j2 1970-01-01 00:00:00 +0000 |
141 | +++ files/templates/defaults.j2 2014-06-26 20:05:25 +0000 |
142 | @@ -0,0 +1,10 @@ |
143 | +export JAVA_HOME={{java_home}} |
144 | +export HADOOP_INSTALL={{hadoop_home}} |
145 | +export HADOOP_HOME=$HADOOP_INSTALL |
146 | +export HADOOP_COMMON_HOME=$HADOOP_INSTALL |
147 | +export HADOOP_HDFS_HOME=$HADOOP_INSTALL |
148 | +export HADOOP_MAPRED_HOME=$HADOOP_INSTALL |
149 | +export HADOOP_YARN_HOME=$HADOOP_INSTALL |
150 | +export PATH=$JAVA_HOME/bin:$PATH:$HADOOP_INSTALL/bin:$HADOOP_INSTALL/sbin |
151 | +export YARN_HOME=$HADOOP_INSTALL |
152 | +export HADOOP_CONF_DIR={{conf}} |
153 | |
154 | === removed file 'files/upstart/setenv.sh' |
155 | --- files/upstart/setenv.sh 2014-05-22 17:00:22 +0000 |
156 | +++ files/upstart/setenv.sh 1970-01-01 00:00:00 +0000 |
157 | @@ -1,13 +0,0 @@ |
158 | -HADOOP_VERSION="hadoop-2.2.0" |
159 | -HOME_DIR="/home/ubuntu" |
160 | -HADOOP_DIR="/home/ubuntu/hadoop" |
161 | -HADOOP_TMP_DIR=$HADOOP_DIR/tmp |
162 | -export HADOOP_INSTALL=$HADOOP_DIR/$HADOOP_VERSION |
163 | -export HADOOP_HOME=$HADOOP_INSTALL |
164 | -export HADOOP_COMMON_HOME=$HADOOP_INSTALL |
165 | -export HADOOP_HDFS_HOME=$HADOOP_INSTALL |
166 | -export HADOOP_MAPRED_HOME=$HADOOP_INSTALL |
167 | -export HADOOP_YARN_HOME=$HADOOP_INSTALL |
168 | -export PATH=$JAVA_HOME/bin:$PATH:$HADOOP_INSTALL/bin:$HADOOP_INSTALL/sbin |
169 | -export YARN_HOME=$HADOOP_INSTALL |
170 | -export HADOOP_CONF_DIR=/etc/hadoop/conf.juju |
171 | |
172 | === added file 'hooks/__init__.py' |
173 | === added directory 'hooks/charmhelpers' |
174 | === added file 'hooks/charmhelpers/__init__.py' |
175 | === added directory 'hooks/charmhelpers/core' |
176 | === added file 'hooks/charmhelpers/core/__init__.py' |
177 | === added file 'hooks/charmhelpers/core/fstab.py' |
178 | --- hooks/charmhelpers/core/fstab.py 1970-01-01 00:00:00 +0000 |
179 | +++ hooks/charmhelpers/core/fstab.py 2014-06-26 20:05:25 +0000 |
180 | @@ -0,0 +1,114 @@ |
181 | +#!/usr/bin/env python |
182 | +# -*- coding: utf-8 -*- |
183 | + |
184 | +__author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' |
185 | + |
186 | +import os |
187 | + |
188 | + |
189 | +class Fstab(file): |
190 | + """This class extends file in order to implement a file reader/writer |
191 | + for file `/etc/fstab` |
192 | + """ |
193 | + |
194 | + class Entry(object): |
195 | + """Entry class represents a non-comment line on the `/etc/fstab` file |
196 | + """ |
197 | + def __init__(self, device, mountpoint, filesystem, |
198 | + options, d=0, p=0): |
199 | + self.device = device |
200 | + self.mountpoint = mountpoint |
201 | + self.filesystem = filesystem |
202 | + |
203 | + if not options: |
204 | + options = "defaults" |
205 | + |
206 | + self.options = options |
207 | + self.d = d |
208 | + self.p = p |
209 | + |
210 | + def __eq__(self, o): |
211 | + return str(self) == str(o) |
212 | + |
213 | + def __str__(self): |
214 | + return "{} {} {} {} {} {}".format(self.device, |
215 | + self.mountpoint, |
216 | + self.filesystem, |
217 | + self.options, |
218 | + self.d, |
219 | + self.p) |
220 | + |
221 | + DEFAULT_PATH = os.path.join(os.path.sep, 'etc', 'fstab') |
222 | + |
223 | + def __init__(self, path=None): |
224 | + if path: |
225 | + self._path = path |
226 | + else: |
227 | + self._path = self.DEFAULT_PATH |
228 | + file.__init__(self, self._path, 'r+') |
229 | + |
230 | + def _hydrate_entry(self, line): |
231 | + return Fstab.Entry(*filter( |
232 | + lambda x: x not in ('', None), |
233 | + line.strip("\n").split(" "))) |
234 | + |
235 | + @property |
236 | + def entries(self): |
237 | + self.seek(0) |
238 | + for line in self.readlines(): |
239 | + try: |
240 | + if not line.startswith("#"): |
241 | + yield self._hydrate_entry(line) |
242 | + except ValueError: |
243 | + pass |
244 | + |
245 | + def get_entry_by_attr(self, attr, value): |
246 | + for entry in self.entries: |
247 | + e_attr = getattr(entry, attr) |
248 | + if e_attr == value: |
249 | + return entry |
250 | + return None |
251 | + |
252 | + def add_entry(self, entry): |
253 | + if self.get_entry_by_attr('device', entry.device): |
254 | + return False |
255 | + |
256 | + self.write(str(entry) + '\n') |
257 | + self.truncate() |
258 | + return entry |
259 | + |
260 | + def remove_entry(self, entry): |
261 | + self.seek(0) |
262 | + |
263 | + lines = self.readlines() |
264 | + |
265 | + found = False |
266 | + for index, line in enumerate(lines): |
267 | + if not line.startswith("#"): |
268 | + if self._hydrate_entry(line) == entry: |
269 | + found = True |
270 | + break |
271 | + |
272 | + if not found: |
273 | + return False |
274 | + |
275 | + lines.remove(line) |
276 | + |
277 | + self.seek(0) |
278 | + self.write(''.join(lines)) |
279 | + self.truncate() |
280 | + return True |
281 | + |
282 | + @classmethod |
283 | + def remove_by_mountpoint(cls, mountpoint, path=None): |
284 | + fstab = cls(path=path) |
285 | + entry = fstab.get_entry_by_attr('mountpoint', mountpoint) |
286 | + if entry: |
287 | + return fstab.remove_entry(entry) |
288 | + return False |
289 | + |
290 | + @classmethod |
291 | + def add(cls, device, mountpoint, filesystem, options=None, path=None): |
292 | + return cls(path=path).add_entry(Fstab.Entry(device, |
293 | + mountpoint, filesystem, |
294 | + options=options)) |
295 | |
296 | === added file 'hooks/charmhelpers/core/hookenv.py' |
297 | --- hooks/charmhelpers/core/hookenv.py 1970-01-01 00:00:00 +0000 |
298 | +++ hooks/charmhelpers/core/hookenv.py 2014-06-26 20:05:25 +0000 |
299 | @@ -0,0 +1,498 @@ |
300 | +"Interactions with the Juju environment" |
301 | +# Copyright 2013 Canonical Ltd. |
302 | +# |
303 | +# Authors: |
304 | +# Charm Helpers Developers <juju@lists.ubuntu.com> |
305 | + |
306 | +import os |
307 | +import json |
308 | +import yaml |
309 | +import subprocess |
310 | +import sys |
311 | +import UserDict |
312 | +from subprocess import CalledProcessError |
313 | + |
314 | +CRITICAL = "CRITICAL" |
315 | +ERROR = "ERROR" |
316 | +WARNING = "WARNING" |
317 | +INFO = "INFO" |
318 | +DEBUG = "DEBUG" |
319 | +MARKER = object() |
320 | + |
321 | +cache = {} |
322 | + |
323 | + |
324 | +def cached(func): |
325 | + """Cache return values for multiple executions of func + args |
326 | + |
327 | + For example: |
328 | + |
329 | + @cached |
330 | + def unit_get(attribute): |
331 | + pass |
332 | + |
333 | + unit_get('test') |
334 | + |
335 | + will cache the result of unit_get + 'test' for future calls. |
336 | + """ |
337 | + def wrapper(*args, **kwargs): |
338 | + global cache |
339 | + key = str((func, args, kwargs)) |
340 | + try: |
341 | + return cache[key] |
342 | + except KeyError: |
343 | + res = func(*args, **kwargs) |
344 | + cache[key] = res |
345 | + return res |
346 | + return wrapper |
347 | + |
348 | + |
349 | +def flush(key): |
350 | + """Flushes any entries from function cache where the |
351 | + key is found in the function+args """ |
352 | + flush_list = [] |
353 | + for item in cache: |
354 | + if key in item: |
355 | + flush_list.append(item) |
356 | + for item in flush_list: |
357 | + del cache[item] |
358 | + |
359 | + |
360 | +def log(message, level=None): |
361 | + """Write a message to the juju log""" |
362 | + command = ['juju-log'] |
363 | + if level: |
364 | + command += ['-l', level] |
365 | + command += [message] |
366 | + subprocess.call(command) |
367 | + |
368 | + |
369 | +class Serializable(UserDict.IterableUserDict): |
370 | + """Wrapper, an object that can be serialized to yaml or json""" |
371 | + |
372 | + def __init__(self, obj): |
373 | + # wrap the object |
374 | + UserDict.IterableUserDict.__init__(self) |
375 | + self.data = obj |
376 | + |
377 | + def __getattr__(self, attr): |
378 | + # See if this object has attribute. |
379 | + if attr in ("json", "yaml", "data"): |
380 | + return self.__dict__[attr] |
381 | + # Check for attribute in wrapped object. |
382 | + got = getattr(self.data, attr, MARKER) |
383 | + if got is not MARKER: |
384 | + return got |
385 | + # Proxy to the wrapped object via dict interface. |
386 | + try: |
387 | + return self.data[attr] |
388 | + except KeyError: |
389 | + raise AttributeError(attr) |
390 | + |
391 | + def __getstate__(self): |
392 | + # Pickle as a standard dictionary. |
393 | + return self.data |
394 | + |
395 | + def __setstate__(self, state): |
396 | + # Unpickle into our wrapper. |
397 | + self.data = state |
398 | + |
399 | + def json(self): |
400 | + """Serialize the object to json""" |
401 | + return json.dumps(self.data) |
402 | + |
403 | + def yaml(self): |
404 | + """Serialize the object to yaml""" |
405 | + return yaml.dump(self.data) |
406 | + |
407 | + |
408 | +def execution_environment(): |
409 | + """A convenient bundling of the current execution context""" |
410 | + context = {} |
411 | + context['conf'] = config() |
412 | + if relation_id(): |
413 | + context['reltype'] = relation_type() |
414 | + context['relid'] = relation_id() |
415 | + context['rel'] = relation_get() |
416 | + context['unit'] = local_unit() |
417 | + context['rels'] = relations() |
418 | + context['env'] = os.environ |
419 | + return context |
420 | + |
421 | + |
422 | +def in_relation_hook(): |
423 | + """Determine whether we're running in a relation hook""" |
424 | + return 'JUJU_RELATION' in os.environ |
425 | + |
426 | + |
427 | +def relation_type(): |
428 | + """The scope for the current relation hook""" |
429 | + return os.environ.get('JUJU_RELATION', None) |
430 | + |
431 | + |
432 | +def relation_id(): |
433 | + """The relation ID for the current relation hook""" |
434 | + return os.environ.get('JUJU_RELATION_ID', None) |
435 | + |
436 | + |
437 | +def local_unit(): |
438 | + """Local unit ID""" |
439 | + return os.environ['JUJU_UNIT_NAME'] |
440 | + |
441 | + |
442 | +def remote_unit(): |
443 | + """The remote unit for the current relation hook""" |
444 | + return os.environ['JUJU_REMOTE_UNIT'] |
445 | + |
446 | + |
447 | +def service_name(): |
448 | + """The name service group this unit belongs to""" |
449 | + return local_unit().split('/')[0] |
450 | + |
451 | + |
452 | +def hook_name(): |
453 | + """The name of the currently executing hook""" |
454 | + return os.path.basename(sys.argv[0]) |
455 | + |
456 | + |
457 | +class Config(dict): |
458 | + """A Juju charm config dictionary that can write itself to |
459 | + disk (as json) and track which values have changed since |
460 | + the previous hook invocation. |
461 | + |
462 | + Do not instantiate this object directly - instead call |
463 | + ``hookenv.config()`` |
464 | + |
465 | + Example usage:: |
466 | + |
467 | + >>> # inside a hook |
468 | + >>> from charmhelpers.core import hookenv |
469 | + >>> config = hookenv.config() |
470 | + >>> config['foo'] |
471 | + 'bar' |
472 | + >>> config['mykey'] = 'myval' |
473 | + >>> config.save() |
474 | + |
475 | + |
476 | + >>> # user runs `juju set mycharm foo=baz` |
477 | + >>> # now we're inside subsequent config-changed hook |
478 | + >>> config = hookenv.config() |
479 | + >>> config['foo'] |
480 | + 'baz' |
481 | + >>> # test to see if this val has changed since last hook |
482 | + >>> config.changed('foo') |
483 | + True |
484 | + >>> # what was the previous value? |
485 | + >>> config.previous('foo') |
486 | + 'bar' |
487 | + >>> # keys/values that we add are preserved across hooks |
488 | + >>> config['mykey'] |
489 | + 'myval' |
490 | + >>> # don't forget to save at the end of hook! |
491 | + >>> config.save() |
492 | + |
493 | + """ |
494 | + CONFIG_FILE_NAME = '.juju-persistent-config' |
495 | + |
496 | + def __init__(self, *args, **kw): |
497 | + super(Config, self).__init__(*args, **kw) |
498 | + self._prev_dict = None |
499 | + self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME) |
500 | + if os.path.exists(self.path): |
501 | + self.load_previous() |
502 | + |
503 | + def load_previous(self, path=None): |
504 | + """Load previous copy of config from disk so that current values |
505 | + can be compared to previous values. |
506 | + |
507 | + :param path: |
508 | + |
509 | + File path from which to load the previous config. If `None`, |
510 | + config is loaded from the default location. If `path` is |
511 | + specified, subsequent `save()` calls will write to the same |
512 | + path. |
513 | + |
514 | + """ |
515 | + self.path = path or self.path |
516 | + with open(self.path) as f: |
517 | + self._prev_dict = json.load(f) |
518 | + |
519 | + def changed(self, key): |
520 | + """Return true if the value for this key has changed since |
521 | + the last save. |
522 | + |
523 | + """ |
524 | + if self._prev_dict is None: |
525 | + return True |
526 | + return self.previous(key) != self.get(key) |
527 | + |
528 | + def previous(self, key): |
529 | + """Return previous value for this key, or None if there |
530 | + is no "previous" value. |
531 | + |
532 | + """ |
533 | + if self._prev_dict: |
534 | + return self._prev_dict.get(key) |
535 | + return None |
536 | + |
537 | + def save(self): |
538 | + """Save this config to disk. |
539 | + |
540 | + Preserves items in _prev_dict that do not exist in self. |
541 | + |
542 | + """ |
543 | + if self._prev_dict: |
544 | + for k, v in self._prev_dict.iteritems(): |
545 | + if k not in self: |
546 | + self[k] = v |
547 | + with open(self.path, 'w') as f: |
548 | + json.dump(self, f) |
549 | + |
550 | + |
551 | +@cached |
552 | +def config(scope=None): |
553 | + """Juju charm configuration""" |
554 | + config_cmd_line = ['config-get'] |
555 | + if scope is not None: |
556 | + config_cmd_line.append(scope) |
557 | + config_cmd_line.append('--format=json') |
558 | + try: |
559 | + config_data = json.loads(subprocess.check_output(config_cmd_line)) |
560 | + if scope is not None: |
561 | + return config_data |
562 | + return Config(config_data) |
563 | + except ValueError: |
564 | + return None |
565 | + |
566 | + |
567 | +@cached |
568 | +def relation_get(attribute=None, unit=None, rid=None): |
569 | + """Get relation information""" |
570 | + _args = ['relation-get', '--format=json'] |
571 | + if rid: |
572 | + _args.append('-r') |
573 | + _args.append(rid) |
574 | + _args.append(attribute or '-') |
575 | + if unit: |
576 | + _args.append(unit) |
577 | + try: |
578 | + return json.loads(subprocess.check_output(_args)) |
579 | + except ValueError: |
580 | + return None |
581 | + except CalledProcessError, e: |
582 | + if e.returncode == 2: |
583 | + return None |
584 | + raise |
585 | + |
586 | + |
587 | +def relation_set(relation_id=None, relation_settings={}, **kwargs): |
588 | + """Set relation information for the current unit""" |
589 | + relation_cmd_line = ['relation-set'] |
590 | + if relation_id is not None: |
591 | + relation_cmd_line.extend(('-r', relation_id)) |
592 | + for k, v in (relation_settings.items() + kwargs.items()): |
593 | + if v is None: |
594 | + relation_cmd_line.append('{}='.format(k)) |
595 | + else: |
596 | + relation_cmd_line.append('{}={}'.format(k, v)) |
597 | + subprocess.check_call(relation_cmd_line) |
598 | + # Flush cache of any relation-gets for local unit |
599 | + flush(local_unit()) |
600 | + |
601 | + |
602 | +@cached |
603 | +def relation_ids(reltype=None): |
604 | + """A list of relation_ids""" |
605 | + reltype = reltype or relation_type() |
606 | + relid_cmd_line = ['relation-ids', '--format=json'] |
607 | + if reltype is not None: |
608 | + relid_cmd_line.append(reltype) |
609 | + return json.loads(subprocess.check_output(relid_cmd_line)) or [] |
610 | + return [] |
611 | + |
612 | + |
613 | +@cached |
614 | +def related_units(relid=None): |
615 | + """A list of related units""" |
616 | + relid = relid or relation_id() |
617 | + units_cmd_line = ['relation-list', '--format=json'] |
618 | + if relid is not None: |
619 | + units_cmd_line.extend(('-r', relid)) |
620 | + return json.loads(subprocess.check_output(units_cmd_line)) or [] |
621 | + |
622 | + |
623 | +@cached |
624 | +def relation_for_unit(unit=None, rid=None): |
625 | + """Get the json represenation of a unit's relation""" |
626 | + unit = unit or remote_unit() |
627 | + relation = relation_get(unit=unit, rid=rid) |
628 | + for key in relation: |
629 | + if key.endswith('-list'): |
630 | + relation[key] = relation[key].split() |
631 | + relation['__unit__'] = unit |
632 | + return relation |
633 | + |
634 | + |
635 | +@cached |
636 | +def relations_for_id(relid=None): |
637 | + """Get relations of a specific relation ID""" |
638 | + relation_data = [] |
639 | + relid = relid or relation_ids() |
640 | + for unit in related_units(relid): |
641 | + unit_data = relation_for_unit(unit, relid) |
642 | + unit_data['__relid__'] = relid |
643 | + relation_data.append(unit_data) |
644 | + return relation_data |
645 | + |
646 | + |
647 | +@cached |
648 | +def relations_of_type(reltype=None): |
649 | + """Get relations of a specific type""" |
650 | + relation_data = [] |
651 | + reltype = reltype or relation_type() |
652 | + for relid in relation_ids(reltype): |
653 | + for relation in relations_for_id(relid): |
654 | + relation['__relid__'] = relid |
655 | + relation_data.append(relation) |
656 | + return relation_data |
657 | + |
658 | + |
659 | +@cached |
660 | +def relation_types(): |
661 | + """Get a list of relation types supported by this charm""" |
662 | + charmdir = os.environ.get('CHARM_DIR', '') |
663 | + mdf = open(os.path.join(charmdir, 'metadata.yaml')) |
664 | + md = yaml.safe_load(mdf) |
665 | + rel_types = [] |
666 | + for key in ('provides', 'requires', 'peers'): |
667 | + section = md.get(key) |
668 | + if section: |
669 | + rel_types.extend(section.keys()) |
670 | + mdf.close() |
671 | + return rel_types |
672 | + |
673 | + |
674 | +@cached |
675 | +def relations(): |
676 | + """Get a nested dictionary of relation data for all related units""" |
677 | + rels = {} |
678 | + for reltype in relation_types(): |
679 | + relids = {} |
680 | + for relid in relation_ids(reltype): |
681 | + units = {local_unit(): relation_get(unit=local_unit(), rid=relid)} |
682 | + for unit in related_units(relid): |
683 | + reldata = relation_get(unit=unit, rid=relid) |
684 | + units[unit] = reldata |
685 | + relids[relid] = units |
686 | + rels[reltype] = relids |
687 | + return rels |
688 | + |
689 | + |
690 | +@cached |
691 | +def is_relation_made(relation, keys='private-address'): |
692 | + ''' |
693 | + Determine whether a relation is established by checking for |
694 | + presence of key(s). If a list of keys is provided, they |
695 | + must all be present for the relation to be identified as made |
696 | + ''' |
697 | + if isinstance(keys, str): |
698 | + keys = [keys] |
699 | + for r_id in relation_ids(relation): |
700 | + for unit in related_units(r_id): |
701 | + context = {} |
702 | + for k in keys: |
703 | + context[k] = relation_get(k, rid=r_id, |
704 | + unit=unit) |
705 | + if None not in context.values(): |
706 | + return True |
707 | + return False |
708 | + |
709 | + |
710 | +def open_port(port, protocol="TCP"): |
711 | + """Open a service network port""" |
712 | + _args = ['open-port'] |
713 | + _args.append('{}/{}'.format(port, protocol)) |
714 | + subprocess.check_call(_args) |
715 | + |
716 | + |
717 | +def close_port(port, protocol="TCP"): |
718 | + """Close a service network port""" |
719 | + _args = ['close-port'] |
720 | + _args.append('{}/{}'.format(port, protocol)) |
721 | + subprocess.check_call(_args) |
722 | + |
723 | + |
724 | +@cached |
725 | +def unit_get(attribute): |
726 | + """Get the unit ID for the remote unit""" |
727 | + _args = ['unit-get', '--format=json', attribute] |
728 | + try: |
729 | + return json.loads(subprocess.check_output(_args)) |
730 | + except ValueError: |
731 | + return None |
732 | + |
733 | + |
734 | +def unit_private_ip(): |
735 | + """Get this unit's private IP address""" |
736 | + return unit_get('private-address') |
737 | + |
738 | + |
739 | +class UnregisteredHookError(Exception): |
740 | + """Raised when an undefined hook is called""" |
741 | + pass |
742 | + |
743 | + |
744 | +class Hooks(object): |
745 | + """A convenient handler for hook functions. |
746 | + |
747 | + Example: |
748 | + hooks = Hooks() |
749 | + |
750 | + # register a hook, taking its name from the function name |
751 | + @hooks.hook() |
752 | + def install(): |
753 | + ... |
754 | + |
755 | + # register a hook, providing a custom hook name |
756 | + @hooks.hook("config-changed") |
757 | + def config_changed(): |
758 | + ... |
759 | + |
760 | + if __name__ == "__main__": |
761 | + # execute a hook based on the name the program is called by |
762 | + hooks.execute(sys.argv) |
763 | + """ |
764 | + |
765 | + def __init__(self): |
766 | + super(Hooks, self).__init__() |
767 | + self._hooks = {} |
768 | + |
769 | + def register(self, name, function): |
770 | + """Register a hook""" |
771 | + self._hooks[name] = function |
772 | + |
773 | + def execute(self, args): |
774 | + """Execute a registered hook based on args[0]""" |
775 | + hook_name = os.path.basename(args[0]) |
776 | + if hook_name in self._hooks: |
777 | + self._hooks[hook_name]() |
778 | + else: |
779 | + raise UnregisteredHookError(hook_name) |
780 | + |
781 | + def hook(self, *hook_names): |
782 | + """Decorator, registering them as hooks""" |
783 | + def wrapper(decorated): |
784 | + for hook_name in hook_names: |
785 | + self.register(hook_name, decorated) |
786 | + else: |
787 | + self.register(decorated.__name__, decorated) |
788 | + if '_' in decorated.__name__: |
789 | + self.register( |
790 | + decorated.__name__.replace('_', '-'), decorated) |
791 | + return decorated |
792 | + return wrapper |
793 | + |
794 | + |
795 | +def charm_dir(): |
796 | + """Return the root directory of the current charm""" |
797 | + return os.environ.get('CHARM_DIR') |
798 | |
799 | === added file 'hooks/charmhelpers/core/host.py' |
800 | --- hooks/charmhelpers/core/host.py 1970-01-01 00:00:00 +0000 |
801 | +++ hooks/charmhelpers/core/host.py 2014-06-26 20:05:25 +0000 |
802 | @@ -0,0 +1,325 @@ |
803 | +"""Tools for working with the host system""" |
804 | +# Copyright 2012 Canonical Ltd. |
805 | +# |
806 | +# Authors: |
807 | +# Nick Moffitt <nick.moffitt@canonical.com> |
808 | +# Matthew Wedgwood <matthew.wedgwood@canonical.com> |
809 | + |
810 | +import os |
811 | +import pwd |
812 | +import grp |
813 | +import random |
814 | +import string |
815 | +import subprocess |
816 | +import hashlib |
817 | +import apt_pkg |
818 | + |
819 | +from collections import OrderedDict |
820 | + |
821 | +from hookenv import log |
822 | +from fstab import Fstab |
823 | + |
824 | + |
825 | +def service_start(service_name): |
826 | + """Start a system service""" |
827 | + return service('start', service_name) |
828 | + |
829 | + |
830 | +def service_stop(service_name): |
831 | + """Stop a system service""" |
832 | + return service('stop', service_name) |
833 | + |
834 | + |
835 | +def service_restart(service_name): |
836 | + """Restart a system service""" |
837 | + return service('restart', service_name) |
838 | + |
839 | + |
840 | +def service_reload(service_name, restart_on_failure=False): |
841 | + """Reload a system service, optionally falling back to restart if |
842 | + reload fails""" |
843 | + service_result = service('reload', service_name) |
844 | + if not service_result and restart_on_failure: |
845 | + service_result = service('restart', service_name) |
846 | + return service_result |
847 | + |
848 | + |
849 | +def service(action, service_name): |
850 | + """Control a system service""" |
851 | + cmd = ['service', service_name, action] |
852 | + return subprocess.call(cmd) == 0 |
853 | + |
854 | + |
855 | +def service_running(service): |
856 | + """Determine whether a system service is running""" |
857 | + try: |
858 | + output = subprocess.check_output(['service', service, 'status']) |
859 | + except subprocess.CalledProcessError: |
860 | + return False |
861 | + else: |
862 | + if ("start/running" in output or "is running" in output): |
863 | + return True |
864 | + else: |
865 | + return False |
866 | + |
867 | + |
868 | +def adduser(username, password=None, shell='/bin/bash', system_user=False): |
869 | + """Add a user to the system""" |
870 | + try: |
871 | + user_info = pwd.getpwnam(username) |
872 | + log('user {0} already exists!'.format(username)) |
873 | + except KeyError: |
874 | + log('creating user {0}'.format(username)) |
875 | + cmd = ['useradd'] |
876 | + if system_user or password is None: |
877 | + cmd.append('--system') |
878 | + else: |
879 | + cmd.extend([ |
880 | + '--create-home', |
881 | + '--shell', shell, |
882 | + '--password', password, |
883 | + ]) |
884 | + cmd.append(username) |
885 | + subprocess.check_call(cmd) |
886 | + user_info = pwd.getpwnam(username) |
887 | + return user_info |
888 | + |
889 | + |
890 | +def add_user_to_group(username, group): |
891 | + """Add a user to a group""" |
892 | + cmd = [ |
893 | + 'gpasswd', '-a', |
894 | + username, |
895 | + group |
896 | + ] |
897 | + log("Adding user {} to group {}".format(username, group)) |
898 | + subprocess.check_call(cmd) |
899 | + |
900 | + |
901 | +def rsync(from_path, to_path, flags='-r', options=None): |
902 | + """Replicate the contents of a path""" |
903 | + options = options or ['--delete', '--executability'] |
904 | + cmd = ['/usr/bin/rsync', flags] |
905 | + cmd.extend(options) |
906 | + cmd.append(from_path) |
907 | + cmd.append(to_path) |
908 | + log(" ".join(cmd)) |
909 | + return subprocess.check_output(cmd).strip() |
910 | + |
911 | + |
912 | +def symlink(source, destination): |
913 | + """Create a symbolic link""" |
914 | + log("Symlinking {} as {}".format(source, destination)) |
915 | + cmd = [ |
916 | + 'ln', |
917 | + '-sf', |
918 | + source, |
919 | + destination, |
920 | + ] |
921 | + subprocess.check_call(cmd) |
922 | + |
923 | + |
924 | +def mkdir(path, owner='root', group='root', perms=0555, force=False): |
925 | + """Create a directory""" |
926 | + log("Making dir {} {}:{} {:o}".format(path, owner, group, |
927 | + perms)) |
928 | + uid = pwd.getpwnam(owner).pw_uid |
929 | + gid = grp.getgrnam(group).gr_gid |
930 | + realpath = os.path.abspath(path) |
931 | + if os.path.exists(realpath): |
932 | + if force and not os.path.isdir(realpath): |
933 | + log("Removing non-directory file {} prior to mkdir()".format(path)) |
934 | + os.unlink(realpath) |
935 | + else: |
936 | + os.makedirs(realpath, perms) |
937 | + os.chown(realpath, uid, gid) |
938 | + |
939 | + |
940 | +def write_file(path, content, owner='root', group='root', perms=0444): |
941 | + """Create or overwrite a file with the contents of a string""" |
942 | + log("Writing file {} {}:{} {:o}".format(path, owner, group, perms)) |
943 | + uid = pwd.getpwnam(owner).pw_uid |
944 | + gid = grp.getgrnam(group).gr_gid |
945 | + with open(path, 'w') as target: |
946 | + os.fchown(target.fileno(), uid, gid) |
947 | + os.fchmod(target.fileno(), perms) |
948 | + target.write(content) |
949 | + |
950 | + |
951 | +def fstab_remove(mp): |
952 | + """Remove the given mountpoint entry from /etc/fstab |
953 | + """ |
954 | + return Fstab.remove_by_mountpoint(mp) |
955 | + |
956 | + |
957 | +def fstab_add(dev, mp, fs, options=None): |
958 | + """Adds the given device entry to the /etc/fstab file |
959 | + """ |
960 | + return Fstab.add(dev, mp, fs, options=options) |
961 | + |
962 | + |
963 | +def mount(device, mountpoint, options=None, persist=False, filesystem="ext3"): |
964 | + """Mount a filesystem at a particular mountpoint""" |
965 | + cmd_args = ['mount'] |
966 | + if options is not None: |
967 | + cmd_args.extend(['-o', options]) |
968 | + cmd_args.extend([device, mountpoint]) |
969 | + try: |
970 | + subprocess.check_output(cmd_args) |
971 | + except subprocess.CalledProcessError, e: |
972 | + log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output)) |
973 | + return False |
974 | + |
975 | + if persist: |
976 | + return fstab_add(device, mountpoint, filesystem, options=options) |
977 | + return True |
978 | + |
979 | + |
980 | +def umount(mountpoint, persist=False): |
981 | + """Unmount a filesystem""" |
982 | + cmd_args = ['umount', mountpoint] |
983 | + try: |
984 | + subprocess.check_output(cmd_args) |
985 | + except subprocess.CalledProcessError, e: |
986 | + log('Error unmounting {}\n{}'.format(mountpoint, e.output)) |
987 | + return False |
988 | + |
989 | + if persist: |
990 | + return fstab_remove(mountpoint) |
991 | + return True |
992 | + |
993 | + |
994 | +def mounts(): |
995 | + """Get a list of all mounted volumes as [[mountpoint,device],[...]]""" |
996 | + with open('/proc/mounts') as f: |
997 | + # [['/mount/point','/dev/path'],[...]] |
998 | + system_mounts = [m[1::-1] for m in [l.strip().split() |
999 | + for l in f.readlines()]] |
1000 | + return system_mounts |
1001 | + |
1002 | + |
1003 | +def file_hash(path): |
1004 | + """Generate a md5 hash of the contents of 'path' or None if not found """ |
1005 | + if os.path.exists(path): |
1006 | + h = hashlib.md5() |
1007 | + with open(path, 'r') as source: |
1008 | + h.update(source.read()) # IGNORE:E1101 - it does have update |
1009 | + return h.hexdigest() |
1010 | + else: |
1011 | + return None |
1012 | + |
1013 | + |
1014 | +def restart_on_change(restart_map, stopstart=False): |
1015 | + """Restart services based on configuration files changing |
1016 | + |
1017 | + This function is used a decorator, for example |
1018 | + |
1019 | + @restart_on_change({ |
1020 | + '/etc/ceph/ceph.conf': [ 'cinder-api', 'cinder-volume' ] |
1021 | + }) |
1022 | + def ceph_client_changed(): |
1023 | + ... |
1024 | + |
1025 | + In this example, the cinder-api and cinder-volume services |
1026 | + would be restarted if /etc/ceph/ceph.conf is changed by the |
1027 | + ceph_client_changed function. |
1028 | + """ |
1029 | + def wrap(f): |
1030 | + def wrapped_f(*args): |
1031 | + checksums = {} |
1032 | + for path in restart_map: |
1033 | + checksums[path] = file_hash(path) |
1034 | + f(*args) |
1035 | + restarts = [] |
1036 | + for path in restart_map: |
1037 | + if checksums[path] != file_hash(path): |
1038 | + restarts += restart_map[path] |
1039 | + services_list = list(OrderedDict.fromkeys(restarts)) |
1040 | + if not stopstart: |
1041 | + for service_name in services_list: |
1042 | + service('restart', service_name) |
1043 | + else: |
1044 | + for action in ['stop', 'start']: |
1045 | + for service_name in services_list: |
1046 | + service(action, service_name) |
1047 | + return wrapped_f |
1048 | + return wrap |
1049 | + |
1050 | + |
1051 | +def lsb_release(): |
1052 | + """Return /etc/lsb-release in a dict""" |
1053 | + d = {} |
1054 | + with open('/etc/lsb-release', 'r') as lsb: |
1055 | + for l in lsb: |
1056 | + k, v = l.split('=') |
1057 | + d[k.strip()] = v.strip() |
1058 | + return d |
1059 | + |
1060 | + |
1061 | +def pwgen(length=None): |
1062 | + """Generate a random pasword.""" |
1063 | + if length is None: |
1064 | + length = random.choice(range(35, 45)) |
1065 | + alphanumeric_chars = [ |
1066 | + l for l in (string.letters + string.digits) |
1067 | + if l not in 'l0QD1vAEIOUaeiou'] |
1068 | + random_chars = [ |
1069 | + random.choice(alphanumeric_chars) for _ in range(length)] |
1070 | + return(''.join(random_chars)) |
1071 | + |
1072 | + |
1073 | +def list_nics(nic_type): |
1074 | + '''Return a list of nics of given type(s)''' |
1075 | + if isinstance(nic_type, basestring): |
1076 | + int_types = [nic_type] |
1077 | + else: |
1078 | + int_types = nic_type |
1079 | + interfaces = [] |
1080 | + for int_type in int_types: |
1081 | + cmd = ['ip', 'addr', 'show', 'label', int_type + '*'] |
1082 | + ip_output = subprocess.check_output(cmd).split('\n') |
1083 | + ip_output = (line for line in ip_output if line) |
1084 | + for line in ip_output: |
1085 | + if line.split()[1].startswith(int_type): |
1086 | + interfaces.append(line.split()[1].replace(":", "")) |
1087 | + return interfaces |
1088 | + |
1089 | + |
1090 | +def set_nic_mtu(nic, mtu): |
1091 | + '''Set MTU on a network interface''' |
1092 | + cmd = ['ip', 'link', 'set', nic, 'mtu', mtu] |
1093 | + subprocess.check_call(cmd) |
1094 | + |
1095 | + |
1096 | +def get_nic_mtu(nic): |
1097 | + cmd = ['ip', 'addr', 'show', nic] |
1098 | + ip_output = subprocess.check_output(cmd).split('\n') |
1099 | + mtu = "" |
1100 | + for line in ip_output: |
1101 | + words = line.split() |
1102 | + if 'mtu' in words: |
1103 | + mtu = words[words.index("mtu") + 1] |
1104 | + return mtu |
1105 | + |
1106 | + |
1107 | +def get_nic_hwaddr(nic): |
1108 | + cmd = ['ip', '-o', '-0', 'addr', 'show', nic] |
1109 | + ip_output = subprocess.check_output(cmd) |
1110 | + hwaddr = "" |
1111 | + words = ip_output.split() |
1112 | + if 'link/ether' in words: |
1113 | + hwaddr = words[words.index('link/ether') + 1] |
1114 | + return hwaddr |
1115 | + |
1116 | + |
1117 | +def cmp_pkgrevno(package, revno, pkgcache=None): |
1118 | + '''Compare supplied revno with the revno of the installed package |
1119 | + 1 => Installed revno is greater than supplied arg |
1120 | + 0 => Installed revno is the same as supplied arg |
1121 | + -1 => Installed revno is less than supplied arg |
1122 | + ''' |
1123 | + if not pkgcache: |
1124 | + apt_pkg.init() |
1125 | + pkgcache = apt_pkg.Cache() |
1126 | + pkg = pkgcache[package] |
1127 | + return apt_pkg.version_compare(pkg.current_ver.ver_str, revno) |
1128 | |
1129 | === added directory 'hooks/charmhelpers/fetch' |
1130 | === added file 'hooks/charmhelpers/fetch/__init__.py' |
1131 | --- hooks/charmhelpers/fetch/__init__.py 1970-01-01 00:00:00 +0000 |
1132 | +++ hooks/charmhelpers/fetch/__init__.py 2014-06-26 20:05:25 +0000 |
1133 | @@ -0,0 +1,349 @@ |
1134 | +import importlib |
1135 | +import time |
1136 | +from yaml import safe_load |
1137 | +from charmhelpers.core.host import ( |
1138 | + lsb_release |
1139 | +) |
1140 | +from urlparse import ( |
1141 | + urlparse, |
1142 | + urlunparse, |
1143 | +) |
1144 | +import subprocess |
1145 | +from charmhelpers.core.hookenv import ( |
1146 | + config, |
1147 | + log, |
1148 | +) |
1149 | +import apt_pkg |
1150 | +import os |
1151 | + |
1152 | + |
1153 | +CLOUD_ARCHIVE = """# Ubuntu Cloud Archive |
1154 | +deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main |
1155 | +""" |
1156 | +PROPOSED_POCKET = """# Proposed |
1157 | +deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted |
1158 | +""" |
1159 | +CLOUD_ARCHIVE_POCKETS = { |
1160 | + # Folsom |
1161 | + 'folsom': 'precise-updates/folsom', |
1162 | + 'precise-folsom': 'precise-updates/folsom', |
1163 | + 'precise-folsom/updates': 'precise-updates/folsom', |
1164 | + 'precise-updates/folsom': 'precise-updates/folsom', |
1165 | + 'folsom/proposed': 'precise-proposed/folsom', |
1166 | + 'precise-folsom/proposed': 'precise-proposed/folsom', |
1167 | + 'precise-proposed/folsom': 'precise-proposed/folsom', |
1168 | + # Grizzly |
1169 | + 'grizzly': 'precise-updates/grizzly', |
1170 | + 'precise-grizzly': 'precise-updates/grizzly', |
1171 | + 'precise-grizzly/updates': 'precise-updates/grizzly', |
1172 | + 'precise-updates/grizzly': 'precise-updates/grizzly', |
1173 | + 'grizzly/proposed': 'precise-proposed/grizzly', |
1174 | + 'precise-grizzly/proposed': 'precise-proposed/grizzly', |
1175 | + 'precise-proposed/grizzly': 'precise-proposed/grizzly', |
1176 | + # Havana |
1177 | + 'havana': 'precise-updates/havana', |
1178 | + 'precise-havana': 'precise-updates/havana', |
1179 | + 'precise-havana/updates': 'precise-updates/havana', |
1180 | + 'precise-updates/havana': 'precise-updates/havana', |
1181 | + 'havana/proposed': 'precise-proposed/havana', |
1182 | + 'precise-havana/proposed': 'precise-proposed/havana', |
1183 | + 'precise-proposed/havana': 'precise-proposed/havana', |
1184 | + # Icehouse |
1185 | + 'icehouse': 'precise-updates/icehouse', |
1186 | + 'precise-icehouse': 'precise-updates/icehouse', |
1187 | + 'precise-icehouse/updates': 'precise-updates/icehouse', |
1188 | + 'precise-updates/icehouse': 'precise-updates/icehouse', |
1189 | + 'icehouse/proposed': 'precise-proposed/icehouse', |
1190 | + 'precise-icehouse/proposed': 'precise-proposed/icehouse', |
1191 | + 'precise-proposed/icehouse': 'precise-proposed/icehouse', |
1192 | + # Juno |
1193 | + 'juno': 'trusty-updates/juno', |
1194 | + 'trusty-juno': 'trusty-updates/juno', |
1195 | + 'trusty-juno/updates': 'trusty-updates/juno', |
1196 | + 'trusty-updates/juno': 'trusty-updates/juno', |
1197 | + 'juno/proposed': 'trusty-proposed/juno', |
1198 | + 'juno/proposed': 'trusty-proposed/juno', |
1199 | + 'trusty-juno/proposed': 'trusty-proposed/juno', |
1200 | + 'trusty-proposed/juno': 'trusty-proposed/juno', |
1201 | +} |
1202 | + |
1203 | +# The order of this list is very important. Handlers should be listed in from |
1204 | +# least- to most-specific URL matching. |
1205 | +FETCH_HANDLERS = ( |
1206 | + 'charmhelpers.fetch.archiveurl.ArchiveUrlFetchHandler', |
1207 | + 'charmhelpers.fetch.bzrurl.BzrUrlFetchHandler', |
1208 | +) |
1209 | + |
1210 | +APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT. |
1211 | +APT_NO_LOCK_RETRY_DELAY = 10 # Wait 10 seconds between apt lock checks. |
1212 | +APT_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times. |
1213 | + |
1214 | + |
1215 | +class SourceConfigError(Exception): |
1216 | + pass |
1217 | + |
1218 | + |
1219 | +class UnhandledSource(Exception): |
1220 | + pass |
1221 | + |
1222 | + |
1223 | +class AptLockError(Exception): |
1224 | + pass |
1225 | + |
1226 | + |
1227 | +class BaseFetchHandler(object): |
1228 | + |
1229 | + """Base class for FetchHandler implementations in fetch plugins""" |
1230 | + |
1231 | + def can_handle(self, source): |
1232 | + """Returns True if the source can be handled. Otherwise returns |
1233 | + a string explaining why it cannot""" |
1234 | + return "Wrong source type" |
1235 | + |
1236 | + def install(self, source): |
1237 | + """Try to download and unpack the source. Return the path to the |
1238 | + unpacked files or raise UnhandledSource.""" |
1239 | + raise UnhandledSource("Wrong source type {}".format(source)) |
1240 | + |
1241 | + def parse_url(self, url): |
1242 | + return urlparse(url) |
1243 | + |
1244 | + def base_url(self, url): |
1245 | + """Return url without querystring or fragment""" |
1246 | + parts = list(self.parse_url(url)) |
1247 | + parts[4:] = ['' for i in parts[4:]] |
1248 | + return urlunparse(parts) |
1249 | + |
1250 | + |
1251 | +def filter_installed_packages(packages): |
1252 | + """Returns a list of packages that require installation""" |
1253 | + apt_pkg.init() |
1254 | + |
1255 | + # Tell apt to build an in-memory cache to prevent race conditions (if |
1256 | + # another process is already building the cache). |
1257 | + apt_pkg.config.set("Dir::Cache::pkgcache", "") |
1258 | + |
1259 | + cache = apt_pkg.Cache() |
1260 | + _pkgs = [] |
1261 | + for package in packages: |
1262 | + try: |
1263 | + p = cache[package] |
1264 | + p.current_ver or _pkgs.append(package) |
1265 | + except KeyError: |
1266 | + log('Package {} has no installation candidate.'.format(package), |
1267 | + level='WARNING') |
1268 | + _pkgs.append(package) |
1269 | + return _pkgs |
1270 | + |
1271 | + |
1272 | +def apt_install(packages, options=None, fatal=False): |
1273 | + """Install one or more packages""" |
1274 | + if options is None: |
1275 | + options = ['--option=Dpkg::Options::=--force-confold'] |
1276 | + |
1277 | + cmd = ['apt-get', '--assume-yes'] |
1278 | + cmd.extend(options) |
1279 | + cmd.append('install') |
1280 | + if isinstance(packages, basestring): |
1281 | + cmd.append(packages) |
1282 | + else: |
1283 | + cmd.extend(packages) |
1284 | + log("Installing {} with options: {}".format(packages, |
1285 | + options)) |
1286 | + _run_apt_command(cmd, fatal) |
1287 | + |
1288 | + |
1289 | +def apt_upgrade(options=None, fatal=False, dist=False): |
1290 | + """Upgrade all packages""" |
1291 | + if options is None: |
1292 | + options = ['--option=Dpkg::Options::=--force-confold'] |
1293 | + |
1294 | + cmd = ['apt-get', '--assume-yes'] |
1295 | + cmd.extend(options) |
1296 | + if dist: |
1297 | + cmd.append('dist-upgrade') |
1298 | + else: |
1299 | + cmd.append('upgrade') |
1300 | + log("Upgrading with options: {}".format(options)) |
1301 | + _run_apt_command(cmd, fatal) |
1302 | + |
1303 | + |
1304 | +def apt_update(fatal=False): |
1305 | + """Update local apt cache""" |
1306 | + cmd = ['apt-get', 'update'] |
1307 | + _run_apt_command(cmd, fatal) |
1308 | + |
1309 | + |
1310 | +def apt_purge(packages, fatal=False): |
1311 | + """Purge one or more packages""" |
1312 | + cmd = ['apt-get', '--assume-yes', 'purge'] |
1313 | + if isinstance(packages, basestring): |
1314 | + cmd.append(packages) |
1315 | + else: |
1316 | + cmd.extend(packages) |
1317 | + log("Purging {}".format(packages)) |
1318 | + _run_apt_command(cmd, fatal) |
1319 | + |
1320 | + |
1321 | +def apt_hold(packages, fatal=False): |
1322 | + """Hold one or more packages""" |
1323 | + cmd = ['apt-mark', 'hold'] |
1324 | + if isinstance(packages, basestring): |
1325 | + cmd.append(packages) |
1326 | + else: |
1327 | + cmd.extend(packages) |
1328 | + log("Holding {}".format(packages)) |
1329 | + |
1330 | + if fatal: |
1331 | + subprocess.check_call(cmd) |
1332 | + else: |
1333 | + subprocess.call(cmd) |
1334 | + |
1335 | + |
1336 | +def add_source(source, key=None): |
1337 | + if source is None: |
1338 | + log('Source is not present. Skipping') |
1339 | + return |
1340 | + |
1341 | + if (source.startswith('ppa:') or |
1342 | + source.startswith('http') or |
1343 | + source.startswith('deb ') or |
1344 | + source.startswith('cloud-archive:')): |
1345 | + subprocess.check_call(['add-apt-repository', '--yes', source]) |
1346 | + elif source.startswith('cloud:'): |
1347 | + apt_install(filter_installed_packages(['ubuntu-cloud-keyring']), |
1348 | + fatal=True) |
1349 | + pocket = source.split(':')[-1] |
1350 | + if pocket not in CLOUD_ARCHIVE_POCKETS: |
1351 | + raise SourceConfigError( |
1352 | + 'Unsupported cloud: source option %s' % |
1353 | + pocket) |
1354 | + actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket] |
1355 | + with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt: |
1356 | + apt.write(CLOUD_ARCHIVE.format(actual_pocket)) |
1357 | + elif source == 'proposed': |
1358 | + release = lsb_release()['DISTRIB_CODENAME'] |
1359 | + with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt: |
1360 | + apt.write(PROPOSED_POCKET.format(release)) |
1361 | + if key: |
1362 | + subprocess.check_call(['apt-key', 'adv', '--keyserver', |
1363 | + 'hkp://keyserver.ubuntu.com:80', '--recv', |
1364 | + key]) |
1365 | + |
1366 | + |
1367 | +def configure_sources(update=False, |
1368 | + sources_var='install_sources', |
1369 | + keys_var='install_keys'): |
1370 | + """ |
1371 | + Configure multiple sources from charm configuration |
1372 | + |
1373 | + Example config: |
1374 | + install_sources: |
1375 | + - "ppa:foo" |
1376 | + - "http://example.com/repo precise main" |
1377 | + install_keys: |
1378 | + - null |
1379 | + - "a1b2c3d4" |
1380 | + |
1381 | + Note that 'null' (a.k.a. None) should not be quoted. |
1382 | + """ |
1383 | + sources = safe_load(config(sources_var)) |
1384 | + keys = config(keys_var) |
1385 | + if keys is not None: |
1386 | + keys = safe_load(keys) |
1387 | + if isinstance(sources, basestring) and ( |
1388 | + keys is None or isinstance(keys, basestring)): |
1389 | + add_source(sources, keys) |
1390 | + else: |
1391 | + if not len(sources) == len(keys): |
1392 | + msg = 'Install sources and keys lists are different lengths' |
1393 | + raise SourceConfigError(msg) |
1394 | + for src_num in range(len(sources)): |
1395 | + add_source(sources[src_num], keys[src_num]) |
1396 | + if update: |
1397 | + apt_update(fatal=True) |
1398 | + |
1399 | + |
1400 | +def install_remote(source): |
1401 | + """ |
1402 | + Install a file tree from a remote source |
1403 | + |
1404 | + The specified source should be a url of the form: |
1405 | + scheme://[host]/path[#[option=value][&...]] |
1406 | + |
1407 | + Schemes supported are based on this modules submodules |
1408 | + Options supported are submodule-specific""" |
1409 | + # We ONLY check for True here because can_handle may return a string |
1410 | + # explaining why it can't handle a given source. |
1411 | + handlers = [h for h in plugins() if h.can_handle(source) is True] |
1412 | + installed_to = None |
1413 | + for handler in handlers: |
1414 | + try: |
1415 | + installed_to = handler.install(source) |
1416 | + except UnhandledSource: |
1417 | + pass |
1418 | + if not installed_to: |
1419 | + raise UnhandledSource("No handler found for source {}".format(source)) |
1420 | + return installed_to |
1421 | + |
1422 | + |
1423 | +def install_from_config(config_var_name): |
1424 | + charm_config = config() |
1425 | + source = charm_config[config_var_name] |
1426 | + return install_remote(source) |
1427 | + |
1428 | + |
1429 | +def plugins(fetch_handlers=None): |
1430 | + if not fetch_handlers: |
1431 | + fetch_handlers = FETCH_HANDLERS |
1432 | + plugin_list = [] |
1433 | + for handler_name in fetch_handlers: |
1434 | + package, classname = handler_name.rsplit('.', 1) |
1435 | + try: |
1436 | + handler_class = getattr( |
1437 | + importlib.import_module(package), |
1438 | + classname) |
1439 | + plugin_list.append(handler_class()) |
1440 | + except (ImportError, AttributeError): |
1441 | + # Skip missing plugins so that they can be ommitted from |
1442 | + # installation if desired |
1443 | + log("FetchHandler {} not found, skipping plugin".format( |
1444 | + handler_name)) |
1445 | + return plugin_list |
1446 | + |
1447 | + |
1448 | +def _run_apt_command(cmd, fatal=False): |
1449 | + """ |
1450 | + Run an APT command, checking output and retrying if the fatal flag is set |
1451 | + to True. |
1452 | + |
1453 | + :param: cmd: str: The apt command to run. |
1454 | + :param: fatal: bool: Whether the command's output should be checked and |
1455 | + retried. |
1456 | + """ |
1457 | + env = os.environ.copy() |
1458 | + |
1459 | + if 'DEBIAN_FRONTEND' not in env: |
1460 | + env['DEBIAN_FRONTEND'] = 'noninteractive' |
1461 | + |
1462 | + if fatal: |
1463 | + retry_count = 0 |
1464 | + result = None |
1465 | + |
1466 | + # If the command is considered "fatal", we need to retry if the apt |
1467 | + # lock was not acquired. |
1468 | + |
1469 | + while result is None or result == APT_NO_LOCK: |
1470 | + try: |
1471 | + result = subprocess.check_call(cmd, env=env) |
1472 | + except subprocess.CalledProcessError, e: |
1473 | + retry_count = retry_count + 1 |
1474 | + if retry_count > APT_NO_LOCK_RETRY_COUNT: |
1475 | + raise |
1476 | + result = e.returncode |
1477 | + log("Couldn't acquire DPKG lock. Will retry in {} seconds." |
1478 | + "".format(APT_NO_LOCK_RETRY_DELAY)) |
1479 | + time.sleep(APT_NO_LOCK_RETRY_DELAY) |
1480 | + |
1481 | + else: |
1482 | + subprocess.call(cmd, env=env) |
1483 | |
1484 | === added file 'hooks/charmhelpers/fetch/archiveurl.py' |
1485 | --- hooks/charmhelpers/fetch/archiveurl.py 1970-01-01 00:00:00 +0000 |
1486 | +++ hooks/charmhelpers/fetch/archiveurl.py 2014-06-26 20:05:25 +0000 |
1487 | @@ -0,0 +1,63 @@ |
1488 | +import os |
1489 | +import urllib2 |
1490 | +import urlparse |
1491 | + |
1492 | +from charmhelpers.fetch import ( |
1493 | + BaseFetchHandler, |
1494 | + UnhandledSource |
1495 | +) |
1496 | +from charmhelpers.payload.archive import ( |
1497 | + get_archive_handler, |
1498 | + extract, |
1499 | +) |
1500 | +from charmhelpers.core.host import mkdir |
1501 | + |
1502 | + |
1503 | +class ArchiveUrlFetchHandler(BaseFetchHandler): |
1504 | + """Handler for archives via generic URLs""" |
1505 | + def can_handle(self, source): |
1506 | + url_parts = self.parse_url(source) |
1507 | + if url_parts.scheme not in ('http', 'https', 'ftp', 'file'): |
1508 | + return "Wrong source type" |
1509 | + if get_archive_handler(self.base_url(source)): |
1510 | + return True |
1511 | + return False |
1512 | + |
1513 | + def download(self, source, dest): |
1514 | + # propogate all exceptions |
1515 | + # URLError, OSError, etc |
1516 | + proto, netloc, path, params, query, fragment = urlparse.urlparse(source) |
1517 | + if proto in ('http', 'https'): |
1518 | + auth, barehost = urllib2.splituser(netloc) |
1519 | + if auth is not None: |
1520 | + source = urlparse.urlunparse((proto, barehost, path, params, query, fragment)) |
1521 | + username, password = urllib2.splitpasswd(auth) |
1522 | + passman = urllib2.HTTPPasswordMgrWithDefaultRealm() |
1523 | + # Realm is set to None in add_password to force the username and password |
1524 | + # to be used whatever the realm |
1525 | + passman.add_password(None, source, username, password) |
1526 | + authhandler = urllib2.HTTPBasicAuthHandler(passman) |
1527 | + opener = urllib2.build_opener(authhandler) |
1528 | + urllib2.install_opener(opener) |
1529 | + response = urllib2.urlopen(source) |
1530 | + try: |
1531 | + with open(dest, 'w') as dest_file: |
1532 | + dest_file.write(response.read()) |
1533 | + except Exception as e: |
1534 | + if os.path.isfile(dest): |
1535 | + os.unlink(dest) |
1536 | + raise e |
1537 | + |
1538 | + def install(self, source): |
1539 | + url_parts = self.parse_url(source) |
1540 | + dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched') |
1541 | + if not os.path.exists(dest_dir): |
1542 | + mkdir(dest_dir, perms=0755) |
1543 | + dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path)) |
1544 | + try: |
1545 | + self.download(source, dld_file) |
1546 | + except urllib2.URLError as e: |
1547 | + raise UnhandledSource(e.reason) |
1548 | + except OSError as e: |
1549 | + raise UnhandledSource(e.strerror) |
1550 | + return extract(dld_file) |
1551 | |
1552 | === added file 'hooks/charmhelpers/fetch/bzrurl.py' |
1553 | --- hooks/charmhelpers/fetch/bzrurl.py 1970-01-01 00:00:00 +0000 |
1554 | +++ hooks/charmhelpers/fetch/bzrurl.py 2014-06-26 20:05:25 +0000 |
1555 | @@ -0,0 +1,50 @@ |
1556 | +import os |
1557 | +from charmhelpers.fetch import ( |
1558 | + BaseFetchHandler, |
1559 | + UnhandledSource |
1560 | +) |
1561 | +from charmhelpers.core.host import mkdir |
1562 | + |
1563 | +try: |
1564 | + from bzrlib.branch import Branch |
1565 | +except ImportError: |
1566 | + from charmhelpers.fetch import apt_install |
1567 | + apt_install("python-bzrlib") |
1568 | + from bzrlib.branch import Branch |
1569 | + |
1570 | + |
1571 | +class BzrUrlFetchHandler(BaseFetchHandler): |
1572 | + """Handler for bazaar branches via generic and lp URLs""" |
1573 | + def can_handle(self, source): |
1574 | + url_parts = self.parse_url(source) |
1575 | + if url_parts.scheme not in ('bzr+ssh', 'lp'): |
1576 | + return False |
1577 | + else: |
1578 | + return True |
1579 | + |
1580 | + def branch(self, source, dest): |
1581 | + url_parts = self.parse_url(source) |
1582 | + # If we use lp:branchname scheme we need to load plugins |
1583 | + if not self.can_handle(source): |
1584 | + raise UnhandledSource("Cannot handle {}".format(source)) |
1585 | + if url_parts.scheme == "lp": |
1586 | + from bzrlib.plugin import load_plugins |
1587 | + load_plugins() |
1588 | + try: |
1589 | + remote_branch = Branch.open(source) |
1590 | + remote_branch.bzrdir.sprout(dest).open_branch() |
1591 | + except Exception as e: |
1592 | + raise e |
1593 | + |
1594 | + def install(self, source): |
1595 | + url_parts = self.parse_url(source) |
1596 | + branch_name = url_parts.path.strip("/").split("/")[-1] |
1597 | + dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", |
1598 | + branch_name) |
1599 | + if not os.path.exists(dest_dir): |
1600 | + mkdir(dest_dir, perms=0755) |
1601 | + try: |
1602 | + self.branch(source, dest_dir) |
1603 | + except OSError as e: |
1604 | + raise UnhandledSource(e.strerror) |
1605 | + return dest_dir |
1606 | |
1607 | === added file 'hooks/charmhelpers/setup.py' |
1608 | --- hooks/charmhelpers/setup.py 1970-01-01 00:00:00 +0000 |
1609 | +++ hooks/charmhelpers/setup.py 2014-06-26 20:05:25 +0000 |
1610 | @@ -0,0 +1,12 @@ |
1611 | +#!/usr/bin/env python |
1612 | + |
1613 | +from distutils.core import setup |
1614 | + |
1615 | +setup(name='charmhelpers', |
1616 | + version='1.0', |
1617 | + description='this is dumb', |
1618 | + author='nobody', |
1619 | + author_email='dummy@amulet', |
1620 | + url='http://google.com', |
1621 | + packages=[], |
1622 | +) |
1623 | |
1624 | === added file 'hooks/common.py' |
1625 | --- hooks/common.py 1970-01-01 00:00:00 +0000 |
1626 | +++ hooks/common.py 2014-06-26 20:05:25 +0000 |
1627 | @@ -0,0 +1,231 @@ |
1628 | +#!/usr/bin/env python |
1629 | + |
1630 | +import grp |
1631 | +import os |
1632 | +import pwd |
1633 | +import subprocess |
1634 | +import sys |
1635 | +import tarfile |
1636 | +import argparse |
1637 | +import logging |
1638 | + |
1639 | +from shutil import rmtree, copyfile |
1640 | +from charmhelpers.core.hookenv import log |
1641 | +# Required for unit tests... :( |
1642 | +try: |
1643 | + from charmhelpers.fetch import apt_install, apt_update |
1644 | +except: |
1645 | + pass |
1646 | + |
1647 | +FORMAT = '%(asctime)-15s %(user)-8s %(message)s' |
1648 | +logging.basicConfig(level=logging.INFO) |
1649 | + |
1650 | + |
1651 | +# ########################################## |
1652 | +# Globals |
1653 | +# ########################################## |
1654 | +JAVA_VERSION = "7" |
1655 | +JAVA_HOME = None |
1656 | +HADOOP_VERSION = "hadoop-2.2.0" |
1657 | +PLATFORM_ARCH = "amd64" |
1658 | +HOME_DIR = os.path.join(os.path.sep, "home", "ubuntu") |
1659 | +HADOOP_DIR = os.path.join(HOME_DIR, "hadoop") |
1660 | +HADOOP_TMP_DIR = os.path.join(HADOOP_DIR, "tmp") |
1661 | +HADOOP_INSTALL = os.path.join(HADOOP_DIR, HADOOP_VERSION) |
1662 | +HADOOP_CONF_DIR = os.path.join(HADOOP_INSTALL, "etc/hadoop") |
1663 | + |
1664 | + |
1665 | +# ########################################## |
1666 | +# Utility Methods |
1667 | +# ########################################## |
1668 | + |
1669 | +def install_dev_packages(): |
1670 | + packages = ['maven', |
1671 | + 'build-essential', |
1672 | + 'autoconf', |
1673 | + 'automake', |
1674 | + 'libtool', |
1675 | + 'cmake', |
1676 | + 'zlib1g-dev', |
1677 | + 'pkg-config', |
1678 | + 'libssl-dev', |
1679 | + 'snappy', |
1680 | + 'libsnappy-dev', |
1681 | + 'openjdk-7-jdk', |
1682 | + 'python-pip', |
1683 | + 'python-jinja2', |
1684 | + ] |
1685 | + apt_update() |
1686 | + apt_install(packages) |
1687 | + |
1688 | + |
1689 | +def ssh_wizardry(): |
1690 | + # Set NonStrict Hostkey Checking to .ssh config |
1691 | + # this both confuses and angers me! |
1692 | + log("Setting NonStrict HostKey Checking for SSH", "INFO") |
1693 | + |
1694 | + nonstrict = "Host *\n\tStrictHostKeyChecking no" |
1695 | + with open("{dir}/.ssh/config".format(dir=HOME_DIR), 'w+') as f: |
1696 | + f.write(nonstrict) |
1697 | + |
1698 | + keyfile = os.path.join(os.path.sep, 'home', 'ubuntu', '.ssh', 'id_rsa') |
1699 | + cmd = 'yes | ssh-keygen -t rsa -N "" -f {d}'.format(d=keyfile) |
1700 | + ps = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, |
1701 | + stderr=subprocess.STDOUT) |
1702 | + output = ps.communicate()[0] |
1703 | + log("Output of ssh keygen: {o}".format(o=output), "INFO") |
1704 | + with open("{dir}/.ssh/id_rsa.pub".format(dir=HOME_DIR), 'r') as f: |
1705 | + hostkey = f.read() |
1706 | + |
1707 | + auth_keys = "{dir}/.ssh/authorized_keys".format(dir=HOME_DIR) |
1708 | + with open(auth_keys, 'a') as f: |
1709 | + f.write(hostkey) |
1710 | + subprocess.call(['chown', '-R', 'ubuntu.ubuntu', |
1711 | + "{dir}/.ssh".format(dir=HOME_DIR)]) |
1712 | + |
1713 | + |
1714 | +def set_java_home(): |
1715 | + jbin = 'java-{v}-openjdk-{a}'.format(v=JAVA_VERSION, a=PLATFORM_ARCH) |
1716 | + jpath = subprocess.check_output(['find', '/usr/', '-name', jbin]) |
1717 | + if not jpath: |
1718 | + raise ValueError("Unable to locate Java Path for %s" % jbin) |
1719 | + script = '{d}/etc/hadoop/hadoop-env.sh'.format(d=HADOOP_INSTALL) |
1720 | + |
1721 | + with open(script) as f: |
1722 | + contents = f.readlines() |
1723 | + for l in range(0, len(contents)): |
1724 | + if contents[l].startswith('export JAVA_HOME='): |
1725 | + contents[l] = "export JAVA_HOME='{jp}'".format( |
1726 | + jp=jpath.strip()) |
1727 | + |
1728 | + with open(script, 'w') as f: |
1729 | + f.writelines(contents) |
1730 | + JAVA_HOME = jpath.strip() |
1731 | + log('Set JAVA_HOME in {p} to {v}'.format(p=script, v=jpath), 'INFO') |
1732 | + JAVA_HOME = jpath.strip() |
1733 | + log("JAVA_HOME set to: {j}".format(j=JAVA_HOME), "INFO") |
1734 | + |
1735 | + |
1736 | +# TODO: this should be configureable at some point and reach out |
1737 | +# over the wire to install w/ Sha1 Sig Validation |
1738 | +def extract_hadoop(): |
1739 | + pa = os.path.join("{d}", "files", "archives", "{hdpver}.tar.gz") |
1740 | + tarballpath = os.path.join(pa.format( |
1741 | + d=os.environ['CHARM_DIR'], |
1742 | + hdpver=HADOOP_VERSION)) |
1743 | + |
1744 | + if tarfile.is_tarfile(tarballpath): |
1745 | + tball = tarfile.open(tarballpath) |
1746 | + tball.extractall(HADOOP_DIR) |
1747 | + else: |
1748 | + log("Unable to extract Hadoop Tarball {tb}".format(tb=tarballpath), |
1749 | + "Warning") |
1750 | + |
1751 | + |
1752 | +def add_shim(): |
1753 | + if os.path.exists("{d}/.hadoop_shim".format(d=HOME_DIR)): |
1754 | + log("Found .hadoop_shim, bailing", "INFO") |
1755 | + log("To re-execute the template, re-run this hook after removing" |
1756 | + "{d}/.hadoop_shim".format(d=HOME_DIR), 'INFO') |
1757 | + return |
1758 | + |
1759 | + # TODO: Refactor this nasty shell callout with some implicit python |
1760 | + # by requiring the templatewiz class, and passing args. |
1761 | + templ_bin_path = os.path.join('files', 'template') |
1762 | + template_path = os.path.join(os.environ['CHARM_DIR'], 'files', |
1763 | + 'templates', 'defaults.j2') |
1764 | + user_path = os.path.join(os.path.sep, 'home', 'ubuntu', '.hadoop_shim') |
1765 | + subprocess.call(['python', templ_bin_path, '-t', template_path, '-o', |
1766 | + user_path, 'java_home={j}'.format(j=JAVA_HOME), |
1767 | + 'hadoop_home={h}'.format(h=HADOOP_INSTALL), |
1768 | + 'conf={c}'.format(c=HADOOP_CONF_DIR)]) |
1769 | + |
1770 | + with open('{d}/.profile'.format(d=HOME_DIR), 'a') as f: |
1771 | + f.write('[ -f "$HOME/.hadoop_shim" ] && . "$HOME/.hadoop_shim"') |
1772 | + |
1773 | + |
1774 | +def template(src, dst, own='root', grp='root', mod=0644): |
1775 | + copyfile(src, dst) |
1776 | + os.chown(dst, own, grp) |
1777 | + os.chmod(dst, mod) |
1778 | + |
1779 | + |
1780 | +def copy_templates(): |
1781 | + # Get UID's |
1782 | + hadoop_uid = pwd.getpwnam('ubuntu').pw_uid |
1783 | + hadoop_gid = grp.getgrnam('ubuntu').gr_gid |
1784 | + |
1785 | + srcp = os.path.join('files', 'hadoop') |
1786 | + dstp = os.path.join(HADOOP_INSTALL, 'etc', 'hadoop') |
1787 | + template(os.path.join(srcp, 'core-site.xml'), |
1788 | + os.path.join(dstp, 'core-site.xml'), hadoop_uid, |
1789 | + hadoop_gid) |
1790 | + template(os.path.join(srcp, 'hdfs-site.xml'), |
1791 | + os.path.join(dstp, 'hdfs-site.xml'), hadoop_uid, |
1792 | + hadoop_gid) |
1793 | + template(os.path.join(srcp, 'yarn-site.xml'), |
1794 | + os.path.join(dstp, 'yarn-site.xml'), hadoop_uid, |
1795 | + hadoop_gid) |
1796 | + template(os.path.join(srcp, 'mapreduce-site.xml'), |
1797 | + os.path.join(dstp, 'mapreduce-site.xml'), hadoop_uid, |
1798 | + hadoop_gid) |
1799 | + template(os.path.join(srcp, 'hdfs_format.sh'), |
1800 | + os.path.join(HOME_DIR, 'hdfs_format.sh'), hadoop_uid, |
1801 | + hadoop_gid, 0777) |
1802 | + |
1803 | + |
1804 | +# ########################################## |
1805 | +# Charm Hooks |
1806 | +# ########################################## |
1807 | + |
1808 | +def install(): |
1809 | + # Remove the hadoop directory if it exists |
1810 | + if os.path.exists(HADOOP_DIR): |
1811 | + rmtree(HADOOP_DIR) |
1812 | + ssh_wizardry() |
1813 | + |
1814 | + os.makedirs(HADOOP_DIR) |
1815 | + os.makedirs(HADOOP_TMP_DIR) |
1816 | + install_dev_packages() |
1817 | + extract_hadoop() |
1818 | + set_java_home() |
1819 | + |
1820 | + # Create HDFS Dir Tree |
1821 | + nndir = os.path.join("{d}".format(d=HADOOP_DIR), "data", |
1822 | + "hdfs", "namenode") |
1823 | + if not os.path.exists(nndir): |
1824 | + os.makedirs(nndir) |
1825 | + dndir = "{d}/data/hdfs/datanode".format(d=HADOOP_DIR) |
1826 | + if not os.path.exists(nndir): |
1827 | + os.makedirs(dndir) |
1828 | + |
1829 | + copy_templates() |
1830 | + add_shim() |
1831 | + |
1832 | + # Call the HDFS Format script post installation |
1833 | + cmd = "{d}{s}hdfs_format.sh".format(d=HOME_DIR, s=os.path.sep) |
1834 | + log(cmd, "INFO") |
1835 | + subprocess.call(cmd) |
1836 | + |
1837 | + # Ensure the Ubuntu user owns everything in the hadoop tree |
1838 | + # Stick a fork in it, we're done. |
1839 | + subprocess.call(['chown', '-R', 'ubuntu.ubuntu', HADOOP_DIR]) |
1840 | + log("Install complete.", "INFO") |
1841 | + |
1842 | + |
1843 | +if __name__ == "__main__": |
1844 | + parser = argparse.ArgumentParser() |
1845 | + parser.add_argument('-H', '--hook_name', dest='hook_name', |
1846 | + help='hook to call') |
1847 | + |
1848 | + args = parser.parse_args() |
1849 | + |
1850 | + if args.hook_name is not None: |
1851 | + hook_name = args.hook_name |
1852 | + else: |
1853 | + hook_name = os.path.basename(sys.argv[0]) |
1854 | + |
1855 | + if hook_name == "install": |
1856 | + retVal = install() |
1857 | + else: |
1858 | + pass |
1859 | |
1860 | === modified file 'hooks/install' |
1861 | --- hooks/install 2014-05-22 17:00:22 +0000 |
1862 | +++ hooks/install 1970-01-01 00:00:00 +0000 |
1863 | @@ -1,58 +0,0 @@ |
1864 | -#!/bin/bash |
1865 | -set -ex |
1866 | - |
1867 | -source files/upstart/hadoop_env |
1868 | - |
1869 | -if [ -d $HADOOP_DIR ]; then |
1870 | - rm -rf $HADOOP_DIR |
1871 | -fi |
1872 | - |
1873 | -echo -e "Host *\n\tStrictHostKeyChecking no" > /home/ubuntu/.ssh/config |
1874 | - |
1875 | -yes | ssh-keygen -t rsa -N "" -f /home/ubuntu/.ssh/id_rsa |
1876 | -cat /home/ubuntu/.ssh/id_rsa.pub >> /home/ubuntu/.ssh/authorized_keys |
1877 | -chown -R ubuntu.ubuntu /home/ubuntu/.ssh |
1878 | -# ssh localhost just to make sure |
1879 | -# ssh -o StrictHostKeyChecking=no localhost |
1880 | - |
1881 | -juju-log "Installing JAVA!" |
1882 | - |
1883 | -mkdir -p $HADOOP_DIR |
1884 | -apt-get update -qqy |
1885 | -apt-get install -y openjdk-7-jdk |
1886 | -JAVA_HOME_PATH=$(find /usr/ -name java-$JAVA_VERSION-openjdk-$PLATFORM_ARCH) |
1887 | - |
1888 | -juju-log "installing other development packages" |
1889 | -apt-get -qqy install maven build-essential autoconf automake libtool cmake zlib1g-dev pkg-config libssl-dev snappy libsnappy-dev |
1890 | -juju-log "Now for Hadoop!" |
1891 | - |
1892 | -mkdir -p $HADOOP_DIR |
1893 | -mkdir -p $HADOOP_TMP_DIR |
1894 | -# cd $HADOOP_DIR |
1895 | -tar -xvzf files/archives/$HADOOP_VERSION.tar.gz -C $HADOOP_DIR |
1896 | -cd $HADOOP_DIR |
1897 | -sed -ir 's|export JAVA_HOME=.*|export JAVA_HOME='$JAVA_HOME_PATH'|' $(find -name hadoop-env.sh) |
1898 | - |
1899 | -mkdir -p $HADOOP_DIR/data/hdfs/namenode |
1900 | -mkdir -p $HADOOP_DIR/data/hdfs/datanode |
1901 | -cd $CHARM_DIR |
1902 | -install -o root -g root -m 0644 files/upstart/defaults /etc/default/hadoop |
1903 | - |
1904 | -# Do a bunch of fun stuff in XML |
1905 | - |
1906 | -install -o ubuntu -g ubuntu -m 0644 files/hadoop/core-site.xml $HADOOP_DIR/$HADOOP_VERSION/etc/hadoop/core-site.xml |
1907 | -install -o ubuntu -g ubuntu -m 0644 files/hadoop/hdfs-site.xml $HADOOP_DIR/$HADOOP_VERSION/etc/hadoop/hdfs-site.xml |
1908 | -install -o ubuntu -g ubuntu -m 0644 files/hadoop/yarn-site.xml $HADOOP_DIR/$HADOOP_VERSION/etc/hadoop/yarn-site.xml |
1909 | -install -o ubuntu -g ubuntu -m 0644 files/hadoop/mapreduce-site.xml $HADOOP_DIR/$HADOOP_VERSION/etc/hadoop/mapred-site.xml |
1910 | - |
1911 | -# Upstart |
1912 | - |
1913 | -. /etc/default/hadoop $JAVA_HOME_PATH $HADOOP_INSTALL $HOME_DIR |
1914 | -source $HOME_DIR/.profile |
1915 | -hdfs namenode -format |
1916 | - |
1917 | -chown -R ubuntu.ubuntu ~ubuntu/hadoop |
1918 | - |
1919 | -juju-log "Install complete." |
1920 | - |
1921 | - |
1922 | |
1923 | === target is u'common.py' |
1924 | === removed file 'hooks/relation-name-relation-broken' |
1925 | --- hooks/relation-name-relation-broken 2014-05-22 17:00:22 +0000 |
1926 | +++ hooks/relation-name-relation-broken 1970-01-01 00:00:00 +0000 |
1927 | @@ -1,2 +0,0 @@ |
1928 | -#!/bin/sh |
1929 | -# This hook runs when the full relation is removed (not just a single member) |
1930 | |
1931 | === removed file 'hooks/relation-name-relation-changed' |
1932 | --- hooks/relation-name-relation-changed 2014-05-22 17:00:22 +0000 |
1933 | +++ hooks/relation-name-relation-changed 1970-01-01 00:00:00 +0000 |
1934 | @@ -1,9 +0,0 @@ |
1935 | -#!/bin/bash |
1936 | -# This must be renamed to the name of the relation. The goal here is to |
1937 | -# affect any change needed by relationships being formed, modified, or broken |
1938 | -# This script should be idempotent. |
1939 | -juju-log $JUJU_REMOTE_UNIT modified its settings |
1940 | -juju-log Relation settings: |
1941 | -relation-get |
1942 | -juju-log Relation members: |
1943 | -relation-list |
1944 | |
1945 | === removed file 'hooks/relation-name-relation-departed' |
1946 | --- hooks/relation-name-relation-departed 2014-05-22 17:00:22 +0000 |
1947 | +++ hooks/relation-name-relation-departed 1970-01-01 00:00:00 +0000 |
1948 | @@ -1,5 +0,0 @@ |
1949 | -#!/bin/sh |
1950 | -# This must be renamed to the name of the relation. The goal here is to |
1951 | -# affect any change needed by the remote unit leaving the relationship. |
1952 | -# This script should be idempotent. |
1953 | -juju-log $JUJU_REMOTE_UNIT departed |
1954 | |
1955 | === removed file 'hooks/relation-name-relation-joined' |
1956 | --- hooks/relation-name-relation-joined 2014-05-22 17:00:22 +0000 |
1957 | +++ hooks/relation-name-relation-joined 1970-01-01 00:00:00 +0000 |
1958 | @@ -1,5 +0,0 @@ |
1959 | -#!/bin/sh |
1960 | -# This must be renamed to the name of the relation. The goal here is to |
1961 | -# affect any change needed by relationships being formed |
1962 | -# This script should be idempotent. |
1963 | -juju-log $JUJU_REMOTE_UNIT joined |
1964 | |
1965 | === added directory 'tests' |
1966 | === added file 'tests/01_test_install_hook.py' |
1967 | --- tests/01_test_install_hook.py 1970-01-01 00:00:00 +0000 |
1968 | +++ tests/01_test_install_hook.py 2014-06-26 20:05:25 +0000 |
1969 | @@ -0,0 +1,22 @@ |
1970 | +import sys |
1971 | +import os |
1972 | +import unittest |
1973 | + |
1974 | +from mock import patch, Mock |
1975 | +sys.path.insert(0, os.path.abspath(os.path.join('..', 'hooks'))) |
1976 | + |
1977 | +from hooks.common import HadoopDevel |
1978 | + |
1979 | + |
1980 | +class TestCommon(unittest.TestCase): |
1981 | + |
1982 | + @patch('grp.getgrnam') |
1983 | + @patch('pwd.getpwnam') |
1984 | + def test_init(self, pwm, grpm): |
1985 | + hd = HadoopDevel() |
1986 | + self.assertEqual(hd.JAVA_VERSION, "7") |
1987 | + self.assertEqual(hd.HADOOP_VERSION, "hadoop-2.2.0") |
1988 | + self.assertEqual(hd.PLATFORM_ARCH, "amd64") |
1989 | + self.assertEqual(hd.HOME_DIR, "/home/ubuntu") |
1990 | + self.assertEqual(hd.HADOOP_TMP_DIR, "/home/ubuntu/hadoop/tmp") |
1991 | + self.assertEqual(hd.HADOOP_INSTALL, "/home/ubuntu/hadoop/hadoop-2.2.0") |
1992 | |
1993 | === added file 'tests/tests.yaml' |
1994 | --- tests/tests.yaml 1970-01-01 00:00:00 +0000 |
1995 | +++ tests/tests.yaml 2014-06-26 20:05:25 +0000 |
1996 | @@ -0,0 +1,10 @@ |
1997 | +bootstrap: false |
1998 | +reset: false |
1999 | +setup: script |
2000 | +teardown: script |
2001 | +tests: "[0-9]*" |
2002 | +virtualenv: false |
2003 | +#sources: |
2004 | +packages: |
2005 | +- amulet |
2006 | +- python-requests |
please remove a file called old_install