Merge lp:~lazypower/charms/trusty/hadoop2-devel/python_rewrite into lp:~asanjar/charms/trusty/hadoop2-devel/trunk

Proposed by Charles Butler
Status: Merged
Merged at revision: 2
Proposed branch: lp:~lazypower/charms/trusty/hadoop2-devel/python_rewrite
Merge into: lp:~asanjar/charms/trusty/hadoop2-devel/trunk
Diff against target: 2006 lines (+1779/-101)
22 files modified
Makefile (+19/-0)
charm-helpers.yaml (+5/-0)
config.yaml (+0/-9)
files/hadoop/hdfs_format.sh (+15/-0)
files/template.py (+56/-0)
files/templates/defaults.j2 (+10/-0)
files/upstart/setenv.sh (+0/-13)
hooks/charmhelpers/core/fstab.py (+114/-0)
hooks/charmhelpers/core/hookenv.py (+498/-0)
hooks/charmhelpers/core/host.py (+325/-0)
hooks/charmhelpers/fetch/__init__.py (+349/-0)
hooks/charmhelpers/fetch/archiveurl.py (+63/-0)
hooks/charmhelpers/fetch/bzrurl.py (+50/-0)
hooks/charmhelpers/setup.py (+12/-0)
hooks/common.py (+231/-0)
hooks/install (+0/-58)
hooks/relation-name-relation-broken (+0/-2)
hooks/relation-name-relation-changed (+0/-9)
hooks/relation-name-relation-departed (+0/-5)
hooks/relation-name-relation-joined (+0/-5)
tests/01_test_install_hook.py (+22/-0)
tests/tests.yaml (+10/-0)
To merge this branch: bzr merge lp:~lazypower/charms/trusty/hadoop2-devel/python_rewrite
Reviewer Review Type Date Requested Status
amir sanjar Approve
Review via email: mp+224647@code.launchpad.net

Description of the change

Removes the class component of the code, and leaves it as a straight shot encapsulated procedural python script.

Adds argparse for future modifications of the charm, and defines the callstack using argparse.

To post a comment you must log in.
Revision history for this message
amir sanjar (asanjar) wrote :

please remove a file called old_install

review: Needs Fixing
10. By Charles Butler

Removed the old_install archived hook

Revision history for this message
amir sanjar (asanjar) :
review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== added file 'Makefile'
--- Makefile 1970-01-01 00:00:00 +0000
+++ Makefile 2014-06-26 20:05:25 +0000
@@ -0,0 +1,19 @@
1#!/usr/bin/make
2PYTHON := /usr/bin/env python
3
4sync-charm-helpers: bin/charm_helpers_sync.py
5 @$(PYTHON) bin/charm_helpers_sync.py -c charm-helpers.yaml
6
7bin/charm_helpers_sync.py:
8 @mkdir -p bin
9 @bzr cat lp:charm-helpers/tools/charm_helpers_sync/charm_helpers_sync.py > bin/charm_helpers_sync.py
10
11autotest:
12 watchmedo shell-command --patterns="*.py" --recursive --command="make test"
13
14test:
15 @nosetests tests/*.py
16
17clean:
18 @find -name *.pyc | xargs rm
19 @rm .coverage
020
=== added file 'charm-helpers.yaml'
--- charm-helpers.yaml 1970-01-01 00:00:00 +0000
+++ charm-helpers.yaml 2014-06-26 20:05:25 +0000
@@ -0,0 +1,5 @@
1branch: lp:charm-helpers
2destination: hooks/charmhelpers
3include:
4 - core
5 - fetch
06
=== modified file 'config.yaml'
--- config.yaml 2014-05-22 17:00:22 +0000
+++ config.yaml 2014-06-26 20:05:25 +0000
@@ -1,14 +1,5 @@
1options:1options:
2 string-option:
3 type: string
4 default: "Default Value"
5 description: "A short description of the configuration option"
6 boolean-option:2 boolean-option:
7 type: boolean3 type: boolean
8 default: False4 default: False
9 description: "A short description of the configuration option"5 description: "A short description of the configuration option"
10 int-option:
11 type: int
12 default: 9001
13 description: "A short description of the configuration option"
14
156
=== added file 'files/__init__.py'
=== added file 'files/hadoop/hdfs_format.sh'
--- files/hadoop/hdfs_format.sh 1970-01-01 00:00:00 +0000
+++ files/hadoop/hdfs_format.sh 2014-06-26 20:05:25 +0000
@@ -0,0 +1,15 @@
1#!/bin/sh
2HADOOP_VERSION="hadoop-2.2.0"
3HOME_DIR="/home/ubuntu"
4HADOOP_DIR="/home/ubuntu/hadoop"
5HADOOP_TMP_DIR=$HADOOP_DIR/tmp
6export HADOOP_INSTALL=$HADOOP_DIR/$HADOOP_VERSION
7export HADOOP_HOME=$HADOOP_INSTALL
8export HADOOP_COMMON_HOME=$HADOOP_INSTALL
9export HADOOP_HDFS_HOME=$HADOOP_INSTALL
10export HADOOP_MAPRED_HOME=$HADOOP_INSTALL
11export HADOOP_YARN_HOME=$HADOOP_INSTALL
12export PATH=$PATH:$HADOOP_INSTALL/bin:$HADOOP_INSTALL/sbin
13export YARN_HOME=$HADOOP_INSTALL
14export HADOOP_CONF_DIR=$HADOOP_INSTALL/etc/hadoop
15hdfs namenode -format
016
=== added symlink 'files/template'
=== target is u'template.py'
=== added file 'files/template.py'
--- files/template.py 1970-01-01 00:00:00 +0000
+++ files/template.py 2014-06-26 20:05:25 +0000
@@ -0,0 +1,56 @@
1#!/usr/bin/env python
2
3import argparse
4import logging
5import os
6import shutil
7import subprocess
8
9# Provide failout cases to install jinja2 templating and argparse.
10# This will not work in offline environments.
11try:
12 from jinja2 import Template
13except:
14 subprocess.call(['pip', 'install', 'jinja2'])
15 raise("Attempted to install missing dependencies. Try re-running")
16
17logging.basicConfig(level=logging.INFO)
18
19
20class TemplateWiz:
21
22 def __init__(self, args=None):
23 self.values = {}
24 parser = argparse.ArgumentParser()
25 parser.add_argument("-t", "--template", help="Template Path")
26 parser.add_argument("-o", "--output", help="Fullpath to output file")
27 self.paths, data = parser.parse_known_args(args)
28 if data:
29 # Process key = value keys
30 for item in data:
31 kv = item.split('=')
32 self.values[kv[0]] = kv[-1]
33
34 def read_template(self):
35 if not os.path.exists(self.paths.template):
36 raise IOError("Error loading template: %s" % self.paths.template)
37 else:
38 with open(self.paths.template, 'r') as f:
39 self.template = Template(f.read())
40
41 def write_template(self):
42 if os.path.exists(self.paths.output):
43 shutil.copy2(self.paths.output, "%s.bak" % self.paths.output)
44 logging.info("Saving {f} as {f}.bak".format(f=self.paths.output))
45 with open(self.paths.output, 'w') as f:
46 f.write(self.template.render(self.values))
47 logging.info('Rendered %s' % self.paths.output)
48
49 def run(self):
50 self.read_template()
51 self.write_template()
52
53
54if __name__ == "__main__":
55 tw = TemplateWiz()
56 tw.run()
057
=== added directory 'files/templates'
=== added file 'files/templates/defaults.j2'
--- files/templates/defaults.j2 1970-01-01 00:00:00 +0000
+++ files/templates/defaults.j2 2014-06-26 20:05:25 +0000
@@ -0,0 +1,10 @@
1export JAVA_HOME={{java_home}}
2export HADOOP_INSTALL={{hadoop_home}}
3export HADOOP_HOME=$HADOOP_INSTALL
4export HADOOP_COMMON_HOME=$HADOOP_INSTALL
5export HADOOP_HDFS_HOME=$HADOOP_INSTALL
6export HADOOP_MAPRED_HOME=$HADOOP_INSTALL
7export HADOOP_YARN_HOME=$HADOOP_INSTALL
8export PATH=$JAVA_HOME/bin:$PATH:$HADOOP_INSTALL/bin:$HADOOP_INSTALL/sbin
9export YARN_HOME=$HADOOP_INSTALL
10export HADOOP_CONF_DIR={{conf}}
011
=== removed file 'files/upstart/setenv.sh'
--- files/upstart/setenv.sh 2014-05-22 17:00:22 +0000
+++ files/upstart/setenv.sh 1970-01-01 00:00:00 +0000
@@ -1,13 +0,0 @@
1HADOOP_VERSION="hadoop-2.2.0"
2HOME_DIR="/home/ubuntu"
3HADOOP_DIR="/home/ubuntu/hadoop"
4HADOOP_TMP_DIR=$HADOOP_DIR/tmp
5export HADOOP_INSTALL=$HADOOP_DIR/$HADOOP_VERSION
6export HADOOP_HOME=$HADOOP_INSTALL
7export HADOOP_COMMON_HOME=$HADOOP_INSTALL
8export HADOOP_HDFS_HOME=$HADOOP_INSTALL
9export HADOOP_MAPRED_HOME=$HADOOP_INSTALL
10export HADOOP_YARN_HOME=$HADOOP_INSTALL
11export PATH=$JAVA_HOME/bin:$PATH:$HADOOP_INSTALL/bin:$HADOOP_INSTALL/sbin
12export YARN_HOME=$HADOOP_INSTALL
13export HADOOP_CONF_DIR=/etc/hadoop/conf.juju
140
=== added file 'hooks/__init__.py'
=== added directory 'hooks/charmhelpers'
=== added file 'hooks/charmhelpers/__init__.py'
=== added directory 'hooks/charmhelpers/core'
=== added file 'hooks/charmhelpers/core/__init__.py'
=== added file 'hooks/charmhelpers/core/fstab.py'
--- hooks/charmhelpers/core/fstab.py 1970-01-01 00:00:00 +0000
+++ hooks/charmhelpers/core/fstab.py 2014-06-26 20:05:25 +0000
@@ -0,0 +1,114 @@
1#!/usr/bin/env python
2# -*- coding: utf-8 -*-
3
4__author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>'
5
6import os
7
8
9class Fstab(file):
10 """This class extends file in order to implement a file reader/writer
11 for file `/etc/fstab`
12 """
13
14 class Entry(object):
15 """Entry class represents a non-comment line on the `/etc/fstab` file
16 """
17 def __init__(self, device, mountpoint, filesystem,
18 options, d=0, p=0):
19 self.device = device
20 self.mountpoint = mountpoint
21 self.filesystem = filesystem
22
23 if not options:
24 options = "defaults"
25
26 self.options = options
27 self.d = d
28 self.p = p
29
30 def __eq__(self, o):
31 return str(self) == str(o)
32
33 def __str__(self):
34 return "{} {} {} {} {} {}".format(self.device,
35 self.mountpoint,
36 self.filesystem,
37 self.options,
38 self.d,
39 self.p)
40
41 DEFAULT_PATH = os.path.join(os.path.sep, 'etc', 'fstab')
42
43 def __init__(self, path=None):
44 if path:
45 self._path = path
46 else:
47 self._path = self.DEFAULT_PATH
48 file.__init__(self, self._path, 'r+')
49
50 def _hydrate_entry(self, line):
51 return Fstab.Entry(*filter(
52 lambda x: x not in ('', None),
53 line.strip("\n").split(" ")))
54
55 @property
56 def entries(self):
57 self.seek(0)
58 for line in self.readlines():
59 try:
60 if not line.startswith("#"):
61 yield self._hydrate_entry(line)
62 except ValueError:
63 pass
64
65 def get_entry_by_attr(self, attr, value):
66 for entry in self.entries:
67 e_attr = getattr(entry, attr)
68 if e_attr == value:
69 return entry
70 return None
71
72 def add_entry(self, entry):
73 if self.get_entry_by_attr('device', entry.device):
74 return False
75
76 self.write(str(entry) + '\n')
77 self.truncate()
78 return entry
79
80 def remove_entry(self, entry):
81 self.seek(0)
82
83 lines = self.readlines()
84
85 found = False
86 for index, line in enumerate(lines):
87 if not line.startswith("#"):
88 if self._hydrate_entry(line) == entry:
89 found = True
90 break
91
92 if not found:
93 return False
94
95 lines.remove(line)
96
97 self.seek(0)
98 self.write(''.join(lines))
99 self.truncate()
100 return True
101
102 @classmethod
103 def remove_by_mountpoint(cls, mountpoint, path=None):
104 fstab = cls(path=path)
105 entry = fstab.get_entry_by_attr('mountpoint', mountpoint)
106 if entry:
107 return fstab.remove_entry(entry)
108 return False
109
110 @classmethod
111 def add(cls, device, mountpoint, filesystem, options=None, path=None):
112 return cls(path=path).add_entry(Fstab.Entry(device,
113 mountpoint, filesystem,
114 options=options))
0115
=== added file 'hooks/charmhelpers/core/hookenv.py'
--- hooks/charmhelpers/core/hookenv.py 1970-01-01 00:00:00 +0000
+++ hooks/charmhelpers/core/hookenv.py 2014-06-26 20:05:25 +0000
@@ -0,0 +1,498 @@
1"Interactions with the Juju environment"
2# Copyright 2013 Canonical Ltd.
3#
4# Authors:
5# Charm Helpers Developers <juju@lists.ubuntu.com>
6
7import os
8import json
9import yaml
10import subprocess
11import sys
12import UserDict
13from subprocess import CalledProcessError
14
15CRITICAL = "CRITICAL"
16ERROR = "ERROR"
17WARNING = "WARNING"
18INFO = "INFO"
19DEBUG = "DEBUG"
20MARKER = object()
21
22cache = {}
23
24
25def cached(func):
26 """Cache return values for multiple executions of func + args
27
28 For example:
29
30 @cached
31 def unit_get(attribute):
32 pass
33
34 unit_get('test')
35
36 will cache the result of unit_get + 'test' for future calls.
37 """
38 def wrapper(*args, **kwargs):
39 global cache
40 key = str((func, args, kwargs))
41 try:
42 return cache[key]
43 except KeyError:
44 res = func(*args, **kwargs)
45 cache[key] = res
46 return res
47 return wrapper
48
49
50def flush(key):
51 """Flushes any entries from function cache where the
52 key is found in the function+args """
53 flush_list = []
54 for item in cache:
55 if key in item:
56 flush_list.append(item)
57 for item in flush_list:
58 del cache[item]
59
60
61def log(message, level=None):
62 """Write a message to the juju log"""
63 command = ['juju-log']
64 if level:
65 command += ['-l', level]
66 command += [message]
67 subprocess.call(command)
68
69
70class Serializable(UserDict.IterableUserDict):
71 """Wrapper, an object that can be serialized to yaml or json"""
72
73 def __init__(self, obj):
74 # wrap the object
75 UserDict.IterableUserDict.__init__(self)
76 self.data = obj
77
78 def __getattr__(self, attr):
79 # See if this object has attribute.
80 if attr in ("json", "yaml", "data"):
81 return self.__dict__[attr]
82 # Check for attribute in wrapped object.
83 got = getattr(self.data, attr, MARKER)
84 if got is not MARKER:
85 return got
86 # Proxy to the wrapped object via dict interface.
87 try:
88 return self.data[attr]
89 except KeyError:
90 raise AttributeError(attr)
91
92 def __getstate__(self):
93 # Pickle as a standard dictionary.
94 return self.data
95
96 def __setstate__(self, state):
97 # Unpickle into our wrapper.
98 self.data = state
99
100 def json(self):
101 """Serialize the object to json"""
102 return json.dumps(self.data)
103
104 def yaml(self):
105 """Serialize the object to yaml"""
106 return yaml.dump(self.data)
107
108
109def execution_environment():
110 """A convenient bundling of the current execution context"""
111 context = {}
112 context['conf'] = config()
113 if relation_id():
114 context['reltype'] = relation_type()
115 context['relid'] = relation_id()
116 context['rel'] = relation_get()
117 context['unit'] = local_unit()
118 context['rels'] = relations()
119 context['env'] = os.environ
120 return context
121
122
123def in_relation_hook():
124 """Determine whether we're running in a relation hook"""
125 return 'JUJU_RELATION' in os.environ
126
127
128def relation_type():
129 """The scope for the current relation hook"""
130 return os.environ.get('JUJU_RELATION', None)
131
132
133def relation_id():
134 """The relation ID for the current relation hook"""
135 return os.environ.get('JUJU_RELATION_ID', None)
136
137
138def local_unit():
139 """Local unit ID"""
140 return os.environ['JUJU_UNIT_NAME']
141
142
143def remote_unit():
144 """The remote unit for the current relation hook"""
145 return os.environ['JUJU_REMOTE_UNIT']
146
147
148def service_name():
149 """The name service group this unit belongs to"""
150 return local_unit().split('/')[0]
151
152
153def hook_name():
154 """The name of the currently executing hook"""
155 return os.path.basename(sys.argv[0])
156
157
158class Config(dict):
159 """A Juju charm config dictionary that can write itself to
160 disk (as json) and track which values have changed since
161 the previous hook invocation.
162
163 Do not instantiate this object directly - instead call
164 ``hookenv.config()``
165
166 Example usage::
167
168 >>> # inside a hook
169 >>> from charmhelpers.core import hookenv
170 >>> config = hookenv.config()
171 >>> config['foo']
172 'bar'
173 >>> config['mykey'] = 'myval'
174 >>> config.save()
175
176
177 >>> # user runs `juju set mycharm foo=baz`
178 >>> # now we're inside subsequent config-changed hook
179 >>> config = hookenv.config()
180 >>> config['foo']
181 'baz'
182 >>> # test to see if this val has changed since last hook
183 >>> config.changed('foo')
184 True
185 >>> # what was the previous value?
186 >>> config.previous('foo')
187 'bar'
188 >>> # keys/values that we add are preserved across hooks
189 >>> config['mykey']
190 'myval'
191 >>> # don't forget to save at the end of hook!
192 >>> config.save()
193
194 """
195 CONFIG_FILE_NAME = '.juju-persistent-config'
196
197 def __init__(self, *args, **kw):
198 super(Config, self).__init__(*args, **kw)
199 self._prev_dict = None
200 self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME)
201 if os.path.exists(self.path):
202 self.load_previous()
203
204 def load_previous(self, path=None):
205 """Load previous copy of config from disk so that current values
206 can be compared to previous values.
207
208 :param path:
209
210 File path from which to load the previous config. If `None`,
211 config is loaded from the default location. If `path` is
212 specified, subsequent `save()` calls will write to the same
213 path.
214
215 """
216 self.path = path or self.path
217 with open(self.path) as f:
218 self._prev_dict = json.load(f)
219
220 def changed(self, key):
221 """Return true if the value for this key has changed since
222 the last save.
223
224 """
225 if self._prev_dict is None:
226 return True
227 return self.previous(key) != self.get(key)
228
229 def previous(self, key):
230 """Return previous value for this key, or None if there
231 is no "previous" value.
232
233 """
234 if self._prev_dict:
235 return self._prev_dict.get(key)
236 return None
237
238 def save(self):
239 """Save this config to disk.
240
241 Preserves items in _prev_dict that do not exist in self.
242
243 """
244 if self._prev_dict:
245 for k, v in self._prev_dict.iteritems():
246 if k not in self:
247 self[k] = v
248 with open(self.path, 'w') as f:
249 json.dump(self, f)
250
251
252@cached
253def config(scope=None):
254 """Juju charm configuration"""
255 config_cmd_line = ['config-get']
256 if scope is not None:
257 config_cmd_line.append(scope)
258 config_cmd_line.append('--format=json')
259 try:
260 config_data = json.loads(subprocess.check_output(config_cmd_line))
261 if scope is not None:
262 return config_data
263 return Config(config_data)
264 except ValueError:
265 return None
266
267
268@cached
269def relation_get(attribute=None, unit=None, rid=None):
270 """Get relation information"""
271 _args = ['relation-get', '--format=json']
272 if rid:
273 _args.append('-r')
274 _args.append(rid)
275 _args.append(attribute or '-')
276 if unit:
277 _args.append(unit)
278 try:
279 return json.loads(subprocess.check_output(_args))
280 except ValueError:
281 return None
282 except CalledProcessError, e:
283 if e.returncode == 2:
284 return None
285 raise
286
287
288def relation_set(relation_id=None, relation_settings={}, **kwargs):
289 """Set relation information for the current unit"""
290 relation_cmd_line = ['relation-set']
291 if relation_id is not None:
292 relation_cmd_line.extend(('-r', relation_id))
293 for k, v in (relation_settings.items() + kwargs.items()):
294 if v is None:
295 relation_cmd_line.append('{}='.format(k))
296 else:
297 relation_cmd_line.append('{}={}'.format(k, v))
298 subprocess.check_call(relation_cmd_line)
299 # Flush cache of any relation-gets for local unit
300 flush(local_unit())
301
302
303@cached
304def relation_ids(reltype=None):
305 """A list of relation_ids"""
306 reltype = reltype or relation_type()
307 relid_cmd_line = ['relation-ids', '--format=json']
308 if reltype is not None:
309 relid_cmd_line.append(reltype)
310 return json.loads(subprocess.check_output(relid_cmd_line)) or []
311 return []
312
313
314@cached
315def related_units(relid=None):
316 """A list of related units"""
317 relid = relid or relation_id()
318 units_cmd_line = ['relation-list', '--format=json']
319 if relid is not None:
320 units_cmd_line.extend(('-r', relid))
321 return json.loads(subprocess.check_output(units_cmd_line)) or []
322
323
324@cached
325def relation_for_unit(unit=None, rid=None):
326 """Get the json represenation of a unit's relation"""
327 unit = unit or remote_unit()
328 relation = relation_get(unit=unit, rid=rid)
329 for key in relation:
330 if key.endswith('-list'):
331 relation[key] = relation[key].split()
332 relation['__unit__'] = unit
333 return relation
334
335
336@cached
337def relations_for_id(relid=None):
338 """Get relations of a specific relation ID"""
339 relation_data = []
340 relid = relid or relation_ids()
341 for unit in related_units(relid):
342 unit_data = relation_for_unit(unit, relid)
343 unit_data['__relid__'] = relid
344 relation_data.append(unit_data)
345 return relation_data
346
347
348@cached
349def relations_of_type(reltype=None):
350 """Get relations of a specific type"""
351 relation_data = []
352 reltype = reltype or relation_type()
353 for relid in relation_ids(reltype):
354 for relation in relations_for_id(relid):
355 relation['__relid__'] = relid
356 relation_data.append(relation)
357 return relation_data
358
359
360@cached
361def relation_types():
362 """Get a list of relation types supported by this charm"""
363 charmdir = os.environ.get('CHARM_DIR', '')
364 mdf = open(os.path.join(charmdir, 'metadata.yaml'))
365 md = yaml.safe_load(mdf)
366 rel_types = []
367 for key in ('provides', 'requires', 'peers'):
368 section = md.get(key)
369 if section:
370 rel_types.extend(section.keys())
371 mdf.close()
372 return rel_types
373
374
375@cached
376def relations():
377 """Get a nested dictionary of relation data for all related units"""
378 rels = {}
379 for reltype in relation_types():
380 relids = {}
381 for relid in relation_ids(reltype):
382 units = {local_unit(): relation_get(unit=local_unit(), rid=relid)}
383 for unit in related_units(relid):
384 reldata = relation_get(unit=unit, rid=relid)
385 units[unit] = reldata
386 relids[relid] = units
387 rels[reltype] = relids
388 return rels
389
390
391@cached
392def is_relation_made(relation, keys='private-address'):
393 '''
394 Determine whether a relation is established by checking for
395 presence of key(s). If a list of keys is provided, they
396 must all be present for the relation to be identified as made
397 '''
398 if isinstance(keys, str):
399 keys = [keys]
400 for r_id in relation_ids(relation):
401 for unit in related_units(r_id):
402 context = {}
403 for k in keys:
404 context[k] = relation_get(k, rid=r_id,
405 unit=unit)
406 if None not in context.values():
407 return True
408 return False
409
410
411def open_port(port, protocol="TCP"):
412 """Open a service network port"""
413 _args = ['open-port']
414 _args.append('{}/{}'.format(port, protocol))
415 subprocess.check_call(_args)
416
417
418def close_port(port, protocol="TCP"):
419 """Close a service network port"""
420 _args = ['close-port']
421 _args.append('{}/{}'.format(port, protocol))
422 subprocess.check_call(_args)
423
424
425@cached
426def unit_get(attribute):
427 """Get the unit ID for the remote unit"""
428 _args = ['unit-get', '--format=json', attribute]
429 try:
430 return json.loads(subprocess.check_output(_args))
431 except ValueError:
432 return None
433
434
435def unit_private_ip():
436 """Get this unit's private IP address"""
437 return unit_get('private-address')
438
439
440class UnregisteredHookError(Exception):
441 """Raised when an undefined hook is called"""
442 pass
443
444
445class Hooks(object):
446 """A convenient handler for hook functions.
447
448 Example:
449 hooks = Hooks()
450
451 # register a hook, taking its name from the function name
452 @hooks.hook()
453 def install():
454 ...
455
456 # register a hook, providing a custom hook name
457 @hooks.hook("config-changed")
458 def config_changed():
459 ...
460
461 if __name__ == "__main__":
462 # execute a hook based on the name the program is called by
463 hooks.execute(sys.argv)
464 """
465
466 def __init__(self):
467 super(Hooks, self).__init__()
468 self._hooks = {}
469
470 def register(self, name, function):
471 """Register a hook"""
472 self._hooks[name] = function
473
474 def execute(self, args):
475 """Execute a registered hook based on args[0]"""
476 hook_name = os.path.basename(args[0])
477 if hook_name in self._hooks:
478 self._hooks[hook_name]()
479 else:
480 raise UnregisteredHookError(hook_name)
481
482 def hook(self, *hook_names):
483 """Decorator, registering them as hooks"""
484 def wrapper(decorated):
485 for hook_name in hook_names:
486 self.register(hook_name, decorated)
487 else:
488 self.register(decorated.__name__, decorated)
489 if '_' in decorated.__name__:
490 self.register(
491 decorated.__name__.replace('_', '-'), decorated)
492 return decorated
493 return wrapper
494
495
496def charm_dir():
497 """Return the root directory of the current charm"""
498 return os.environ.get('CHARM_DIR')
0499
=== added file 'hooks/charmhelpers/core/host.py'
--- hooks/charmhelpers/core/host.py 1970-01-01 00:00:00 +0000
+++ hooks/charmhelpers/core/host.py 2014-06-26 20:05:25 +0000
@@ -0,0 +1,325 @@
1"""Tools for working with the host system"""
2# Copyright 2012 Canonical Ltd.
3#
4# Authors:
5# Nick Moffitt <nick.moffitt@canonical.com>
6# Matthew Wedgwood <matthew.wedgwood@canonical.com>
7
8import os
9import pwd
10import grp
11import random
12import string
13import subprocess
14import hashlib
15import apt_pkg
16
17from collections import OrderedDict
18
19from hookenv import log
20from fstab import Fstab
21
22
23def service_start(service_name):
24 """Start a system service"""
25 return service('start', service_name)
26
27
28def service_stop(service_name):
29 """Stop a system service"""
30 return service('stop', service_name)
31
32
33def service_restart(service_name):
34 """Restart a system service"""
35 return service('restart', service_name)
36
37
38def service_reload(service_name, restart_on_failure=False):
39 """Reload a system service, optionally falling back to restart if
40 reload fails"""
41 service_result = service('reload', service_name)
42 if not service_result and restart_on_failure:
43 service_result = service('restart', service_name)
44 return service_result
45
46
47def service(action, service_name):
48 """Control a system service"""
49 cmd = ['service', service_name, action]
50 return subprocess.call(cmd) == 0
51
52
53def service_running(service):
54 """Determine whether a system service is running"""
55 try:
56 output = subprocess.check_output(['service', service, 'status'])
57 except subprocess.CalledProcessError:
58 return False
59 else:
60 if ("start/running" in output or "is running" in output):
61 return True
62 else:
63 return False
64
65
66def adduser(username, password=None, shell='/bin/bash', system_user=False):
67 """Add a user to the system"""
68 try:
69 user_info = pwd.getpwnam(username)
70 log('user {0} already exists!'.format(username))
71 except KeyError:
72 log('creating user {0}'.format(username))
73 cmd = ['useradd']
74 if system_user or password is None:
75 cmd.append('--system')
76 else:
77 cmd.extend([
78 '--create-home',
79 '--shell', shell,
80 '--password', password,
81 ])
82 cmd.append(username)
83 subprocess.check_call(cmd)
84 user_info = pwd.getpwnam(username)
85 return user_info
86
87
88def add_user_to_group(username, group):
89 """Add a user to a group"""
90 cmd = [
91 'gpasswd', '-a',
92 username,
93 group
94 ]
95 log("Adding user {} to group {}".format(username, group))
96 subprocess.check_call(cmd)
97
98
99def rsync(from_path, to_path, flags='-r', options=None):
100 """Replicate the contents of a path"""
101 options = options or ['--delete', '--executability']
102 cmd = ['/usr/bin/rsync', flags]
103 cmd.extend(options)
104 cmd.append(from_path)
105 cmd.append(to_path)
106 log(" ".join(cmd))
107 return subprocess.check_output(cmd).strip()
108
109
110def symlink(source, destination):
111 """Create a symbolic link"""
112 log("Symlinking {} as {}".format(source, destination))
113 cmd = [
114 'ln',
115 '-sf',
116 source,
117 destination,
118 ]
119 subprocess.check_call(cmd)
120
121
122def mkdir(path, owner='root', group='root', perms=0555, force=False):
123 """Create a directory"""
124 log("Making dir {} {}:{} {:o}".format(path, owner, group,
125 perms))
126 uid = pwd.getpwnam(owner).pw_uid
127 gid = grp.getgrnam(group).gr_gid
128 realpath = os.path.abspath(path)
129 if os.path.exists(realpath):
130 if force and not os.path.isdir(realpath):
131 log("Removing non-directory file {} prior to mkdir()".format(path))
132 os.unlink(realpath)
133 else:
134 os.makedirs(realpath, perms)
135 os.chown(realpath, uid, gid)
136
137
138def write_file(path, content, owner='root', group='root', perms=0444):
139 """Create or overwrite a file with the contents of a string"""
140 log("Writing file {} {}:{} {:o}".format(path, owner, group, perms))
141 uid = pwd.getpwnam(owner).pw_uid
142 gid = grp.getgrnam(group).gr_gid
143 with open(path, 'w') as target:
144 os.fchown(target.fileno(), uid, gid)
145 os.fchmod(target.fileno(), perms)
146 target.write(content)
147
148
149def fstab_remove(mp):
150 """Remove the given mountpoint entry from /etc/fstab
151 """
152 return Fstab.remove_by_mountpoint(mp)
153
154
155def fstab_add(dev, mp, fs, options=None):
156 """Adds the given device entry to the /etc/fstab file
157 """
158 return Fstab.add(dev, mp, fs, options=options)
159
160
161def mount(device, mountpoint, options=None, persist=False, filesystem="ext3"):
162 """Mount a filesystem at a particular mountpoint"""
163 cmd_args = ['mount']
164 if options is not None:
165 cmd_args.extend(['-o', options])
166 cmd_args.extend([device, mountpoint])
167 try:
168 subprocess.check_output(cmd_args)
169 except subprocess.CalledProcessError, e:
170 log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output))
171 return False
172
173 if persist:
174 return fstab_add(device, mountpoint, filesystem, options=options)
175 return True
176
177
178def umount(mountpoint, persist=False):
179 """Unmount a filesystem"""
180 cmd_args = ['umount', mountpoint]
181 try:
182 subprocess.check_output(cmd_args)
183 except subprocess.CalledProcessError, e:
184 log('Error unmounting {}\n{}'.format(mountpoint, e.output))
185 return False
186
187 if persist:
188 return fstab_remove(mountpoint)
189 return True
190
191
192def mounts():
193 """Get a list of all mounted volumes as [[mountpoint,device],[...]]"""
194 with open('/proc/mounts') as f:
195 # [['/mount/point','/dev/path'],[...]]
196 system_mounts = [m[1::-1] for m in [l.strip().split()
197 for l in f.readlines()]]
198 return system_mounts
199
200
201def file_hash(path):
202 """Generate a md5 hash of the contents of 'path' or None if not found """
203 if os.path.exists(path):
204 h = hashlib.md5()
205 with open(path, 'r') as source:
206 h.update(source.read()) # IGNORE:E1101 - it does have update
207 return h.hexdigest()
208 else:
209 return None
210
211
212def restart_on_change(restart_map, stopstart=False):
213 """Restart services based on configuration files changing
214
215 This function is used a decorator, for example
216
217 @restart_on_change({
218 '/etc/ceph/ceph.conf': [ 'cinder-api', 'cinder-volume' ]
219 })
220 def ceph_client_changed():
221 ...
222
223 In this example, the cinder-api and cinder-volume services
224 would be restarted if /etc/ceph/ceph.conf is changed by the
225 ceph_client_changed function.
226 """
227 def wrap(f):
228 def wrapped_f(*args):
229 checksums = {}
230 for path in restart_map:
231 checksums[path] = file_hash(path)
232 f(*args)
233 restarts = []
234 for path in restart_map:
235 if checksums[path] != file_hash(path):
236 restarts += restart_map[path]
237 services_list = list(OrderedDict.fromkeys(restarts))
238 if not stopstart:
239 for service_name in services_list:
240 service('restart', service_name)
241 else:
242 for action in ['stop', 'start']:
243 for service_name in services_list:
244 service(action, service_name)
245 return wrapped_f
246 return wrap
247
248
249def lsb_release():
250 """Return /etc/lsb-release in a dict"""
251 d = {}
252 with open('/etc/lsb-release', 'r') as lsb:
253 for l in lsb:
254 k, v = l.split('=')
255 d[k.strip()] = v.strip()
256 return d
257
258
259def pwgen(length=None):
260 """Generate a random pasword."""
261 if length is None:
262 length = random.choice(range(35, 45))
263 alphanumeric_chars = [
264 l for l in (string.letters + string.digits)
265 if l not in 'l0QD1vAEIOUaeiou']
266 random_chars = [
267 random.choice(alphanumeric_chars) for _ in range(length)]
268 return(''.join(random_chars))
269
270
271def list_nics(nic_type):
272 '''Return a list of nics of given type(s)'''
273 if isinstance(nic_type, basestring):
274 int_types = [nic_type]
275 else:
276 int_types = nic_type
277 interfaces = []
278 for int_type in int_types:
279 cmd = ['ip', 'addr', 'show', 'label', int_type + '*']
280 ip_output = subprocess.check_output(cmd).split('\n')
281 ip_output = (line for line in ip_output if line)
282 for line in ip_output:
283 if line.split()[1].startswith(int_type):
284 interfaces.append(line.split()[1].replace(":", ""))
285 return interfaces
286
287
288def set_nic_mtu(nic, mtu):
289 '''Set MTU on a network interface'''
290 cmd = ['ip', 'link', 'set', nic, 'mtu', mtu]
291 subprocess.check_call(cmd)
292
293
294def get_nic_mtu(nic):
295 cmd = ['ip', 'addr', 'show', nic]
296 ip_output = subprocess.check_output(cmd).split('\n')
297 mtu = ""
298 for line in ip_output:
299 words = line.split()
300 if 'mtu' in words:
301 mtu = words[words.index("mtu") + 1]
302 return mtu
303
304
305def get_nic_hwaddr(nic):
306 cmd = ['ip', '-o', '-0', 'addr', 'show', nic]
307 ip_output = subprocess.check_output(cmd)
308 hwaddr = ""
309 words = ip_output.split()
310 if 'link/ether' in words:
311 hwaddr = words[words.index('link/ether') + 1]
312 return hwaddr
313
314
315def cmp_pkgrevno(package, revno, pkgcache=None):
316 '''Compare supplied revno with the revno of the installed package
317 1 => Installed revno is greater than supplied arg
318 0 => Installed revno is the same as supplied arg
319 -1 => Installed revno is less than supplied arg
320 '''
321 if not pkgcache:
322 apt_pkg.init()
323 pkgcache = apt_pkg.Cache()
324 pkg = pkgcache[package]
325 return apt_pkg.version_compare(pkg.current_ver.ver_str, revno)
0326
=== added directory 'hooks/charmhelpers/fetch'
=== added file 'hooks/charmhelpers/fetch/__init__.py'
--- hooks/charmhelpers/fetch/__init__.py 1970-01-01 00:00:00 +0000
+++ hooks/charmhelpers/fetch/__init__.py 2014-06-26 20:05:25 +0000
@@ -0,0 +1,349 @@
1import importlib
2import time
3from yaml import safe_load
4from charmhelpers.core.host import (
5 lsb_release
6)
7from urlparse import (
8 urlparse,
9 urlunparse,
10)
11import subprocess
12from charmhelpers.core.hookenv import (
13 config,
14 log,
15)
16import apt_pkg
17import os
18
19
20CLOUD_ARCHIVE = """# Ubuntu Cloud Archive
21deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main
22"""
23PROPOSED_POCKET = """# Proposed
24deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted
25"""
26CLOUD_ARCHIVE_POCKETS = {
27 # Folsom
28 'folsom': 'precise-updates/folsom',
29 'precise-folsom': 'precise-updates/folsom',
30 'precise-folsom/updates': 'precise-updates/folsom',
31 'precise-updates/folsom': 'precise-updates/folsom',
32 'folsom/proposed': 'precise-proposed/folsom',
33 'precise-folsom/proposed': 'precise-proposed/folsom',
34 'precise-proposed/folsom': 'precise-proposed/folsom',
35 # Grizzly
36 'grizzly': 'precise-updates/grizzly',
37 'precise-grizzly': 'precise-updates/grizzly',
38 'precise-grizzly/updates': 'precise-updates/grizzly',
39 'precise-updates/grizzly': 'precise-updates/grizzly',
40 'grizzly/proposed': 'precise-proposed/grizzly',
41 'precise-grizzly/proposed': 'precise-proposed/grizzly',
42 'precise-proposed/grizzly': 'precise-proposed/grizzly',
43 # Havana
44 'havana': 'precise-updates/havana',
45 'precise-havana': 'precise-updates/havana',
46 'precise-havana/updates': 'precise-updates/havana',
47 'precise-updates/havana': 'precise-updates/havana',
48 'havana/proposed': 'precise-proposed/havana',
49 'precise-havana/proposed': 'precise-proposed/havana',
50 'precise-proposed/havana': 'precise-proposed/havana',
51 # Icehouse
52 'icehouse': 'precise-updates/icehouse',
53 'precise-icehouse': 'precise-updates/icehouse',
54 'precise-icehouse/updates': 'precise-updates/icehouse',
55 'precise-updates/icehouse': 'precise-updates/icehouse',
56 'icehouse/proposed': 'precise-proposed/icehouse',
57 'precise-icehouse/proposed': 'precise-proposed/icehouse',
58 'precise-proposed/icehouse': 'precise-proposed/icehouse',
59 # Juno
60 'juno': 'trusty-updates/juno',
61 'trusty-juno': 'trusty-updates/juno',
62 'trusty-juno/updates': 'trusty-updates/juno',
63 'trusty-updates/juno': 'trusty-updates/juno',
64 'juno/proposed': 'trusty-proposed/juno',
65 'juno/proposed': 'trusty-proposed/juno',
66 'trusty-juno/proposed': 'trusty-proposed/juno',
67 'trusty-proposed/juno': 'trusty-proposed/juno',
68}
69
70# The order of this list is very important. Handlers should be listed in from
71# least- to most-specific URL matching.
72FETCH_HANDLERS = (
73 'charmhelpers.fetch.archiveurl.ArchiveUrlFetchHandler',
74 'charmhelpers.fetch.bzrurl.BzrUrlFetchHandler',
75)
76
77APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT.
78APT_NO_LOCK_RETRY_DELAY = 10 # Wait 10 seconds between apt lock checks.
79APT_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times.
80
81
82class SourceConfigError(Exception):
83 pass
84
85
86class UnhandledSource(Exception):
87 pass
88
89
90class AptLockError(Exception):
91 pass
92
93
94class BaseFetchHandler(object):
95
96 """Base class for FetchHandler implementations in fetch plugins"""
97
98 def can_handle(self, source):
99 """Returns True if the source can be handled. Otherwise returns
100 a string explaining why it cannot"""
101 return "Wrong source type"
102
103 def install(self, source):
104 """Try to download and unpack the source. Return the path to the
105 unpacked files or raise UnhandledSource."""
106 raise UnhandledSource("Wrong source type {}".format(source))
107
108 def parse_url(self, url):
109 return urlparse(url)
110
111 def base_url(self, url):
112 """Return url without querystring or fragment"""
113 parts = list(self.parse_url(url))
114 parts[4:] = ['' for i in parts[4:]]
115 return urlunparse(parts)
116
117
118def filter_installed_packages(packages):
119 """Returns a list of packages that require installation"""
120 apt_pkg.init()
121
122 # Tell apt to build an in-memory cache to prevent race conditions (if
123 # another process is already building the cache).
124 apt_pkg.config.set("Dir::Cache::pkgcache", "")
125
126 cache = apt_pkg.Cache()
127 _pkgs = []
128 for package in packages:
129 try:
130 p = cache[package]
131 p.current_ver or _pkgs.append(package)
132 except KeyError:
133 log('Package {} has no installation candidate.'.format(package),
134 level='WARNING')
135 _pkgs.append(package)
136 return _pkgs
137
138
139def apt_install(packages, options=None, fatal=False):
140 """Install one or more packages"""
141 if options is None:
142 options = ['--option=Dpkg::Options::=--force-confold']
143
144 cmd = ['apt-get', '--assume-yes']
145 cmd.extend(options)
146 cmd.append('install')
147 if isinstance(packages, basestring):
148 cmd.append(packages)
149 else:
150 cmd.extend(packages)
151 log("Installing {} with options: {}".format(packages,
152 options))
153 _run_apt_command(cmd, fatal)
154
155
156def apt_upgrade(options=None, fatal=False, dist=False):
157 """Upgrade all packages"""
158 if options is None:
159 options = ['--option=Dpkg::Options::=--force-confold']
160
161 cmd = ['apt-get', '--assume-yes']
162 cmd.extend(options)
163 if dist:
164 cmd.append('dist-upgrade')
165 else:
166 cmd.append('upgrade')
167 log("Upgrading with options: {}".format(options))
168 _run_apt_command(cmd, fatal)
169
170
171def apt_update(fatal=False):
172 """Update local apt cache"""
173 cmd = ['apt-get', 'update']
174 _run_apt_command(cmd, fatal)
175
176
177def apt_purge(packages, fatal=False):
178 """Purge one or more packages"""
179 cmd = ['apt-get', '--assume-yes', 'purge']
180 if isinstance(packages, basestring):
181 cmd.append(packages)
182 else:
183 cmd.extend(packages)
184 log("Purging {}".format(packages))
185 _run_apt_command(cmd, fatal)
186
187
188def apt_hold(packages, fatal=False):
189 """Hold one or more packages"""
190 cmd = ['apt-mark', 'hold']
191 if isinstance(packages, basestring):
192 cmd.append(packages)
193 else:
194 cmd.extend(packages)
195 log("Holding {}".format(packages))
196
197 if fatal:
198 subprocess.check_call(cmd)
199 else:
200 subprocess.call(cmd)
201
202
203def add_source(source, key=None):
204 if source is None:
205 log('Source is not present. Skipping')
206 return
207
208 if (source.startswith('ppa:') or
209 source.startswith('http') or
210 source.startswith('deb ') or
211 source.startswith('cloud-archive:')):
212 subprocess.check_call(['add-apt-repository', '--yes', source])
213 elif source.startswith('cloud:'):
214 apt_install(filter_installed_packages(['ubuntu-cloud-keyring']),
215 fatal=True)
216 pocket = source.split(':')[-1]
217 if pocket not in CLOUD_ARCHIVE_POCKETS:
218 raise SourceConfigError(
219 'Unsupported cloud: source option %s' %
220 pocket)
221 actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket]
222 with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt:
223 apt.write(CLOUD_ARCHIVE.format(actual_pocket))
224 elif source == 'proposed':
225 release = lsb_release()['DISTRIB_CODENAME']
226 with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt:
227 apt.write(PROPOSED_POCKET.format(release))
228 if key:
229 subprocess.check_call(['apt-key', 'adv', '--keyserver',
230 'hkp://keyserver.ubuntu.com:80', '--recv',
231 key])
232
233
234def configure_sources(update=False,
235 sources_var='install_sources',
236 keys_var='install_keys'):
237 """
238 Configure multiple sources from charm configuration
239
240 Example config:
241 install_sources:
242 - "ppa:foo"
243 - "http://example.com/repo precise main"
244 install_keys:
245 - null
246 - "a1b2c3d4"
247
248 Note that 'null' (a.k.a. None) should not be quoted.
249 """
250 sources = safe_load(config(sources_var))
251 keys = config(keys_var)
252 if keys is not None:
253 keys = safe_load(keys)
254 if isinstance(sources, basestring) and (
255 keys is None or isinstance(keys, basestring)):
256 add_source(sources, keys)
257 else:
258 if not len(sources) == len(keys):
259 msg = 'Install sources and keys lists are different lengths'
260 raise SourceConfigError(msg)
261 for src_num in range(len(sources)):
262 add_source(sources[src_num], keys[src_num])
263 if update:
264 apt_update(fatal=True)
265
266
267def install_remote(source):
268 """
269 Install a file tree from a remote source
270
271 The specified source should be a url of the form:
272 scheme://[host]/path[#[option=value][&...]]
273
274 Schemes supported are based on this modules submodules
275 Options supported are submodule-specific"""
276 # We ONLY check for True here because can_handle may return a string
277 # explaining why it can't handle a given source.
278 handlers = [h for h in plugins() if h.can_handle(source) is True]
279 installed_to = None
280 for handler in handlers:
281 try:
282 installed_to = handler.install(source)
283 except UnhandledSource:
284 pass
285 if not installed_to:
286 raise UnhandledSource("No handler found for source {}".format(source))
287 return installed_to
288
289
290def install_from_config(config_var_name):
291 charm_config = config()
292 source = charm_config[config_var_name]
293 return install_remote(source)
294
295
296def plugins(fetch_handlers=None):
297 if not fetch_handlers:
298 fetch_handlers = FETCH_HANDLERS
299 plugin_list = []
300 for handler_name in fetch_handlers:
301 package, classname = handler_name.rsplit('.', 1)
302 try:
303 handler_class = getattr(
304 importlib.import_module(package),
305 classname)
306 plugin_list.append(handler_class())
307 except (ImportError, AttributeError):
308 # Skip missing plugins so that they can be ommitted from
309 # installation if desired
310 log("FetchHandler {} not found, skipping plugin".format(
311 handler_name))
312 return plugin_list
313
314
315def _run_apt_command(cmd, fatal=False):
316 """
317 Run an APT command, checking output and retrying if the fatal flag is set
318 to True.
319
320 :param: cmd: str: The apt command to run.
321 :param: fatal: bool: Whether the command's output should be checked and
322 retried.
323 """
324 env = os.environ.copy()
325
326 if 'DEBIAN_FRONTEND' not in env:
327 env['DEBIAN_FRONTEND'] = 'noninteractive'
328
329 if fatal:
330 retry_count = 0
331 result = None
332
333 # If the command is considered "fatal", we need to retry if the apt
334 # lock was not acquired.
335
336 while result is None or result == APT_NO_LOCK:
337 try:
338 result = subprocess.check_call(cmd, env=env)
339 except subprocess.CalledProcessError, e:
340 retry_count = retry_count + 1
341 if retry_count > APT_NO_LOCK_RETRY_COUNT:
342 raise
343 result = e.returncode
344 log("Couldn't acquire DPKG lock. Will retry in {} seconds."
345 "".format(APT_NO_LOCK_RETRY_DELAY))
346 time.sleep(APT_NO_LOCK_RETRY_DELAY)
347
348 else:
349 subprocess.call(cmd, env=env)
0350
=== added file 'hooks/charmhelpers/fetch/archiveurl.py'
--- hooks/charmhelpers/fetch/archiveurl.py 1970-01-01 00:00:00 +0000
+++ hooks/charmhelpers/fetch/archiveurl.py 2014-06-26 20:05:25 +0000
@@ -0,0 +1,63 @@
1import os
2import urllib2
3import urlparse
4
5from charmhelpers.fetch import (
6 BaseFetchHandler,
7 UnhandledSource
8)
9from charmhelpers.payload.archive import (
10 get_archive_handler,
11 extract,
12)
13from charmhelpers.core.host import mkdir
14
15
16class ArchiveUrlFetchHandler(BaseFetchHandler):
17 """Handler for archives via generic URLs"""
18 def can_handle(self, source):
19 url_parts = self.parse_url(source)
20 if url_parts.scheme not in ('http', 'https', 'ftp', 'file'):
21 return "Wrong source type"
22 if get_archive_handler(self.base_url(source)):
23 return True
24 return False
25
26 def download(self, source, dest):
27 # propogate all exceptions
28 # URLError, OSError, etc
29 proto, netloc, path, params, query, fragment = urlparse.urlparse(source)
30 if proto in ('http', 'https'):
31 auth, barehost = urllib2.splituser(netloc)
32 if auth is not None:
33 source = urlparse.urlunparse((proto, barehost, path, params, query, fragment))
34 username, password = urllib2.splitpasswd(auth)
35 passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
36 # Realm is set to None in add_password to force the username and password
37 # to be used whatever the realm
38 passman.add_password(None, source, username, password)
39 authhandler = urllib2.HTTPBasicAuthHandler(passman)
40 opener = urllib2.build_opener(authhandler)
41 urllib2.install_opener(opener)
42 response = urllib2.urlopen(source)
43 try:
44 with open(dest, 'w') as dest_file:
45 dest_file.write(response.read())
46 except Exception as e:
47 if os.path.isfile(dest):
48 os.unlink(dest)
49 raise e
50
51 def install(self, source):
52 url_parts = self.parse_url(source)
53 dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched')
54 if not os.path.exists(dest_dir):
55 mkdir(dest_dir, perms=0755)
56 dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path))
57 try:
58 self.download(source, dld_file)
59 except urllib2.URLError as e:
60 raise UnhandledSource(e.reason)
61 except OSError as e:
62 raise UnhandledSource(e.strerror)
63 return extract(dld_file)
064
=== added file 'hooks/charmhelpers/fetch/bzrurl.py'
--- hooks/charmhelpers/fetch/bzrurl.py 1970-01-01 00:00:00 +0000
+++ hooks/charmhelpers/fetch/bzrurl.py 2014-06-26 20:05:25 +0000
@@ -0,0 +1,50 @@
1import os
2from charmhelpers.fetch import (
3 BaseFetchHandler,
4 UnhandledSource
5)
6from charmhelpers.core.host import mkdir
7
8try:
9 from bzrlib.branch import Branch
10except ImportError:
11 from charmhelpers.fetch import apt_install
12 apt_install("python-bzrlib")
13 from bzrlib.branch import Branch
14
15
16class BzrUrlFetchHandler(BaseFetchHandler):
17 """Handler for bazaar branches via generic and lp URLs"""
18 def can_handle(self, source):
19 url_parts = self.parse_url(source)
20 if url_parts.scheme not in ('bzr+ssh', 'lp'):
21 return False
22 else:
23 return True
24
25 def branch(self, source, dest):
26 url_parts = self.parse_url(source)
27 # If we use lp:branchname scheme we need to load plugins
28 if not self.can_handle(source):
29 raise UnhandledSource("Cannot handle {}".format(source))
30 if url_parts.scheme == "lp":
31 from bzrlib.plugin import load_plugins
32 load_plugins()
33 try:
34 remote_branch = Branch.open(source)
35 remote_branch.bzrdir.sprout(dest).open_branch()
36 except Exception as e:
37 raise e
38
39 def install(self, source):
40 url_parts = self.parse_url(source)
41 branch_name = url_parts.path.strip("/").split("/")[-1]
42 dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched",
43 branch_name)
44 if not os.path.exists(dest_dir):
45 mkdir(dest_dir, perms=0755)
46 try:
47 self.branch(source, dest_dir)
48 except OSError as e:
49 raise UnhandledSource(e.strerror)
50 return dest_dir
051
=== added file 'hooks/charmhelpers/setup.py'
--- hooks/charmhelpers/setup.py 1970-01-01 00:00:00 +0000
+++ hooks/charmhelpers/setup.py 2014-06-26 20:05:25 +0000
@@ -0,0 +1,12 @@
1#!/usr/bin/env python
2
3from distutils.core import setup
4
5setup(name='charmhelpers',
6 version='1.0',
7 description='this is dumb',
8 author='nobody',
9 author_email='dummy@amulet',
10 url='http://google.com',
11 packages=[],
12)
013
=== added file 'hooks/common.py'
--- hooks/common.py 1970-01-01 00:00:00 +0000
+++ hooks/common.py 2014-06-26 20:05:25 +0000
@@ -0,0 +1,231 @@
1#!/usr/bin/env python
2
3import grp
4import os
5import pwd
6import subprocess
7import sys
8import tarfile
9import argparse
10import logging
11
12from shutil import rmtree, copyfile
13from charmhelpers.core.hookenv import log
14# Required for unit tests... :(
15try:
16 from charmhelpers.fetch import apt_install, apt_update
17except:
18 pass
19
20FORMAT = '%(asctime)-15s %(user)-8s %(message)s'
21logging.basicConfig(level=logging.INFO)
22
23
24# ##########################################
25# Globals
26# ##########################################
27JAVA_VERSION = "7"
28JAVA_HOME = None
29HADOOP_VERSION = "hadoop-2.2.0"
30PLATFORM_ARCH = "amd64"
31HOME_DIR = os.path.join(os.path.sep, "home", "ubuntu")
32HADOOP_DIR = os.path.join(HOME_DIR, "hadoop")
33HADOOP_TMP_DIR = os.path.join(HADOOP_DIR, "tmp")
34HADOOP_INSTALL = os.path.join(HADOOP_DIR, HADOOP_VERSION)
35HADOOP_CONF_DIR = os.path.join(HADOOP_INSTALL, "etc/hadoop")
36
37
38# ##########################################
39# Utility Methods
40# ##########################################
41
42def install_dev_packages():
43 packages = ['maven',
44 'build-essential',
45 'autoconf',
46 'automake',
47 'libtool',
48 'cmake',
49 'zlib1g-dev',
50 'pkg-config',
51 'libssl-dev',
52 'snappy',
53 'libsnappy-dev',
54 'openjdk-7-jdk',
55 'python-pip',
56 'python-jinja2',
57 ]
58 apt_update()
59 apt_install(packages)
60
61
62def ssh_wizardry():
63 # Set NonStrict Hostkey Checking to .ssh config
64 # this both confuses and angers me!
65 log("Setting NonStrict HostKey Checking for SSH", "INFO")
66
67 nonstrict = "Host *\n\tStrictHostKeyChecking no"
68 with open("{dir}/.ssh/config".format(dir=HOME_DIR), 'w+') as f:
69 f.write(nonstrict)
70
71 keyfile = os.path.join(os.path.sep, 'home', 'ubuntu', '.ssh', 'id_rsa')
72 cmd = 'yes | ssh-keygen -t rsa -N "" -f {d}'.format(d=keyfile)
73 ps = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
74 stderr=subprocess.STDOUT)
75 output = ps.communicate()[0]
76 log("Output of ssh keygen: {o}".format(o=output), "INFO")
77 with open("{dir}/.ssh/id_rsa.pub".format(dir=HOME_DIR), 'r') as f:
78 hostkey = f.read()
79
80 auth_keys = "{dir}/.ssh/authorized_keys".format(dir=HOME_DIR)
81 with open(auth_keys, 'a') as f:
82 f.write(hostkey)
83 subprocess.call(['chown', '-R', 'ubuntu.ubuntu',
84 "{dir}/.ssh".format(dir=HOME_DIR)])
85
86
87def set_java_home():
88 jbin = 'java-{v}-openjdk-{a}'.format(v=JAVA_VERSION, a=PLATFORM_ARCH)
89 jpath = subprocess.check_output(['find', '/usr/', '-name', jbin])
90 if not jpath:
91 raise ValueError("Unable to locate Java Path for %s" % jbin)
92 script = '{d}/etc/hadoop/hadoop-env.sh'.format(d=HADOOP_INSTALL)
93
94 with open(script) as f:
95 contents = f.readlines()
96 for l in range(0, len(contents)):
97 if contents[l].startswith('export JAVA_HOME='):
98 contents[l] = "export JAVA_HOME='{jp}'".format(
99 jp=jpath.strip())
100
101 with open(script, 'w') as f:
102 f.writelines(contents)
103 JAVA_HOME = jpath.strip()
104 log('Set JAVA_HOME in {p} to {v}'.format(p=script, v=jpath), 'INFO')
105 JAVA_HOME = jpath.strip()
106 log("JAVA_HOME set to: {j}".format(j=JAVA_HOME), "INFO")
107
108
109# TODO: this should be configureable at some point and reach out
110# over the wire to install w/ Sha1 Sig Validation
111def extract_hadoop():
112 pa = os.path.join("{d}", "files", "archives", "{hdpver}.tar.gz")
113 tarballpath = os.path.join(pa.format(
114 d=os.environ['CHARM_DIR'],
115 hdpver=HADOOP_VERSION))
116
117 if tarfile.is_tarfile(tarballpath):
118 tball = tarfile.open(tarballpath)
119 tball.extractall(HADOOP_DIR)
120 else:
121 log("Unable to extract Hadoop Tarball {tb}".format(tb=tarballpath),
122 "Warning")
123
124
125def add_shim():
126 if os.path.exists("{d}/.hadoop_shim".format(d=HOME_DIR)):
127 log("Found .hadoop_shim, bailing", "INFO")
128 log("To re-execute the template, re-run this hook after removing"
129 "{d}/.hadoop_shim".format(d=HOME_DIR), 'INFO')
130 return
131
132 # TODO: Refactor this nasty shell callout with some implicit python
133 # by requiring the templatewiz class, and passing args.
134 templ_bin_path = os.path.join('files', 'template')
135 template_path = os.path.join(os.environ['CHARM_DIR'], 'files',
136 'templates', 'defaults.j2')
137 user_path = os.path.join(os.path.sep, 'home', 'ubuntu', '.hadoop_shim')
138 subprocess.call(['python', templ_bin_path, '-t', template_path, '-o',
139 user_path, 'java_home={j}'.format(j=JAVA_HOME),
140 'hadoop_home={h}'.format(h=HADOOP_INSTALL),
141 'conf={c}'.format(c=HADOOP_CONF_DIR)])
142
143 with open('{d}/.profile'.format(d=HOME_DIR), 'a') as f:
144 f.write('[ -f "$HOME/.hadoop_shim" ] && . "$HOME/.hadoop_shim"')
145
146
147def template(src, dst, own='root', grp='root', mod=0644):
148 copyfile(src, dst)
149 os.chown(dst, own, grp)
150 os.chmod(dst, mod)
151
152
153def copy_templates():
154 # Get UID's
155 hadoop_uid = pwd.getpwnam('ubuntu').pw_uid
156 hadoop_gid = grp.getgrnam('ubuntu').gr_gid
157
158 srcp = os.path.join('files', 'hadoop')
159 dstp = os.path.join(HADOOP_INSTALL, 'etc', 'hadoop')
160 template(os.path.join(srcp, 'core-site.xml'),
161 os.path.join(dstp, 'core-site.xml'), hadoop_uid,
162 hadoop_gid)
163 template(os.path.join(srcp, 'hdfs-site.xml'),
164 os.path.join(dstp, 'hdfs-site.xml'), hadoop_uid,
165 hadoop_gid)
166 template(os.path.join(srcp, 'yarn-site.xml'),
167 os.path.join(dstp, 'yarn-site.xml'), hadoop_uid,
168 hadoop_gid)
169 template(os.path.join(srcp, 'mapreduce-site.xml'),
170 os.path.join(dstp, 'mapreduce-site.xml'), hadoop_uid,
171 hadoop_gid)
172 template(os.path.join(srcp, 'hdfs_format.sh'),
173 os.path.join(HOME_DIR, 'hdfs_format.sh'), hadoop_uid,
174 hadoop_gid, 0777)
175
176
177# ##########################################
178# Charm Hooks
179# ##########################################
180
181def install():
182 # Remove the hadoop directory if it exists
183 if os.path.exists(HADOOP_DIR):
184 rmtree(HADOOP_DIR)
185 ssh_wizardry()
186
187 os.makedirs(HADOOP_DIR)
188 os.makedirs(HADOOP_TMP_DIR)
189 install_dev_packages()
190 extract_hadoop()
191 set_java_home()
192
193 # Create HDFS Dir Tree
194 nndir = os.path.join("{d}".format(d=HADOOP_DIR), "data",
195 "hdfs", "namenode")
196 if not os.path.exists(nndir):
197 os.makedirs(nndir)
198 dndir = "{d}/data/hdfs/datanode".format(d=HADOOP_DIR)
199 if not os.path.exists(nndir):
200 os.makedirs(dndir)
201
202 copy_templates()
203 add_shim()
204
205 # Call the HDFS Format script post installation
206 cmd = "{d}{s}hdfs_format.sh".format(d=HOME_DIR, s=os.path.sep)
207 log(cmd, "INFO")
208 subprocess.call(cmd)
209
210 # Ensure the Ubuntu user owns everything in the hadoop tree
211 # Stick a fork in it, we're done.
212 subprocess.call(['chown', '-R', 'ubuntu.ubuntu', HADOOP_DIR])
213 log("Install complete.", "INFO")
214
215
216if __name__ == "__main__":
217 parser = argparse.ArgumentParser()
218 parser.add_argument('-H', '--hook_name', dest='hook_name',
219 help='hook to call')
220
221 args = parser.parse_args()
222
223 if args.hook_name is not None:
224 hook_name = args.hook_name
225 else:
226 hook_name = os.path.basename(sys.argv[0])
227
228 if hook_name == "install":
229 retVal = install()
230 else:
231 pass
0232
=== modified file 'hooks/install'
--- hooks/install 2014-05-22 17:00:22 +0000
+++ hooks/install 1970-01-01 00:00:00 +0000
@@ -1,58 +0,0 @@
1#!/bin/bash
2set -ex
3
4source files/upstart/hadoop_env
5
6if [ -d $HADOOP_DIR ]; then
7 rm -rf $HADOOP_DIR
8fi
9
10echo -e "Host *\n\tStrictHostKeyChecking no" > /home/ubuntu/.ssh/config
11
12yes | ssh-keygen -t rsa -N "" -f /home/ubuntu/.ssh/id_rsa
13cat /home/ubuntu/.ssh/id_rsa.pub >> /home/ubuntu/.ssh/authorized_keys
14chown -R ubuntu.ubuntu /home/ubuntu/.ssh
15# ssh localhost just to make sure
16# ssh -o StrictHostKeyChecking=no localhost
17
18juju-log "Installing JAVA!"
19
20mkdir -p $HADOOP_DIR
21apt-get update -qqy
22apt-get install -y openjdk-7-jdk
23JAVA_HOME_PATH=$(find /usr/ -name java-$JAVA_VERSION-openjdk-$PLATFORM_ARCH)
24
25juju-log "installing other development packages"
26apt-get -qqy install maven build-essential autoconf automake libtool cmake zlib1g-dev pkg-config libssl-dev snappy libsnappy-dev
27juju-log "Now for Hadoop!"
28
29mkdir -p $HADOOP_DIR
30mkdir -p $HADOOP_TMP_DIR
31# cd $HADOOP_DIR
32tar -xvzf files/archives/$HADOOP_VERSION.tar.gz -C $HADOOP_DIR
33cd $HADOOP_DIR
34sed -ir 's|export JAVA_HOME=.*|export JAVA_HOME='$JAVA_HOME_PATH'|' $(find -name hadoop-env.sh)
35
36mkdir -p $HADOOP_DIR/data/hdfs/namenode
37mkdir -p $HADOOP_DIR/data/hdfs/datanode
38cd $CHARM_DIR
39install -o root -g root -m 0644 files/upstart/defaults /etc/default/hadoop
40
41# Do a bunch of fun stuff in XML
42
43install -o ubuntu -g ubuntu -m 0644 files/hadoop/core-site.xml $HADOOP_DIR/$HADOOP_VERSION/etc/hadoop/core-site.xml
44install -o ubuntu -g ubuntu -m 0644 files/hadoop/hdfs-site.xml $HADOOP_DIR/$HADOOP_VERSION/etc/hadoop/hdfs-site.xml
45install -o ubuntu -g ubuntu -m 0644 files/hadoop/yarn-site.xml $HADOOP_DIR/$HADOOP_VERSION/etc/hadoop/yarn-site.xml
46install -o ubuntu -g ubuntu -m 0644 files/hadoop/mapreduce-site.xml $HADOOP_DIR/$HADOOP_VERSION/etc/hadoop/mapred-site.xml
47
48# Upstart
49
50. /etc/default/hadoop $JAVA_HOME_PATH $HADOOP_INSTALL $HOME_DIR
51source $HOME_DIR/.profile
52hdfs namenode -format
53
54chown -R ubuntu.ubuntu ~ubuntu/hadoop
55
56juju-log "Install complete."
57
58
590
=== target is u'common.py'
=== removed file 'hooks/relation-name-relation-broken'
--- hooks/relation-name-relation-broken 2014-05-22 17:00:22 +0000
+++ hooks/relation-name-relation-broken 1970-01-01 00:00:00 +0000
@@ -1,2 +0,0 @@
1#!/bin/sh
2# This hook runs when the full relation is removed (not just a single member)
30
=== removed file 'hooks/relation-name-relation-changed'
--- hooks/relation-name-relation-changed 2014-05-22 17:00:22 +0000
+++ hooks/relation-name-relation-changed 1970-01-01 00:00:00 +0000
@@ -1,9 +0,0 @@
1#!/bin/bash
2# This must be renamed to the name of the relation. The goal here is to
3# affect any change needed by relationships being formed, modified, or broken
4# This script should be idempotent.
5juju-log $JUJU_REMOTE_UNIT modified its settings
6juju-log Relation settings:
7relation-get
8juju-log Relation members:
9relation-list
100
=== removed file 'hooks/relation-name-relation-departed'
--- hooks/relation-name-relation-departed 2014-05-22 17:00:22 +0000
+++ hooks/relation-name-relation-departed 1970-01-01 00:00:00 +0000
@@ -1,5 +0,0 @@
1#!/bin/sh
2# This must be renamed to the name of the relation. The goal here is to
3# affect any change needed by the remote unit leaving the relationship.
4# This script should be idempotent.
5juju-log $JUJU_REMOTE_UNIT departed
60
=== removed file 'hooks/relation-name-relation-joined'
--- hooks/relation-name-relation-joined 2014-05-22 17:00:22 +0000
+++ hooks/relation-name-relation-joined 1970-01-01 00:00:00 +0000
@@ -1,5 +0,0 @@
1#!/bin/sh
2# This must be renamed to the name of the relation. The goal here is to
3# affect any change needed by relationships being formed
4# This script should be idempotent.
5juju-log $JUJU_REMOTE_UNIT joined
60
=== added directory 'tests'
=== added file 'tests/01_test_install_hook.py'
--- tests/01_test_install_hook.py 1970-01-01 00:00:00 +0000
+++ tests/01_test_install_hook.py 2014-06-26 20:05:25 +0000
@@ -0,0 +1,22 @@
1import sys
2import os
3import unittest
4
5from mock import patch, Mock
6sys.path.insert(0, os.path.abspath(os.path.join('..', 'hooks')))
7
8from hooks.common import HadoopDevel
9
10
11class TestCommon(unittest.TestCase):
12
13 @patch('grp.getgrnam')
14 @patch('pwd.getpwnam')
15 def test_init(self, pwm, grpm):
16 hd = HadoopDevel()
17 self.assertEqual(hd.JAVA_VERSION, "7")
18 self.assertEqual(hd.HADOOP_VERSION, "hadoop-2.2.0")
19 self.assertEqual(hd.PLATFORM_ARCH, "amd64")
20 self.assertEqual(hd.HOME_DIR, "/home/ubuntu")
21 self.assertEqual(hd.HADOOP_TMP_DIR, "/home/ubuntu/hadoop/tmp")
22 self.assertEqual(hd.HADOOP_INSTALL, "/home/ubuntu/hadoop/hadoop-2.2.0")
023
=== added file 'tests/tests.yaml'
--- tests/tests.yaml 1970-01-01 00:00:00 +0000
+++ tests/tests.yaml 2014-06-26 20:05:25 +0000
@@ -0,0 +1,10 @@
1bootstrap: false
2reset: false
3setup: script
4teardown: script
5tests: "[0-9]*"
6virtualenv: false
7#sources:
8packages:
9- amulet
10- python-requests

Subscribers

People subscribed via source and target branches