Merge lp:~thedac/charms/precise/rabbitmq-server/enable-nrpe-external-master into lp:charms/rabbitmq-server
- Precise Pangolin (12.04)
- enable-nrpe-external-master
- Merge into trunk
Proposed by
David Ames
Status: | Merged |
---|---|
Merged at revision: | 45 |
Proposed branch: | lp:~thedac/charms/precise/rabbitmq-server/enable-nrpe-external-master |
Merge into: | lp:charms/rabbitmq-server |
Diff against target: |
3480 lines (+3280/-4) 26 files modified
hooks/_pythonpath.py (+14/-0) hooks/rabbit_utils.py (+1/-1) hooks/rabbitmq_server_relations.py (+40/-1) lib/charmhelpers-0.1.2.egg-info (+18/-0) lib/charmhelpers/cli/__init__.py (+147/-0) lib/charmhelpers/cli/commands.py (+2/-0) lib/charmhelpers/cli/host.py (+15/-0) lib/charmhelpers/contrib/ansible/__init__.py (+101/-0) lib/charmhelpers/contrib/charmhelpers/__init__.py (+184/-0) lib/charmhelpers/contrib/charmsupport/nrpe.py (+219/-0) lib/charmhelpers/contrib/charmsupport/volumes.py (+156/-0) lib/charmhelpers/contrib/hahelpers/apache.py (+58/-0) lib/charmhelpers/contrib/hahelpers/cluster.py (+183/-0) lib/charmhelpers/contrib/jujugui/utils.py (+602/-0) lib/charmhelpers/contrib/saltstack/__init__.py (+149/-0) lib/charmhelpers/core/hookenv.py (+395/-0) lib/charmhelpers/core/host.py (+281/-0) lib/charmhelpers/fetch/__init__.py (+271/-0) lib/charmhelpers/fetch/archiveurl.py (+48/-0) lib/charmhelpers/fetch/bzrurl.py (+49/-0) lib/charmhelpers/payload/__init__.py (+1/-0) lib/charmhelpers/payload/archive.py (+57/-0) lib/charmhelpers/payload/execd.py (+50/-0) metadata.yaml (+1/-1) revision (+1/-1) scripts/check_rabbitmq.py (+237/-0) |
To merge this branch: | bzr merge lp:~thedac/charms/precise/rabbitmq-server/enable-nrpe-external-master |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Tom Haddon | Approve | ||
Review via email: mp+196030@code.launchpad.net |
Commit message
Description of the change
Enable nrpe-external-
Use charmhelpers (embedded for now)
To post a comment you must log in.
- 46. By David Ames
-
Use /usr/local/
lib/nagios/ plugins for non-packaged checks
Revision history for this message
David Ames (thedac) wrote : | # |
Use /usr/local/
Updated charm-helpers
Mojo tested: https:/
Revision history for this message
Tom Haddon (mthaddon) wrote : | # |
Looks good, approved. Very nice to be able to see it in Mojo as well...
review:
Approve
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === added file 'hooks/_pythonpath.py' | |||
2 | --- hooks/_pythonpath.py 1970-01-01 00:00:00 +0000 | |||
3 | +++ hooks/_pythonpath.py 2013-11-21 22:43:22 +0000 | |||
4 | @@ -0,0 +1,14 @@ | |||
5 | 1 | import sys | ||
6 | 2 | import os | ||
7 | 3 | import os.path | ||
8 | 4 | |||
9 | 5 | # Make sure that charmhelpers is importable, or bail out. | ||
10 | 6 | local_copy = os.path.join( | ||
11 | 7 | os.path.dirname(os.path.dirname(__file__)), "lib") | ||
12 | 8 | if os.path.exists(local_copy) and os.path.isdir(local_copy): | ||
13 | 9 | sys.path.insert(0, local_copy) | ||
14 | 10 | try: | ||
15 | 11 | import charmhelpers | ||
16 | 12 | _ = charmhelpers | ||
17 | 13 | except ImportError: | ||
18 | 14 | sys.exit("Could not find required 'charmhelpers' library.") | ||
19 | 0 | 15 | ||
20 | === added symlink 'hooks/nrpe-external-master-relation-changed' | |||
21 | === target is u'rabbitmq_server_relations.py' | |||
22 | === modified file 'hooks/rabbit_utils.py' | |||
23 | --- hooks/rabbit_utils.py 2013-05-20 17:00:03 +0000 | |||
24 | +++ hooks/rabbit_utils.py 2013-11-21 22:43:22 +0000 | |||
25 | @@ -7,7 +7,7 @@ | |||
26 | 7 | import lib.utils as utils | 7 | import lib.utils as utils |
27 | 8 | import apt_pkg as apt | 8 | import apt_pkg as apt |
28 | 9 | 9 | ||
30 | 10 | PACKAGES = ['pwgen', 'rabbitmq-server'] | 10 | PACKAGES = ['pwgen', 'rabbitmq-server', 'python-amqplib'] |
31 | 11 | 11 | ||
32 | 12 | RABBITMQ_CTL = '/usr/sbin/rabbitmqctl' | 12 | RABBITMQ_CTL = '/usr/sbin/rabbitmqctl' |
33 | 13 | COOKIE_PATH = '/var/lib/rabbitmq/.erlang.cookie' | 13 | COOKIE_PATH = '/var/lib/rabbitmq/.erlang.cookie' |
34 | 14 | 14 | ||
35 | === modified file 'hooks/rabbitmq_server_relations.py' | |||
36 | --- hooks/rabbitmq_server_relations.py 2013-10-28 14:12:08 +0000 | |||
37 | +++ hooks/rabbitmq_server_relations.py 2013-11-21 22:43:22 +0000 | |||
38 | @@ -13,10 +13,17 @@ | |||
39 | 13 | import lib.ceph_utils as ceph | 13 | import lib.ceph_utils as ceph |
40 | 14 | import lib.openstack_common as openstack | 14 | import lib.openstack_common as openstack |
41 | 15 | 15 | ||
42 | 16 | import _pythonpath | ||
43 | 17 | _ = _pythonpath | ||
44 | 18 | |||
45 | 19 | from charmhelpers.core.host import rsync | ||
46 | 20 | from charmhelpers.contrib.charmsupport.nrpe import NRPE | ||
47 | 21 | |||
48 | 16 | 22 | ||
49 | 17 | SERVICE_NAME = os.getenv('JUJU_UNIT_NAME').split('/')[0] | 23 | SERVICE_NAME = os.getenv('JUJU_UNIT_NAME').split('/')[0] |
50 | 18 | POOL_NAME = SERVICE_NAME | 24 | POOL_NAME = SERVICE_NAME |
51 | 19 | RABBIT_DIR = '/var/lib/rabbitmq' | 25 | RABBIT_DIR = '/var/lib/rabbitmq' |
52 | 26 | NAGIOS_PLUGINS='/usr/local/lib/nagios/plugins' | ||
53 | 20 | 27 | ||
54 | 21 | 28 | ||
55 | 22 | def install(): | 29 | def install(): |
56 | @@ -237,6 +244,35 @@ | |||
57 | 237 | utils.juju_log('INFO', 'Finish Ceph Relation Changed') | 244 | utils.juju_log('INFO', 'Finish Ceph Relation Changed') |
58 | 238 | 245 | ||
59 | 239 | 246 | ||
60 | 247 | def update_nrpe_checks(): | ||
61 | 248 | if os.path.isdir(NAGIOS_PLUGINS): | ||
62 | 249 | rsync(os.path.join(os.getenv('CHARM_DIR'), 'scripts', 'check_rabbitmq.py'), | ||
63 | 250 | os.path.join(NAGIOS_PLUGINS, 'check_rabbitmq.py')) | ||
64 | 251 | user = 'naigos' | ||
65 | 252 | vhost = 'nagios' | ||
66 | 253 | password_file = os.path.join(RABBIT_DIR, '%s.passwd' % user) | ||
67 | 254 | if os.path.exists(password_file): | ||
68 | 255 | password = open(password_file).read().strip() | ||
69 | 256 | else: | ||
70 | 257 | cmd = ['pwgen', '64', '1'] | ||
71 | 258 | password = subprocess.check_output(cmd).strip() | ||
72 | 259 | with open(password_file, 'wb') as out: | ||
73 | 260 | out.write(password) | ||
74 | 261 | |||
75 | 262 | rabbit.create_vhost(vhost) | ||
76 | 263 | rabbit.create_user(user, password) | ||
77 | 264 | rabbit.grant_permissions(user, vhost) | ||
78 | 265 | |||
79 | 266 | nrpe_compat = NRPE() | ||
80 | 267 | nrpe_compat.add_check( | ||
81 | 268 | shortname='rabbitmq', | ||
82 | 269 | description='Check RabbitMQ', | ||
83 | 270 | check_cmd='{}/check_rabbitmq.py --user {} --password {} --vhost {}' | ||
84 | 271 | ''.format(NAGIOS_PLUGINS, user, password, vhost) | ||
85 | 272 | ) | ||
86 | 273 | nrpe_compat.write() | ||
87 | 274 | |||
88 | 275 | |||
89 | 240 | def upgrade_charm(): | 276 | def upgrade_charm(): |
90 | 241 | pre_install_hooks() | 277 | pre_install_hooks() |
91 | 242 | # Ensure older passwd files in /var/lib/juju are moved to | 278 | # Ensure older passwd files in /var/lib/juju are moved to |
92 | @@ -281,6 +317,8 @@ | |||
93 | 281 | if cluster.eligible_leader('res_rabbitmq_vip'): | 317 | if cluster.eligible_leader('res_rabbitmq_vip'): |
94 | 282 | utils.restart('rabbitmq-server') | 318 | utils.restart('rabbitmq-server') |
95 | 283 | 319 | ||
96 | 320 | update_nrpe_checks() | ||
97 | 321 | |||
98 | 284 | 322 | ||
99 | 285 | def pre_install_hooks(): | 323 | def pre_install_hooks(): |
100 | 286 | for f in glob.glob('exec.d/*/charm-pre-install'): | 324 | for f in glob.glob('exec.d/*/charm-pre-install'): |
101 | @@ -297,7 +335,8 @@ | |||
102 | 297 | 'ceph-relation-joined': ceph_joined, | 335 | 'ceph-relation-joined': ceph_joined, |
103 | 298 | 'ceph-relation-changed': ceph_changed, | 336 | 'ceph-relation-changed': ceph_changed, |
104 | 299 | 'upgrade-charm': upgrade_charm, | 337 | 'upgrade-charm': upgrade_charm, |
106 | 300 | 'config-changed': config_changed | 338 | 'config-changed': config_changed, |
107 | 339 | 'nrpe-external-master-relation-changed': update_nrpe_checks | ||
108 | 301 | } | 340 | } |
109 | 302 | 341 | ||
110 | 303 | utils.do_hooks(hooks) | 342 | utils.do_hooks(hooks) |
111 | 304 | 343 | ||
112 | === added directory 'lib' | |||
113 | === added directory 'lib/charmhelpers' | |||
114 | === added file 'lib/charmhelpers-0.1.2.egg-info' | |||
115 | --- lib/charmhelpers-0.1.2.egg-info 1970-01-01 00:00:00 +0000 | |||
116 | +++ lib/charmhelpers-0.1.2.egg-info 2013-11-21 22:43:22 +0000 | |||
117 | @@ -0,0 +1,18 @@ | |||
118 | 1 | Metadata-Version: 1.0 | ||
119 | 2 | Name: charmhelpers | ||
120 | 3 | Version: 0.1.2 | ||
121 | 4 | Summary: UNKNOWN | ||
122 | 5 | Home-page: https://code.launchpad.net/charm-helpers | ||
123 | 6 | Author: Ubuntu Developers | ||
124 | 7 | Author-email: ubuntu-devel-discuss@lists.ubuntu.com | ||
125 | 8 | License: Affero GNU Public License v3 | ||
126 | 9 | Description: ============ | ||
127 | 10 | CharmHelpers | ||
128 | 11 | ============ | ||
129 | 12 | |||
130 | 13 | CharmHelpers provides an opinionated set of tools for building Juju | ||
131 | 14 | charms that work together. In addition to basic tasks like interact- | ||
132 | 15 | ing with the charm environment and the machine it runs on, it also | ||
133 | 16 | helps keep you build hooks and establish relations effortlessly. | ||
134 | 17 | |||
135 | 18 | Platform: UNKNOWN | ||
136 | 0 | 19 | ||
137 | === added file 'lib/charmhelpers/__init__.py' | |||
138 | === added directory 'lib/charmhelpers/cli' | |||
139 | === added file 'lib/charmhelpers/cli/__init__.py' | |||
140 | --- lib/charmhelpers/cli/__init__.py 1970-01-01 00:00:00 +0000 | |||
141 | +++ lib/charmhelpers/cli/__init__.py 2013-11-21 22:43:22 +0000 | |||
142 | @@ -0,0 +1,147 @@ | |||
143 | 1 | import inspect | ||
144 | 2 | import itertools | ||
145 | 3 | import argparse | ||
146 | 4 | import sys | ||
147 | 5 | |||
148 | 6 | |||
149 | 7 | class OutputFormatter(object): | ||
150 | 8 | def __init__(self, outfile=sys.stdout): | ||
151 | 9 | self.formats = ( | ||
152 | 10 | "raw", | ||
153 | 11 | "json", | ||
154 | 12 | "py", | ||
155 | 13 | "yaml", | ||
156 | 14 | "csv", | ||
157 | 15 | "tab", | ||
158 | 16 | ) | ||
159 | 17 | self.outfile = outfile | ||
160 | 18 | |||
161 | 19 | def add_arguments(self, argument_parser): | ||
162 | 20 | formatgroup = argument_parser.add_mutually_exclusive_group() | ||
163 | 21 | choices = self.supported_formats | ||
164 | 22 | formatgroup.add_argument("--format", metavar='FMT', | ||
165 | 23 | help="Select output format for returned data, " | ||
166 | 24 | "where FMT is one of: {}".format(choices), | ||
167 | 25 | choices=choices, default='raw') | ||
168 | 26 | for fmt in self.formats: | ||
169 | 27 | fmtfunc = getattr(self, fmt) | ||
170 | 28 | formatgroup.add_argument("-{}".format(fmt[0]), | ||
171 | 29 | "--{}".format(fmt), action='store_const', | ||
172 | 30 | const=fmt, dest='format', | ||
173 | 31 | help=fmtfunc.__doc__) | ||
174 | 32 | |||
175 | 33 | @property | ||
176 | 34 | def supported_formats(self): | ||
177 | 35 | return self.formats | ||
178 | 36 | |||
179 | 37 | def raw(self, output): | ||
180 | 38 | """Output data as raw string (default)""" | ||
181 | 39 | self.outfile.write(str(output)) | ||
182 | 40 | |||
183 | 41 | def py(self, output): | ||
184 | 42 | """Output data as a nicely-formatted python data structure""" | ||
185 | 43 | import pprint | ||
186 | 44 | pprint.pprint(output, stream=self.outfile) | ||
187 | 45 | |||
188 | 46 | def json(self, output): | ||
189 | 47 | """Output data in JSON format""" | ||
190 | 48 | import json | ||
191 | 49 | json.dump(output, self.outfile) | ||
192 | 50 | |||
193 | 51 | def yaml(self, output): | ||
194 | 52 | """Output data in YAML format""" | ||
195 | 53 | import yaml | ||
196 | 54 | yaml.safe_dump(output, self.outfile) | ||
197 | 55 | |||
198 | 56 | def csv(self, output): | ||
199 | 57 | """Output data as excel-compatible CSV""" | ||
200 | 58 | import csv | ||
201 | 59 | csvwriter = csv.writer(self.outfile) | ||
202 | 60 | csvwriter.writerows(output) | ||
203 | 61 | |||
204 | 62 | def tab(self, output): | ||
205 | 63 | """Output data in excel-compatible tab-delimited format""" | ||
206 | 64 | import csv | ||
207 | 65 | csvwriter = csv.writer(self.outfile, dialect=csv.excel_tab) | ||
208 | 66 | csvwriter.writerows(output) | ||
209 | 67 | |||
210 | 68 | def format_output(self, output, fmt='raw'): | ||
211 | 69 | fmtfunc = getattr(self, fmt) | ||
212 | 70 | fmtfunc(output) | ||
213 | 71 | |||
214 | 72 | |||
215 | 73 | class CommandLine(object): | ||
216 | 74 | argument_parser = None | ||
217 | 75 | subparsers = None | ||
218 | 76 | formatter = None | ||
219 | 77 | |||
220 | 78 | def __init__(self): | ||
221 | 79 | if not self.argument_parser: | ||
222 | 80 | self.argument_parser = argparse.ArgumentParser(description='Perform common charm tasks') | ||
223 | 81 | if not self.formatter: | ||
224 | 82 | self.formatter = OutputFormatter() | ||
225 | 83 | self.formatter.add_arguments(self.argument_parser) | ||
226 | 84 | if not self.subparsers: | ||
227 | 85 | self.subparsers = self.argument_parser.add_subparsers(help='Commands') | ||
228 | 86 | |||
229 | 87 | def subcommand(self, command_name=None): | ||
230 | 88 | """ | ||
231 | 89 | Decorate a function as a subcommand. Use its arguments as the | ||
232 | 90 | command-line arguments""" | ||
233 | 91 | def wrapper(decorated): | ||
234 | 92 | cmd_name = command_name or decorated.__name__ | ||
235 | 93 | subparser = self.subparsers.add_parser(cmd_name, | ||
236 | 94 | description=decorated.__doc__) | ||
237 | 95 | for args, kwargs in describe_arguments(decorated): | ||
238 | 96 | subparser.add_argument(*args, **kwargs) | ||
239 | 97 | subparser.set_defaults(func=decorated) | ||
240 | 98 | return decorated | ||
241 | 99 | return wrapper | ||
242 | 100 | |||
243 | 101 | def subcommand_builder(self, command_name, description=None): | ||
244 | 102 | """ | ||
245 | 103 | Decorate a function that builds a subcommand. Builders should accept a | ||
246 | 104 | single argument (the subparser instance) and return the function to be | ||
247 | 105 | run as the command.""" | ||
248 | 106 | def wrapper(decorated): | ||
249 | 107 | subparser = self.subparsers.add_parser(command_name) | ||
250 | 108 | func = decorated(subparser) | ||
251 | 109 | subparser.set_defaults(func=func) | ||
252 | 110 | subparser.description = description or func.__doc__ | ||
253 | 111 | return wrapper | ||
254 | 112 | |||
255 | 113 | def run(self): | ||
256 | 114 | "Run cli, processing arguments and executing subcommands." | ||
257 | 115 | arguments = self.argument_parser.parse_args() | ||
258 | 116 | argspec = inspect.getargspec(arguments.func) | ||
259 | 117 | vargs = [] | ||
260 | 118 | kwargs = {} | ||
261 | 119 | if argspec.varargs: | ||
262 | 120 | vargs = getattr(arguments, argspec.varargs) | ||
263 | 121 | for arg in argspec.args: | ||
264 | 122 | kwargs[arg] = getattr(arguments, arg) | ||
265 | 123 | self.formatter.format_output(arguments.func(*vargs, **kwargs), arguments.format) | ||
266 | 124 | |||
267 | 125 | |||
268 | 126 | cmdline = CommandLine() | ||
269 | 127 | |||
270 | 128 | |||
271 | 129 | def describe_arguments(func): | ||
272 | 130 | """ | ||
273 | 131 | Analyze a function's signature and return a data structure suitable for | ||
274 | 132 | passing in as arguments to an argparse parser's add_argument() method.""" | ||
275 | 133 | |||
276 | 134 | argspec = inspect.getargspec(func) | ||
277 | 135 | # we should probably raise an exception somewhere if func includes **kwargs | ||
278 | 136 | if argspec.defaults: | ||
279 | 137 | positional_args = argspec.args[:-len(argspec.defaults)] | ||
280 | 138 | keyword_names = argspec.args[-len(argspec.defaults):] | ||
281 | 139 | for arg, default in itertools.izip(keyword_names, argspec.defaults): | ||
282 | 140 | yield ('--{}'.format(arg),), {'default': default} | ||
283 | 141 | else: | ||
284 | 142 | positional_args = argspec.args | ||
285 | 143 | |||
286 | 144 | for arg in positional_args: | ||
287 | 145 | yield (arg,), {} | ||
288 | 146 | if argspec.varargs: | ||
289 | 147 | yield (argspec.varargs,), {'nargs': '*'} | ||
290 | 0 | 148 | ||
291 | === added file 'lib/charmhelpers/cli/commands.py' | |||
292 | --- lib/charmhelpers/cli/commands.py 1970-01-01 00:00:00 +0000 | |||
293 | +++ lib/charmhelpers/cli/commands.py 2013-11-21 22:43:22 +0000 | |||
294 | @@ -0,0 +1,2 @@ | |||
295 | 1 | from . import CommandLine | ||
296 | 2 | import host | ||
297 | 0 | 3 | ||
298 | === added file 'lib/charmhelpers/cli/host.py' | |||
299 | --- lib/charmhelpers/cli/host.py 1970-01-01 00:00:00 +0000 | |||
300 | +++ lib/charmhelpers/cli/host.py 2013-11-21 22:43:22 +0000 | |||
301 | @@ -0,0 +1,15 @@ | |||
302 | 1 | from . import cmdline | ||
303 | 2 | from charmhelpers.core import host | ||
304 | 3 | |||
305 | 4 | |||
306 | 5 | @cmdline.subcommand() | ||
307 | 6 | def mounts(): | ||
308 | 7 | "List mounts" | ||
309 | 8 | return host.mounts() | ||
310 | 9 | |||
311 | 10 | |||
312 | 11 | @cmdline.subcommand_builder('service', description="Control system services") | ||
313 | 12 | def service(subparser): | ||
314 | 13 | subparser.add_argument("action", help="The action to perform (start, stop, etc...)") | ||
315 | 14 | subparser.add_argument("service_name", help="Name of the service to control") | ||
316 | 15 | return host.service | ||
317 | 0 | 16 | ||
318 | === added directory 'lib/charmhelpers/contrib' | |||
319 | === added file 'lib/charmhelpers/contrib/__init__.py' | |||
320 | === added directory 'lib/charmhelpers/contrib/ansible' | |||
321 | === added file 'lib/charmhelpers/contrib/ansible/__init__.py' | |||
322 | --- lib/charmhelpers/contrib/ansible/__init__.py 1970-01-01 00:00:00 +0000 | |||
323 | +++ lib/charmhelpers/contrib/ansible/__init__.py 2013-11-21 22:43:22 +0000 | |||
324 | @@ -0,0 +1,101 @@ | |||
325 | 1 | # Copyright 2013 Canonical Ltd. | ||
326 | 2 | # | ||
327 | 3 | # Authors: | ||
328 | 4 | # Charm Helpers Developers <juju@lists.ubuntu.com> | ||
329 | 5 | """Charm Helpers ansible - declare the state of your machines. | ||
330 | 6 | |||
331 | 7 | This helper enables you to declare your machine state, rather than | ||
332 | 8 | program it procedurally (and have to test each change to your procedures). | ||
333 | 9 | Your install hook can be as simple as: | ||
334 | 10 | |||
335 | 11 | {{{ | ||
336 | 12 | import charmhelpers.contrib.ansible | ||
337 | 13 | |||
338 | 14 | |||
339 | 15 | def install(): | ||
340 | 16 | charmhelpers.contrib.ansible.install_ansible_support() | ||
341 | 17 | charmhelpers.contrib.ansible.apply_playbook('playbooks/install.yaml') | ||
342 | 18 | }}} | ||
343 | 19 | |||
344 | 20 | and won't need to change (nor will its tests) when you change the machine | ||
345 | 21 | state. | ||
346 | 22 | |||
347 | 23 | All of your juju config and relation-data are available as template | ||
348 | 24 | variables within your playbooks and templates. An install playbook looks | ||
349 | 25 | something like: | ||
350 | 26 | |||
351 | 27 | {{{ | ||
352 | 28 | --- | ||
353 | 29 | - hosts: localhost | ||
354 | 30 | user: root | ||
355 | 31 | |||
356 | 32 | tasks: | ||
357 | 33 | - name: Add private repositories. | ||
358 | 34 | template: | ||
359 | 35 | src: ../templates/private-repositories.list.jinja2 | ||
360 | 36 | dest: /etc/apt/sources.list.d/private.list | ||
361 | 37 | |||
362 | 38 | - name: Update the cache. | ||
363 | 39 | apt: update_cache=yes | ||
364 | 40 | |||
365 | 41 | - name: Install dependencies. | ||
366 | 42 | apt: pkg={{ item }} | ||
367 | 43 | with_items: | ||
368 | 44 | - python-mimeparse | ||
369 | 45 | - python-webob | ||
370 | 46 | - sunburnt | ||
371 | 47 | |||
372 | 48 | - name: Setup groups. | ||
373 | 49 | group: name={{ item.name }} gid={{ item.gid }} | ||
374 | 50 | with_items: | ||
375 | 51 | - { name: 'deploy_user', gid: 1800 } | ||
376 | 52 | - { name: 'service_user', gid: 1500 } | ||
377 | 53 | |||
378 | 54 | ... | ||
379 | 55 | }}} | ||
380 | 56 | |||
381 | 57 | Read more online about playbooks[1] and standard ansible modules[2]. | ||
382 | 58 | |||
383 | 59 | [1] http://www.ansibleworks.com/docs/playbooks.html | ||
384 | 60 | [2] http://www.ansibleworks.com/docs/modules.html | ||
385 | 61 | """ | ||
386 | 62 | import os | ||
387 | 63 | import subprocess | ||
388 | 64 | |||
389 | 65 | import charmhelpers.contrib.saltstack | ||
390 | 66 | import charmhelpers.core.host | ||
391 | 67 | import charmhelpers.core.hookenv | ||
392 | 68 | import charmhelpers.fetch | ||
393 | 69 | |||
394 | 70 | |||
395 | 71 | charm_dir = os.environ.get('CHARM_DIR', '') | ||
396 | 72 | ansible_hosts_path = '/etc/ansible/hosts' | ||
397 | 73 | # Ansible will automatically include any vars in the following | ||
398 | 74 | # file in its inventory when run locally. | ||
399 | 75 | ansible_vars_path = '/etc/ansible/host_vars/localhost' | ||
400 | 76 | |||
401 | 77 | |||
402 | 78 | def install_ansible_support(from_ppa=True): | ||
403 | 79 | """Installs the ansible package. | ||
404 | 80 | |||
405 | 81 | By default it is installed from the PPA [1] linked from | ||
406 | 82 | the ansible website [2]. | ||
407 | 83 | |||
408 | 84 | [1] https://launchpad.net/~rquillo/+archive/ansible | ||
409 | 85 | [2] http://www.ansibleworks.com/docs/gettingstarted.html#ubuntu-and-debian | ||
410 | 86 | |||
411 | 87 | If from_ppa is false, you must ensure that the package is available | ||
412 | 88 | from a configured repository. | ||
413 | 89 | """ | ||
414 | 90 | if from_ppa: | ||
415 | 91 | charmhelpers.fetch.add_source('ppa:rquillo/ansible') | ||
416 | 92 | charmhelpers.fetch.apt_update(fatal=True) | ||
417 | 93 | charmhelpers.fetch.apt_install('ansible') | ||
418 | 94 | with open(ansible_hosts_path, 'w+') as hosts_file: | ||
419 | 95 | hosts_file.write('localhost ansible_connection=local') | ||
420 | 96 | |||
421 | 97 | |||
422 | 98 | def apply_playbook(playbook): | ||
423 | 99 | charmhelpers.contrib.saltstack.juju_state_to_yaml( | ||
424 | 100 | ansible_vars_path, namespace_separator='__') | ||
425 | 101 | subprocess.check_call(['ansible-playbook', '-c', 'local', playbook]) | ||
426 | 0 | 102 | ||
427 | === added directory 'lib/charmhelpers/contrib/charmhelpers' | |||
428 | === added file 'lib/charmhelpers/contrib/charmhelpers/__init__.py' | |||
429 | --- lib/charmhelpers/contrib/charmhelpers/__init__.py 1970-01-01 00:00:00 +0000 | |||
430 | +++ lib/charmhelpers/contrib/charmhelpers/__init__.py 2013-11-21 22:43:22 +0000 | |||
431 | @@ -0,0 +1,184 @@ | |||
432 | 1 | # Copyright 2012 Canonical Ltd. This software is licensed under the | ||
433 | 2 | # GNU Affero General Public License version 3 (see the file LICENSE). | ||
434 | 3 | |||
435 | 4 | import warnings | ||
436 | 5 | warnings.warn("contrib.charmhelpers is deprecated", DeprecationWarning) | ||
437 | 6 | |||
438 | 7 | """Helper functions for writing Juju charms in Python.""" | ||
439 | 8 | |||
440 | 9 | __metaclass__ = type | ||
441 | 10 | __all__ = [ | ||
442 | 11 | #'get_config', # core.hookenv.config() | ||
443 | 12 | #'log', # core.hookenv.log() | ||
444 | 13 | #'log_entry', # core.hookenv.log() | ||
445 | 14 | #'log_exit', # core.hookenv.log() | ||
446 | 15 | #'relation_get', # core.hookenv.relation_get() | ||
447 | 16 | #'relation_set', # core.hookenv.relation_set() | ||
448 | 17 | #'relation_ids', # core.hookenv.relation_ids() | ||
449 | 18 | #'relation_list', # core.hookenv.relation_units() | ||
450 | 19 | #'config_get', # core.hookenv.config() | ||
451 | 20 | #'unit_get', # core.hookenv.unit_get() | ||
452 | 21 | #'open_port', # core.hookenv.open_port() | ||
453 | 22 | #'close_port', # core.hookenv.close_port() | ||
454 | 23 | #'service_control', # core.host.service() | ||
455 | 24 | 'unit_info', # client-side, NOT IMPLEMENTED | ||
456 | 25 | 'wait_for_machine', # client-side, NOT IMPLEMENTED | ||
457 | 26 | 'wait_for_page_contents', # client-side, NOT IMPLEMENTED | ||
458 | 27 | 'wait_for_relation', # client-side, NOT IMPLEMENTED | ||
459 | 28 | 'wait_for_unit', # client-side, NOT IMPLEMENTED | ||
460 | 29 | ] | ||
461 | 30 | |||
462 | 31 | import operator | ||
463 | 32 | from shelltoolbox import ( | ||
464 | 33 | command, | ||
465 | 34 | ) | ||
466 | 35 | import tempfile | ||
467 | 36 | import time | ||
468 | 37 | import urllib2 | ||
469 | 38 | import yaml | ||
470 | 39 | |||
471 | 40 | SLEEP_AMOUNT = 0.1 | ||
472 | 41 | # We create a juju_status Command here because it makes testing much, | ||
473 | 42 | # much easier. | ||
474 | 43 | juju_status = lambda: command('juju')('status') | ||
475 | 44 | |||
476 | 45 | # re-implemented as charmhelpers.fetch.configure_sources() | ||
477 | 46 | #def configure_source(update=False): | ||
478 | 47 | # source = config_get('source') | ||
479 | 48 | # if ((source.startswith('ppa:') or | ||
480 | 49 | # source.startswith('cloud:') or | ||
481 | 50 | # source.startswith('http:'))): | ||
482 | 51 | # run('add-apt-repository', source) | ||
483 | 52 | # if source.startswith("http:"): | ||
484 | 53 | # run('apt-key', 'import', config_get('key')) | ||
485 | 54 | # if update: | ||
486 | 55 | # run('apt-get', 'update') | ||
487 | 56 | |||
488 | 57 | |||
489 | 58 | # DEPRECATED: client-side only | ||
490 | 59 | def make_charm_config_file(charm_config): | ||
491 | 60 | charm_config_file = tempfile.NamedTemporaryFile() | ||
492 | 61 | charm_config_file.write(yaml.dump(charm_config)) | ||
493 | 62 | charm_config_file.flush() | ||
494 | 63 | # The NamedTemporaryFile instance is returned instead of just the name | ||
495 | 64 | # because we want to take advantage of garbage collection-triggered | ||
496 | 65 | # deletion of the temp file when it goes out of scope in the caller. | ||
497 | 66 | return charm_config_file | ||
498 | 67 | |||
499 | 68 | |||
500 | 69 | # DEPRECATED: client-side only | ||
501 | 70 | def unit_info(service_name, item_name, data=None, unit=None): | ||
502 | 71 | if data is None: | ||
503 | 72 | data = yaml.safe_load(juju_status()) | ||
504 | 73 | service = data['services'].get(service_name) | ||
505 | 74 | if service is None: | ||
506 | 75 | # XXX 2012-02-08 gmb: | ||
507 | 76 | # This allows us to cope with the race condition that we | ||
508 | 77 | # have between deploying a service and having it come up in | ||
509 | 78 | # `juju status`. We could probably do with cleaning it up so | ||
510 | 79 | # that it fails a bit more noisily after a while. | ||
511 | 80 | return '' | ||
512 | 81 | units = service['units'] | ||
513 | 82 | if unit is not None: | ||
514 | 83 | item = units[unit][item_name] | ||
515 | 84 | else: | ||
516 | 85 | # It might seem odd to sort the units here, but we do it to | ||
517 | 86 | # ensure that when no unit is specified, the first unit for the | ||
518 | 87 | # service (or at least the one with the lowest number) is the | ||
519 | 88 | # one whose data gets returned. | ||
520 | 89 | sorted_unit_names = sorted(units.keys()) | ||
521 | 90 | item = units[sorted_unit_names[0]][item_name] | ||
522 | 91 | return item | ||
523 | 92 | |||
524 | 93 | |||
525 | 94 | # DEPRECATED: client-side only | ||
526 | 95 | def get_machine_data(): | ||
527 | 96 | return yaml.safe_load(juju_status())['machines'] | ||
528 | 97 | |||
529 | 98 | |||
530 | 99 | # DEPRECATED: client-side only | ||
531 | 100 | def wait_for_machine(num_machines=1, timeout=300): | ||
532 | 101 | """Wait `timeout` seconds for `num_machines` machines to come up. | ||
533 | 102 | |||
534 | 103 | This wait_for... function can be called by other wait_for functions | ||
535 | 104 | whose timeouts might be too short in situations where only a bare | ||
536 | 105 | Juju setup has been bootstrapped. | ||
537 | 106 | |||
538 | 107 | :return: A tuple of (num_machines, time_taken). This is used for | ||
539 | 108 | testing. | ||
540 | 109 | """ | ||
541 | 110 | # You may think this is a hack, and you'd be right. The easiest way | ||
542 | 111 | # to tell what environment we're working in (LXC vs EC2) is to check | ||
543 | 112 | # the dns-name of the first machine. If it's localhost we're in LXC | ||
544 | 113 | # and we can just return here. | ||
545 | 114 | if get_machine_data()[0]['dns-name'] == 'localhost': | ||
546 | 115 | return 1, 0 | ||
547 | 116 | start_time = time.time() | ||
548 | 117 | while True: | ||
549 | 118 | # Drop the first machine, since it's the Zookeeper and that's | ||
550 | 119 | # not a machine that we need to wait for. This will only work | ||
551 | 120 | # for EC2 environments, which is why we return early above if | ||
552 | 121 | # we're in LXC. | ||
553 | 122 | machine_data = get_machine_data() | ||
554 | 123 | non_zookeeper_machines = [ | ||
555 | 124 | machine_data[key] for key in machine_data.keys()[1:]] | ||
556 | 125 | if len(non_zookeeper_machines) >= num_machines: | ||
557 | 126 | all_machines_running = True | ||
558 | 127 | for machine in non_zookeeper_machines: | ||
559 | 128 | if machine.get('instance-state') != 'running': | ||
560 | 129 | all_machines_running = False | ||
561 | 130 | break | ||
562 | 131 | if all_machines_running: | ||
563 | 132 | break | ||
564 | 133 | if time.time() - start_time >= timeout: | ||
565 | 134 | raise RuntimeError('timeout waiting for service to start') | ||
566 | 135 | time.sleep(SLEEP_AMOUNT) | ||
567 | 136 | return num_machines, time.time() - start_time | ||
568 | 137 | |||
569 | 138 | |||
570 | 139 | # DEPRECATED: client-side only | ||
571 | 140 | def wait_for_unit(service_name, timeout=480): | ||
572 | 141 | """Wait `timeout` seconds for a given service name to come up.""" | ||
573 | 142 | wait_for_machine(num_machines=1) | ||
574 | 143 | start_time = time.time() | ||
575 | 144 | while True: | ||
576 | 145 | state = unit_info(service_name, 'agent-state') | ||
577 | 146 | if 'error' in state or state == 'started': | ||
578 | 147 | break | ||
579 | 148 | if time.time() - start_time >= timeout: | ||
580 | 149 | raise RuntimeError('timeout waiting for service to start') | ||
581 | 150 | time.sleep(SLEEP_AMOUNT) | ||
582 | 151 | if state != 'started': | ||
583 | 152 | raise RuntimeError('unit did not start, agent-state: ' + state) | ||
584 | 153 | |||
585 | 154 | |||
586 | 155 | # DEPRECATED: client-side only | ||
587 | 156 | def wait_for_relation(service_name, relation_name, timeout=120): | ||
588 | 157 | """Wait `timeout` seconds for a given relation to come up.""" | ||
589 | 158 | start_time = time.time() | ||
590 | 159 | while True: | ||
591 | 160 | relation = unit_info(service_name, 'relations').get(relation_name) | ||
592 | 161 | if relation is not None and relation['state'] == 'up': | ||
593 | 162 | break | ||
594 | 163 | if time.time() - start_time >= timeout: | ||
595 | 164 | raise RuntimeError('timeout waiting for relation to be up') | ||
596 | 165 | time.sleep(SLEEP_AMOUNT) | ||
597 | 166 | |||
598 | 167 | |||
599 | 168 | # DEPRECATED: client-side only | ||
600 | 169 | def wait_for_page_contents(url, contents, timeout=120, validate=None): | ||
601 | 170 | if validate is None: | ||
602 | 171 | validate = operator.contains | ||
603 | 172 | start_time = time.time() | ||
604 | 173 | while True: | ||
605 | 174 | try: | ||
606 | 175 | stream = urllib2.urlopen(url) | ||
607 | 176 | except (urllib2.HTTPError, urllib2.URLError): | ||
608 | 177 | pass | ||
609 | 178 | else: | ||
610 | 179 | page = stream.read() | ||
611 | 180 | if validate(page, contents): | ||
612 | 181 | return page | ||
613 | 182 | if time.time() - start_time >= timeout: | ||
614 | 183 | raise RuntimeError('timeout waiting for contents of ' + url) | ||
615 | 184 | time.sleep(SLEEP_AMOUNT) | ||
616 | 0 | 185 | ||
617 | === added directory 'lib/charmhelpers/contrib/charmsupport' | |||
618 | === added file 'lib/charmhelpers/contrib/charmsupport/__init__.py' | |||
619 | === added file 'lib/charmhelpers/contrib/charmsupport/nrpe.py' | |||
620 | --- lib/charmhelpers/contrib/charmsupport/nrpe.py 1970-01-01 00:00:00 +0000 | |||
621 | +++ lib/charmhelpers/contrib/charmsupport/nrpe.py 2013-11-21 22:43:22 +0000 | |||
622 | @@ -0,0 +1,219 @@ | |||
623 | 1 | """Compatibility with the nrpe-external-master charm""" | ||
624 | 2 | # Copyright 2012 Canonical Ltd. | ||
625 | 3 | # | ||
626 | 4 | # Authors: | ||
627 | 5 | # Matthew Wedgwood <matthew.wedgwood@canonical.com> | ||
628 | 6 | |||
629 | 7 | import subprocess | ||
630 | 8 | import pwd | ||
631 | 9 | import grp | ||
632 | 10 | import os | ||
633 | 11 | import re | ||
634 | 12 | import shlex | ||
635 | 13 | import yaml | ||
636 | 14 | |||
637 | 15 | from charmhelpers.core.hookenv import ( | ||
638 | 16 | config, | ||
639 | 17 | local_unit, | ||
640 | 18 | log, | ||
641 | 19 | relation_ids, | ||
642 | 20 | relation_set, | ||
643 | 21 | ) | ||
644 | 22 | |||
645 | 23 | from charmhelpers.core.host import service | ||
646 | 24 | |||
647 | 25 | # This module adds compatibility with the nrpe-external-master and plain nrpe | ||
648 | 26 | # subordinate charms. To use it in your charm: | ||
649 | 27 | # | ||
650 | 28 | # 1. Update metadata.yaml | ||
651 | 29 | # | ||
652 | 30 | # provides: | ||
653 | 31 | # (...) | ||
654 | 32 | # nrpe-external-master: | ||
655 | 33 | # interface: nrpe-external-master | ||
656 | 34 | # scope: container | ||
657 | 35 | # | ||
658 | 36 | # and/or | ||
659 | 37 | # | ||
660 | 38 | # provides: | ||
661 | 39 | # (...) | ||
662 | 40 | # local-monitors: | ||
663 | 41 | # interface: local-monitors | ||
664 | 42 | # scope: container | ||
665 | 43 | |||
666 | 44 | # | ||
667 | 45 | # 2. Add the following to config.yaml | ||
668 | 46 | # | ||
669 | 47 | # nagios_context: | ||
670 | 48 | # default: "juju" | ||
671 | 49 | # type: string | ||
672 | 50 | # description: | | ||
673 | 51 | # Used by the nrpe subordinate charms. | ||
674 | 52 | # A string that will be prepended to instance name to set the host name | ||
675 | 53 | # in nagios. So for instance the hostname would be something like: | ||
676 | 54 | # juju-myservice-0 | ||
677 | 55 | # If you're running multiple environments with the same services in them | ||
678 | 56 | # this allows you to differentiate between them. | ||
679 | 57 | # | ||
680 | 58 | # 3. Add custom checks (Nagios plugins) to files/nrpe-external-master | ||
681 | 59 | # | ||
682 | 60 | # 4. Update your hooks.py with something like this: | ||
683 | 61 | # | ||
684 | 62 | # from charmsupport.nrpe import NRPE | ||
685 | 63 | # (...) | ||
686 | 64 | # def update_nrpe_config(): | ||
687 | 65 | # nrpe_compat = NRPE() | ||
688 | 66 | # nrpe_compat.add_check( | ||
689 | 67 | # shortname = "myservice", | ||
690 | 68 | # description = "Check MyService", | ||
691 | 69 | # check_cmd = "check_http -w 2 -c 10 http://localhost" | ||
692 | 70 | # ) | ||
693 | 71 | # nrpe_compat.add_check( | ||
694 | 72 | # "myservice_other", | ||
695 | 73 | # "Check for widget failures", | ||
696 | 74 | # check_cmd = "/srv/myapp/scripts/widget_check" | ||
697 | 75 | # ) | ||
698 | 76 | # nrpe_compat.write() | ||
699 | 77 | # | ||
700 | 78 | # def config_changed(): | ||
701 | 79 | # (...) | ||
702 | 80 | # update_nrpe_config() | ||
703 | 81 | # | ||
704 | 82 | # def nrpe_external_master_relation_changed(): | ||
705 | 83 | # update_nrpe_config() | ||
706 | 84 | # | ||
707 | 85 | # def local_monitors_relation_changed(): | ||
708 | 86 | # update_nrpe_config() | ||
709 | 87 | # | ||
710 | 88 | # 5. ln -s hooks.py nrpe-external-master-relation-changed | ||
711 | 89 | # ln -s hooks.py local-monitors-relation-changed | ||
712 | 90 | |||
713 | 91 | |||
714 | 92 | class CheckException(Exception): | ||
715 | 93 | pass | ||
716 | 94 | |||
717 | 95 | |||
718 | 96 | class Check(object): | ||
719 | 97 | shortname_re = '[A-Za-z0-9-_]+$' | ||
720 | 98 | service_template = (""" | ||
721 | 99 | #--------------------------------------------------- | ||
722 | 100 | # This file is Juju managed | ||
723 | 101 | #--------------------------------------------------- | ||
724 | 102 | define service {{ | ||
725 | 103 | use active-service | ||
726 | 104 | host_name {nagios_hostname} | ||
727 | 105 | service_description {nagios_hostname}[{shortname}] """ | ||
728 | 106 | """{description} | ||
729 | 107 | check_command check_nrpe!{command} | ||
730 | 108 | servicegroups {nagios_servicegroup} | ||
731 | 109 | }} | ||
732 | 110 | """) | ||
733 | 111 | |||
734 | 112 | def __init__(self, shortname, description, check_cmd): | ||
735 | 113 | super(Check, self).__init__() | ||
736 | 114 | # XXX: could be better to calculate this from the service name | ||
737 | 115 | if not re.match(self.shortname_re, shortname): | ||
738 | 116 | raise CheckException("shortname must match {}".format( | ||
739 | 117 | Check.shortname_re)) | ||
740 | 118 | self.shortname = shortname | ||
741 | 119 | self.command = "check_{}".format(shortname) | ||
742 | 120 | # Note: a set of invalid characters is defined by the | ||
743 | 121 | # Nagios server config | ||
744 | 122 | # The default is: illegal_object_name_chars=`~!$%^&*"|'<>?,()= | ||
745 | 123 | self.description = description | ||
746 | 124 | self.check_cmd = self._locate_cmd(check_cmd) | ||
747 | 125 | |||
748 | 126 | def _locate_cmd(self, check_cmd): | ||
749 | 127 | search_path = ( | ||
750 | 128 | '/', | ||
751 | 129 | os.path.join(os.environ['CHARM_DIR'], | ||
752 | 130 | 'files/nrpe-external-master'), | ||
753 | 131 | '/usr/lib/nagios/plugins', | ||
754 | 132 | '/usr/local/lib/nagios/plugins', | ||
755 | 133 | ) | ||
756 | 134 | parts = shlex.split(check_cmd) | ||
757 | 135 | for path in search_path: | ||
758 | 136 | if os.path.exists(os.path.join(path, parts[0])): | ||
759 | 137 | command = os.path.join(path, parts[0]) | ||
760 | 138 | if len(parts) > 1: | ||
761 | 139 | command += " " + " ".join(parts[1:]) | ||
762 | 140 | return command | ||
763 | 141 | log('Check command not found: {}'.format(parts[0])) | ||
764 | 142 | return '' | ||
765 | 143 | |||
766 | 144 | def write(self, nagios_context, hostname): | ||
767 | 145 | nrpe_check_file = '/etc/nagios/nrpe.d/{}.cfg'.format( | ||
768 | 146 | self.command) | ||
769 | 147 | with open(nrpe_check_file, 'w') as nrpe_check_config: | ||
770 | 148 | nrpe_check_config.write("# check {}\n".format(self.shortname)) | ||
771 | 149 | nrpe_check_config.write("command[{}]={}\n".format( | ||
772 | 150 | self.command, self.check_cmd)) | ||
773 | 151 | |||
774 | 152 | if not os.path.exists(NRPE.nagios_exportdir): | ||
775 | 153 | log('Not writing service config as {} is not accessible'.format( | ||
776 | 154 | NRPE.nagios_exportdir)) | ||
777 | 155 | else: | ||
778 | 156 | self.write_service_config(nagios_context, hostname) | ||
779 | 157 | |||
780 | 158 | def write_service_config(self, nagios_context, hostname): | ||
781 | 159 | for f in os.listdir(NRPE.nagios_exportdir): | ||
782 | 160 | if re.search('.*{}.cfg'.format(self.command), f): | ||
783 | 161 | os.remove(os.path.join(NRPE.nagios_exportdir, f)) | ||
784 | 162 | |||
785 | 163 | templ_vars = { | ||
786 | 164 | 'nagios_hostname': hostname, | ||
787 | 165 | 'nagios_servicegroup': nagios_context, | ||
788 | 166 | 'description': self.description, | ||
789 | 167 | 'shortname': self.shortname, | ||
790 | 168 | 'command': self.command, | ||
791 | 169 | } | ||
792 | 170 | nrpe_service_text = Check.service_template.format(**templ_vars) | ||
793 | 171 | nrpe_service_file = '{}/service__{}_{}.cfg'.format( | ||
794 | 172 | NRPE.nagios_exportdir, hostname, self.command) | ||
795 | 173 | with open(nrpe_service_file, 'w') as nrpe_service_config: | ||
796 | 174 | nrpe_service_config.write(str(nrpe_service_text)) | ||
797 | 175 | |||
798 | 176 | def run(self): | ||
799 | 177 | subprocess.call(self.check_cmd) | ||
800 | 178 | |||
801 | 179 | |||
802 | 180 | class NRPE(object): | ||
803 | 181 | nagios_logdir = '/var/log/nagios' | ||
804 | 182 | nagios_exportdir = '/var/lib/nagios/export' | ||
805 | 183 | nrpe_confdir = '/etc/nagios/nrpe.d' | ||
806 | 184 | |||
807 | 185 | def __init__(self): | ||
808 | 186 | super(NRPE, self).__init__() | ||
809 | 187 | self.config = config() | ||
810 | 188 | self.nagios_context = self.config['nagios_context'] | ||
811 | 189 | self.unit_name = local_unit().replace('/', '-') | ||
812 | 190 | self.hostname = "{}-{}".format(self.nagios_context, self.unit_name) | ||
813 | 191 | self.checks = [] | ||
814 | 192 | |||
815 | 193 | def add_check(self, *args, **kwargs): | ||
816 | 194 | self.checks.append(Check(*args, **kwargs)) | ||
817 | 195 | |||
818 | 196 | def write(self): | ||
819 | 197 | try: | ||
820 | 198 | nagios_uid = pwd.getpwnam('nagios').pw_uid | ||
821 | 199 | nagios_gid = grp.getgrnam('nagios').gr_gid | ||
822 | 200 | except: | ||
823 | 201 | log("Nagios user not set up, nrpe checks not updated") | ||
824 | 202 | return | ||
825 | 203 | |||
826 | 204 | if not os.path.exists(NRPE.nagios_logdir): | ||
827 | 205 | os.mkdir(NRPE.nagios_logdir) | ||
828 | 206 | os.chown(NRPE.nagios_logdir, nagios_uid, nagios_gid) | ||
829 | 207 | |||
830 | 208 | nrpe_monitors = {} | ||
831 | 209 | monitors = {"monitors": {"remote": {"nrpe": nrpe_monitors}}} | ||
832 | 210 | for nrpecheck in self.checks: | ||
833 | 211 | nrpecheck.write(self.nagios_context, self.hostname) | ||
834 | 212 | nrpe_monitors[nrpecheck.shortname] = { | ||
835 | 213 | "command": nrpecheck.command, | ||
836 | 214 | } | ||
837 | 215 | |||
838 | 216 | service('restart', 'nagios-nrpe-server') | ||
839 | 217 | |||
840 | 218 | for rid in relation_ids("local-monitors"): | ||
841 | 219 | relation_set(relation_id=rid, monitors=yaml.dump(monitors)) | ||
842 | 0 | 220 | ||
843 | === added file 'lib/charmhelpers/contrib/charmsupport/volumes.py' | |||
844 | --- lib/charmhelpers/contrib/charmsupport/volumes.py 1970-01-01 00:00:00 +0000 | |||
845 | +++ lib/charmhelpers/contrib/charmsupport/volumes.py 2013-11-21 22:43:22 +0000 | |||
846 | @@ -0,0 +1,156 @@ | |||
847 | 1 | ''' | ||
848 | 2 | Functions for managing volumes in juju units. One volume is supported per unit. | ||
849 | 3 | Subordinates may have their own storage, provided it is on its own partition. | ||
850 | 4 | |||
851 | 5 | Configuration stanzas: | ||
852 | 6 | volume-ephemeral: | ||
853 | 7 | type: boolean | ||
854 | 8 | default: true | ||
855 | 9 | description: > | ||
856 | 10 | If false, a volume is mounted as sepecified in "volume-map" | ||
857 | 11 | If true, ephemeral storage will be used, meaning that log data | ||
858 | 12 | will only exist as long as the machine. YOU HAVE BEEN WARNED. | ||
859 | 13 | volume-map: | ||
860 | 14 | type: string | ||
861 | 15 | default: {} | ||
862 | 16 | description: > | ||
863 | 17 | YAML map of units to device names, e.g: | ||
864 | 18 | "{ rsyslog/0: /dev/vdb, rsyslog/1: /dev/vdb }" | ||
865 | 19 | Service units will raise a configure-error if volume-ephemeral | ||
866 | 20 | is 'true' and no volume-map value is set. Use 'juju set' to set a | ||
867 | 21 | value and 'juju resolved' to complete configuration. | ||
868 | 22 | |||
869 | 23 | Usage: | ||
870 | 24 | from charmsupport.volumes import configure_volume, VolumeConfigurationError | ||
871 | 25 | from charmsupport.hookenv import log, ERROR | ||
872 | 26 | def post_mount_hook(): | ||
873 | 27 | stop_service('myservice') | ||
874 | 28 | def post_mount_hook(): | ||
875 | 29 | start_service('myservice') | ||
876 | 30 | |||
877 | 31 | if __name__ == '__main__': | ||
878 | 32 | try: | ||
879 | 33 | configure_volume(before_change=pre_mount_hook, | ||
880 | 34 | after_change=post_mount_hook) | ||
881 | 35 | except VolumeConfigurationError: | ||
882 | 36 | log('Storage could not be configured', ERROR) | ||
883 | 37 | ''' | ||
884 | 38 | |||
885 | 39 | # XXX: Known limitations | ||
886 | 40 | # - fstab is neither consulted nor updated | ||
887 | 41 | |||
888 | 42 | import os | ||
889 | 43 | from charmhelpers.core import hookenv | ||
890 | 44 | from charmhelpers.core import host | ||
891 | 45 | import yaml | ||
892 | 46 | |||
893 | 47 | |||
894 | 48 | MOUNT_BASE = '/srv/juju/volumes' | ||
895 | 49 | |||
896 | 50 | |||
897 | 51 | class VolumeConfigurationError(Exception): | ||
898 | 52 | '''Volume configuration data is missing or invalid''' | ||
899 | 53 | pass | ||
900 | 54 | |||
901 | 55 | |||
902 | 56 | def get_config(): | ||
903 | 57 | '''Gather and sanity-check volume configuration data''' | ||
904 | 58 | volume_config = {} | ||
905 | 59 | config = hookenv.config() | ||
906 | 60 | |||
907 | 61 | errors = False | ||
908 | 62 | |||
909 | 63 | if config.get('volume-ephemeral') in (True, 'True', 'true', 'Yes', 'yes'): | ||
910 | 64 | volume_config['ephemeral'] = True | ||
911 | 65 | else: | ||
912 | 66 | volume_config['ephemeral'] = False | ||
913 | 67 | |||
914 | 68 | try: | ||
915 | 69 | volume_map = yaml.safe_load(config.get('volume-map', '{}')) | ||
916 | 70 | except yaml.YAMLError as e: | ||
917 | 71 | hookenv.log("Error parsing YAML volume-map: {}".format(e), | ||
918 | 72 | hookenv.ERROR) | ||
919 | 73 | errors = True | ||
920 | 74 | if volume_map is None: | ||
921 | 75 | # probably an empty string | ||
922 | 76 | volume_map = {} | ||
923 | 77 | elif not isinstance(volume_map, dict): | ||
924 | 78 | hookenv.log("Volume-map should be a dictionary, not {}".format( | ||
925 | 79 | type(volume_map))) | ||
926 | 80 | errors = True | ||
927 | 81 | |||
928 | 82 | volume_config['device'] = volume_map.get(os.environ['JUJU_UNIT_NAME']) | ||
929 | 83 | if volume_config['device'] and volume_config['ephemeral']: | ||
930 | 84 | # asked for ephemeral storage but also defined a volume ID | ||
931 | 85 | hookenv.log('A volume is defined for this unit, but ephemeral ' | ||
932 | 86 | 'storage was requested', hookenv.ERROR) | ||
933 | 87 | errors = True | ||
934 | 88 | elif not volume_config['device'] and not volume_config['ephemeral']: | ||
935 | 89 | # asked for permanent storage but did not define volume ID | ||
936 | 90 | hookenv.log('Ephemeral storage was requested, but there is no volume ' | ||
937 | 91 | 'defined for this unit.', hookenv.ERROR) | ||
938 | 92 | errors = True | ||
939 | 93 | |||
940 | 94 | unit_mount_name = hookenv.local_unit().replace('/', '-') | ||
941 | 95 | volume_config['mountpoint'] = os.path.join(MOUNT_BASE, unit_mount_name) | ||
942 | 96 | |||
943 | 97 | if errors: | ||
944 | 98 | return None | ||
945 | 99 | return volume_config | ||
946 | 100 | |||
947 | 101 | |||
948 | 102 | def mount_volume(config): | ||
949 | 103 | if os.path.exists(config['mountpoint']): | ||
950 | 104 | if not os.path.isdir(config['mountpoint']): | ||
951 | 105 | hookenv.log('Not a directory: {}'.format(config['mountpoint'])) | ||
952 | 106 | raise VolumeConfigurationError() | ||
953 | 107 | else: | ||
954 | 108 | host.mkdir(config['mountpoint']) | ||
955 | 109 | if os.path.ismount(config['mountpoint']): | ||
956 | 110 | unmount_volume(config) | ||
957 | 111 | if not host.mount(config['device'], config['mountpoint'], persist=True): | ||
958 | 112 | raise VolumeConfigurationError() | ||
959 | 113 | |||
960 | 114 | |||
961 | 115 | def unmount_volume(config): | ||
962 | 116 | if os.path.ismount(config['mountpoint']): | ||
963 | 117 | if not host.umount(config['mountpoint'], persist=True): | ||
964 | 118 | raise VolumeConfigurationError() | ||
965 | 119 | |||
966 | 120 | |||
967 | 121 | def managed_mounts(): | ||
968 | 122 | '''List of all mounted managed volumes''' | ||
969 | 123 | return filter(lambda mount: mount[0].startswith(MOUNT_BASE), host.mounts()) | ||
970 | 124 | |||
971 | 125 | |||
972 | 126 | def configure_volume(before_change=lambda: None, after_change=lambda: None): | ||
973 | 127 | '''Set up storage (or don't) according to the charm's volume configuration. | ||
974 | 128 | Returns the mount point or "ephemeral". before_change and after_change | ||
975 | 129 | are optional functions to be called if the volume configuration changes. | ||
976 | 130 | ''' | ||
977 | 131 | |||
978 | 132 | config = get_config() | ||
979 | 133 | if not config: | ||
980 | 134 | hookenv.log('Failed to read volume configuration', hookenv.CRITICAL) | ||
981 | 135 | raise VolumeConfigurationError() | ||
982 | 136 | |||
983 | 137 | if config['ephemeral']: | ||
984 | 138 | if os.path.ismount(config['mountpoint']): | ||
985 | 139 | before_change() | ||
986 | 140 | unmount_volume(config) | ||
987 | 141 | after_change() | ||
988 | 142 | return 'ephemeral' | ||
989 | 143 | else: | ||
990 | 144 | # persistent storage | ||
991 | 145 | if os.path.ismount(config['mountpoint']): | ||
992 | 146 | mounts = dict(managed_mounts()) | ||
993 | 147 | if mounts.get(config['mountpoint']) != config['device']: | ||
994 | 148 | before_change() | ||
995 | 149 | unmount_volume(config) | ||
996 | 150 | mount_volume(config) | ||
997 | 151 | after_change() | ||
998 | 152 | else: | ||
999 | 153 | before_change() | ||
1000 | 154 | mount_volume(config) | ||
1001 | 155 | after_change() | ||
1002 | 156 | return config['mountpoint'] | ||
1003 | 0 | 157 | ||
1004 | === added directory 'lib/charmhelpers/contrib/hahelpers' | |||
1005 | === added file 'lib/charmhelpers/contrib/hahelpers/__init__.py' | |||
1006 | === added file 'lib/charmhelpers/contrib/hahelpers/apache.py' | |||
1007 | --- lib/charmhelpers/contrib/hahelpers/apache.py 1970-01-01 00:00:00 +0000 | |||
1008 | +++ lib/charmhelpers/contrib/hahelpers/apache.py 2013-11-21 22:43:22 +0000 | |||
1009 | @@ -0,0 +1,58 @@ | |||
1010 | 1 | # | ||
1011 | 2 | # Copyright 2012 Canonical Ltd. | ||
1012 | 3 | # | ||
1013 | 4 | # This file is sourced from lp:openstack-charm-helpers | ||
1014 | 5 | # | ||
1015 | 6 | # Authors: | ||
1016 | 7 | # James Page <james.page@ubuntu.com> | ||
1017 | 8 | # Adam Gandelman <adamg@ubuntu.com> | ||
1018 | 9 | # | ||
1019 | 10 | |||
1020 | 11 | import subprocess | ||
1021 | 12 | |||
1022 | 13 | from charmhelpers.core.hookenv import ( | ||
1023 | 14 | config as config_get, | ||
1024 | 15 | relation_get, | ||
1025 | 16 | relation_ids, | ||
1026 | 17 | related_units as relation_list, | ||
1027 | 18 | log, | ||
1028 | 19 | INFO, | ||
1029 | 20 | ) | ||
1030 | 21 | |||
1031 | 22 | |||
1032 | 23 | def get_cert(): | ||
1033 | 24 | cert = config_get('ssl_cert') | ||
1034 | 25 | key = config_get('ssl_key') | ||
1035 | 26 | if not (cert and key): | ||
1036 | 27 | log("Inspecting identity-service relations for SSL certificate.", | ||
1037 | 28 | level=INFO) | ||
1038 | 29 | cert = key = None | ||
1039 | 30 | for r_id in relation_ids('identity-service'): | ||
1040 | 31 | for unit in relation_list(r_id): | ||
1041 | 32 | if not cert: | ||
1042 | 33 | cert = relation_get('ssl_cert', | ||
1043 | 34 | rid=r_id, unit=unit) | ||
1044 | 35 | if not key: | ||
1045 | 36 | key = relation_get('ssl_key', | ||
1046 | 37 | rid=r_id, unit=unit) | ||
1047 | 38 | return (cert, key) | ||
1048 | 39 | |||
1049 | 40 | |||
1050 | 41 | def get_ca_cert(): | ||
1051 | 42 | ca_cert = None | ||
1052 | 43 | log("Inspecting identity-service relations for CA SSL certificate.", | ||
1053 | 44 | level=INFO) | ||
1054 | 45 | for r_id in relation_ids('identity-service'): | ||
1055 | 46 | for unit in relation_list(r_id): | ||
1056 | 47 | if not ca_cert: | ||
1057 | 48 | ca_cert = relation_get('ca_cert', | ||
1058 | 49 | rid=r_id, unit=unit) | ||
1059 | 50 | return ca_cert | ||
1060 | 51 | |||
1061 | 52 | |||
1062 | 53 | def install_ca_cert(ca_cert): | ||
1063 | 54 | if ca_cert: | ||
1064 | 55 | with open('/usr/local/share/ca-certificates/keystone_juju_ca_cert.crt', | ||
1065 | 56 | 'w') as crt: | ||
1066 | 57 | crt.write(ca_cert) | ||
1067 | 58 | subprocess.check_call(['update-ca-certificates', '--fresh']) | ||
1068 | 0 | 59 | ||
1069 | === added file 'lib/charmhelpers/contrib/hahelpers/cluster.py' | |||
1070 | --- lib/charmhelpers/contrib/hahelpers/cluster.py 1970-01-01 00:00:00 +0000 | |||
1071 | +++ lib/charmhelpers/contrib/hahelpers/cluster.py 2013-11-21 22:43:22 +0000 | |||
1072 | @@ -0,0 +1,183 @@ | |||
1073 | 1 | # | ||
1074 | 2 | # Copyright 2012 Canonical Ltd. | ||
1075 | 3 | # | ||
1076 | 4 | # Authors: | ||
1077 | 5 | # James Page <james.page@ubuntu.com> | ||
1078 | 6 | # Adam Gandelman <adamg@ubuntu.com> | ||
1079 | 7 | # | ||
1080 | 8 | |||
1081 | 9 | import subprocess | ||
1082 | 10 | import os | ||
1083 | 11 | |||
1084 | 12 | from socket import gethostname as get_unit_hostname | ||
1085 | 13 | |||
1086 | 14 | from charmhelpers.core.hookenv import ( | ||
1087 | 15 | log, | ||
1088 | 16 | relation_ids, | ||
1089 | 17 | related_units as relation_list, | ||
1090 | 18 | relation_get, | ||
1091 | 19 | config as config_get, | ||
1092 | 20 | INFO, | ||
1093 | 21 | ERROR, | ||
1094 | 22 | unit_get, | ||
1095 | 23 | ) | ||
1096 | 24 | |||
1097 | 25 | |||
1098 | 26 | class HAIncompleteConfig(Exception): | ||
1099 | 27 | pass | ||
1100 | 28 | |||
1101 | 29 | |||
1102 | 30 | def is_clustered(): | ||
1103 | 31 | for r_id in (relation_ids('ha') or []): | ||
1104 | 32 | for unit in (relation_list(r_id) or []): | ||
1105 | 33 | clustered = relation_get('clustered', | ||
1106 | 34 | rid=r_id, | ||
1107 | 35 | unit=unit) | ||
1108 | 36 | if clustered: | ||
1109 | 37 | return True | ||
1110 | 38 | return False | ||
1111 | 39 | |||
1112 | 40 | |||
1113 | 41 | def is_leader(resource): | ||
1114 | 42 | cmd = [ | ||
1115 | 43 | "crm", "resource", | ||
1116 | 44 | "show", resource | ||
1117 | 45 | ] | ||
1118 | 46 | try: | ||
1119 | 47 | status = subprocess.check_output(cmd) | ||
1120 | 48 | except subprocess.CalledProcessError: | ||
1121 | 49 | return False | ||
1122 | 50 | else: | ||
1123 | 51 | if get_unit_hostname() in status: | ||
1124 | 52 | return True | ||
1125 | 53 | else: | ||
1126 | 54 | return False | ||
1127 | 55 | |||
1128 | 56 | |||
1129 | 57 | def peer_units(): | ||
1130 | 58 | peers = [] | ||
1131 | 59 | for r_id in (relation_ids('cluster') or []): | ||
1132 | 60 | for unit in (relation_list(r_id) or []): | ||
1133 | 61 | peers.append(unit) | ||
1134 | 62 | return peers | ||
1135 | 63 | |||
1136 | 64 | |||
1137 | 65 | def oldest_peer(peers): | ||
1138 | 66 | local_unit_no = int(os.getenv('JUJU_UNIT_NAME').split('/')[1]) | ||
1139 | 67 | for peer in peers: | ||
1140 | 68 | remote_unit_no = int(peer.split('/')[1]) | ||
1141 | 69 | if remote_unit_no < local_unit_no: | ||
1142 | 70 | return False | ||
1143 | 71 | return True | ||
1144 | 72 | |||
1145 | 73 | |||
1146 | 74 | def eligible_leader(resource): | ||
1147 | 75 | if is_clustered(): | ||
1148 | 76 | if not is_leader(resource): | ||
1149 | 77 | log('Deferring action to CRM leader.', level=INFO) | ||
1150 | 78 | return False | ||
1151 | 79 | else: | ||
1152 | 80 | peers = peer_units() | ||
1153 | 81 | if peers and not oldest_peer(peers): | ||
1154 | 82 | log('Deferring action to oldest service unit.', level=INFO) | ||
1155 | 83 | return False | ||
1156 | 84 | return True | ||
1157 | 85 | |||
1158 | 86 | |||
1159 | 87 | def https(): | ||
1160 | 88 | ''' | ||
1161 | 89 | Determines whether enough data has been provided in configuration | ||
1162 | 90 | or relation data to configure HTTPS | ||
1163 | 91 | . | ||
1164 | 92 | returns: boolean | ||
1165 | 93 | ''' | ||
1166 | 94 | if config_get('use-https') == "yes": | ||
1167 | 95 | return True | ||
1168 | 96 | if config_get('ssl_cert') and config_get('ssl_key'): | ||
1169 | 97 | return True | ||
1170 | 98 | for r_id in relation_ids('identity-service'): | ||
1171 | 99 | for unit in relation_list(r_id): | ||
1172 | 100 | rel_state = [ | ||
1173 | 101 | relation_get('https_keystone', rid=r_id, unit=unit), | ||
1174 | 102 | relation_get('ssl_cert', rid=r_id, unit=unit), | ||
1175 | 103 | relation_get('ssl_key', rid=r_id, unit=unit), | ||
1176 | 104 | relation_get('ca_cert', rid=r_id, unit=unit), | ||
1177 | 105 | ] | ||
1178 | 106 | # NOTE: works around (LP: #1203241) | ||
1179 | 107 | if (None not in rel_state) and ('' not in rel_state): | ||
1180 | 108 | return True | ||
1181 | 109 | return False | ||
1182 | 110 | |||
1183 | 111 | |||
1184 | 112 | def determine_api_port(public_port): | ||
1185 | 113 | ''' | ||
1186 | 114 | Determine correct API server listening port based on | ||
1187 | 115 | existence of HTTPS reverse proxy and/or haproxy. | ||
1188 | 116 | |||
1189 | 117 | public_port: int: standard public port for given service | ||
1190 | 118 | |||
1191 | 119 | returns: int: the correct listening port for the API service | ||
1192 | 120 | ''' | ||
1193 | 121 | i = 0 | ||
1194 | 122 | if len(peer_units()) > 0 or is_clustered(): | ||
1195 | 123 | i += 1 | ||
1196 | 124 | if https(): | ||
1197 | 125 | i += 1 | ||
1198 | 126 | return public_port - (i * 10) | ||
1199 | 127 | |||
1200 | 128 | |||
1201 | 129 | def determine_haproxy_port(public_port): | ||
1202 | 130 | ''' | ||
1203 | 131 | Description: Determine correct proxy listening port based on public IP + | ||
1204 | 132 | existence of HTTPS reverse proxy. | ||
1205 | 133 | |||
1206 | 134 | public_port: int: standard public port for given service | ||
1207 | 135 | |||
1208 | 136 | returns: int: the correct listening port for the HAProxy service | ||
1209 | 137 | ''' | ||
1210 | 138 | i = 0 | ||
1211 | 139 | if https(): | ||
1212 | 140 | i += 1 | ||
1213 | 141 | return public_port - (i * 10) | ||
1214 | 142 | |||
1215 | 143 | |||
1216 | 144 | def get_hacluster_config(): | ||
1217 | 145 | ''' | ||
1218 | 146 | Obtains all relevant configuration from charm configuration required | ||
1219 | 147 | for initiating a relation to hacluster: | ||
1220 | 148 | |||
1221 | 149 | ha-bindiface, ha-mcastport, vip, vip_iface, vip_cidr | ||
1222 | 150 | |||
1223 | 151 | returns: dict: A dict containing settings keyed by setting name. | ||
1224 | 152 | raises: HAIncompleteConfig if settings are missing. | ||
1225 | 153 | ''' | ||
1226 | 154 | settings = ['ha-bindiface', 'ha-mcastport', 'vip', 'vip_iface', 'vip_cidr'] | ||
1227 | 155 | conf = {} | ||
1228 | 156 | for setting in settings: | ||
1229 | 157 | conf[setting] = config_get(setting) | ||
1230 | 158 | missing = [] | ||
1231 | 159 | [missing.append(s) for s, v in conf.iteritems() if v is None] | ||
1232 | 160 | if missing: | ||
1233 | 161 | log('Insufficient config data to configure hacluster.', level=ERROR) | ||
1234 | 162 | raise HAIncompleteConfig | ||
1235 | 163 | return conf | ||
1236 | 164 | |||
1237 | 165 | |||
1238 | 166 | def canonical_url(configs, vip_setting='vip'): | ||
1239 | 167 | ''' | ||
1240 | 168 | Returns the correct HTTP URL to this host given the state of HTTPS | ||
1241 | 169 | configuration and hacluster. | ||
1242 | 170 | |||
1243 | 171 | :configs : OSTemplateRenderer: A config tempating object to inspect for | ||
1244 | 172 | a complete https context. | ||
1245 | 173 | :vip_setting: str: Setting in charm config that specifies | ||
1246 | 174 | VIP address. | ||
1247 | 175 | ''' | ||
1248 | 176 | scheme = 'http' | ||
1249 | 177 | if 'https' in configs.complete_contexts(): | ||
1250 | 178 | scheme = 'https' | ||
1251 | 179 | if is_clustered(): | ||
1252 | 180 | addr = config_get(vip_setting) | ||
1253 | 181 | else: | ||
1254 | 182 | addr = unit_get('private-address') | ||
1255 | 183 | return '%s://%s' % (scheme, addr) | ||
1256 | 0 | 184 | ||
1257 | === added directory 'lib/charmhelpers/contrib/jujugui' | |||
1258 | === added file 'lib/charmhelpers/contrib/jujugui/__init__.py' | |||
1259 | === added file 'lib/charmhelpers/contrib/jujugui/utils.py' | |||
1260 | --- lib/charmhelpers/contrib/jujugui/utils.py 1970-01-01 00:00:00 +0000 | |||
1261 | +++ lib/charmhelpers/contrib/jujugui/utils.py 2013-11-21 22:43:22 +0000 | |||
1262 | @@ -0,0 +1,602 @@ | |||
1263 | 1 | """Juju GUI charm utilities.""" | ||
1264 | 2 | |||
1265 | 3 | __all__ = [ | ||
1266 | 4 | 'AGENT', | ||
1267 | 5 | 'APACHE', | ||
1268 | 6 | 'API_PORT', | ||
1269 | 7 | 'CURRENT_DIR', | ||
1270 | 8 | 'HAPROXY', | ||
1271 | 9 | 'IMPROV', | ||
1272 | 10 | 'JUJU_DIR', | ||
1273 | 11 | 'JUJU_GUI_DIR', | ||
1274 | 12 | 'JUJU_GUI_SITE', | ||
1275 | 13 | 'JUJU_PEM', | ||
1276 | 14 | 'WEB_PORT', | ||
1277 | 15 | 'bzr_checkout', | ||
1278 | 16 | 'chain', | ||
1279 | 17 | 'cmd_log', | ||
1280 | 18 | 'fetch_api', | ||
1281 | 19 | 'fetch_gui', | ||
1282 | 20 | 'find_missing_packages', | ||
1283 | 21 | 'first_path_in_dir', | ||
1284 | 22 | 'get_api_address', | ||
1285 | 23 | 'get_npm_cache_archive_url', | ||
1286 | 24 | 'get_release_file_url', | ||
1287 | 25 | 'get_staging_dependencies', | ||
1288 | 26 | 'get_zookeeper_address', | ||
1289 | 27 | 'legacy_juju', | ||
1290 | 28 | 'log_hook', | ||
1291 | 29 | 'merge', | ||
1292 | 30 | 'parse_source', | ||
1293 | 31 | 'prime_npm_cache', | ||
1294 | 32 | 'render_to_file', | ||
1295 | 33 | 'save_or_create_certificates', | ||
1296 | 34 | 'setup_apache', | ||
1297 | 35 | 'setup_gui', | ||
1298 | 36 | 'start_agent', | ||
1299 | 37 | 'start_gui', | ||
1300 | 38 | 'start_improv', | ||
1301 | 39 | 'write_apache_config', | ||
1302 | 40 | ] | ||
1303 | 41 | |||
1304 | 42 | from contextlib import contextmanager | ||
1305 | 43 | import errno | ||
1306 | 44 | import json | ||
1307 | 45 | import os | ||
1308 | 46 | import logging | ||
1309 | 47 | import shutil | ||
1310 | 48 | from subprocess import CalledProcessError | ||
1311 | 49 | import tempfile | ||
1312 | 50 | from urlparse import urlparse | ||
1313 | 51 | |||
1314 | 52 | import apt | ||
1315 | 53 | import tempita | ||
1316 | 54 | |||
1317 | 55 | from launchpadlib.launchpad import Launchpad | ||
1318 | 56 | from shelltoolbox import ( | ||
1319 | 57 | Serializer, | ||
1320 | 58 | apt_get_install, | ||
1321 | 59 | command, | ||
1322 | 60 | environ, | ||
1323 | 61 | install_extra_repositories, | ||
1324 | 62 | run, | ||
1325 | 63 | script_name, | ||
1326 | 64 | search_file, | ||
1327 | 65 | su, | ||
1328 | 66 | ) | ||
1329 | 67 | from charmhelpers.core.host import ( | ||
1330 | 68 | service_start, | ||
1331 | 69 | ) | ||
1332 | 70 | from charmhelpers.core.hookenv import ( | ||
1333 | 71 | log, | ||
1334 | 72 | config, | ||
1335 | 73 | unit_get, | ||
1336 | 74 | ) | ||
1337 | 75 | |||
1338 | 76 | |||
1339 | 77 | AGENT = 'juju-api-agent' | ||
1340 | 78 | APACHE = 'apache2' | ||
1341 | 79 | IMPROV = 'juju-api-improv' | ||
1342 | 80 | HAPROXY = 'haproxy' | ||
1343 | 81 | |||
1344 | 82 | API_PORT = 8080 | ||
1345 | 83 | WEB_PORT = 8000 | ||
1346 | 84 | |||
1347 | 85 | CURRENT_DIR = os.getcwd() | ||
1348 | 86 | JUJU_DIR = os.path.join(CURRENT_DIR, 'juju') | ||
1349 | 87 | JUJU_GUI_DIR = os.path.join(CURRENT_DIR, 'juju-gui') | ||
1350 | 88 | JUJU_GUI_SITE = '/etc/apache2/sites-available/juju-gui' | ||
1351 | 89 | JUJU_GUI_PORTS = '/etc/apache2/ports.conf' | ||
1352 | 90 | JUJU_PEM = 'juju.includes-private-key.pem' | ||
1353 | 91 | BUILD_REPOSITORIES = ('ppa:chris-lea/node.js-legacy',) | ||
1354 | 92 | DEB_BUILD_DEPENDENCIES = ( | ||
1355 | 93 | 'bzr', 'imagemagick', 'make', 'nodejs', 'npm', | ||
1356 | 94 | ) | ||
1357 | 95 | DEB_STAGE_DEPENDENCIES = ( | ||
1358 | 96 | 'zookeeper', | ||
1359 | 97 | ) | ||
1360 | 98 | |||
1361 | 99 | |||
1362 | 100 | # Store the configuration from on invocation to the next. | ||
1363 | 101 | config_json = Serializer('/tmp/config.json') | ||
1364 | 102 | # Bazaar checkout command. | ||
1365 | 103 | bzr_checkout = command('bzr', 'co', '--lightweight') | ||
1366 | 104 | # Whether or not the charm is deployed using juju-core. | ||
1367 | 105 | # If juju-core has been used to deploy the charm, an agent.conf file must | ||
1368 | 106 | # be present in the charm parent directory. | ||
1369 | 107 | legacy_juju = lambda: not os.path.exists( | ||
1370 | 108 | os.path.join(CURRENT_DIR, '..', 'agent.conf')) | ||
1371 | 109 | |||
1372 | 110 | |||
1373 | 111 | def _get_build_dependencies(): | ||
1374 | 112 | """Install deb dependencies for building.""" | ||
1375 | 113 | log('Installing build dependencies.') | ||
1376 | 114 | cmd_log(install_extra_repositories(*BUILD_REPOSITORIES)) | ||
1377 | 115 | cmd_log(apt_get_install(*DEB_BUILD_DEPENDENCIES)) | ||
1378 | 116 | |||
1379 | 117 | |||
1380 | 118 | def get_api_address(unit_dir): | ||
1381 | 119 | """Return the Juju API address stored in the uniter agent.conf file.""" | ||
1382 | 120 | import yaml # python-yaml is only installed if juju-core is used. | ||
1383 | 121 | # XXX 2013-03-27 frankban bug=1161443: | ||
1384 | 122 | # currently the uniter agent.conf file does not include the API | ||
1385 | 123 | # address. For now retrieve it from the machine agent file. | ||
1386 | 124 | base_dir = os.path.abspath(os.path.join(unit_dir, '..')) | ||
1387 | 125 | for dirname in os.listdir(base_dir): | ||
1388 | 126 | if dirname.startswith('machine-'): | ||
1389 | 127 | agent_conf = os.path.join(base_dir, dirname, 'agent.conf') | ||
1390 | 128 | break | ||
1391 | 129 | else: | ||
1392 | 130 | raise IOError('Juju agent configuration file not found.') | ||
1393 | 131 | contents = yaml.load(open(agent_conf)) | ||
1394 | 132 | return contents['apiinfo']['addrs'][0] | ||
1395 | 133 | |||
1396 | 134 | |||
1397 | 135 | def get_staging_dependencies(): | ||
1398 | 136 | """Install deb dependencies for the stage (improv) environment.""" | ||
1399 | 137 | log('Installing stage dependencies.') | ||
1400 | 138 | cmd_log(apt_get_install(*DEB_STAGE_DEPENDENCIES)) | ||
1401 | 139 | |||
1402 | 140 | |||
1403 | 141 | def first_path_in_dir(directory): | ||
1404 | 142 | """Return the full path of the first file/dir in *directory*.""" | ||
1405 | 143 | return os.path.join(directory, os.listdir(directory)[0]) | ||
1406 | 144 | |||
1407 | 145 | |||
1408 | 146 | def _get_by_attr(collection, attr, value): | ||
1409 | 147 | """Return the first item in collection having attr == value. | ||
1410 | 148 | |||
1411 | 149 | Return None if the item is not found. | ||
1412 | 150 | """ | ||
1413 | 151 | for item in collection: | ||
1414 | 152 | if getattr(item, attr) == value: | ||
1415 | 153 | return item | ||
1416 | 154 | |||
1417 | 155 | |||
1418 | 156 | def get_release_file_url(project, series_name, release_version): | ||
1419 | 157 | """Return the URL of the release file hosted in Launchpad. | ||
1420 | 158 | |||
1421 | 159 | The returned URL points to a release file for the given project, series | ||
1422 | 160 | name and release version. | ||
1423 | 161 | The argument *project* is a project object as returned by launchpadlib. | ||
1424 | 162 | The arguments *series_name* and *release_version* are strings. If | ||
1425 | 163 | *release_version* is None, the URL of the latest release will be returned. | ||
1426 | 164 | """ | ||
1427 | 165 | series = _get_by_attr(project.series, 'name', series_name) | ||
1428 | 166 | if series is None: | ||
1429 | 167 | raise ValueError('%r: series not found' % series_name) | ||
1430 | 168 | # Releases are returned by Launchpad in reverse date order. | ||
1431 | 169 | releases = list(series.releases) | ||
1432 | 170 | if not releases: | ||
1433 | 171 | raise ValueError('%r: series does not contain releases' % series_name) | ||
1434 | 172 | if release_version is not None: | ||
1435 | 173 | release = _get_by_attr(releases, 'version', release_version) | ||
1436 | 174 | if release is None: | ||
1437 | 175 | raise ValueError('%r: release not found' % release_version) | ||
1438 | 176 | releases = [release] | ||
1439 | 177 | for release in releases: | ||
1440 | 178 | for file_ in release.files: | ||
1441 | 179 | if str(file_).endswith('.tgz'): | ||
1442 | 180 | return file_.file_link | ||
1443 | 181 | raise ValueError('%r: file not found' % release_version) | ||
1444 | 182 | |||
1445 | 183 | |||
1446 | 184 | def get_zookeeper_address(agent_file_path): | ||
1447 | 185 | """Retrieve the Zookeeper address contained in the given *agent_file_path*. | ||
1448 | 186 | |||
1449 | 187 | The *agent_file_path* is a path to a file containing a line similar to the | ||
1450 | 188 | following:: | ||
1451 | 189 | |||
1452 | 190 | env JUJU_ZOOKEEPER="address" | ||
1453 | 191 | """ | ||
1454 | 192 | line = search_file('JUJU_ZOOKEEPER', agent_file_path).strip() | ||
1455 | 193 | return line.split('=')[1].strip('"') | ||
1456 | 194 | |||
1457 | 195 | |||
1458 | 196 | @contextmanager | ||
1459 | 197 | def log_hook(): | ||
1460 | 198 | """Log when a hook starts and stops its execution. | ||
1461 | 199 | |||
1462 | 200 | Also log to stdout possible CalledProcessError exceptions raised executing | ||
1463 | 201 | the hook. | ||
1464 | 202 | """ | ||
1465 | 203 | script = script_name() | ||
1466 | 204 | log(">>> Entering {}".format(script)) | ||
1467 | 205 | try: | ||
1468 | 206 | yield | ||
1469 | 207 | except CalledProcessError as err: | ||
1470 | 208 | log('Exception caught:') | ||
1471 | 209 | log(err.output) | ||
1472 | 210 | raise | ||
1473 | 211 | finally: | ||
1474 | 212 | log("<<< Exiting {}".format(script)) | ||
1475 | 213 | |||
1476 | 214 | |||
1477 | 215 | def parse_source(source): | ||
1478 | 216 | """Parse the ``juju-gui-source`` option. | ||
1479 | 217 | |||
1480 | 218 | Return a tuple of two elements representing info on how to deploy Juju GUI. | ||
1481 | 219 | Examples: | ||
1482 | 220 | - ('stable', None): latest stable release; | ||
1483 | 221 | - ('stable', '0.1.0'): stable release v0.1.0; | ||
1484 | 222 | - ('trunk', None): latest trunk release; | ||
1485 | 223 | - ('trunk', '0.1.0+build.1'): trunk release v0.1.0 bzr revision 1; | ||
1486 | 224 | - ('branch', 'lp:juju-gui'): release is made from a branch; | ||
1487 | 225 | - ('url', 'http://example.com/gui'): release from a downloaded file. | ||
1488 | 226 | """ | ||
1489 | 227 | if source.startswith('url:'): | ||
1490 | 228 | source = source[4:] | ||
1491 | 229 | # Support file paths, including relative paths. | ||
1492 | 230 | if urlparse(source).scheme == '': | ||
1493 | 231 | if not source.startswith('/'): | ||
1494 | 232 | source = os.path.join(os.path.abspath(CURRENT_DIR), source) | ||
1495 | 233 | source = "file://%s" % source | ||
1496 | 234 | return 'url', source | ||
1497 | 235 | if source in ('stable', 'trunk'): | ||
1498 | 236 | return source, None | ||
1499 | 237 | if source.startswith('lp:') or source.startswith('http://'): | ||
1500 | 238 | return 'branch', source | ||
1501 | 239 | if 'build' in source: | ||
1502 | 240 | return 'trunk', source | ||
1503 | 241 | return 'stable', source | ||
1504 | 242 | |||
1505 | 243 | |||
1506 | 244 | def render_to_file(template_name, context, destination): | ||
1507 | 245 | """Render the given *template_name* into *destination* using *context*. | ||
1508 | 246 | |||
1509 | 247 | The tempita template language is used to render contents | ||
1510 | 248 | (see http://pythonpaste.org/tempita/). | ||
1511 | 249 | The argument *template_name* is the name or path of the template file: | ||
1512 | 250 | it may be either a path relative to ``../config`` or an absolute path. | ||
1513 | 251 | The argument *destination* is a file path. | ||
1514 | 252 | The argument *context* is a dict-like object. | ||
1515 | 253 | """ | ||
1516 | 254 | template_path = os.path.abspath(template_name) | ||
1517 | 255 | template = tempita.Template.from_filename(template_path) | ||
1518 | 256 | with open(destination, 'w') as stream: | ||
1519 | 257 | stream.write(template.substitute(context)) | ||
1520 | 258 | |||
1521 | 259 | |||
1522 | 260 | results_log = None | ||
1523 | 261 | |||
1524 | 262 | |||
1525 | 263 | def _setupLogging(): | ||
1526 | 264 | global results_log | ||
1527 | 265 | if results_log is not None: | ||
1528 | 266 | return | ||
1529 | 267 | cfg = config() | ||
1530 | 268 | logging.basicConfig( | ||
1531 | 269 | filename=cfg['command-log-file'], | ||
1532 | 270 | level=logging.INFO, | ||
1533 | 271 | format="%(asctime)s: %(name)s@%(levelname)s %(message)s") | ||
1534 | 272 | results_log = logging.getLogger('juju-gui') | ||
1535 | 273 | |||
1536 | 274 | |||
1537 | 275 | def cmd_log(results): | ||
1538 | 276 | global results_log | ||
1539 | 277 | if not results: | ||
1540 | 278 | return | ||
1541 | 279 | if results_log is None: | ||
1542 | 280 | _setupLogging() | ||
1543 | 281 | # Since 'results' may be multi-line output, start it on a separate line | ||
1544 | 282 | # from the logger timestamp, etc. | ||
1545 | 283 | results_log.info('\n' + results) | ||
1546 | 284 | |||
1547 | 285 | |||
1548 | 286 | def start_improv(staging_env, ssl_cert_path, | ||
1549 | 287 | config_path='/etc/init/juju-api-improv.conf'): | ||
1550 | 288 | """Start a simulated juju environment using ``improv.py``.""" | ||
1551 | 289 | log('Setting up staging start up script.') | ||
1552 | 290 | context = { | ||
1553 | 291 | 'juju_dir': JUJU_DIR, | ||
1554 | 292 | 'keys': ssl_cert_path, | ||
1555 | 293 | 'port': API_PORT, | ||
1556 | 294 | 'staging_env': staging_env, | ||
1557 | 295 | } | ||
1558 | 296 | render_to_file('config/juju-api-improv.conf.template', context, config_path) | ||
1559 | 297 | log('Starting the staging backend.') | ||
1560 | 298 | with su('root'): | ||
1561 | 299 | service_start(IMPROV) | ||
1562 | 300 | |||
1563 | 301 | |||
1564 | 302 | def start_agent( | ||
1565 | 303 | ssl_cert_path, config_path='/etc/init/juju-api-agent.conf', | ||
1566 | 304 | read_only=False): | ||
1567 | 305 | """Start the Juju agent and connect to the current environment.""" | ||
1568 | 306 | # Retrieve the Zookeeper address from the start up script. | ||
1569 | 307 | unit_dir = os.path.realpath(os.path.join(CURRENT_DIR, '..')) | ||
1570 | 308 | agent_file = '/etc/init/juju-{0}.conf'.format(os.path.basename(unit_dir)) | ||
1571 | 309 | zookeeper = get_zookeeper_address(agent_file) | ||
1572 | 310 | log('Setting up API agent start up script.') | ||
1573 | 311 | context = { | ||
1574 | 312 | 'juju_dir': JUJU_DIR, | ||
1575 | 313 | 'keys': ssl_cert_path, | ||
1576 | 314 | 'port': API_PORT, | ||
1577 | 315 | 'zookeeper': zookeeper, | ||
1578 | 316 | 'read_only': read_only | ||
1579 | 317 | } | ||
1580 | 318 | render_to_file('config/juju-api-agent.conf.template', context, config_path) | ||
1581 | 319 | log('Starting API agent.') | ||
1582 | 320 | with su('root'): | ||
1583 | 321 | service_start(AGENT) | ||
1584 | 322 | |||
1585 | 323 | |||
1586 | 324 | def start_gui( | ||
1587 | 325 | console_enabled, login_help, readonly, in_staging, ssl_cert_path, | ||
1588 | 326 | charmworld_url, serve_tests, haproxy_path='/etc/haproxy/haproxy.cfg', | ||
1589 | 327 | config_js_path=None, secure=True, sandbox=False): | ||
1590 | 328 | """Set up and start the Juju GUI server.""" | ||
1591 | 329 | with su('root'): | ||
1592 | 330 | run('chown', '-R', 'ubuntu:', JUJU_GUI_DIR) | ||
1593 | 331 | # XXX 2013-02-05 frankban bug=1116320: | ||
1594 | 332 | # External insecure resources are still loaded when testing in the | ||
1595 | 333 | # debug environment. For now, switch to the production environment if | ||
1596 | 334 | # the charm is configured to serve tests. | ||
1597 | 335 | if in_staging and not serve_tests: | ||
1598 | 336 | build_dirname = 'build-debug' | ||
1599 | 337 | else: | ||
1600 | 338 | build_dirname = 'build-prod' | ||
1601 | 339 | build_dir = os.path.join(JUJU_GUI_DIR, build_dirname) | ||
1602 | 340 | log('Generating the Juju GUI configuration file.') | ||
1603 | 341 | is_legacy_juju = legacy_juju() | ||
1604 | 342 | user, password = None, None | ||
1605 | 343 | if (is_legacy_juju and in_staging) or sandbox: | ||
1606 | 344 | user, password = 'admin', 'admin' | ||
1607 | 345 | else: | ||
1608 | 346 | user, password = None, None | ||
1609 | 347 | |||
1610 | 348 | api_backend = 'python' if is_legacy_juju else 'go' | ||
1611 | 349 | if secure: | ||
1612 | 350 | protocol = 'wss' | ||
1613 | 351 | else: | ||
1614 | 352 | log('Running in insecure mode! Port 80 will serve unencrypted.') | ||
1615 | 353 | protocol = 'ws' | ||
1616 | 354 | |||
1617 | 355 | context = { | ||
1618 | 356 | 'raw_protocol': protocol, | ||
1619 | 357 | 'address': unit_get('public-address'), | ||
1620 | 358 | 'console_enabled': json.dumps(console_enabled), | ||
1621 | 359 | 'login_help': json.dumps(login_help), | ||
1622 | 360 | 'password': json.dumps(password), | ||
1623 | 361 | 'api_backend': json.dumps(api_backend), | ||
1624 | 362 | 'readonly': json.dumps(readonly), | ||
1625 | 363 | 'user': json.dumps(user), | ||
1626 | 364 | 'protocol': json.dumps(protocol), | ||
1627 | 365 | 'sandbox': json.dumps(sandbox), | ||
1628 | 366 | 'charmworld_url': json.dumps(charmworld_url), | ||
1629 | 367 | } | ||
1630 | 368 | if config_js_path is None: | ||
1631 | 369 | config_js_path = os.path.join( | ||
1632 | 370 | build_dir, 'juju-ui', 'assets', 'config.js') | ||
1633 | 371 | render_to_file('config/config.js.template', context, config_js_path) | ||
1634 | 372 | |||
1635 | 373 | write_apache_config(build_dir, serve_tests) | ||
1636 | 374 | |||
1637 | 375 | log('Generating haproxy configuration file.') | ||
1638 | 376 | if is_legacy_juju: | ||
1639 | 377 | # The PyJuju API agent is listening on localhost. | ||
1640 | 378 | api_address = '127.0.0.1:{0}'.format(API_PORT) | ||
1641 | 379 | else: | ||
1642 | 380 | # Retrieve the juju-core API server address. | ||
1643 | 381 | api_address = get_api_address(os.path.join(CURRENT_DIR, '..')) | ||
1644 | 382 | context = { | ||
1645 | 383 | 'api_address': api_address, | ||
1646 | 384 | 'api_pem': JUJU_PEM, | ||
1647 | 385 | 'legacy_juju': is_legacy_juju, | ||
1648 | 386 | 'ssl_cert_path': ssl_cert_path, | ||
1649 | 387 | # In PyJuju environments, use the same certificate for both HTTPS and | ||
1650 | 388 | # WebSocket connections. In juju-core the system already has the proper | ||
1651 | 389 | # certificate installed. | ||
1652 | 390 | 'web_pem': JUJU_PEM, | ||
1653 | 391 | 'web_port': WEB_PORT, | ||
1654 | 392 | 'secure': secure | ||
1655 | 393 | } | ||
1656 | 394 | render_to_file('config/haproxy.cfg.template', context, haproxy_path) | ||
1657 | 395 | log('Starting Juju GUI.') | ||
1658 | 396 | |||
1659 | 397 | |||
1660 | 398 | def write_apache_config(build_dir, serve_tests=False): | ||
1661 | 399 | log('Generating the apache site configuration file.') | ||
1662 | 400 | context = { | ||
1663 | 401 | 'port': WEB_PORT, | ||
1664 | 402 | 'serve_tests': serve_tests, | ||
1665 | 403 | 'server_root': build_dir, | ||
1666 | 404 | 'tests_root': os.path.join(JUJU_GUI_DIR, 'test', ''), | ||
1667 | 405 | } | ||
1668 | 406 | render_to_file('config/apache-ports.template', context, JUJU_GUI_PORTS) | ||
1669 | 407 | render_to_file('config/apache-site.template', context, JUJU_GUI_SITE) | ||
1670 | 408 | |||
1671 | 409 | |||
1672 | 410 | def get_npm_cache_archive_url(Launchpad=Launchpad): | ||
1673 | 411 | """Figure out the URL of the most recent NPM cache archive on Launchpad.""" | ||
1674 | 412 | launchpad = Launchpad.login_anonymously('Juju GUI charm', 'production') | ||
1675 | 413 | project = launchpad.projects['juju-gui'] | ||
1676 | 414 | # Find the URL of the most recently created NPM cache archive. | ||
1677 | 415 | npm_cache_url = get_release_file_url(project, 'npm-cache', None) | ||
1678 | 416 | return npm_cache_url | ||
1679 | 417 | |||
1680 | 418 | |||
1681 | 419 | def prime_npm_cache(npm_cache_url): | ||
1682 | 420 | """Download NPM cache archive and prime the NPM cache with it.""" | ||
1683 | 421 | # Download the cache archive and then uncompress it into the NPM cache. | ||
1684 | 422 | npm_cache_archive = os.path.join(CURRENT_DIR, 'npm-cache.tgz') | ||
1685 | 423 | cmd_log(run('curl', '-L', '-o', npm_cache_archive, npm_cache_url)) | ||
1686 | 424 | npm_cache_dir = os.path.expanduser('~/.npm') | ||
1687 | 425 | # The NPM cache directory probably does not exist, so make it if not. | ||
1688 | 426 | try: | ||
1689 | 427 | os.mkdir(npm_cache_dir) | ||
1690 | 428 | except OSError, e: | ||
1691 | 429 | # If the directory already exists then ignore the error. | ||
1692 | 430 | if e.errno != errno.EEXIST: # File exists. | ||
1693 | 431 | raise | ||
1694 | 432 | uncompress = command('tar', '-x', '-z', '-C', npm_cache_dir, '-f') | ||
1695 | 433 | cmd_log(uncompress(npm_cache_archive)) | ||
1696 | 434 | |||
1697 | 435 | |||
1698 | 436 | def fetch_gui(juju_gui_source, logpath): | ||
1699 | 437 | """Retrieve the Juju GUI release/branch.""" | ||
1700 | 438 | # Retrieve a Juju GUI release. | ||
1701 | 439 | origin, version_or_branch = parse_source(juju_gui_source) | ||
1702 | 440 | if origin == 'branch': | ||
1703 | 441 | # Make sure we have the dependencies necessary for us to actually make | ||
1704 | 442 | # a build. | ||
1705 | 443 | _get_build_dependencies() | ||
1706 | 444 | # Create a release starting from a branch. | ||
1707 | 445 | juju_gui_source_dir = os.path.join(CURRENT_DIR, 'juju-gui-source') | ||
1708 | 446 | log('Retrieving Juju GUI source checkout from %s.' % version_or_branch) | ||
1709 | 447 | cmd_log(run('rm', '-rf', juju_gui_source_dir)) | ||
1710 | 448 | cmd_log(bzr_checkout(version_or_branch, juju_gui_source_dir)) | ||
1711 | 449 | log('Preparing a Juju GUI release.') | ||
1712 | 450 | logdir = os.path.dirname(logpath) | ||
1713 | 451 | fd, name = tempfile.mkstemp(prefix='make-distfile-', dir=logdir) | ||
1714 | 452 | log('Output from "make distfile" sent to %s' % name) | ||
1715 | 453 | with environ(NO_BZR='1'): | ||
1716 | 454 | run('make', '-C', juju_gui_source_dir, 'distfile', | ||
1717 | 455 | stdout=fd, stderr=fd) | ||
1718 | 456 | release_tarball = first_path_in_dir( | ||
1719 | 457 | os.path.join(juju_gui_source_dir, 'releases')) | ||
1720 | 458 | else: | ||
1721 | 459 | log('Retrieving Juju GUI release.') | ||
1722 | 460 | if origin == 'url': | ||
1723 | 461 | file_url = version_or_branch | ||
1724 | 462 | else: | ||
1725 | 463 | # Retrieve a release from Launchpad. | ||
1726 | 464 | launchpad = Launchpad.login_anonymously( | ||
1727 | 465 | 'Juju GUI charm', 'production') | ||
1728 | 466 | project = launchpad.projects['juju-gui'] | ||
1729 | 467 | file_url = get_release_file_url(project, origin, version_or_branch) | ||
1730 | 468 | log('Downloading release file from %s.' % file_url) | ||
1731 | 469 | release_tarball = os.path.join(CURRENT_DIR, 'release.tgz') | ||
1732 | 470 | cmd_log(run('curl', '-L', '-o', release_tarball, file_url)) | ||
1733 | 471 | return release_tarball | ||
1734 | 472 | |||
1735 | 473 | |||
1736 | 474 | def fetch_api(juju_api_branch): | ||
1737 | 475 | """Retrieve the Juju branch.""" | ||
1738 | 476 | # Retrieve Juju API source checkout. | ||
1739 | 477 | log('Retrieving Juju API source checkout.') | ||
1740 | 478 | cmd_log(run('rm', '-rf', JUJU_DIR)) | ||
1741 | 479 | cmd_log(bzr_checkout(juju_api_branch, JUJU_DIR)) | ||
1742 | 480 | |||
1743 | 481 | |||
1744 | 482 | def setup_gui(release_tarball): | ||
1745 | 483 | """Set up Juju GUI.""" | ||
1746 | 484 | # Uncompress the release tarball. | ||
1747 | 485 | log('Installing Juju GUI.') | ||
1748 | 486 | release_dir = os.path.join(CURRENT_DIR, 'release') | ||
1749 | 487 | cmd_log(run('rm', '-rf', release_dir)) | ||
1750 | 488 | os.mkdir(release_dir) | ||
1751 | 489 | uncompress = command('tar', '-x', '-z', '-C', release_dir, '-f') | ||
1752 | 490 | cmd_log(uncompress(release_tarball)) | ||
1753 | 491 | # Link the Juju GUI dir to the contents of the release tarball. | ||
1754 | 492 | cmd_log(run('ln', '-sf', first_path_in_dir(release_dir), JUJU_GUI_DIR)) | ||
1755 | 493 | |||
1756 | 494 | |||
1757 | 495 | def setup_apache(): | ||
1758 | 496 | """Set up apache.""" | ||
1759 | 497 | log('Setting up apache.') | ||
1760 | 498 | if not os.path.exists(JUJU_GUI_SITE): | ||
1761 | 499 | cmd_log(run('touch', JUJU_GUI_SITE)) | ||
1762 | 500 | cmd_log(run('chown', 'ubuntu:', JUJU_GUI_SITE)) | ||
1763 | 501 | cmd_log( | ||
1764 | 502 | run('ln', '-s', JUJU_GUI_SITE, | ||
1765 | 503 | '/etc/apache2/sites-enabled/juju-gui')) | ||
1766 | 504 | |||
1767 | 505 | if not os.path.exists(JUJU_GUI_PORTS): | ||
1768 | 506 | cmd_log(run('touch', JUJU_GUI_PORTS)) | ||
1769 | 507 | cmd_log(run('chown', 'ubuntu:', JUJU_GUI_PORTS)) | ||
1770 | 508 | |||
1771 | 509 | with su('root'): | ||
1772 | 510 | run('a2dissite', 'default') | ||
1773 | 511 | run('a2ensite', 'juju-gui') | ||
1774 | 512 | |||
1775 | 513 | |||
1776 | 514 | def save_or_create_certificates( | ||
1777 | 515 | ssl_cert_path, ssl_cert_contents, ssl_key_contents): | ||
1778 | 516 | """Generate the SSL certificates. | ||
1779 | 517 | |||
1780 | 518 | If both *ssl_cert_contents* and *ssl_key_contents* are provided, use them | ||
1781 | 519 | as certificates; otherwise, generate them. | ||
1782 | 520 | |||
1783 | 521 | Also create a pem file, suitable for use in the haproxy configuration, | ||
1784 | 522 | concatenating the key and the certificate files. | ||
1785 | 523 | """ | ||
1786 | 524 | crt_path = os.path.join(ssl_cert_path, 'juju.crt') | ||
1787 | 525 | key_path = os.path.join(ssl_cert_path, 'juju.key') | ||
1788 | 526 | if not os.path.exists(ssl_cert_path): | ||
1789 | 527 | os.makedirs(ssl_cert_path) | ||
1790 | 528 | if ssl_cert_contents and ssl_key_contents: | ||
1791 | 529 | # Save the provided certificates. | ||
1792 | 530 | with open(crt_path, 'w') as cert_file: | ||
1793 | 531 | cert_file.write(ssl_cert_contents) | ||
1794 | 532 | with open(key_path, 'w') as key_file: | ||
1795 | 533 | key_file.write(ssl_key_contents) | ||
1796 | 534 | else: | ||
1797 | 535 | # Generate certificates. | ||
1798 | 536 | # See http://superuser.com/questions/226192/openssl-without-prompt | ||
1799 | 537 | cmd_log(run( | ||
1800 | 538 | 'openssl', 'req', '-new', '-newkey', 'rsa:4096', | ||
1801 | 539 | '-days', '365', '-nodes', '-x509', '-subj', | ||
1802 | 540 | # These are arbitrary test values for the certificate. | ||
1803 | 541 | '/C=GB/ST=Juju/L=GUI/O=Ubuntu/CN=juju.ubuntu.com', | ||
1804 | 542 | '-keyout', key_path, '-out', crt_path)) | ||
1805 | 543 | # Generate the pem file. | ||
1806 | 544 | pem_path = os.path.join(ssl_cert_path, JUJU_PEM) | ||
1807 | 545 | if os.path.exists(pem_path): | ||
1808 | 546 | os.remove(pem_path) | ||
1809 | 547 | with open(pem_path, 'w') as pem_file: | ||
1810 | 548 | shutil.copyfileobj(open(key_path), pem_file) | ||
1811 | 549 | shutil.copyfileobj(open(crt_path), pem_file) | ||
1812 | 550 | |||
1813 | 551 | |||
1814 | 552 | def find_missing_packages(*packages): | ||
1815 | 553 | """Given a list of packages, return the packages which are not installed. | ||
1816 | 554 | """ | ||
1817 | 555 | cache = apt.Cache() | ||
1818 | 556 | missing = set() | ||
1819 | 557 | for pkg_name in packages: | ||
1820 | 558 | try: | ||
1821 | 559 | pkg = cache[pkg_name] | ||
1822 | 560 | except KeyError: | ||
1823 | 561 | missing.add(pkg_name) | ||
1824 | 562 | continue | ||
1825 | 563 | if pkg.is_installed: | ||
1826 | 564 | continue | ||
1827 | 565 | missing.add(pkg_name) | ||
1828 | 566 | return missing | ||
1829 | 567 | |||
1830 | 568 | |||
1831 | 569 | ## Backend support decorators | ||
1832 | 570 | |||
1833 | 571 | def chain(name): | ||
1834 | 572 | """Helper method to compose a set of mixin objects into a callable. | ||
1835 | 573 | |||
1836 | 574 | Each method is called in the context of its mixin instance, and its | ||
1837 | 575 | argument is the Backend instance. | ||
1838 | 576 | """ | ||
1839 | 577 | # Chain method calls through all implementing mixins. | ||
1840 | 578 | def method(self): | ||
1841 | 579 | for mixin in self.mixins: | ||
1842 | 580 | a_callable = getattr(type(mixin), name, None) | ||
1843 | 581 | if a_callable: | ||
1844 | 582 | a_callable(mixin, self) | ||
1845 | 583 | |||
1846 | 584 | method.__name__ = name | ||
1847 | 585 | return method | ||
1848 | 586 | |||
1849 | 587 | |||
1850 | 588 | def merge(name): | ||
1851 | 589 | """Helper to merge a property from a set of strategy objects | ||
1852 | 590 | into a unified set. | ||
1853 | 591 | """ | ||
1854 | 592 | # Return merged property from every providing mixin as a set. | ||
1855 | 593 | @property | ||
1856 | 594 | def method(self): | ||
1857 | 595 | result = set() | ||
1858 | 596 | for mixin in self.mixins: | ||
1859 | 597 | segment = getattr(type(mixin), name, None) | ||
1860 | 598 | if segment and isinstance(segment, (list, tuple, set)): | ||
1861 | 599 | result |= set(segment) | ||
1862 | 600 | |||
1863 | 601 | return result | ||
1864 | 602 | return method | ||
1865 | 0 | 603 | ||
1866 | === added directory 'lib/charmhelpers/contrib/saltstack' | |||
1867 | === added file 'lib/charmhelpers/contrib/saltstack/__init__.py' | |||
1868 | --- lib/charmhelpers/contrib/saltstack/__init__.py 1970-01-01 00:00:00 +0000 | |||
1869 | +++ lib/charmhelpers/contrib/saltstack/__init__.py 2013-11-21 22:43:22 +0000 | |||
1870 | @@ -0,0 +1,149 @@ | |||
1871 | 1 | """Charm Helpers saltstack - declare the state of your machines. | ||
1872 | 2 | |||
1873 | 3 | This helper enables you to declare your machine state, rather than | ||
1874 | 4 | program it procedurally (and have to test each change to your procedures). | ||
1875 | 5 | Your install hook can be as simple as: | ||
1876 | 6 | |||
1877 | 7 | {{{ | ||
1878 | 8 | from charmhelpers.contrib.saltstack import ( | ||
1879 | 9 | install_salt_support, | ||
1880 | 10 | update_machine_state, | ||
1881 | 11 | ) | ||
1882 | 12 | |||
1883 | 13 | |||
1884 | 14 | def install(): | ||
1885 | 15 | install_salt_support() | ||
1886 | 16 | update_machine_state('machine_states/dependencies.yaml') | ||
1887 | 17 | update_machine_state('machine_states/installed.yaml') | ||
1888 | 18 | }}} | ||
1889 | 19 | |||
1890 | 20 | and won't need to change (nor will its tests) when you change the machine | ||
1891 | 21 | state. | ||
1892 | 22 | |||
1893 | 23 | It's using a python package called salt-minion which allows various formats for | ||
1894 | 24 | specifying resources, such as: | ||
1895 | 25 | |||
1896 | 26 | {{{ | ||
1897 | 27 | /srv/{{ basedir }}: | ||
1898 | 28 | file.directory: | ||
1899 | 29 | - group: ubunet | ||
1900 | 30 | - user: ubunet | ||
1901 | 31 | - require: | ||
1902 | 32 | - user: ubunet | ||
1903 | 33 | - recurse: | ||
1904 | 34 | - user | ||
1905 | 35 | - group | ||
1906 | 36 | |||
1907 | 37 | ubunet: | ||
1908 | 38 | group.present: | ||
1909 | 39 | - gid: 1500 | ||
1910 | 40 | user.present: | ||
1911 | 41 | - uid: 1500 | ||
1912 | 42 | - gid: 1500 | ||
1913 | 43 | - createhome: False | ||
1914 | 44 | - require: | ||
1915 | 45 | - group: ubunet | ||
1916 | 46 | }}} | ||
1917 | 47 | |||
1918 | 48 | The docs for all the different state definitions are at: | ||
1919 | 49 | http://docs.saltstack.com/ref/states/all/ | ||
1920 | 50 | |||
1921 | 51 | |||
1922 | 52 | TODO: | ||
1923 | 53 | * Add test helpers which will ensure that machine state definitions | ||
1924 | 54 | are functionally (but not necessarily logically) correct (ie. getting | ||
1925 | 55 | salt to parse all state defs. | ||
1926 | 56 | * Add a link to a public bootstrap charm example / blogpost. | ||
1927 | 57 | * Find a way to obviate the need to use the grains['charm_dir'] syntax | ||
1928 | 58 | in templates. | ||
1929 | 59 | """ | ||
1930 | 60 | # Copyright 2013 Canonical Ltd. | ||
1931 | 61 | # | ||
1932 | 62 | # Authors: | ||
1933 | 63 | # Charm Helpers Developers <juju@lists.ubuntu.com> | ||
1934 | 64 | import os | ||
1935 | 65 | import subprocess | ||
1936 | 66 | import yaml | ||
1937 | 67 | |||
1938 | 68 | import charmhelpers.core.host | ||
1939 | 69 | import charmhelpers.core.hookenv | ||
1940 | 70 | |||
1941 | 71 | |||
1942 | 72 | charm_dir = os.environ.get('CHARM_DIR', '') | ||
1943 | 73 | salt_grains_path = '/etc/salt/grains' | ||
1944 | 74 | |||
1945 | 75 | |||
1946 | 76 | def install_salt_support(from_ppa=True): | ||
1947 | 77 | """Installs the salt-minion helper for machine state. | ||
1948 | 78 | |||
1949 | 79 | By default the salt-minion package is installed from | ||
1950 | 80 | the saltstack PPA. If from_ppa is False you must ensure | ||
1951 | 81 | that the salt-minion package is available in the apt cache. | ||
1952 | 82 | """ | ||
1953 | 83 | if from_ppa: | ||
1954 | 84 | subprocess.check_call([ | ||
1955 | 85 | '/usr/bin/add-apt-repository', | ||
1956 | 86 | '--yes', | ||
1957 | 87 | 'ppa:saltstack/salt', | ||
1958 | 88 | ]) | ||
1959 | 89 | subprocess.check_call(['/usr/bin/apt-get', 'update']) | ||
1960 | 90 | # We install salt-common as salt-minion would run the salt-minion | ||
1961 | 91 | # daemon. | ||
1962 | 92 | charmhelpers.fetch.apt_install('salt-common') | ||
1963 | 93 | |||
1964 | 94 | |||
1965 | 95 | def update_machine_state(state_path): | ||
1966 | 96 | """Update the machine state using the provided state declaration.""" | ||
1967 | 97 | juju_state_to_yaml(salt_grains_path) | ||
1968 | 98 | subprocess.check_call([ | ||
1969 | 99 | 'salt-call', | ||
1970 | 100 | '--local', | ||
1971 | 101 | 'state.template', | ||
1972 | 102 | state_path, | ||
1973 | 103 | ]) | ||
1974 | 104 | |||
1975 | 105 | |||
1976 | 106 | def juju_state_to_yaml(yaml_path, namespace_separator=':'): | ||
1977 | 107 | """Update the juju config and state in a yaml file. | ||
1978 | 108 | |||
1979 | 109 | This includes any current relation-get data, and the charm | ||
1980 | 110 | directory. | ||
1981 | 111 | """ | ||
1982 | 112 | config = charmhelpers.core.hookenv.config() | ||
1983 | 113 | |||
1984 | 114 | # Add the charm_dir which we will need to refer to charm | ||
1985 | 115 | # file resources etc. | ||
1986 | 116 | config['charm_dir'] = charm_dir | ||
1987 | 117 | config['local_unit'] = charmhelpers.core.hookenv.local_unit() | ||
1988 | 118 | |||
1989 | 119 | # Add any relation data prefixed with the relation type. | ||
1990 | 120 | relation_type = charmhelpers.core.hookenv.relation_type() | ||
1991 | 121 | if relation_type is not None: | ||
1992 | 122 | relation_data = charmhelpers.core.hookenv.relation_get() | ||
1993 | 123 | relation_data = dict( | ||
1994 | 124 | ("{relation_type}{namespace_separator}{key}".format( | ||
1995 | 125 | relation_type=relation_type.replace('-', '_'), | ||
1996 | 126 | key=key, | ||
1997 | 127 | namespace_separator=namespace_separator), val) | ||
1998 | 128 | for key, val in relation_data.items()) | ||
1999 | 129 | config.update(relation_data) | ||
2000 | 130 | |||
2001 | 131 | # Don't use non-standard tags for unicode which will not | ||
2002 | 132 | # work when salt uses yaml.load_safe. | ||
2003 | 133 | yaml.add_representer(unicode, lambda dumper, | ||
2004 | 134 | value: dumper.represent_scalar( | ||
2005 | 135 | u'tag:yaml.org,2002:str', value)) | ||
2006 | 136 | |||
2007 | 137 | yaml_dir = os.path.dirname(yaml_path) | ||
2008 | 138 | if not os.path.exists(yaml_dir): | ||
2009 | 139 | os.makedirs(yaml_dir) | ||
2010 | 140 | |||
2011 | 141 | if os.path.exists(yaml_path): | ||
2012 | 142 | with open(yaml_path, "r") as existing_vars_file: | ||
2013 | 143 | existing_vars = yaml.load(existing_vars_file.read()) | ||
2014 | 144 | else: | ||
2015 | 145 | existing_vars = {} | ||
2016 | 146 | |||
2017 | 147 | existing_vars.update(config) | ||
2018 | 148 | with open(yaml_path, "w+") as fp: | ||
2019 | 149 | fp.write(yaml.dump(existing_vars)) | ||
2020 | 0 | 150 | ||
2021 | === added directory 'lib/charmhelpers/core' | |||
2022 | === added file 'lib/charmhelpers/core/__init__.py' | |||
2023 | === added file 'lib/charmhelpers/core/hookenv.py' | |||
2024 | --- lib/charmhelpers/core/hookenv.py 1970-01-01 00:00:00 +0000 | |||
2025 | +++ lib/charmhelpers/core/hookenv.py 2013-11-21 22:43:22 +0000 | |||
2026 | @@ -0,0 +1,395 @@ | |||
2027 | 1 | "Interactions with the Juju environment" | ||
2028 | 2 | # Copyright 2013 Canonical Ltd. | ||
2029 | 3 | # | ||
2030 | 4 | # Authors: | ||
2031 | 5 | # Charm Helpers Developers <juju@lists.ubuntu.com> | ||
2032 | 6 | |||
2033 | 7 | import os | ||
2034 | 8 | import json | ||
2035 | 9 | import yaml | ||
2036 | 10 | import subprocess | ||
2037 | 11 | import UserDict | ||
2038 | 12 | from subprocess import CalledProcessError | ||
2039 | 13 | |||
2040 | 14 | CRITICAL = "CRITICAL" | ||
2041 | 15 | ERROR = "ERROR" | ||
2042 | 16 | WARNING = "WARNING" | ||
2043 | 17 | INFO = "INFO" | ||
2044 | 18 | DEBUG = "DEBUG" | ||
2045 | 19 | MARKER = object() | ||
2046 | 20 | |||
2047 | 21 | cache = {} | ||
2048 | 22 | |||
2049 | 23 | |||
2050 | 24 | def cached(func): | ||
2051 | 25 | """Cache return values for multiple executions of func + args | ||
2052 | 26 | |||
2053 | 27 | For example: | ||
2054 | 28 | |||
2055 | 29 | @cached | ||
2056 | 30 | def unit_get(attribute): | ||
2057 | 31 | pass | ||
2058 | 32 | |||
2059 | 33 | unit_get('test') | ||
2060 | 34 | |||
2061 | 35 | will cache the result of unit_get + 'test' for future calls. | ||
2062 | 36 | """ | ||
2063 | 37 | def wrapper(*args, **kwargs): | ||
2064 | 38 | global cache | ||
2065 | 39 | key = str((func, args, kwargs)) | ||
2066 | 40 | try: | ||
2067 | 41 | return cache[key] | ||
2068 | 42 | except KeyError: | ||
2069 | 43 | res = func(*args, **kwargs) | ||
2070 | 44 | cache[key] = res | ||
2071 | 45 | return res | ||
2072 | 46 | return wrapper | ||
2073 | 47 | |||
2074 | 48 | |||
2075 | 49 | def flush(key): | ||
2076 | 50 | """Flushes any entries from function cache where the | ||
2077 | 51 | key is found in the function+args """ | ||
2078 | 52 | flush_list = [] | ||
2079 | 53 | for item in cache: | ||
2080 | 54 | if key in item: | ||
2081 | 55 | flush_list.append(item) | ||
2082 | 56 | for item in flush_list: | ||
2083 | 57 | del cache[item] | ||
2084 | 58 | |||
2085 | 59 | |||
2086 | 60 | def log(message, level=None): | ||
2087 | 61 | """Write a message to the juju log""" | ||
2088 | 62 | command = ['juju-log'] | ||
2089 | 63 | if level: | ||
2090 | 64 | command += ['-l', level] | ||
2091 | 65 | command += [message] | ||
2092 | 66 | subprocess.call(command) | ||
2093 | 67 | |||
2094 | 68 | |||
2095 | 69 | class Serializable(UserDict.IterableUserDict): | ||
2096 | 70 | """Wrapper, an object that can be serialized to yaml or json""" | ||
2097 | 71 | |||
2098 | 72 | def __init__(self, obj): | ||
2099 | 73 | # wrap the object | ||
2100 | 74 | UserDict.IterableUserDict.__init__(self) | ||
2101 | 75 | self.data = obj | ||
2102 | 76 | |||
2103 | 77 | def __getattr__(self, attr): | ||
2104 | 78 | # See if this object has attribute. | ||
2105 | 79 | if attr in ("json", "yaml", "data"): | ||
2106 | 80 | return self.__dict__[attr] | ||
2107 | 81 | # Check for attribute in wrapped object. | ||
2108 | 82 | got = getattr(self.data, attr, MARKER) | ||
2109 | 83 | if got is not MARKER: | ||
2110 | 84 | return got | ||
2111 | 85 | # Proxy to the wrapped object via dict interface. | ||
2112 | 86 | try: | ||
2113 | 87 | return self.data[attr] | ||
2114 | 88 | except KeyError: | ||
2115 | 89 | raise AttributeError(attr) | ||
2116 | 90 | |||
2117 | 91 | def __getstate__(self): | ||
2118 | 92 | # Pickle as a standard dictionary. | ||
2119 | 93 | return self.data | ||
2120 | 94 | |||
2121 | 95 | def __setstate__(self, state): | ||
2122 | 96 | # Unpickle into our wrapper. | ||
2123 | 97 | self.data = state | ||
2124 | 98 | |||
2125 | 99 | def json(self): | ||
2126 | 100 | """Serialize the object to json""" | ||
2127 | 101 | return json.dumps(self.data) | ||
2128 | 102 | |||
2129 | 103 | def yaml(self): | ||
2130 | 104 | """Serialize the object to yaml""" | ||
2131 | 105 | return yaml.dump(self.data) | ||
2132 | 106 | |||
2133 | 107 | |||
2134 | 108 | def execution_environment(): | ||
2135 | 109 | """A convenient bundling of the current execution context""" | ||
2136 | 110 | context = {} | ||
2137 | 111 | context['conf'] = config() | ||
2138 | 112 | if relation_id(): | ||
2139 | 113 | context['reltype'] = relation_type() | ||
2140 | 114 | context['relid'] = relation_id() | ||
2141 | 115 | context['rel'] = relation_get() | ||
2142 | 116 | context['unit'] = local_unit() | ||
2143 | 117 | context['rels'] = relations() | ||
2144 | 118 | context['env'] = os.environ | ||
2145 | 119 | return context | ||
2146 | 120 | |||
2147 | 121 | |||
2148 | 122 | def in_relation_hook(): | ||
2149 | 123 | """Determine whether we're running in a relation hook""" | ||
2150 | 124 | return 'JUJU_RELATION' in os.environ | ||
2151 | 125 | |||
2152 | 126 | |||
2153 | 127 | def relation_type(): | ||
2154 | 128 | """The scope for the current relation hook""" | ||
2155 | 129 | return os.environ.get('JUJU_RELATION', None) | ||
2156 | 130 | |||
2157 | 131 | |||
2158 | 132 | def relation_id(): | ||
2159 | 133 | """The relation ID for the current relation hook""" | ||
2160 | 134 | return os.environ.get('JUJU_RELATION_ID', None) | ||
2161 | 135 | |||
2162 | 136 | |||
2163 | 137 | def local_unit(): | ||
2164 | 138 | """Local unit ID""" | ||
2165 | 139 | return os.environ['JUJU_UNIT_NAME'] | ||
2166 | 140 | |||
2167 | 141 | |||
2168 | 142 | def remote_unit(): | ||
2169 | 143 | """The remote unit for the current relation hook""" | ||
2170 | 144 | return os.environ['JUJU_REMOTE_UNIT'] | ||
2171 | 145 | |||
2172 | 146 | |||
2173 | 147 | def service_name(): | ||
2174 | 148 | """The name service group this unit belongs to""" | ||
2175 | 149 | return local_unit().split('/')[0] | ||
2176 | 150 | |||
2177 | 151 | |||
2178 | 152 | @cached | ||
2179 | 153 | def config(scope=None): | ||
2180 | 154 | """Juju charm configuration""" | ||
2181 | 155 | config_cmd_line = ['config-get'] | ||
2182 | 156 | if scope is not None: | ||
2183 | 157 | config_cmd_line.append(scope) | ||
2184 | 158 | config_cmd_line.append('--format=json') | ||
2185 | 159 | try: | ||
2186 | 160 | return json.loads(subprocess.check_output(config_cmd_line)) | ||
2187 | 161 | except ValueError: | ||
2188 | 162 | return None | ||
2189 | 163 | |||
2190 | 164 | |||
2191 | 165 | @cached | ||
2192 | 166 | def relation_get(attribute=None, unit=None, rid=None): | ||
2193 | 167 | """Get relation information""" | ||
2194 | 168 | _args = ['relation-get', '--format=json'] | ||
2195 | 169 | if rid: | ||
2196 | 170 | _args.append('-r') | ||
2197 | 171 | _args.append(rid) | ||
2198 | 172 | _args.append(attribute or '-') | ||
2199 | 173 | if unit: | ||
2200 | 174 | _args.append(unit) | ||
2201 | 175 | try: | ||
2202 | 176 | return json.loads(subprocess.check_output(_args)) | ||
2203 | 177 | except ValueError: | ||
2204 | 178 | return None | ||
2205 | 179 | except CalledProcessError, e: | ||
2206 | 180 | if e.returncode == 2: | ||
2207 | 181 | return None | ||
2208 | 182 | raise | ||
2209 | 183 | |||
2210 | 184 | |||
2211 | 185 | def relation_set(relation_id=None, relation_settings={}, **kwargs): | ||
2212 | 186 | """Set relation information for the current unit""" | ||
2213 | 187 | relation_cmd_line = ['relation-set'] | ||
2214 | 188 | if relation_id is not None: | ||
2215 | 189 | relation_cmd_line.extend(('-r', relation_id)) | ||
2216 | 190 | for k, v in (relation_settings.items() + kwargs.items()): | ||
2217 | 191 | if v is None: | ||
2218 | 192 | relation_cmd_line.append('{}='.format(k)) | ||
2219 | 193 | else: | ||
2220 | 194 | relation_cmd_line.append('{}={}'.format(k, v)) | ||
2221 | 195 | subprocess.check_call(relation_cmd_line) | ||
2222 | 196 | # Flush cache of any relation-gets for local unit | ||
2223 | 197 | flush(local_unit()) | ||
2224 | 198 | |||
2225 | 199 | |||
2226 | 200 | @cached | ||
2227 | 201 | def relation_ids(reltype=None): | ||
2228 | 202 | """A list of relation_ids""" | ||
2229 | 203 | reltype = reltype or relation_type() | ||
2230 | 204 | relid_cmd_line = ['relation-ids', '--format=json'] | ||
2231 | 205 | if reltype is not None: | ||
2232 | 206 | relid_cmd_line.append(reltype) | ||
2233 | 207 | return json.loads(subprocess.check_output(relid_cmd_line)) or [] | ||
2234 | 208 | return [] | ||
2235 | 209 | |||
2236 | 210 | |||
2237 | 211 | @cached | ||
2238 | 212 | def related_units(relid=None): | ||
2239 | 213 | """A list of related units""" | ||
2240 | 214 | relid = relid or relation_id() | ||
2241 | 215 | units_cmd_line = ['relation-list', '--format=json'] | ||
2242 | 216 | if relid is not None: | ||
2243 | 217 | units_cmd_line.extend(('-r', relid)) | ||
2244 | 218 | return json.loads(subprocess.check_output(units_cmd_line)) or [] | ||
2245 | 219 | |||
2246 | 220 | |||
2247 | 221 | @cached | ||
2248 | 222 | def relation_for_unit(unit=None, rid=None): | ||
2249 | 223 | """Get the json represenation of a unit's relation""" | ||
2250 | 224 | unit = unit or remote_unit() | ||
2251 | 225 | relation = relation_get(unit=unit, rid=rid) | ||
2252 | 226 | for key in relation: | ||
2253 | 227 | if key.endswith('-list'): | ||
2254 | 228 | relation[key] = relation[key].split() | ||
2255 | 229 | relation['__unit__'] = unit | ||
2256 | 230 | return relation | ||
2257 | 231 | |||
2258 | 232 | |||
2259 | 233 | @cached | ||
2260 | 234 | def relations_for_id(relid=None): | ||
2261 | 235 | """Get relations of a specific relation ID""" | ||
2262 | 236 | relation_data = [] | ||
2263 | 237 | relid = relid or relation_ids() | ||
2264 | 238 | for unit in related_units(relid): | ||
2265 | 239 | unit_data = relation_for_unit(unit, relid) | ||
2266 | 240 | unit_data['__relid__'] = relid | ||
2267 | 241 | relation_data.append(unit_data) | ||
2268 | 242 | return relation_data | ||
2269 | 243 | |||
2270 | 244 | |||
2271 | 245 | @cached | ||
2272 | 246 | def relations_of_type(reltype=None): | ||
2273 | 247 | """Get relations of a specific type""" | ||
2274 | 248 | relation_data = [] | ||
2275 | 249 | reltype = reltype or relation_type() | ||
2276 | 250 | for relid in relation_ids(reltype): | ||
2277 | 251 | for relation in relations_for_id(relid): | ||
2278 | 252 | relation['__relid__'] = relid | ||
2279 | 253 | relation_data.append(relation) | ||
2280 | 254 | return relation_data | ||
2281 | 255 | |||
2282 | 256 | |||
2283 | 257 | @cached | ||
2284 | 258 | def relation_types(): | ||
2285 | 259 | """Get a list of relation types supported by this charm""" | ||
2286 | 260 | charmdir = os.environ.get('CHARM_DIR', '') | ||
2287 | 261 | mdf = open(os.path.join(charmdir, 'metadata.yaml')) | ||
2288 | 262 | md = yaml.safe_load(mdf) | ||
2289 | 263 | rel_types = [] | ||
2290 | 264 | for key in ('provides', 'requires', 'peers'): | ||
2291 | 265 | section = md.get(key) | ||
2292 | 266 | if section: | ||
2293 | 267 | rel_types.extend(section.keys()) | ||
2294 | 268 | mdf.close() | ||
2295 | 269 | return rel_types | ||
2296 | 270 | |||
2297 | 271 | |||
2298 | 272 | @cached | ||
2299 | 273 | def relations(): | ||
2300 | 274 | """Get a nested dictionary of relation data for all related units""" | ||
2301 | 275 | rels = {} | ||
2302 | 276 | for reltype in relation_types(): | ||
2303 | 277 | relids = {} | ||
2304 | 278 | for relid in relation_ids(reltype): | ||
2305 | 279 | units = {local_unit(): relation_get(unit=local_unit(), rid=relid)} | ||
2306 | 280 | for unit in related_units(relid): | ||
2307 | 281 | reldata = relation_get(unit=unit, rid=relid) | ||
2308 | 282 | units[unit] = reldata | ||
2309 | 283 | relids[relid] = units | ||
2310 | 284 | rels[reltype] = relids | ||
2311 | 285 | return rels | ||
2312 | 286 | |||
2313 | 287 | |||
2314 | 288 | @cached | ||
2315 | 289 | def is_relation_made(relation, keys='private-address'): | ||
2316 | 290 | ''' | ||
2317 | 291 | Determine whether a relation is established by checking for | ||
2318 | 292 | presence of key(s). If a list of keys is provided, they | ||
2319 | 293 | must all be present for the relation to be identified as made | ||
2320 | 294 | ''' | ||
2321 | 295 | if isinstance(keys, str): | ||
2322 | 296 | keys = [keys] | ||
2323 | 297 | for r_id in relation_ids(relation): | ||
2324 | 298 | for unit in related_units(r_id): | ||
2325 | 299 | context = {} | ||
2326 | 300 | for k in keys: | ||
2327 | 301 | context[k] = relation_get(k, rid=r_id, | ||
2328 | 302 | unit=unit) | ||
2329 | 303 | if None not in context.values(): | ||
2330 | 304 | return True | ||
2331 | 305 | return False | ||
2332 | 306 | |||
2333 | 307 | |||
2334 | 308 | def open_port(port, protocol="TCP"): | ||
2335 | 309 | """Open a service network port""" | ||
2336 | 310 | _args = ['open-port'] | ||
2337 | 311 | _args.append('{}/{}'.format(port, protocol)) | ||
2338 | 312 | subprocess.check_call(_args) | ||
2339 | 313 | |||
2340 | 314 | |||
2341 | 315 | def close_port(port, protocol="TCP"): | ||
2342 | 316 | """Close a service network port""" | ||
2343 | 317 | _args = ['close-port'] | ||
2344 | 318 | _args.append('{}/{}'.format(port, protocol)) | ||
2345 | 319 | subprocess.check_call(_args) | ||
2346 | 320 | |||
2347 | 321 | |||
2348 | 322 | @cached | ||
2349 | 323 | def unit_get(attribute): | ||
2350 | 324 | """Get the unit ID for the remote unit""" | ||
2351 | 325 | _args = ['unit-get', '--format=json', attribute] | ||
2352 | 326 | try: | ||
2353 | 327 | return json.loads(subprocess.check_output(_args)) | ||
2354 | 328 | except ValueError: | ||
2355 | 329 | return None | ||
2356 | 330 | |||
2357 | 331 | |||
2358 | 332 | def unit_private_ip(): | ||
2359 | 333 | """Get this unit's private IP address""" | ||
2360 | 334 | return unit_get('private-address') | ||
2361 | 335 | |||
2362 | 336 | |||
2363 | 337 | class UnregisteredHookError(Exception): | ||
2364 | 338 | """Raised when an undefined hook is called""" | ||
2365 | 339 | pass | ||
2366 | 340 | |||
2367 | 341 | |||
2368 | 342 | class Hooks(object): | ||
2369 | 343 | """A convenient handler for hook functions. | ||
2370 | 344 | |||
2371 | 345 | Example: | ||
2372 | 346 | hooks = Hooks() | ||
2373 | 347 | |||
2374 | 348 | # register a hook, taking its name from the function name | ||
2375 | 349 | @hooks.hook() | ||
2376 | 350 | def install(): | ||
2377 | 351 | ... | ||
2378 | 352 | |||
2379 | 353 | # register a hook, providing a custom hook name | ||
2380 | 354 | @hooks.hook("config-changed") | ||
2381 | 355 | def config_changed(): | ||
2382 | 356 | ... | ||
2383 | 357 | |||
2384 | 358 | if __name__ == "__main__": | ||
2385 | 359 | # execute a hook based on the name the program is called by | ||
2386 | 360 | hooks.execute(sys.argv) | ||
2387 | 361 | """ | ||
2388 | 362 | |||
2389 | 363 | def __init__(self): | ||
2390 | 364 | super(Hooks, self).__init__() | ||
2391 | 365 | self._hooks = {} | ||
2392 | 366 | |||
2393 | 367 | def register(self, name, function): | ||
2394 | 368 | """Register a hook""" | ||
2395 | 369 | self._hooks[name] = function | ||
2396 | 370 | |||
2397 | 371 | def execute(self, args): | ||
2398 | 372 | """Execute a registered hook based on args[0]""" | ||
2399 | 373 | hook_name = os.path.basename(args[0]) | ||
2400 | 374 | if hook_name in self._hooks: | ||
2401 | 375 | self._hooks[hook_name]() | ||
2402 | 376 | else: | ||
2403 | 377 | raise UnregisteredHookError(hook_name) | ||
2404 | 378 | |||
2405 | 379 | def hook(self, *hook_names): | ||
2406 | 380 | """Decorator, registering them as hooks""" | ||
2407 | 381 | def wrapper(decorated): | ||
2408 | 382 | for hook_name in hook_names: | ||
2409 | 383 | self.register(hook_name, decorated) | ||
2410 | 384 | else: | ||
2411 | 385 | self.register(decorated.__name__, decorated) | ||
2412 | 386 | if '_' in decorated.__name__: | ||
2413 | 387 | self.register( | ||
2414 | 388 | decorated.__name__.replace('_', '-'), decorated) | ||
2415 | 389 | return decorated | ||
2416 | 390 | return wrapper | ||
2417 | 391 | |||
2418 | 392 | |||
2419 | 393 | def charm_dir(): | ||
2420 | 394 | """Return the root directory of the current charm""" | ||
2421 | 395 | return os.environ.get('CHARM_DIR') | ||
2422 | 0 | 396 | ||
2423 | === added file 'lib/charmhelpers/core/host.py' | |||
2424 | --- lib/charmhelpers/core/host.py 1970-01-01 00:00:00 +0000 | |||
2425 | +++ lib/charmhelpers/core/host.py 2013-11-21 22:43:22 +0000 | |||
2426 | @@ -0,0 +1,281 @@ | |||
2427 | 1 | """Tools for working with the host system""" | ||
2428 | 2 | # Copyright 2012 Canonical Ltd. | ||
2429 | 3 | # | ||
2430 | 4 | # Authors: | ||
2431 | 5 | # Nick Moffitt <nick.moffitt@canonical.com> | ||
2432 | 6 | # Matthew Wedgwood <matthew.wedgwood@canonical.com> | ||
2433 | 7 | |||
2434 | 8 | import os | ||
2435 | 9 | import pwd | ||
2436 | 10 | import grp | ||
2437 | 11 | import random | ||
2438 | 12 | import string | ||
2439 | 13 | import subprocess | ||
2440 | 14 | import hashlib | ||
2441 | 15 | |||
2442 | 16 | from collections import OrderedDict | ||
2443 | 17 | |||
2444 | 18 | from hookenv import log | ||
2445 | 19 | |||
2446 | 20 | |||
2447 | 21 | def service_start(service_name): | ||
2448 | 22 | """Start a system service""" | ||
2449 | 23 | return service('start', service_name) | ||
2450 | 24 | |||
2451 | 25 | |||
2452 | 26 | def service_stop(service_name): | ||
2453 | 27 | """Stop a system service""" | ||
2454 | 28 | return service('stop', service_name) | ||
2455 | 29 | |||
2456 | 30 | |||
2457 | 31 | def service_restart(service_name): | ||
2458 | 32 | """Restart a system service""" | ||
2459 | 33 | return service('restart', service_name) | ||
2460 | 34 | |||
2461 | 35 | |||
2462 | 36 | def service_reload(service_name, restart_on_failure=False): | ||
2463 | 37 | """Reload a system service, optionally falling back to restart if reload fails""" | ||
2464 | 38 | service_result = service('reload', service_name) | ||
2465 | 39 | if not service_result and restart_on_failure: | ||
2466 | 40 | service_result = service('restart', service_name) | ||
2467 | 41 | return service_result | ||
2468 | 42 | |||
2469 | 43 | |||
2470 | 44 | def service(action, service_name): | ||
2471 | 45 | """Control a system service""" | ||
2472 | 46 | cmd = ['service', service_name, action] | ||
2473 | 47 | return subprocess.call(cmd) == 0 | ||
2474 | 48 | |||
2475 | 49 | |||
2476 | 50 | def service_running(service): | ||
2477 | 51 | """Determine whether a system service is running""" | ||
2478 | 52 | try: | ||
2479 | 53 | output = subprocess.check_output(['service', service, 'status']) | ||
2480 | 54 | except subprocess.CalledProcessError: | ||
2481 | 55 | return False | ||
2482 | 56 | else: | ||
2483 | 57 | if ("start/running" in output or "is running" in output): | ||
2484 | 58 | return True | ||
2485 | 59 | else: | ||
2486 | 60 | return False | ||
2487 | 61 | |||
2488 | 62 | |||
2489 | 63 | def adduser(username, password=None, shell='/bin/bash', system_user=False): | ||
2490 | 64 | """Add a user to the system""" | ||
2491 | 65 | try: | ||
2492 | 66 | user_info = pwd.getpwnam(username) | ||
2493 | 67 | log('user {0} already exists!'.format(username)) | ||
2494 | 68 | except KeyError: | ||
2495 | 69 | log('creating user {0}'.format(username)) | ||
2496 | 70 | cmd = ['useradd'] | ||
2497 | 71 | if system_user or password is None: | ||
2498 | 72 | cmd.append('--system') | ||
2499 | 73 | else: | ||
2500 | 74 | cmd.extend([ | ||
2501 | 75 | '--create-home', | ||
2502 | 76 | '--shell', shell, | ||
2503 | 77 | '--password', password, | ||
2504 | 78 | ]) | ||
2505 | 79 | cmd.append(username) | ||
2506 | 80 | subprocess.check_call(cmd) | ||
2507 | 81 | user_info = pwd.getpwnam(username) | ||
2508 | 82 | return user_info | ||
2509 | 83 | |||
2510 | 84 | |||
2511 | 85 | def add_user_to_group(username, group): | ||
2512 | 86 | """Add a user to a group""" | ||
2513 | 87 | cmd = [ | ||
2514 | 88 | 'gpasswd', '-a', | ||
2515 | 89 | username, | ||
2516 | 90 | group | ||
2517 | 91 | ] | ||
2518 | 92 | log("Adding user {} to group {}".format(username, group)) | ||
2519 | 93 | subprocess.check_call(cmd) | ||
2520 | 94 | |||
2521 | 95 | |||
2522 | 96 | def rsync(from_path, to_path, flags='-r', options=None): | ||
2523 | 97 | """Replicate the contents of a path""" | ||
2524 | 98 | options = options or ['--delete', '--executability'] | ||
2525 | 99 | cmd = ['/usr/bin/rsync', flags] | ||
2526 | 100 | cmd.extend(options) | ||
2527 | 101 | cmd.append(from_path) | ||
2528 | 102 | cmd.append(to_path) | ||
2529 | 103 | log(" ".join(cmd)) | ||
2530 | 104 | return subprocess.check_output(cmd).strip() | ||
2531 | 105 | |||
2532 | 106 | |||
2533 | 107 | def symlink(source, destination): | ||
2534 | 108 | """Create a symbolic link""" | ||
2535 | 109 | log("Symlinking {} as {}".format(source, destination)) | ||
2536 | 110 | cmd = [ | ||
2537 | 111 | 'ln', | ||
2538 | 112 | '-sf', | ||
2539 | 113 | source, | ||
2540 | 114 | destination, | ||
2541 | 115 | ] | ||
2542 | 116 | subprocess.check_call(cmd) | ||
2543 | 117 | |||
2544 | 118 | |||
2545 | 119 | def mkdir(path, owner='root', group='root', perms=0555, force=False): | ||
2546 | 120 | """Create a directory""" | ||
2547 | 121 | log("Making dir {} {}:{} {:o}".format(path, owner, group, | ||
2548 | 122 | perms)) | ||
2549 | 123 | uid = pwd.getpwnam(owner).pw_uid | ||
2550 | 124 | gid = grp.getgrnam(group).gr_gid | ||
2551 | 125 | realpath = os.path.abspath(path) | ||
2552 | 126 | if os.path.exists(realpath): | ||
2553 | 127 | if force and not os.path.isdir(realpath): | ||
2554 | 128 | log("Removing non-directory file {} prior to mkdir()".format(path)) | ||
2555 | 129 | os.unlink(realpath) | ||
2556 | 130 | else: | ||
2557 | 131 | os.makedirs(realpath, perms) | ||
2558 | 132 | os.chown(realpath, uid, gid) | ||
2559 | 133 | |||
2560 | 134 | |||
2561 | 135 | def write_file(path, content, owner='root', group='root', perms=0444): | ||
2562 | 136 | """Create or overwrite a file with the contents of a string""" | ||
2563 | 137 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms)) | ||
2564 | 138 | uid = pwd.getpwnam(owner).pw_uid | ||
2565 | 139 | gid = grp.getgrnam(group).gr_gid | ||
2566 | 140 | with open(path, 'w') as target: | ||
2567 | 141 | os.fchown(target.fileno(), uid, gid) | ||
2568 | 142 | os.fchmod(target.fileno(), perms) | ||
2569 | 143 | target.write(content) | ||
2570 | 144 | |||
2571 | 145 | |||
2572 | 146 | def mount(device, mountpoint, options=None, persist=False): | ||
2573 | 147 | """Mount a filesystem at a particular mountpoint""" | ||
2574 | 148 | cmd_args = ['mount'] | ||
2575 | 149 | if options is not None: | ||
2576 | 150 | cmd_args.extend(['-o', options]) | ||
2577 | 151 | cmd_args.extend([device, mountpoint]) | ||
2578 | 152 | try: | ||
2579 | 153 | subprocess.check_output(cmd_args) | ||
2580 | 154 | except subprocess.CalledProcessError, e: | ||
2581 | 155 | log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output)) | ||
2582 | 156 | return False | ||
2583 | 157 | if persist: | ||
2584 | 158 | # TODO: update fstab | ||
2585 | 159 | pass | ||
2586 | 160 | return True | ||
2587 | 161 | |||
2588 | 162 | |||
2589 | 163 | def umount(mountpoint, persist=False): | ||
2590 | 164 | """Unmount a filesystem""" | ||
2591 | 165 | cmd_args = ['umount', mountpoint] | ||
2592 | 166 | try: | ||
2593 | 167 | subprocess.check_output(cmd_args) | ||
2594 | 168 | except subprocess.CalledProcessError, e: | ||
2595 | 169 | log('Error unmounting {}\n{}'.format(mountpoint, e.output)) | ||
2596 | 170 | return False | ||
2597 | 171 | if persist: | ||
2598 | 172 | # TODO: update fstab | ||
2599 | 173 | pass | ||
2600 | 174 | return True | ||
2601 | 175 | |||
2602 | 176 | |||
2603 | 177 | def mounts(): | ||
2604 | 178 | """Get a list of all mounted volumes as [[mountpoint,device],[...]]""" | ||
2605 | 179 | with open('/proc/mounts') as f: | ||
2606 | 180 | # [['/mount/point','/dev/path'],[...]] | ||
2607 | 181 | system_mounts = [m[1::-1] for m in [l.strip().split() | ||
2608 | 182 | for l in f.readlines()]] | ||
2609 | 183 | return system_mounts | ||
2610 | 184 | |||
2611 | 185 | |||
2612 | 186 | def file_hash(path): | ||
2613 | 187 | """Generate a md5 hash of the contents of 'path' or None if not found """ | ||
2614 | 188 | if os.path.exists(path): | ||
2615 | 189 | h = hashlib.md5() | ||
2616 | 190 | with open(path, 'r') as source: | ||
2617 | 191 | h.update(source.read()) # IGNORE:E1101 - it does have update | ||
2618 | 192 | return h.hexdigest() | ||
2619 | 193 | else: | ||
2620 | 194 | return None | ||
2621 | 195 | |||
2622 | 196 | |||
2623 | 197 | def restart_on_change(restart_map): | ||
2624 | 198 | """Restart services based on configuration files changing | ||
2625 | 199 | |||
2626 | 200 | This function is used a decorator, for example | ||
2627 | 201 | |||
2628 | 202 | @restart_on_change({ | ||
2629 | 203 | '/etc/ceph/ceph.conf': [ 'cinder-api', 'cinder-volume' ] | ||
2630 | 204 | }) | ||
2631 | 205 | def ceph_client_changed(): | ||
2632 | 206 | ... | ||
2633 | 207 | |||
2634 | 208 | In this example, the cinder-api and cinder-volume services | ||
2635 | 209 | would be restarted if /etc/ceph/ceph.conf is changed by the | ||
2636 | 210 | ceph_client_changed function. | ||
2637 | 211 | """ | ||
2638 | 212 | def wrap(f): | ||
2639 | 213 | def wrapped_f(*args): | ||
2640 | 214 | checksums = {} | ||
2641 | 215 | for path in restart_map: | ||
2642 | 216 | checksums[path] = file_hash(path) | ||
2643 | 217 | f(*args) | ||
2644 | 218 | restarts = [] | ||
2645 | 219 | for path in restart_map: | ||
2646 | 220 | if checksums[path] != file_hash(path): | ||
2647 | 221 | restarts += restart_map[path] | ||
2648 | 222 | for service_name in list(OrderedDict.fromkeys(restarts)): | ||
2649 | 223 | service('restart', service_name) | ||
2650 | 224 | return wrapped_f | ||
2651 | 225 | return wrap | ||
2652 | 226 | |||
2653 | 227 | |||
2654 | 228 | def lsb_release(): | ||
2655 | 229 | """Return /etc/lsb-release in a dict""" | ||
2656 | 230 | d = {} | ||
2657 | 231 | with open('/etc/lsb-release', 'r') as lsb: | ||
2658 | 232 | for l in lsb: | ||
2659 | 233 | k, v = l.split('=') | ||
2660 | 234 | d[k.strip()] = v.strip() | ||
2661 | 235 | return d | ||
2662 | 236 | |||
2663 | 237 | |||
2664 | 238 | def pwgen(length=None): | ||
2665 | 239 | """Generate a random pasword.""" | ||
2666 | 240 | if length is None: | ||
2667 | 241 | length = random.choice(range(35, 45)) | ||
2668 | 242 | alphanumeric_chars = [ | ||
2669 | 243 | l for l in (string.letters + string.digits) | ||
2670 | 244 | if l not in 'l0QD1vAEIOUaeiou'] | ||
2671 | 245 | random_chars = [ | ||
2672 | 246 | random.choice(alphanumeric_chars) for _ in range(length)] | ||
2673 | 247 | return(''.join(random_chars)) | ||
2674 | 248 | |||
2675 | 249 | |||
2676 | 250 | def list_nics(nic_type): | ||
2677 | 251 | '''Return a list of nics of given type(s)''' | ||
2678 | 252 | if isinstance(nic_type, basestring): | ||
2679 | 253 | int_types = [nic_type] | ||
2680 | 254 | else: | ||
2681 | 255 | int_types = nic_type | ||
2682 | 256 | interfaces = [] | ||
2683 | 257 | for int_type in int_types: | ||
2684 | 258 | cmd = ['ip', 'addr', 'show', 'label', int_type + '*'] | ||
2685 | 259 | ip_output = subprocess.check_output(cmd).split('\n') | ||
2686 | 260 | ip_output = (line for line in ip_output if line) | ||
2687 | 261 | for line in ip_output: | ||
2688 | 262 | if line.split()[1].startswith(int_type): | ||
2689 | 263 | interfaces.append(line.split()[1].replace(":", "")) | ||
2690 | 264 | return interfaces | ||
2691 | 265 | |||
2692 | 266 | |||
2693 | 267 | def set_nic_mtu(nic, mtu): | ||
2694 | 268 | '''Set MTU on a network interface''' | ||
2695 | 269 | cmd = ['ip', 'link', 'set', nic, 'mtu', mtu] | ||
2696 | 270 | subprocess.check_call(cmd) | ||
2697 | 271 | |||
2698 | 272 | |||
2699 | 273 | def get_nic_mtu(nic): | ||
2700 | 274 | cmd = ['ip', 'addr', 'show', nic] | ||
2701 | 275 | ip_output = subprocess.check_output(cmd).split('\n') | ||
2702 | 276 | mtu = "" | ||
2703 | 277 | for line in ip_output: | ||
2704 | 278 | words = line.split() | ||
2705 | 279 | if 'mtu' in words: | ||
2706 | 280 | mtu = words[words.index("mtu") + 1] | ||
2707 | 281 | return mtu | ||
2708 | 0 | 282 | ||
2709 | === added directory 'lib/charmhelpers/fetch' | |||
2710 | === added file 'lib/charmhelpers/fetch/__init__.py' | |||
2711 | --- lib/charmhelpers/fetch/__init__.py 1970-01-01 00:00:00 +0000 | |||
2712 | +++ lib/charmhelpers/fetch/__init__.py 2013-11-21 22:43:22 +0000 | |||
2713 | @@ -0,0 +1,271 @@ | |||
2714 | 1 | import importlib | ||
2715 | 2 | from yaml import safe_load | ||
2716 | 3 | from charmhelpers.core.host import ( | ||
2717 | 4 | lsb_release | ||
2718 | 5 | ) | ||
2719 | 6 | from urlparse import ( | ||
2720 | 7 | urlparse, | ||
2721 | 8 | urlunparse, | ||
2722 | 9 | ) | ||
2723 | 10 | import subprocess | ||
2724 | 11 | from charmhelpers.core.hookenv import ( | ||
2725 | 12 | config, | ||
2726 | 13 | log, | ||
2727 | 14 | ) | ||
2728 | 15 | import apt_pkg | ||
2729 | 16 | import os | ||
2730 | 17 | |||
2731 | 18 | CLOUD_ARCHIVE = """# Ubuntu Cloud Archive | ||
2732 | 19 | deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main | ||
2733 | 20 | """ | ||
2734 | 21 | PROPOSED_POCKET = """# Proposed | ||
2735 | 22 | deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted | ||
2736 | 23 | """ | ||
2737 | 24 | CLOUD_ARCHIVE_POCKETS = { | ||
2738 | 25 | # Folsom | ||
2739 | 26 | 'folsom': 'precise-updates/folsom', | ||
2740 | 27 | 'precise-folsom': 'precise-updates/folsom', | ||
2741 | 28 | 'precise-folsom/updates': 'precise-updates/folsom', | ||
2742 | 29 | 'precise-updates/folsom': 'precise-updates/folsom', | ||
2743 | 30 | 'folsom/proposed': 'precise-proposed/folsom', | ||
2744 | 31 | 'precise-folsom/proposed': 'precise-proposed/folsom', | ||
2745 | 32 | 'precise-proposed/folsom': 'precise-proposed/folsom', | ||
2746 | 33 | # Grizzly | ||
2747 | 34 | 'grizzly': 'precise-updates/grizzly', | ||
2748 | 35 | 'precise-grizzly': 'precise-updates/grizzly', | ||
2749 | 36 | 'precise-grizzly/updates': 'precise-updates/grizzly', | ||
2750 | 37 | 'precise-updates/grizzly': 'precise-updates/grizzly', | ||
2751 | 38 | 'grizzly/proposed': 'precise-proposed/grizzly', | ||
2752 | 39 | 'precise-grizzly/proposed': 'precise-proposed/grizzly', | ||
2753 | 40 | 'precise-proposed/grizzly': 'precise-proposed/grizzly', | ||
2754 | 41 | # Havana | ||
2755 | 42 | 'havana': 'precise-updates/havana', | ||
2756 | 43 | 'precise-havana': 'precise-updates/havana', | ||
2757 | 44 | 'precise-havana/updates': 'precise-updates/havana', | ||
2758 | 45 | 'precise-updates/havana': 'precise-updates/havana', | ||
2759 | 46 | 'havana/proposed': 'precise-proposed/havana', | ||
2760 | 47 | 'precies-havana/proposed': 'precise-proposed/havana', | ||
2761 | 48 | 'precise-proposed/havana': 'precise-proposed/havana', | ||
2762 | 49 | } | ||
2763 | 50 | |||
2764 | 51 | |||
2765 | 52 | def filter_installed_packages(packages): | ||
2766 | 53 | """Returns a list of packages that require installation""" | ||
2767 | 54 | apt_pkg.init() | ||
2768 | 55 | cache = apt_pkg.Cache() | ||
2769 | 56 | _pkgs = [] | ||
2770 | 57 | for package in packages: | ||
2771 | 58 | try: | ||
2772 | 59 | p = cache[package] | ||
2773 | 60 | p.current_ver or _pkgs.append(package) | ||
2774 | 61 | except KeyError: | ||
2775 | 62 | log('Package {} has no installation candidate.'.format(package), | ||
2776 | 63 | level='WARNING') | ||
2777 | 64 | _pkgs.append(package) | ||
2778 | 65 | return _pkgs | ||
2779 | 66 | |||
2780 | 67 | |||
2781 | 68 | def apt_install(packages, options=None, fatal=False): | ||
2782 | 69 | """Install one or more packages""" | ||
2783 | 70 | if options is None: | ||
2784 | 71 | options = ['--option=Dpkg::Options::=--force-confold'] | ||
2785 | 72 | |||
2786 | 73 | cmd = ['apt-get', '--assume-yes'] | ||
2787 | 74 | cmd.extend(options) | ||
2788 | 75 | cmd.append('install') | ||
2789 | 76 | if isinstance(packages, basestring): | ||
2790 | 77 | cmd.append(packages) | ||
2791 | 78 | else: | ||
2792 | 79 | cmd.extend(packages) | ||
2793 | 80 | log("Installing {} with options: {}".format(packages, | ||
2794 | 81 | options)) | ||
2795 | 82 | env = os.environ.copy() | ||
2796 | 83 | if 'DEBIAN_FRONTEND' not in env: | ||
2797 | 84 | env['DEBIAN_FRONTEND'] = 'noninteractive' | ||
2798 | 85 | |||
2799 | 86 | if fatal: | ||
2800 | 87 | subprocess.check_call(cmd, env=env) | ||
2801 | 88 | else: | ||
2802 | 89 | subprocess.call(cmd, env=env) | ||
2803 | 90 | |||
2804 | 91 | |||
2805 | 92 | def apt_update(fatal=False): | ||
2806 | 93 | """Update local apt cache""" | ||
2807 | 94 | cmd = ['apt-get', 'update'] | ||
2808 | 95 | if fatal: | ||
2809 | 96 | subprocess.check_call(cmd) | ||
2810 | 97 | else: | ||
2811 | 98 | subprocess.call(cmd) | ||
2812 | 99 | |||
2813 | 100 | |||
2814 | 101 | def apt_purge(packages, fatal=False): | ||
2815 | 102 | """Purge one or more packages""" | ||
2816 | 103 | cmd = ['apt-get', '--assume-yes', 'purge'] | ||
2817 | 104 | if isinstance(packages, basestring): | ||
2818 | 105 | cmd.append(packages) | ||
2819 | 106 | else: | ||
2820 | 107 | cmd.extend(packages) | ||
2821 | 108 | log("Purging {}".format(packages)) | ||
2822 | 109 | if fatal: | ||
2823 | 110 | subprocess.check_call(cmd) | ||
2824 | 111 | else: | ||
2825 | 112 | subprocess.call(cmd) | ||
2826 | 113 | |||
2827 | 114 | |||
2828 | 115 | def apt_hold(packages, fatal=False): | ||
2829 | 116 | """Hold one or more packages""" | ||
2830 | 117 | cmd = ['apt-mark', 'hold'] | ||
2831 | 118 | if isinstance(packages, basestring): | ||
2832 | 119 | cmd.append(packages) | ||
2833 | 120 | else: | ||
2834 | 121 | cmd.extend(packages) | ||
2835 | 122 | log("Holding {}".format(packages)) | ||
2836 | 123 | if fatal: | ||
2837 | 124 | subprocess.check_call(cmd) | ||
2838 | 125 | else: | ||
2839 | 126 | subprocess.call(cmd) | ||
2840 | 127 | |||
2841 | 128 | |||
2842 | 129 | def add_source(source, key=None): | ||
2843 | 130 | if (source.startswith('ppa:') or | ||
2844 | 131 | source.startswith('http:') or | ||
2845 | 132 | source.startswith('deb ') or | ||
2846 | 133 | source.startswith('cloud-archive:')): | ||
2847 | 134 | subprocess.check_call(['add-apt-repository', '--yes', source]) | ||
2848 | 135 | elif source.startswith('cloud:'): | ||
2849 | 136 | apt_install(filter_installed_packages(['ubuntu-cloud-keyring']), | ||
2850 | 137 | fatal=True) | ||
2851 | 138 | pocket = source.split(':')[-1] | ||
2852 | 139 | if pocket not in CLOUD_ARCHIVE_POCKETS: | ||
2853 | 140 | raise SourceConfigError( | ||
2854 | 141 | 'Unsupported cloud: source option %s' % | ||
2855 | 142 | pocket) | ||
2856 | 143 | actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket] | ||
2857 | 144 | with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt: | ||
2858 | 145 | apt.write(CLOUD_ARCHIVE.format(actual_pocket)) | ||
2859 | 146 | elif source == 'proposed': | ||
2860 | 147 | release = lsb_release()['DISTRIB_CODENAME'] | ||
2861 | 148 | with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt: | ||
2862 | 149 | apt.write(PROPOSED_POCKET.format(release)) | ||
2863 | 150 | if key: | ||
2864 | 151 | subprocess.check_call(['apt-key', 'import', key]) | ||
2865 | 152 | |||
2866 | 153 | |||
2867 | 154 | class SourceConfigError(Exception): | ||
2868 | 155 | pass | ||
2869 | 156 | |||
2870 | 157 | |||
2871 | 158 | def configure_sources(update=False, | ||
2872 | 159 | sources_var='install_sources', | ||
2873 | 160 | keys_var='install_keys'): | ||
2874 | 161 | """ | ||
2875 | 162 | Configure multiple sources from charm configuration | ||
2876 | 163 | |||
2877 | 164 | Example config: | ||
2878 | 165 | install_sources: | ||
2879 | 166 | - "ppa:foo" | ||
2880 | 167 | - "http://example.com/repo precise main" | ||
2881 | 168 | install_keys: | ||
2882 | 169 | - null | ||
2883 | 170 | - "a1b2c3d4" | ||
2884 | 171 | |||
2885 | 172 | Note that 'null' (a.k.a. None) should not be quoted. | ||
2886 | 173 | """ | ||
2887 | 174 | sources = safe_load(config(sources_var)) | ||
2888 | 175 | keys = config(keys_var) | ||
2889 | 176 | if keys is not None: | ||
2890 | 177 | keys = safe_load(keys) | ||
2891 | 178 | if isinstance(sources, basestring) and ( | ||
2892 | 179 | keys is None or isinstance(keys, basestring)): | ||
2893 | 180 | add_source(sources, keys) | ||
2894 | 181 | else: | ||
2895 | 182 | if not len(sources) == len(keys): | ||
2896 | 183 | msg = 'Install sources and keys lists are different lengths' | ||
2897 | 184 | raise SourceConfigError(msg) | ||
2898 | 185 | for src_num in range(len(sources)): | ||
2899 | 186 | add_source(sources[src_num], keys[src_num]) | ||
2900 | 187 | if update: | ||
2901 | 188 | apt_update(fatal=True) | ||
2902 | 189 | |||
2903 | 190 | # The order of this list is very important. Handlers should be listed in from | ||
2904 | 191 | # least- to most-specific URL matching. | ||
2905 | 192 | FETCH_HANDLERS = ( | ||
2906 | 193 | 'charmhelpers.fetch.archiveurl.ArchiveUrlFetchHandler', | ||
2907 | 194 | 'charmhelpers.fetch.bzrurl.BzrUrlFetchHandler', | ||
2908 | 195 | ) | ||
2909 | 196 | |||
2910 | 197 | |||
2911 | 198 | class UnhandledSource(Exception): | ||
2912 | 199 | pass | ||
2913 | 200 | |||
2914 | 201 | |||
2915 | 202 | def install_remote(source): | ||
2916 | 203 | """ | ||
2917 | 204 | Install a file tree from a remote source | ||
2918 | 205 | |||
2919 | 206 | The specified source should be a url of the form: | ||
2920 | 207 | scheme://[host]/path[#[option=value][&...]] | ||
2921 | 208 | |||
2922 | 209 | Schemes supported are based on this modules submodules | ||
2923 | 210 | Options supported are submodule-specific""" | ||
2924 | 211 | # We ONLY check for True here because can_handle may return a string | ||
2925 | 212 | # explaining why it can't handle a given source. | ||
2926 | 213 | handlers = [h for h in plugins() if h.can_handle(source) is True] | ||
2927 | 214 | installed_to = None | ||
2928 | 215 | for handler in handlers: | ||
2929 | 216 | try: | ||
2930 | 217 | installed_to = handler.install(source) | ||
2931 | 218 | except UnhandledSource: | ||
2932 | 219 | pass | ||
2933 | 220 | if not installed_to: | ||
2934 | 221 | raise UnhandledSource("No handler found for source {}".format(source)) | ||
2935 | 222 | return installed_to | ||
2936 | 223 | |||
2937 | 224 | |||
2938 | 225 | def install_from_config(config_var_name): | ||
2939 | 226 | charm_config = config() | ||
2940 | 227 | source = charm_config[config_var_name] | ||
2941 | 228 | return install_remote(source) | ||
2942 | 229 | |||
2943 | 230 | |||
2944 | 231 | class BaseFetchHandler(object): | ||
2945 | 232 | |||
2946 | 233 | """Base class for FetchHandler implementations in fetch plugins""" | ||
2947 | 234 | |||
2948 | 235 | def can_handle(self, source): | ||
2949 | 236 | """Returns True if the source can be handled. Otherwise returns | ||
2950 | 237 | a string explaining why it cannot""" | ||
2951 | 238 | return "Wrong source type" | ||
2952 | 239 | |||
2953 | 240 | def install(self, source): | ||
2954 | 241 | """Try to download and unpack the source. Return the path to the | ||
2955 | 242 | unpacked files or raise UnhandledSource.""" | ||
2956 | 243 | raise UnhandledSource("Wrong source type {}".format(source)) | ||
2957 | 244 | |||
2958 | 245 | def parse_url(self, url): | ||
2959 | 246 | return urlparse(url) | ||
2960 | 247 | |||
2961 | 248 | def base_url(self, url): | ||
2962 | 249 | """Return url without querystring or fragment""" | ||
2963 | 250 | parts = list(self.parse_url(url)) | ||
2964 | 251 | parts[4:] = ['' for i in parts[4:]] | ||
2965 | 252 | return urlunparse(parts) | ||
2966 | 253 | |||
2967 | 254 | |||
2968 | 255 | def plugins(fetch_handlers=None): | ||
2969 | 256 | if not fetch_handlers: | ||
2970 | 257 | fetch_handlers = FETCH_HANDLERS | ||
2971 | 258 | plugin_list = [] | ||
2972 | 259 | for handler_name in fetch_handlers: | ||
2973 | 260 | package, classname = handler_name.rsplit('.', 1) | ||
2974 | 261 | try: | ||
2975 | 262 | handler_class = getattr( | ||
2976 | 263 | importlib.import_module(package), | ||
2977 | 264 | classname) | ||
2978 | 265 | plugin_list.append(handler_class()) | ||
2979 | 266 | except (ImportError, AttributeError): | ||
2980 | 267 | # Skip missing plugins so that they can be ommitted from | ||
2981 | 268 | # installation if desired | ||
2982 | 269 | log("FetchHandler {} not found, skipping plugin".format( | ||
2983 | 270 | handler_name)) | ||
2984 | 271 | return plugin_list | ||
2985 | 0 | 272 | ||
2986 | === added file 'lib/charmhelpers/fetch/archiveurl.py' | |||
2987 | --- lib/charmhelpers/fetch/archiveurl.py 1970-01-01 00:00:00 +0000 | |||
2988 | +++ lib/charmhelpers/fetch/archiveurl.py 2013-11-21 22:43:22 +0000 | |||
2989 | @@ -0,0 +1,48 @@ | |||
2990 | 1 | import os | ||
2991 | 2 | import urllib2 | ||
2992 | 3 | from charmhelpers.fetch import ( | ||
2993 | 4 | BaseFetchHandler, | ||
2994 | 5 | UnhandledSource | ||
2995 | 6 | ) | ||
2996 | 7 | from charmhelpers.payload.archive import ( | ||
2997 | 8 | get_archive_handler, | ||
2998 | 9 | extract, | ||
2999 | 10 | ) | ||
3000 | 11 | from charmhelpers.core.host import mkdir | ||
3001 | 12 | |||
3002 | 13 | |||
3003 | 14 | class ArchiveUrlFetchHandler(BaseFetchHandler): | ||
3004 | 15 | """Handler for archives via generic URLs""" | ||
3005 | 16 | def can_handle(self, source): | ||
3006 | 17 | url_parts = self.parse_url(source) | ||
3007 | 18 | if url_parts.scheme not in ('http', 'https', 'ftp', 'file'): | ||
3008 | 19 | return "Wrong source type" | ||
3009 | 20 | if get_archive_handler(self.base_url(source)): | ||
3010 | 21 | return True | ||
3011 | 22 | return False | ||
3012 | 23 | |||
3013 | 24 | def download(self, source, dest): | ||
3014 | 25 | # propogate all exceptions | ||
3015 | 26 | # URLError, OSError, etc | ||
3016 | 27 | response = urllib2.urlopen(source) | ||
3017 | 28 | try: | ||
3018 | 29 | with open(dest, 'w') as dest_file: | ||
3019 | 30 | dest_file.write(response.read()) | ||
3020 | 31 | except Exception as e: | ||
3021 | 32 | if os.path.isfile(dest): | ||
3022 | 33 | os.unlink(dest) | ||
3023 | 34 | raise e | ||
3024 | 35 | |||
3025 | 36 | def install(self, source): | ||
3026 | 37 | url_parts = self.parse_url(source) | ||
3027 | 38 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched') | ||
3028 | 39 | if not os.path.exists(dest_dir): | ||
3029 | 40 | mkdir(dest_dir, perms=0755) | ||
3030 | 41 | dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path)) | ||
3031 | 42 | try: | ||
3032 | 43 | self.download(source, dld_file) | ||
3033 | 44 | except urllib2.URLError as e: | ||
3034 | 45 | raise UnhandledSource(e.reason) | ||
3035 | 46 | except OSError as e: | ||
3036 | 47 | raise UnhandledSource(e.strerror) | ||
3037 | 48 | return extract(dld_file) | ||
3038 | 0 | 49 | ||
3039 | === added file 'lib/charmhelpers/fetch/bzrurl.py' | |||
3040 | --- lib/charmhelpers/fetch/bzrurl.py 1970-01-01 00:00:00 +0000 | |||
3041 | +++ lib/charmhelpers/fetch/bzrurl.py 2013-11-21 22:43:22 +0000 | |||
3042 | @@ -0,0 +1,49 @@ | |||
3043 | 1 | import os | ||
3044 | 2 | from charmhelpers.fetch import ( | ||
3045 | 3 | BaseFetchHandler, | ||
3046 | 4 | UnhandledSource | ||
3047 | 5 | ) | ||
3048 | 6 | from charmhelpers.core.host import mkdir | ||
3049 | 7 | |||
3050 | 8 | try: | ||
3051 | 9 | from bzrlib.branch import Branch | ||
3052 | 10 | except ImportError: | ||
3053 | 11 | from charmhelpers.fetch import apt_install | ||
3054 | 12 | apt_install("python-bzrlib") | ||
3055 | 13 | from bzrlib.branch import Branch | ||
3056 | 14 | |||
3057 | 15 | |||
3058 | 16 | class BzrUrlFetchHandler(BaseFetchHandler): | ||
3059 | 17 | """Handler for bazaar branches via generic and lp URLs""" | ||
3060 | 18 | def can_handle(self, source): | ||
3061 | 19 | url_parts = self.parse_url(source) | ||
3062 | 20 | if url_parts.scheme not in ('bzr+ssh', 'lp'): | ||
3063 | 21 | return False | ||
3064 | 22 | else: | ||
3065 | 23 | return True | ||
3066 | 24 | |||
3067 | 25 | def branch(self, source, dest): | ||
3068 | 26 | url_parts = self.parse_url(source) | ||
3069 | 27 | # If we use lp:branchname scheme we need to load plugins | ||
3070 | 28 | if not self.can_handle(source): | ||
3071 | 29 | raise UnhandledSource("Cannot handle {}".format(source)) | ||
3072 | 30 | if url_parts.scheme == "lp": | ||
3073 | 31 | from bzrlib.plugin import load_plugins | ||
3074 | 32 | load_plugins() | ||
3075 | 33 | try: | ||
3076 | 34 | remote_branch = Branch.open(source) | ||
3077 | 35 | remote_branch.bzrdir.sprout(dest).open_branch() | ||
3078 | 36 | except Exception as e: | ||
3079 | 37 | raise e | ||
3080 | 38 | |||
3081 | 39 | def install(self, source): | ||
3082 | 40 | url_parts = self.parse_url(source) | ||
3083 | 41 | branch_name = url_parts.path.strip("/").split("/")[-1] | ||
3084 | 42 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", branch_name) | ||
3085 | 43 | if not os.path.exists(dest_dir): | ||
3086 | 44 | mkdir(dest_dir, perms=0755) | ||
3087 | 45 | try: | ||
3088 | 46 | self.branch(source, dest_dir) | ||
3089 | 47 | except OSError as e: | ||
3090 | 48 | raise UnhandledSource(e.strerror) | ||
3091 | 49 | return dest_dir | ||
3092 | 0 | 50 | ||
3093 | === added directory 'lib/charmhelpers/payload' | |||
3094 | === added file 'lib/charmhelpers/payload/__init__.py' | |||
3095 | --- lib/charmhelpers/payload/__init__.py 1970-01-01 00:00:00 +0000 | |||
3096 | +++ lib/charmhelpers/payload/__init__.py 2013-11-21 22:43:22 +0000 | |||
3097 | @@ -0,0 +1,1 @@ | |||
3098 | 1 | "Tools for working with files injected into a charm just before deployment." | ||
3099 | 0 | 2 | ||
3100 | === added file 'lib/charmhelpers/payload/archive.py' | |||
3101 | --- lib/charmhelpers/payload/archive.py 1970-01-01 00:00:00 +0000 | |||
3102 | +++ lib/charmhelpers/payload/archive.py 2013-11-21 22:43:22 +0000 | |||
3103 | @@ -0,0 +1,57 @@ | |||
3104 | 1 | import os | ||
3105 | 2 | import tarfile | ||
3106 | 3 | import zipfile | ||
3107 | 4 | from charmhelpers.core import ( | ||
3108 | 5 | host, | ||
3109 | 6 | hookenv, | ||
3110 | 7 | ) | ||
3111 | 8 | |||
3112 | 9 | |||
3113 | 10 | class ArchiveError(Exception): | ||
3114 | 11 | pass | ||
3115 | 12 | |||
3116 | 13 | |||
3117 | 14 | def get_archive_handler(archive_name): | ||
3118 | 15 | if os.path.isfile(archive_name): | ||
3119 | 16 | if tarfile.is_tarfile(archive_name): | ||
3120 | 17 | return extract_tarfile | ||
3121 | 18 | elif zipfile.is_zipfile(archive_name): | ||
3122 | 19 | return extract_zipfile | ||
3123 | 20 | else: | ||
3124 | 21 | # look at the file name | ||
3125 | 22 | for ext in ('.tar', '.tar.gz', '.tgz', 'tar.bz2', '.tbz2', '.tbz'): | ||
3126 | 23 | if archive_name.endswith(ext): | ||
3127 | 24 | return extract_tarfile | ||
3128 | 25 | for ext in ('.zip', '.jar'): | ||
3129 | 26 | if archive_name.endswith(ext): | ||
3130 | 27 | return extract_zipfile | ||
3131 | 28 | |||
3132 | 29 | |||
3133 | 30 | def archive_dest_default(archive_name): | ||
3134 | 31 | archive_file = os.path.basename(archive_name) | ||
3135 | 32 | return os.path.join(hookenv.charm_dir(), "archives", archive_file) | ||
3136 | 33 | |||
3137 | 34 | |||
3138 | 35 | def extract(archive_name, destpath=None): | ||
3139 | 36 | handler = get_archive_handler(archive_name) | ||
3140 | 37 | if handler: | ||
3141 | 38 | if not destpath: | ||
3142 | 39 | destpath = archive_dest_default(archive_name) | ||
3143 | 40 | if not os.path.isdir(destpath): | ||
3144 | 41 | host.mkdir(destpath) | ||
3145 | 42 | handler(archive_name, destpath) | ||
3146 | 43 | return destpath | ||
3147 | 44 | else: | ||
3148 | 45 | raise ArchiveError("No handler for archive") | ||
3149 | 46 | |||
3150 | 47 | |||
3151 | 48 | def extract_tarfile(archive_name, destpath): | ||
3152 | 49 | "Unpack a tar archive, optionally compressed" | ||
3153 | 50 | archive = tarfile.open(archive_name) | ||
3154 | 51 | archive.extractall(destpath) | ||
3155 | 52 | |||
3156 | 53 | |||
3157 | 54 | def extract_zipfile(archive_name, destpath): | ||
3158 | 55 | "Unpack a zip file" | ||
3159 | 56 | archive = zipfile.ZipFile(archive_name) | ||
3160 | 57 | archive.extractall(destpath) | ||
3161 | 0 | 58 | ||
3162 | === added file 'lib/charmhelpers/payload/execd.py' | |||
3163 | --- lib/charmhelpers/payload/execd.py 1970-01-01 00:00:00 +0000 | |||
3164 | +++ lib/charmhelpers/payload/execd.py 2013-11-21 22:43:22 +0000 | |||
3165 | @@ -0,0 +1,50 @@ | |||
3166 | 1 | #!/usr/bin/env python | ||
3167 | 2 | |||
3168 | 3 | import os | ||
3169 | 4 | import sys | ||
3170 | 5 | import subprocess | ||
3171 | 6 | from charmhelpers.core import hookenv | ||
3172 | 7 | |||
3173 | 8 | |||
3174 | 9 | def default_execd_dir(): | ||
3175 | 10 | return os.path.join(os.environ['CHARM_DIR'], 'exec.d') | ||
3176 | 11 | |||
3177 | 12 | |||
3178 | 13 | def execd_module_paths(execd_dir=None): | ||
3179 | 14 | """Generate a list of full paths to modules within execd_dir.""" | ||
3180 | 15 | if not execd_dir: | ||
3181 | 16 | execd_dir = default_execd_dir() | ||
3182 | 17 | |||
3183 | 18 | if not os.path.exists(execd_dir): | ||
3184 | 19 | return | ||
3185 | 20 | |||
3186 | 21 | for subpath in os.listdir(execd_dir): | ||
3187 | 22 | module = os.path.join(execd_dir, subpath) | ||
3188 | 23 | if os.path.isdir(module): | ||
3189 | 24 | yield module | ||
3190 | 25 | |||
3191 | 26 | |||
3192 | 27 | def execd_submodule_paths(command, execd_dir=None): | ||
3193 | 28 | """Generate a list of full paths to the specified command within exec_dir. | ||
3194 | 29 | """ | ||
3195 | 30 | for module_path in execd_module_paths(execd_dir): | ||
3196 | 31 | path = os.path.join(module_path, command) | ||
3197 | 32 | if os.access(path, os.X_OK) and os.path.isfile(path): | ||
3198 | 33 | yield path | ||
3199 | 34 | |||
3200 | 35 | |||
3201 | 36 | def execd_run(command, execd_dir=None, die_on_error=False, stderr=None): | ||
3202 | 37 | """Run command for each module within execd_dir which defines it.""" | ||
3203 | 38 | for submodule_path in execd_submodule_paths(command, execd_dir): | ||
3204 | 39 | try: | ||
3205 | 40 | subprocess.check_call(submodule_path, shell=True, stderr=stderr) | ||
3206 | 41 | except subprocess.CalledProcessError as e: | ||
3207 | 42 | hookenv.log("Error ({}) running {}. Output: {}".format( | ||
3208 | 43 | e.returncode, e.cmd, e.output)) | ||
3209 | 44 | if die_on_error: | ||
3210 | 45 | sys.exit(e.returncode) | ||
3211 | 46 | |||
3212 | 47 | |||
3213 | 48 | def execd_preinstall(execd_dir=None): | ||
3214 | 49 | """Run charm-pre-install for each module within execd_dir.""" | ||
3215 | 50 | execd_run('charm-pre-install', execd_dir=execd_dir) | ||
3216 | 0 | 51 | ||
3217 | === modified file 'metadata.yaml' | |||
3218 | --- metadata.yaml 2013-05-20 13:19:13 +0000 | |||
3219 | +++ metadata.yaml 2013-11-21 22:43:22 +0000 | |||
3220 | @@ -9,10 +9,10 @@ | |||
3221 | 9 | provides: | 9 | provides: |
3222 | 10 | amqp: | 10 | amqp: |
3223 | 11 | interface: rabbitmq | 11 | interface: rabbitmq |
3224 | 12 | requires: | ||
3225 | 13 | nrpe-external-master: | 12 | nrpe-external-master: |
3226 | 14 | interface: nrpe-external-master | 13 | interface: nrpe-external-master |
3227 | 15 | scope: container | 14 | scope: container |
3228 | 15 | requires: | ||
3229 | 16 | ha: | 16 | ha: |
3230 | 17 | interface: hacluster | 17 | interface: hacluster |
3231 | 18 | scope: container | 18 | scope: container |
3232 | 19 | 19 | ||
3233 | === modified file 'revision' | |||
3234 | --- revision 2013-08-08 21:22:39 +0000 | |||
3235 | +++ revision 2013-11-21 22:43:22 +0000 | |||
3236 | @@ -1,1 +1,1 @@ | |||
3238 | 1 | 97 | 1 | 98 |
3239 | 2 | 2 | ||
3240 | === added file 'scripts/check_rabbitmq.py' | |||
3241 | --- scripts/check_rabbitmq.py 1970-01-01 00:00:00 +0000 | |||
3242 | +++ scripts/check_rabbitmq.py 2013-11-21 22:43:22 +0000 | |||
3243 | @@ -0,0 +1,237 @@ | |||
3244 | 1 | #!/usr/bin/python | ||
3245 | 2 | # | ||
3246 | 3 | # # | ||
3247 | 4 | # # # # # # # | ||
3248 | 5 | # # # # # # # | ||
3249 | 6 | # # # # # # # | ||
3250 | 7 | # # # # # # # # | ||
3251 | 8 | # # # # # # # # # | ||
3252 | 9 | # ##### #### #### #### | ||
3253 | 10 | |||
3254 | 11 | # This file is managed by juju. Do not make local changes. | ||
3255 | 12 | |||
3256 | 13 | # Copyright (C) 2009, 2012 Canonical | ||
3257 | 14 | # All Rights Reserved | ||
3258 | 15 | # | ||
3259 | 16 | # tests RabbitMQ operation | ||
3260 | 17 | |||
3261 | 18 | """ test rabbitmq functionality """ | ||
3262 | 19 | |||
3263 | 20 | import os | ||
3264 | 21 | import sys | ||
3265 | 22 | import signal | ||
3266 | 23 | import socket | ||
3267 | 24 | |||
3268 | 25 | try: | ||
3269 | 26 | from amqplib import client_0_8 as amqp | ||
3270 | 27 | except ImportError: | ||
3271 | 28 | print "CRITICAL: amqplib not found" | ||
3272 | 29 | sys.exit(2) | ||
3273 | 30 | |||
3274 | 31 | from optparse import OptionParser | ||
3275 | 32 | |||
3276 | 33 | ROUTE_KEY = "test_mq" | ||
3277 | 34 | |||
3278 | 35 | |||
3279 | 36 | def alarm_handler(signum, frame): | ||
3280 | 37 | print "TIMEOUT waiting for all queued messages to be delivered" | ||
3281 | 38 | os._exit(1) | ||
3282 | 39 | |||
3283 | 40 | |||
3284 | 41 | def get_connection(host_port, user, password, vhost): | ||
3285 | 42 | """ connect to the amqp service """ | ||
3286 | 43 | if options.verbose: | ||
3287 | 44 | print "Connection to %s requested" % host_port | ||
3288 | 45 | try: | ||
3289 | 46 | ret = amqp.Connection(host=host_port, userid=user, | ||
3290 | 47 | password=password, virtual_host=vhost, | ||
3291 | 48 | insist=False) | ||
3292 | 49 | except (socket.error, TypeError), e: | ||
3293 | 50 | print "ERROR: Could not connect to RabbitMQ server %s:%d" % ( | ||
3294 | 51 | options.host, options.port) | ||
3295 | 52 | if options.verbose: | ||
3296 | 53 | print e | ||
3297 | 54 | raise | ||
3298 | 55 | sys.exit(2) | ||
3299 | 56 | except: | ||
3300 | 57 | print "ERROR: Unknown error connecting to RabbitMQ server %s:%d" % ( | ||
3301 | 58 | options.host, options.port) | ||
3302 | 59 | if options.verbose: | ||
3303 | 60 | raise | ||
3304 | 61 | sys.exit(3) | ||
3305 | 62 | return ret | ||
3306 | 63 | |||
3307 | 64 | |||
3308 | 65 | def setup_exchange(conn, exchange_name, exchange_type): | ||
3309 | 66 | """ create an exchange """ | ||
3310 | 67 | # see if we already have the exchange | ||
3311 | 68 | must_create = False | ||
3312 | 69 | chan = conn.channel() | ||
3313 | 70 | try: | ||
3314 | 71 | chan.exchange_declare(exchange=exchange_name, type=exchange_type, | ||
3315 | 72 | passive=True) | ||
3316 | 73 | except (amqp.AMQPConnectionException, amqp.AMQPChannelException), e: | ||
3317 | 74 | if e.amqp_reply_code == 404: | ||
3318 | 75 | must_create = True | ||
3319 | 76 | # amqplib kills the channel on error.... we dispose of it too | ||
3320 | 77 | chan.close() | ||
3321 | 78 | chan = conn.channel() | ||
3322 | 79 | else: | ||
3323 | 80 | raise | ||
3324 | 81 | # now create the exchange if needed | ||
3325 | 82 | if must_create: | ||
3326 | 83 | chan.exchange_declare(exchange=exchange_name, type=exchange_type, | ||
3327 | 84 | durable=False, auto_delete=False,) | ||
3328 | 85 | if options.verbose: | ||
3329 | 86 | print "Created new exchange %s (%s)" % ( | ||
3330 | 87 | exchange_name, exchange_type) | ||
3331 | 88 | else: | ||
3332 | 89 | if options.verbose: | ||
3333 | 90 | print "Exchange %s (%s) is already declared" % ( | ||
3334 | 91 | exchange_name, exchange_type) | ||
3335 | 92 | chan.close() | ||
3336 | 93 | return must_create | ||
3337 | 94 | |||
3338 | 95 | |||
3339 | 96 | class Consumer(object): | ||
3340 | 97 | """ message consumer class """ | ||
3341 | 98 | _quit = False | ||
3342 | 99 | |||
3343 | 100 | def __init__(self, conn, exname): | ||
3344 | 101 | self.exname = exname | ||
3345 | 102 | self.connection = conn | ||
3346 | 103 | self.name = "%s_queue" % exname | ||
3347 | 104 | |||
3348 | 105 | def setup(self): | ||
3349 | 106 | """ sets up the queue and links it to the exchange """ | ||
3350 | 107 | if options.verbose: | ||
3351 | 108 | print self.name, "setup" | ||
3352 | 109 | chan = self.connection.channel() | ||
3353 | 110 | # setup the queue | ||
3354 | 111 | chan.queue_declare(queue=self.name, durable=False, | ||
3355 | 112 | exclusive=False, auto_delete=False) | ||
3356 | 113 | chan.queue_bind(queue=self.name, exchange=self.exname, | ||
3357 | 114 | routing_key=ROUTE_KEY) | ||
3358 | 115 | chan.queue_purge(self.name) | ||
3359 | 116 | chan.close() | ||
3360 | 117 | |||
3361 | 118 | def check_end(self, msg): | ||
3362 | 119 | """ checks if this is an end request """ | ||
3363 | 120 | return msg.body.startswith("QUIT") | ||
3364 | 121 | |||
3365 | 122 | def loop(self, timeout=5): | ||
3366 | 123 | """ main loop for the consumer client """ | ||
3367 | 124 | consumer_tag = "callback_%s" % self.name | ||
3368 | 125 | chan = self.connection.channel() | ||
3369 | 126 | |||
3370 | 127 | def callback(msg): | ||
3371 | 128 | """ callback for message received """ | ||
3372 | 129 | if options.verbose: | ||
3373 | 130 | print "Client %s saw this message: '%s'" % (self.name, msg.body) | ||
3374 | 131 | if self.check_end(msg): # we have been asked to quit | ||
3375 | 132 | self._quit = True | ||
3376 | 133 | chan.basic_consume(queue=self.name, no_ack=True, callback=callback, | ||
3377 | 134 | consumer_tag=consumer_tag) | ||
3378 | 135 | signal.signal(signal.SIGALRM, alarm_handler) | ||
3379 | 136 | signal.alarm(timeout) | ||
3380 | 137 | while True: | ||
3381 | 138 | chan.wait() | ||
3382 | 139 | if self._quit: | ||
3383 | 140 | break | ||
3384 | 141 | # cancel alarm for receive wait | ||
3385 | 142 | signal.alarm(0) | ||
3386 | 143 | chan.basic_cancel(consumer_tag) | ||
3387 | 144 | chan.close() | ||
3388 | 145 | return self._quit | ||
3389 | 146 | |||
3390 | 147 | |||
3391 | 148 | def send_message(chan, exname, counter=None, message=None): | ||
3392 | 149 | """ publish a message on the exchange """ | ||
3393 | 150 | if not message: | ||
3394 | 151 | message = "This is test message %d" % counter | ||
3395 | 152 | msg = amqp.Message(message) | ||
3396 | 153 | chan.basic_publish(msg, exchange=exname, routing_key=ROUTE_KEY) | ||
3397 | 154 | if options.verbose: | ||
3398 | 155 | print "Sent message: %s" % message | ||
3399 | 156 | |||
3400 | 157 | |||
3401 | 158 | def main_loop(conn, exname): | ||
3402 | 159 | """ demo code to send/receive a few messages """ | ||
3403 | 160 | # first, set up a few consumers | ||
3404 | 161 | # setup the queue that would collect the messages | ||
3405 | 162 | consumer = Consumer(conn, exname) | ||
3406 | 163 | consumer.setup() | ||
3407 | 164 | # open up our own connection and start sending messages | ||
3408 | 165 | chan = conn.channel() | ||
3409 | 166 | # loop a few messages | ||
3410 | 167 | for i in range(options.messages): | ||
3411 | 168 | send_message(chan, exname, i) | ||
3412 | 169 | # signal end of test | ||
3413 | 170 | send_message(chan, exname, message="QUIT") | ||
3414 | 171 | chan.close() | ||
3415 | 172 | |||
3416 | 173 | # loop around for a while waiting for messages to be picked up | ||
3417 | 174 | return consumer.loop(timeout=options.timeout) | ||
3418 | 175 | |||
3419 | 176 | |||
3420 | 177 | def main(host, port, exname, extype, user, password, vhost): | ||
3421 | 178 | """ setup the connection and the communication channel """ | ||
3422 | 179 | sys.stdout = os.fdopen(os.dup(1), "w", 0) | ||
3423 | 180 | host_port = "%s:%s" % (host, port) | ||
3424 | 181 | conn = get_connection(host_port, user, password, vhost) | ||
3425 | 182 | chan = conn.channel() | ||
3426 | 183 | if setup_exchange(conn, exname, extype): | ||
3427 | 184 | if options.verbose: | ||
3428 | 185 | print "Created %s exchange of type %s" % (exname, extype) | ||
3429 | 186 | else: | ||
3430 | 187 | if options.verbose: | ||
3431 | 188 | print "Reusing existing exchange %s of type %s" % (exname, extype) | ||
3432 | 189 | ret = main_loop(conn, exname) | ||
3433 | 190 | chan.close() | ||
3434 | 191 | conn.close() | ||
3435 | 192 | return ret | ||
3436 | 193 | |||
3437 | 194 | if __name__ == '__main__': | ||
3438 | 195 | parser = OptionParser() | ||
3439 | 196 | parser.add_option("--host", dest="host", | ||
3440 | 197 | help="RabbitMQ host [default=%default]", | ||
3441 | 198 | metavar="HOST", default="localhost") | ||
3442 | 199 | parser.add_option("--port", dest="port", type="int", | ||
3443 | 200 | help="port RabbitMQ is running on [default=%default]", | ||
3444 | 201 | metavar="PORT", default=5672) | ||
3445 | 202 | parser.add_option("--exchange", dest="exchange", | ||
3446 | 203 | help="Exchange name to use [default=%default]", | ||
3447 | 204 | default="test_exchange", metavar="EXCHANGE") | ||
3448 | 205 | parser.add_option("--type", dest="type", | ||
3449 | 206 | help="EXCHANGE type [default=%default]", | ||
3450 | 207 | metavar="TYPE", default="fanout") | ||
3451 | 208 | parser.add_option("-v", "--verbose", default=False, action="store_true", | ||
3452 | 209 | help="verbose run") | ||
3453 | 210 | parser.add_option("-m", "--messages", dest="messages", type="int", | ||
3454 | 211 | help="send NUM messages for testing [default=%default]", | ||
3455 | 212 | metavar="NUM", default=10) | ||
3456 | 213 | parser.add_option("-t", "--timeout", dest="timeout", type="int", | ||
3457 | 214 | help="wait TIMEOUT sec for loop test [default=%default]", | ||
3458 | 215 | metavar="TIMEOUT", default=5) | ||
3459 | 216 | parser.add_option("-u", "--user", dest="user", default="guest", | ||
3460 | 217 | help="RabbitMQ user [default=%default]", | ||
3461 | 218 | metavar="USER") | ||
3462 | 219 | parser.add_option("-p", "--password", dest="password", default="guest", | ||
3463 | 220 | help="RabbitMQ password [default=%default]", | ||
3464 | 221 | metavar="PASSWORD") | ||
3465 | 222 | parser.add_option("--vhost", dest="vhost", default="/", | ||
3466 | 223 | help="RabbitMQ vhost [default=%default]", | ||
3467 | 224 | metavar="VHOST") | ||
3468 | 225 | |||
3469 | 226 | (options, args) = parser.parse_args() | ||
3470 | 227 | if options.verbose: | ||
3471 | 228 | print """ | ||
3472 | 229 | Using AMQP setup: host:port=%s:%d exchange_name=%s exchange_type=%s | ||
3473 | 230 | """ % (options.host, options.port, options.exchange, options.type) | ||
3474 | 231 | ret = main(options.host, options.port, options.exchange, options.type, | ||
3475 | 232 | options.user, options.password, options.vhost) | ||
3476 | 233 | if ret: | ||
3477 | 234 | print "Ok: sent and received %d test messages" % options.messages | ||
3478 | 235 | sys.exit(0) | ||
3479 | 236 | print "ERROR: Could not send/receive test messages" | ||
3480 | 237 | sys.exit(3) |
I think we should change the author of the check_rabbitmq.py script, as that email address no longer works (the person in question has left Canonical). Also I wonder if we can change the NAGIOS_ PLUGINS= '/usr/lib/ nagios/ plugins' to NAGIOS_ PLUGINS= '/usr/local/ lib/nagios/ plugins' - it seems odd to install non-packaged files in /usr/lib. I think we may need to pre-create the directory, though.