Merge lp:~hloeung/jenkins-slave-charm/reactive-rewrite-remove-bundled-charmhelpers into lp:jenkins-slave-charm

Proposed by Haw Loeung on 2019-03-04
Status: Merged
Approved by: Paul Gear on 2019-03-04
Approved revision: 23
Merged at revision: 23
Proposed branch: lp:~hloeung/jenkins-slave-charm/reactive-rewrite-remove-bundled-charmhelpers
Merge into: lp:jenkins-slave-charm
Diff against target: 5326 lines (+0/-5072)
44 files modified
hooks/install.d/README.md (+0/-7)
hooks/install.d/charmhelpers/canonical_ci/cron.py (+0/-55)
hooks/install.d/charmhelpers/canonical_ci/gerrit.py (+0/-220)
hooks/install.d/charmhelpers/canonical_ci/jenkins.py (+0/-41)
hooks/install.d/charmhelpers/canonical_ci/nrpe.py (+0/-66)
hooks/install.d/charmhelpers/canonical_ci/ssh.py (+0/-37)
hooks/install.d/charmhelpers/canonical_ci/volume.py (+0/-223)
hooks/install.d/charmhelpers/cli/README.rst (+0/-57)
hooks/install.d/charmhelpers/cli/__init__.py (+0/-147)
hooks/install.d/charmhelpers/cli/commands.py (+0/-2)
hooks/install.d/charmhelpers/cli/host.py (+0/-14)
hooks/install.d/charmhelpers/contrib/ansible/__init__.py (+0/-101)
hooks/install.d/charmhelpers/contrib/charmhelpers/IMPORT (+0/-4)
hooks/install.d/charmhelpers/contrib/charmhelpers/__init__.py (+0/-184)
hooks/install.d/charmhelpers/contrib/charmsupport/IMPORT (+0/-14)
hooks/install.d/charmhelpers/contrib/charmsupport/nrpe.py (+0/-218)
hooks/install.d/charmhelpers/contrib/charmsupport/volumes.py (+0/-156)
hooks/install.d/charmhelpers/contrib/hahelpers/apache.py (+0/-58)
hooks/install.d/charmhelpers/contrib/hahelpers/ceph.py (+0/-294)
hooks/install.d/charmhelpers/contrib/hahelpers/cluster.py (+0/-181)
hooks/install.d/charmhelpers/contrib/jujugui/IMPORT (+0/-4)
hooks/install.d/charmhelpers/contrib/jujugui/utils.py (+0/-602)
hooks/install.d/charmhelpers/contrib/network/ovs/__init__.py (+0/-72)
hooks/install.d/charmhelpers/contrib/openstack/context.py (+0/-294)
hooks/install.d/charmhelpers/contrib/openstack/templates/__init__.py (+0/-2)
hooks/install.d/charmhelpers/contrib/openstack/templates/ceph.conf (+0/-11)
hooks/install.d/charmhelpers/contrib/openstack/templates/haproxy.cfg (+0/-37)
hooks/install.d/charmhelpers/contrib/openstack/templates/openstack_https_frontend (+0/-23)
hooks/install.d/charmhelpers/contrib/openstack/templating.py (+0/-261)
hooks/install.d/charmhelpers/contrib/openstack/utils.py (+0/-276)
hooks/install.d/charmhelpers/contrib/saltstack/__init__.py (+0/-149)
hooks/install.d/charmhelpers/contrib/ssl/__init__.py (+0/-79)
hooks/install.d/charmhelpers/contrib/storage/linux/loopback.py (+0/-62)
hooks/install.d/charmhelpers/contrib/storage/linux/lvm.py (+0/-88)
hooks/install.d/charmhelpers/contrib/storage/linux/utils.py (+0/-25)
hooks/install.d/charmhelpers/contrib/templating/pyformat.py (+0/-13)
hooks/install.d/charmhelpers/core/hookenv.py (+0/-340)
hooks/install.d/charmhelpers/core/host.py (+0/-241)
hooks/install.d/charmhelpers/fetch/__init__.py (+0/-209)
hooks/install.d/charmhelpers/fetch/archiveurl.py (+0/-48)
hooks/install.d/charmhelpers/fetch/bzrurl.py (+0/-49)
hooks/install.d/charmhelpers/payload/__init__.py (+0/-1)
hooks/install.d/charmhelpers/payload/archive.py (+0/-57)
hooks/install.d/charmhelpers/payload/execd.py (+0/-50)
To merge this branch: bzr merge lp:~hloeung/jenkins-slave-charm/reactive-rewrite-remove-bundled-charmhelpers
Reviewer Review Type Date Requested Status
Paul Gear (community) 2019-03-04 Approve on 2019-03-04
Review via email: mp+363898@code.launchpad.net

Commit message

Removed no longer needed and used hooks/install.d

To post a comment you must log in.

This merge proposal is being monitored by mergebot. Change the status to Approved to merge.

Paul Gear (paulgear) wrote :

LGTM

review: Approve

Change successfully merged at revision 23

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== removed directory 'hooks'
2=== removed directory 'hooks/install.d'
3=== removed file 'hooks/install.d/README.md'
4--- hooks/install.d/README.md 2014-05-15 16:16:04 +0000
5+++ hooks/install.d/README.md 1970-01-01 00:00:00 +0000
6@@ -1,7 +0,0 @@
7-# hooks/install.d
8-
9-This directory can be used to extend the function of the jenkins master
10-charm without changing any of the base hooks.
11-
12-Files must be executable otherwise the install hook (which is also run
13-on upgrade-charm and config-changed hooks) will not execute them.
14
15=== removed directory 'hooks/install.d/charmhelpers'
16=== removed file 'hooks/install.d/charmhelpers/__init__.py'
17=== removed directory 'hooks/install.d/charmhelpers/canonical_ci'
18=== removed file 'hooks/install.d/charmhelpers/canonical_ci/__init__.py'
19=== removed file 'hooks/install.d/charmhelpers/canonical_ci/cron.py'
20--- hooks/install.d/charmhelpers/canonical_ci/cron.py 2015-08-19 19:17:42 +0000
21+++ hooks/install.d/charmhelpers/canonical_ci/cron.py 1970-01-01 00:00:00 +0000
22@@ -1,55 +0,0 @@
23-import os
24-
25-from charmhelpers.core.hookenv import log, INFO
26-
27-
28-def write_cronjob(content, job_name=''):
29- f = os.environ["JUJU_UNIT_NAME"].replace("/", "_")
30- cron_path = os.path.join('/etc', 'cron.d', f)
31- if job_name:
32- cron_path+='_'+job_name
33-
34- f = open(cron_path, "w")
35- f.write(content)
36- f.close()
37- os.chmod(cron_path, 0755)
38- log("Wrote cronjob to %s." % cron_path, INFO)
39-
40-
41-# generic backup job creation
42-def schedule_backup(sources, ci_user, target, schedule, retention_count):
43- log("Creating backup cronjob for sources: %s." % sources, INFO)
44-
45- # if doesn't exist, create backup directory and scripts directory
46- if not os.path.exists(target):
47- os.makedirs(target)
48- os.chmod(target, 0755)
49-
50- script = os.path.join(os.environ['CHARM_DIR'],
51- "scripts/backup_job")
52- backup_string = ",".join(sources)
53-
54- # create the cronjob file that will call the script
55- content = ("%s %s %s %s %s %s\n" %
56- (schedule, ci_user, script, backup_string, target, retention_count))
57- write_cronjob(content)
58-
59-
60-def schedule_repo_updates(schedule, ci_user, ci_config_dir, jobs_config_dir):
61- log("Creating cronjob to update CI repo config.", INFO)
62-
63- #XXX: matsubara perhaps would be better to bzr pull and then
64- # trigger jjb.update_jenkins()
65- update_command = (
66- "/usr/bin/bzr update %s && "
67- "/usr/local/bin/jenkins-jobs --flush-cache update %s" % (
68- ci_config_dir, jobs_config_dir))
69-
70- content = "%s %s %s\n" % (schedule, ci_user, update_command)
71- write_cronjob(content)
72-
73-
74-def schedule_generic_job(schedule, user, name, job):
75- content = "%s %s %s\n" % (schedule, user, job)
76- write_cronjob(content, job_name=name)
77-
78
79=== removed file 'hooks/install.d/charmhelpers/canonical_ci/gerrit.py'
80--- hooks/install.d/charmhelpers/canonical_ci/gerrit.py 2015-08-19 19:17:42 +0000
81+++ hooks/install.d/charmhelpers/canonical_ci/gerrit.py 1970-01-01 00:00:00 +0000
82@@ -1,220 +0,0 @@
83-import logging
84-import os
85-import paramiko
86-import sys
87-import subprocess
88-import json
89-
90-from charmhelpers.core.hookenv import (
91- log as _log,
92- ERROR,
93-)
94-
95-_connection = None
96-GERRIT_DAEMON = "/etc/init.d/gerrit"
97-
98-logging.basicConfig(level=logging.INFO)
99-
100-
101-def log(msg, level=None):
102- # wrap log calls and distribute to correct logger
103- # depending if this code is being run by a hook
104- # or an external script.
105- if os.getenv('JUJU_AGENT_SOCKET'):
106- _log(msg, level=level)
107- else:
108- logging.info(msg)
109-
110-
111-def get_ssh(host, user, port, key_file):
112- global _connection
113- if _connection:
114- return _connection
115-
116- _connection = paramiko.SSHClient()
117- _connection.set_missing_host_key_policy(paramiko.AutoAddPolicy())
118- _connection.connect(host, username=user, port=port, key_filename=key_file)
119-
120- return _connection
121-
122-
123-# start gerrit application
124-def start_gerrit():
125- try:
126- subprocess.check_call([GERRIT_DAEMON, "start"])
127- except:
128- pass
129-
130-
131-# stop gerrit application
132-def stop_gerrit():
133- try:
134- subprocess.check_call([GERRIT_DAEMON, "stop"])
135- except:
136- pass
137-
138-
139-class GerritException(Exception):
140- def __init__(self, msg):
141- log('Failed to execute gerrit command: %s' % msg)
142- super(GerritException, self).__init__(msg)
143-
144-
145-class GerritClient(object):
146- def __init__(self, host, user, port, key_file):
147- self.ssh = get_ssh(host, user, port, key_file)
148-
149- def _run_cmd(self, cmd):
150- stdin, stdout, stderr = self.ssh.exec_command(cmd)
151- return (stdout.read(), stderr.read())
152-
153- def create_user(self, user, name, group, ssh_key):
154- log('Creating gerrit new user %s in group %s.' % (user, group))
155- cmd = ('gerrit create-account %(user)s --full-name "%(name)s" '
156- '--group "%(group)s" --ssh-key '
157- '"%(ssh_key)s"' % locals())
158- stdout, stderr = self._run_cmd(cmd)
159- if not stdout and not stderr:
160- log('Created new gerrit user %s in group %s.' % (user, group))
161-
162- if stderr.startswith('fatal'):
163- if 'already exists' not in stderr:
164- # different error
165- log('Error creating account', ERROR)
166- sys.exit(1)
167- else:
168- # retrieve user id and update keys
169- account_id = None
170- cmd = ('gerrit gsql --format json -c "SELECT account_id '
171- 'FROM account_external_ids WHERE external_id=\'username:%s\'"'
172- % (user))
173- stdout, stderr = self._run_cmd(cmd)
174- if not stderr:
175- # load and decode json, extract account id
176- lines = stdout.splitlines()
177- if len(lines)>0:
178- res = json.loads(lines[0])
179- try:
180- account_id = res['columns']['account_id']
181- except:
182- pass
183-
184- # if found, update ssh keys
185- if account_id:
186- cmd = ('gerrit gsql -c "DELETE FROM account_ssh_keys '
187- 'WHERE account_id=%s' % account_id)
188- stdout, stderr = self._run_cmd(cmd)
189-
190- # insert new key
191- cmd = ('gerrit gsql -c "INSERT INTO account_ssh_keys '
192- '(ssh_public_key, valid, account_id, seq) VALUES (\'%s\', \'Y\', '
193- '\'%s\', 0)" ' % (ssh_key, account_id))
194- stdout, stderr = self._run_cmd(cmd)
195-
196- # reboot gerrit to refresh accounts
197- stop_gerrit()
198- start_gerrit()
199-
200- def create_users_batch(self, group, users):
201- for user in users:
202- # sets container user, name, ssh, openid
203- login = user[0]
204- name = user[1]
205- email = user[2]
206- ssh = user[3]
207- openid = user[4]
208-
209- cmd = (u'gerrit create-account %s --full-name "%s" '
210- u'--group "%s" --email "%s"' %
211- (login, name, group, email))
212- stdout, stderr = self._run_cmd(cmd)
213-
214- if stderr.startswith('fatal'):
215- if 'already exists' not in stderr:
216- sys.exit(1)
217-
218- # retrieve user id
219- account_id = None
220- cmd = ('gerrit gsql --format json -c "SELECT account_id '
221- 'FROM account_external_ids WHERE external_id=\'username:%s\'"'
222- % (login))
223- stdout, stderr = self._run_cmd(cmd)
224- if not stderr:
225- # load and decode json, extract account id
226- lines = stdout.splitlines()
227- if len(lines)>0:
228- res = json.loads(lines[0])
229- try:
230- account_id = res['columns']['account_id']
231- except:
232- pass
233-
234- # if found, update ssh keys and openid
235- if account_id:
236- # remove old keys and add new
237- if len(ssh)>0:
238- cmd = ('gerrit gsql -c "DELETE FROM account_ssh_keys '
239- 'WHERE account_id=%s AND ssh_public_key NOT IN (%s)"' %
240- (account_id, (', '.join('\''+item+'\'' for item in ssh)) ))
241- else:
242- cmd = ('gerrit gsql -c "DELETE FROM account_ssh_keys '
243- 'WHERE account_id=%s' % account_id)
244-
245- stdout, stderr = self._run_cmd(cmd)
246-
247- num_key = 0
248- for ssh_key in ssh:
249- # insert new keys
250- cmd = ('gerrit gsql -c "INSERT INTO account_ssh_keys '
251- '(ssh_public_key, valid, account_id, seq) SELECT '
252- '%(ssh_key)s, %(valid)s, %(account_id)s, %(num_key)s '
253- 'WHERE NOT EXISTS (SELECT '
254- 'account_id FROM account_ssh_keys WHERE '
255- 'account_id=%(account_id)s AND ssh_public_key=%(ssh_key)s)"' %
256- {'ssh_key': '\''+ssh_key+'\'', 'valid':'\'Y\'',
257- 'account_id': '\''+account_id+'\'', 'num_key': num_key})
258- num_key+=1
259- stdout, stderr = self._run_cmd(cmd)
260-
261- # replace external id
262- if openid:
263- openid = openid.replace('login.launchpad.net', 'login.ubuntu.com')
264- cmd = ('gerrit gsql -c "DELETE FROM account_external_ids '
265- 'WHERE account_id=%s AND external_id NOT IN (%s) AND '
266- 'external_id LIKE \'http%%\'"' % (account_id, '\''+openid+'\''))
267- stdout, stderr = self._run_cmd(cmd)
268-
269- # replace launchpad for ubuntu account
270- cmd = ('gerrit gsql -c "INSERT INTO account_external_ids '
271- '(account_id, email_address, external_id) SELECT '
272- '%(account_id)s, %(email_address)s, %(external_id)s WHERE '
273- 'NOT EXISTS (SELECT account_id FROM account_external_ids '
274- 'WHERE account_id=%(account_id)s AND external_id=%(external_id)s)"' %
275- {'account_id':'\''+account_id+'\'',
276- 'email_address':'\''+str(email)+'\'',
277- 'external_id': '\''+openid+'\''})
278- stdout, stderr = self._run_cmd(cmd)
279-
280-
281- def create_project(self, project):
282- log('Creating gerrit project %s' % project)
283- cmd = ('gerrit create-project %s' % project)
284- stdout, stderr = self._run_cmd(cmd)
285- if not stdout and not stderr:
286- log('Created new project %s.' % project)
287- return True
288- else:
289- log('Error creating project %s, skipping project creation' %
290- project)
291- return False
292-
293- def create_group(self, group):
294- log('Creating gerrit group %s' % group)
295- cmd = ('gerrit create-group %s' % group)
296- stdout, stderr = self._run_cmd(cmd)
297- if not stdout and not stderr:
298- log('Created new group %s.' % group)
299-
300- def flush_cache(self):
301- cmd = ('gerrit flush-caches')
302- stdout, stderr = self._run_cmd(cmd)
303
304=== removed file 'hooks/install.d/charmhelpers/canonical_ci/jenkins.py'
305--- hooks/install.d/charmhelpers/canonical_ci/jenkins.py 2015-08-19 19:17:42 +0000
306+++ hooks/install.d/charmhelpers/canonical_ci/jenkins.py 1970-01-01 00:00:00 +0000
307@@ -1,41 +0,0 @@
308-import logging
309-import os
310-import paramiko
311-import sys
312-import subprocess
313-import json
314-
315-from charmhelpers.core.hookenv import (
316- log as _log,
317- ERROR,
318-)
319-
320-JENKINS_DAEMON = "/etc/init.d/jenkins"
321-
322-logging.basicConfig(level=logging.INFO)
323-
324-
325-def log(msg, level=None):
326- # wrap log calls and distribute to correct logger
327- # depending if this code is being run by a hook
328- # or an external script.
329- if os.getenv('JUJU_AGENT_SOCKET'):
330- _log(msg, level=level)
331- else:
332- logging.info(msg)
333-
334-
335-# start jenkins application
336-def start_jenkins():
337- try:
338- subprocess.check_call([JENKINS_DAEMON, "start"])
339- except:
340- pass
341-
342-
343-# stop jenkins application
344-def stop_jenkins():
345- try:
346- subprocess.check_call([JENKINS_DAEMON, "stop"])
347- except:
348- pass
349
350=== removed file 'hooks/install.d/charmhelpers/canonical_ci/nrpe.py'
351--- hooks/install.d/charmhelpers/canonical_ci/nrpe.py 2015-08-19 19:17:42 +0000
352+++ hooks/install.d/charmhelpers/canonical_ci/nrpe.py 1970-01-01 00:00:00 +0000
353@@ -1,66 +0,0 @@
354-from charmhelpers.core.hookenv import log
355-
356-
357-HTTP_CHECK = """
358-command[%(name)s]=/usr/lib/nagios/plugins/check_http
359---hostname=%(hostname)s --port=%(port)s
360-""".strip().replace('\n', ' ')
361-
362-
363-TCP_CHECK = """
364-command[%(name)s]=/usr/lib/nagios/plugins/check_tcp
365---hostname=%(hostname)s --port=%(port)s
366-""".strip().replace('\n', ' ')
367-
368-
369-NRPE_SERVICE_ENTRY = """
370-define service {
371- use active-service
372- host_name %(nagios_hostname)s
373- service_description %(nagios_hostname)s %(check_name)s
374- check_command check_nrpe!%(check_name)s
375- servicegroups %(nagios_servicegroup)s
376-}
377-
378-"""
379-
380-
381-NRPE_CHECKS = {
382- 'http': HTTP_CHECK,
383- 'tcp': TCP_CHECK,
384-}
385-
386-
387-CONF_HEADER = "#"*80 + "\n# This file is Juju managed\n" + "#"*80 + '\n'
388-
389-
390-def nrpe_service_config(check_name, nagios_hostname, nagios_servicegroup):
391- """
392- Generates a single snippet of nagios config for a monitored service.
393- Does not verify whether the check to use is actually configured.
394- """
395- return NRPE_SERVICE_ENTRY % locals()
396-
397-
398-def nrpe_check(check_type, name, hostname, port, **kwargs):
399- """
400- Generates a single NRPE check command for a given type.
401-
402- name, hostname and port are currently required for all.
403-
404- Any kwargs will be expanded to additional --k=v arguments,
405- or --k argument if value is True.
406- """
407- try:
408- cmd = NRPE_CHECKS[check_type]
409- except KeyError:
410- e = 'Unsupported NRPE check type: %s.' % check_type
411- log(e)
412- raise Exception(e)
413- cmd = cmd % locals()
414- for k, v in kwargs.iteritems():
415- if v is True:
416- cmd += ' --%s' % k
417- else:
418- cmd += ' --%s=%s' % (k, v)
419- return cmd
420
421=== removed file 'hooks/install.d/charmhelpers/canonical_ci/ssh.py'
422--- hooks/install.d/charmhelpers/canonical_ci/ssh.py 2015-08-19 19:17:42 +0000
423+++ hooks/install.d/charmhelpers/canonical_ci/ssh.py 1970-01-01 00:00:00 +0000
424@@ -1,37 +0,0 @@
425-import os
426-import pwd
427-
428-from subprocess import check_output
429-from charmhelpers.core.hookenv import log
430-
431-
432-def public_ssh_key(user='root', ssh_dir=None):
433- _ssh_dir = ssh_dir or os.path.join(pwd.getpwnam(user).pw_dir, '.ssh')
434- try:
435- with open(os.path.join(_ssh_dir, 'id_rsa.pub')) as key:
436- return key.read().strip()
437- except:
438- return None
439-
440-
441-def initialize_ssh_keys(user='root', ssh_dir=None):
442- home_dir = pwd.getpwnam(user).pw_dir
443- out_dir = ssh_dir or os.path.join(home_dir, '.ssh')
444- if not os.path.isdir(out_dir):
445- os.mkdir(out_dir)
446-
447- priv_key = os.path.join(out_dir, 'id_rsa')
448- if not os.path.isfile(priv_key):
449- log('Generating new ssh key for user %s.' % user)
450- cmd = ['ssh-keygen', '-q', '-N', '', '-t', 'rsa', '-b', '2048',
451- '-f', priv_key]
452- check_output(cmd)
453-
454- pub_key = '%s.pub' % priv_key
455- if not os.path.isfile(pub_key):
456- log('Generating missing ssh public key @ %s.' % pub_key)
457- cmd = ['ssh-keygen', '-y', '-f', priv_key]
458- p = check_output(cmd).strip()
459- with open(pub_key, 'wb') as out:
460- out.write(p)
461- check_output(['chown', '-R', user, out_dir])
462
463=== removed file 'hooks/install.d/charmhelpers/canonical_ci/volume.py'
464--- hooks/install.d/charmhelpers/canonical_ci/volume.py 2015-08-19 19:17:42 +0000
465+++ hooks/install.d/charmhelpers/canonical_ci/volume.py 1970-01-01 00:00:00 +0000
466@@ -1,223 +0,0 @@
467-# Helpers to facilitate initializing and moving application data
468-# to persistent volumes. The bulk of it has been lifted directly
469-# from lp:charms/postgresql, with modification to volume_apply()
470-# to remove postgres specific bits.
471-#
472-# - Adam Gandelman <adamg@canonical.com>
473-
474-import subprocess
475-import sys
476-import os
477-import time
478-import yaml
479-
480-from charmhelpers.core import hookenv
481-
482-from charmhelpers.core.hookenv import (
483- config, WARNING, INFO, ERROR, CRITICAL,
484- log as _log,
485-)
486-
487-
488-def log(level, msg):
489- msg = '[peristent storage] ' + msg
490- _log(level=level, message=msg)
491-
492-
493-def run(command, exit_on_error=True):
494- '''Run a command and return the output.'''
495- try:
496- log(INFO, command)
497- return subprocess.check_output(
498- command, stderr=subprocess.STDOUT, shell=True)
499- except subprocess.CalledProcessError, e:
500- log(ERROR, "status=%d, output=%s" % (e.returncode, e.output))
501- if exit_on_error:
502- sys.exit(e.returncode)
503- else:
504- raise
505-
506-
507-###############################################################################
508-# Volume managment
509-###############################################################################
510-#------------------------------
511-# Get volume-id from juju config "volume-map" dictionary as
512-# volume-map[JUJU_UNIT_NAME]
513-# @return volid
514-#
515-#------------------------------
516-def volume_get_volid_from_volume_map():
517- volume_map = {}
518- try:
519- volume_map = yaml.load(config('volume-map').strip())
520- if volume_map:
521- return volume_map.get(os.environ['JUJU_UNIT_NAME'])
522- except yaml.constructor.ConstructorError as e:
523- log(WARNING, "invalid YAML in 'volume-map': {}".format(e))
524- return None
525-
526-
527-# Is this volume_id permanent ?
528-# @returns True if volid set and not --ephemeral, else:
529-# False
530-def volume_is_permanent(volid):
531- if volid and volid != "--ephemeral":
532- return True
533- return False
534-
535-
536-# Do we have a valid storage state?
537-# @returns volid
538-# None config state is invalid - we should not serve
539-def volume_get_volume_id():
540- ephemeral_storage = config('volume-ephemeral-storage')
541- volid = volume_get_volid_from_volume_map()
542- juju_unit_name = hookenv.local_unit()
543- if ephemeral_storage in [True, 'yes', 'Yes', 'true', 'True']:
544- if volid:
545- log(ERROR,
546- "volume-ephemeral-storage is True, but " +
547- "volume-map[{!r}] -> {}".format(juju_unit_name, volid),)
548- return None
549- else:
550- return "--ephemeral"
551- else:
552- if not volid:
553- log(WARNING,
554- "volume-ephemeral-storage is False, but "
555- "no volid found for volume-map[{!r}]".format(
556- hookenv.local_unit()))
557- return None
558- return volid
559-
560-
561-# Initialize and/or mount permanent storage, it straightly calls
562-# shell helper
563-def volume_init_and_mount(volid):
564- command = ("scripts/volume-common.sh call " +
565- "volume_init_and_mount %s" % volid)
566- output = run(command)
567- if output.find("ERROR") >= 0:
568- return False
569- return True
570-
571-
572-def volume_mount_point_from_volid(volid):
573- if volid and volume_is_permanent(volid):
574- return "/srv/juju/%s" % volid
575- return None
576-
577-
578-def volume_apply(data_directory_path, service, user, group):
579- # assumes service stopped.
580- volid = volume_get_volume_id()
581- if volid:
582- if volume_is_permanent(volid):
583- if not volume_init_and_mount(volid):
584- log(ERROR,
585- "volume_init_and_mount failed, not applying changes")
586- return False
587-
588- if not os.path.exists(data_directory_path):
589- log(CRITICAL,
590- "postgresql data dir {} not found, "
591- "not applying changes.".format(data_directory_path))
592- return False
593-
594- mount_point = volume_mount_point_from_volid(volid)
595- # new data path consturcted as if mount_point were chroot, eg
596- # /srv/juju/vol-000010/var/lib/mysql
597- new_data_path = os.path.join(
598- mount_point, *data_directory_path.split('/'))
599-
600- if not mount_point:
601- log(ERROR,
602- "invalid mount point from volid = {}, "
603- "not applying changes.".format(mount_point))
604- return False
605-
606- if ((os.path.islink(data_directory_path) and
607- os.readlink(data_directory_path) == new_data_path)):
608- log(INFO,
609- "%s data dir '%s' already points "
610- "to %s skipping storage changes." % (
611- service, data_directory_path, new_data_path))
612- log(INFO,
613- "existing-symlink: to fix/avoid UID changes from "
614- "previous units, doing: "
615- "chown -R %s:%s %s" % (user, group, new_data_path))
616- run("chown -R %s:%s %s" % (user, group, new_data_path))
617- return True
618-
619- # Create new data directory path under mount point if required
620- # and set permissions.
621- # Create a directory structure below "new" mount_point, as e.g.:
622- # /srv/juju/vol-000012345/postgresql/9.1/main , which "mimics":
623- # /var/lib/postgresql/9.1/main
624- if not os.path.isdir(new_data_path):
625- log(INFO, "Creating new data path under mount: %s" % new_data_path)
626- os.makedirs(new_data_path)
627-
628- # Ensure directory permissions on every run.
629- log(INFO, "Ensuring %s:%s ownership on %s." %
630- (user, group, new_data_path))
631- run("chown -R %s:%s %s" % (user, group, new_data_path))
632-
633-# curr_dir_stat = os.stat(data_directory_path)
634-# os.chown(new_data_path, curr_dir_stat.st_uid, curr_dir_stat.st_gi
635-# os.chmod(new_data_path, curr_dir_stat.st_mode)
636-
637-# for new_dir in [new_pg_dir,
638-# os.path.join(new_pg_dir, config("version")),
639-# new_pg_version_cluster_dir]:
640-# if not os.path.isdir(new_dir):
641-# log("mkdir %s".format(new_dir))
642-# os.mkdir(new_dir)
643-# # copy permissions from current data_directory_path
644-# os.chown(new_dir, curr_dir_stat.st_uid, curr_dir_stat.st_gid)
645-# os.chmod(new_dir, curr_dir_stat.st_mode)
646-
647- # Carefully build this symlink, e.g.:
648- # /var/lib/postgresql/9.1/main ->
649- # /srv/juju/vol-000012345/postgresql/9.1/main
650- # but keep previous "main/" directory, by renaming it to
651- # main-$TIMESTAMP
652-
653- log(WARNING, "migrating application data {}/ -> {}/".format(
654- data_directory_path, new_data_path))
655-
656- command = "rsync -a {}/ {}/".format(data_directory_path, new_data_path)
657- log(INFO, "run: {}".format(command))
658- run(command)
659-
660-# if not os.path.exists(os.path.join(
661-# new_pg_version_cluster_dir, "PG_VERSION")):
662-# log("migrating PG data {}/ -> {}/".format(
663-# data_directory_path, new_pg_version_cluster_dir), WARNING)
664-# # void copying PID file to perm storage (shouldn't be any...)
665-# command = "rsync -a --exclude postmaster.pid {}/ {}/".format(
666-# data_directory_path, new_pg_version_cluster_dir)
667-# log("run: {}".format(command))
668-# run(command)
669-
670- try:
671- os.rename(data_directory_path, "{}-{}".format(
672- data_directory_path, int(time.time())))
673- log(INFO, "symlinking {} -> {}".format(
674- new_data_path, data_directory_path))
675- os.symlink(new_data_path, data_directory_path)
676- log(INFO,
677- "after-symlink: to fix/avoid UID changes from "
678- "previous units, doing: "
679- "chown -R %s:%s %s" % (user, group, new_data_path))
680- run("chown -R %s:%s %s" % (user, group, new_data_path))
681- return True
682- except OSError:
683- log(ERROR, "failed to symlink {} -> {}".format(
684- data_directory_path, new_data_path))
685- return False
686- else:
687- log(ERROR,
688- "Invalid volume storage configuration, not applying changes")
689- return False
690
691=== removed directory 'hooks/install.d/charmhelpers/cli'
692=== removed file 'hooks/install.d/charmhelpers/cli/README.rst'
693--- hooks/install.d/charmhelpers/cli/README.rst 2015-08-19 19:17:42 +0000
694+++ hooks/install.d/charmhelpers/cli/README.rst 1970-01-01 00:00:00 +0000
695@@ -1,57 +0,0 @@
696-==========
697-Commandant
698-==========
699-
700------------------------------------------------------
701-Automatic command-line interfaces to Python functions
702------------------------------------------------------
703-
704-One of the benefits of ``libvirt`` is the uniformity of the interface: the C API (as well as the bindings in other languages) is a set of functions that accept parameters that are nearly identical to the command-line arguments. If you run ``virsh``, you get an interactive command prompt that supports all of the same commands that your shell scripts use as ``virsh`` subcommands.
705-
706-Command execution and stdio manipulation is the greatest common factor across all development systems in the POSIX environment. By exposing your functions as commands that manipulate streams of text, you can make life easier for all the Ruby and Erlang and Go programmers in your life.
707-
708-Goals
709-=====
710-
711-* Single decorator to expose a function as a command.
712- * now two decorators - one "automatic" and one that allows authors to manipulate the arguments for fine-grained control.(MW)
713-* Automatic analysis of function signature through ``inspect.getargspec()``
714-* Command argument parser built automatically with ``argparse``
715-* Interactive interpreter loop object made with ``Cmd``
716-* Options to output structured return value data via ``pprint``, ``yaml`` or ``json`` dumps.
717-
718-Other Important Features that need writing
719-------------------------------------------
720-
721-* Help and Usage documentation can be automatically generated, but it will be important to let users override this behaviour
722-* The decorator should allow specifying further parameters to the parser's add_argument() calls, to specify types or to make arguments behave as boolean flags, etc.
723- - Filename arguments are important, as good practice is for functions to accept file objects as parameters.
724- - choices arguments help to limit bad input before the function is called
725-* Some automatic behaviour could make for better defaults, once the user can override them.
726- - We could automatically detect arguments that default to False or True, and automatically support --no-foo for foo=True.
727- - We could automatically support hyphens as alternates for underscores
728- - Arguments defaulting to sequence types could support the ``append`` action.
729-
730-
731------------------------------------------------------
732-Implementing subcommands
733------------------------------------------------------
734-
735-(WIP)
736-
737-So as to avoid dependencies on the cli module, subcommands should be defined separately from their implementations. The recommmendation would be to place definitions into separate modules near the implementations which they expose.
738-
739-Some examples::
740-
741- from charmhelpers.cli import CommandLine
742- from charmhelpers.payload import execd
743- from charmhelpers.foo import bar
744-
745- cli = CommandLine()
746-
747- cli.subcommand(execd.execd_run)
748-
749- @cli.subcommand_builder("bar", help="Bar baz qux")
750- def barcmd_builder(subparser):
751- subparser.add_argument('argument1', help="yackety")
752- return bar
753
754=== removed file 'hooks/install.d/charmhelpers/cli/__init__.py'
755--- hooks/install.d/charmhelpers/cli/__init__.py 2015-08-19 19:17:42 +0000
756+++ hooks/install.d/charmhelpers/cli/__init__.py 1970-01-01 00:00:00 +0000
757@@ -1,147 +0,0 @@
758-import inspect
759-import itertools
760-import argparse
761-import sys
762-
763-
764-class OutputFormatter(object):
765- def __init__(self, outfile=sys.stdout):
766- self.formats = (
767- "raw",
768- "json",
769- "py",
770- "yaml",
771- "csv",
772- "tab",
773- )
774- self.outfile = outfile
775-
776- def add_arguments(self, argument_parser):
777- formatgroup = argument_parser.add_mutually_exclusive_group()
778- choices = self.supported_formats
779- formatgroup.add_argument("--format", metavar='FMT',
780- help="Select output format for returned data, "
781- "where FMT is one of: {}".format(choices),
782- choices=choices, default='raw')
783- for fmt in self.formats:
784- fmtfunc = getattr(self, fmt)
785- formatgroup.add_argument("-{}".format(fmt[0]),
786- "--{}".format(fmt), action='store_const',
787- const=fmt, dest='format',
788- help=fmtfunc.__doc__)
789-
790- @property
791- def supported_formats(self):
792- return self.formats
793-
794- def raw(self, output):
795- """Output data as raw string (default)"""
796- self.outfile.write(str(output))
797-
798- def py(self, output):
799- """Output data as a nicely-formatted python data structure"""
800- import pprint
801- pprint.pprint(output, stream=self.outfile)
802-
803- def json(self, output):
804- """Output data in JSON format"""
805- import json
806- json.dump(output, self.outfile)
807-
808- def yaml(self, output):
809- """Output data in YAML format"""
810- import yaml
811- yaml.safe_dump(output, self.outfile)
812-
813- def csv(self, output):
814- """Output data as excel-compatible CSV"""
815- import csv
816- csvwriter = csv.writer(self.outfile)
817- csvwriter.writerows(output)
818-
819- def tab(self, output):
820- """Output data in excel-compatible tab-delimited format"""
821- import csv
822- csvwriter = csv.writer(self.outfile, dialect=csv.excel_tab)
823- csvwriter.writerows(output)
824-
825- def format_output(self, output, fmt='raw'):
826- fmtfunc = getattr(self, fmt)
827- fmtfunc(output)
828-
829-
830-class CommandLine(object):
831- argument_parser = None
832- subparsers = None
833- formatter = None
834-
835- def __init__(self):
836- if not self.argument_parser:
837- self.argument_parser = argparse.ArgumentParser(description='Perform common charm tasks')
838- if not self.formatter:
839- self.formatter = OutputFormatter()
840- self.formatter.add_arguments(self.argument_parser)
841- if not self.subparsers:
842- self.subparsers = self.argument_parser.add_subparsers(help='Commands')
843-
844- def subcommand(self, command_name=None):
845- """
846- Decorate a function as a subcommand. Use its arguments as the
847- command-line arguments"""
848- def wrapper(decorated):
849- cmd_name = command_name or decorated.__name__
850- subparser = self.subparsers.add_parser(cmd_name,
851- description=decorated.__doc__)
852- for args, kwargs in describe_arguments(decorated):
853- subparser.add_argument(*args, **kwargs)
854- subparser.set_defaults(func=decorated)
855- return decorated
856- return wrapper
857-
858- def subcommand_builder(self, command_name, description=None):
859- """
860- Decorate a function that builds a subcommand. Builders should accept a
861- single argument (the subparser instance) and return the function to be
862- run as the command."""
863- def wrapper(decorated):
864- subparser = self.subparsers.add_parser(command_name)
865- func = decorated(subparser)
866- subparser.set_defaults(func=func)
867- subparser.description = description or func.__doc__
868- return wrapper
869-
870- def run(self):
871- "Run cli, processing arguments and executing subcommands."
872- arguments = self.argument_parser.parse_args()
873- argspec = inspect.getargspec(arguments.func)
874- vargs = []
875- kwargs = {}
876- if argspec.varargs:
877- vargs = getattr(arguments, argspec.varargs)
878- for arg in argspec.args:
879- kwargs[arg] = getattr(arguments, arg)
880- self.formatter.format_output(arguments.func(*vargs, **kwargs), arguments.format)
881-
882-
883-cmdline = CommandLine()
884-
885-
886-def describe_arguments(func):
887- """
888- Analyze a function's signature and return a data structure suitable for
889- passing in as arguments to an argparse parser's add_argument() method."""
890-
891- argspec = inspect.getargspec(func)
892- # we should probably raise an exception somewhere if func includes **kwargs
893- if argspec.defaults:
894- positional_args = argspec.args[:-len(argspec.defaults)]
895- keyword_names = argspec.args[-len(argspec.defaults):]
896- for arg, default in itertools.izip(keyword_names, argspec.defaults):
897- yield ('--{}'.format(arg),), {'default': default}
898- else:
899- positional_args = argspec.args
900-
901- for arg in positional_args:
902- yield (arg,), {}
903- if argspec.varargs:
904- yield (argspec.varargs,), {'nargs': '*'}
905
906=== removed file 'hooks/install.d/charmhelpers/cli/commands.py'
907--- hooks/install.d/charmhelpers/cli/commands.py 2015-08-19 19:17:42 +0000
908+++ hooks/install.d/charmhelpers/cli/commands.py 1970-01-01 00:00:00 +0000
909@@ -1,2 +0,0 @@
910-from . import CommandLine
911-import host
912
913=== removed file 'hooks/install.d/charmhelpers/cli/host.py'
914--- hooks/install.d/charmhelpers/cli/host.py 2015-08-19 19:17:42 +0000
915+++ hooks/install.d/charmhelpers/cli/host.py 1970-01-01 00:00:00 +0000
916@@ -1,14 +0,0 @@
917-from . import cmdline
918-from charmhelpers.core import host
919-
920-
921-@cmdline.subcommand()
922-def mounts():
923- "List mounts"
924- return host.mounts()
925-
926-@cmdline.subcommand_builder('service', description="Control system services")
927-def service(subparser):
928- subparser.add_argument("action", help="The action to perform (start, stop, etc...)")
929- subparser.add_argument("service_name", help="Name of the service to control")
930- return host.service
931
932=== removed directory 'hooks/install.d/charmhelpers/contrib'
933=== removed file 'hooks/install.d/charmhelpers/contrib/__init__.py'
934=== removed directory 'hooks/install.d/charmhelpers/contrib/ansible'
935=== removed file 'hooks/install.d/charmhelpers/contrib/ansible/__init__.py'
936--- hooks/install.d/charmhelpers/contrib/ansible/__init__.py 2015-08-19 19:17:42 +0000
937+++ hooks/install.d/charmhelpers/contrib/ansible/__init__.py 1970-01-01 00:00:00 +0000
938@@ -1,101 +0,0 @@
939-# Copyright 2013 Canonical Ltd.
940-#
941-# Authors:
942-# Charm Helpers Developers <juju@lists.ubuntu.com>
943-"""Charm Helpers ansible - declare the state of your machines.
944-
945-This helper enables you to declare your machine state, rather than
946-program it procedurally (and have to test each change to your procedures).
947-Your install hook can be as simple as:
948-
949-{{{
950-import charmhelpers.contrib.ansible
951-
952-
953-def install():
954- charmhelpers.contrib.ansible.install_ansible_support()
955- charmhelpers.contrib.ansible.apply_playbook('playbooks/install.yaml')
956-}}}
957-
958-and won't need to change (nor will its tests) when you change the machine
959-state.
960-
961-All of your juju config and relation-data are available as template
962-variables within your playbooks and templates. An install playbook looks
963-something like:
964-
965-{{{
966----
967-- hosts: localhost
968- user: root
969-
970- tasks:
971- - name: Add private repositories.
972- template:
973- src: ../templates/private-repositories.list.jinja2
974- dest: /etc/apt/sources.list.d/private.list
975-
976- - name: Update the cache.
977- apt: update_cache=yes
978-
979- - name: Install dependencies.
980- apt: pkg={{ item }}
981- with_items:
982- - python-mimeparse
983- - python-webob
984- - sunburnt
985-
986- - name: Setup groups.
987- group: name={{ item.name }} gid={{ item.gid }}
988- with_items:
989- - { name: 'deploy_user', gid: 1800 }
990- - { name: 'service_user', gid: 1500 }
991-
992- ...
993-}}}
994-
995-Read more online about playbooks[1] and standard ansible modules[2].
996-
997-[1] http://www.ansibleworks.com/docs/playbooks.html
998-[2] http://www.ansibleworks.com/docs/modules.html
999-"""
1000-import os
1001-import subprocess
1002-
1003-import charmhelpers.contrib.saltstack
1004-import charmhelpers.core.host
1005-import charmhelpers.core.hookenv
1006-import charmhelpers.fetch
1007-
1008-
1009-charm_dir = os.environ.get('CHARM_DIR', '')
1010-ansible_hosts_path = '/etc/ansible/hosts'
1011-# Ansible will automatically include any vars in the following
1012-# file in its inventory when run locally.
1013-ansible_vars_path = '/etc/ansible/host_vars/localhost'
1014-
1015-
1016-def install_ansible_support(from_ppa=True):
1017- """Installs the ansible package.
1018-
1019- By default it is installed from the PPA [1] linked from
1020- the ansible website [2].
1021-
1022- [1] https://launchpad.net/~rquillo/+archive/ansible
1023- [2] http://www.ansibleworks.com/docs/gettingstarted.html#ubuntu-and-debian
1024-
1025- If from_ppa is false, you must ensure that the package is available
1026- from a configured repository.
1027- """
1028- if from_ppa:
1029- charmhelpers.fetch.add_source('ppa:rquillo/ansible')
1030- charmhelpers.fetch.apt_update(fatal=True)
1031- charmhelpers.fetch.apt_install('ansible')
1032- with open(ansible_hosts_path, 'w+') as hosts_file:
1033- hosts_file.write('localhost ansible_connection=local')
1034-
1035-
1036-def apply_playbook(playbook):
1037- charmhelpers.contrib.saltstack.juju_state_to_yaml(
1038- ansible_vars_path, namespace_separator='__')
1039- subprocess.check_call(['ansible-playbook', '-c', 'local', playbook])
1040
1041=== removed directory 'hooks/install.d/charmhelpers/contrib/charmhelpers'
1042=== removed file 'hooks/install.d/charmhelpers/contrib/charmhelpers/IMPORT'
1043--- hooks/install.d/charmhelpers/contrib/charmhelpers/IMPORT 2015-08-19 19:17:42 +0000
1044+++ hooks/install.d/charmhelpers/contrib/charmhelpers/IMPORT 1970-01-01 00:00:00 +0000
1045@@ -1,4 +0,0 @@
1046-Source lp:charm-tools/trunk
1047-
1048-charm-tools/helpers/python/charmhelpers/__init__.py -> charmhelpers/charmhelpers/contrib/charmhelpers/__init__.py
1049-charm-tools/helpers/python/charmhelpers/tests/test_charmhelpers.py -> charmhelpers/tests/contrib/charmhelpers/test_charmhelpers.py
1050
1051=== removed file 'hooks/install.d/charmhelpers/contrib/charmhelpers/__init__.py'
1052--- hooks/install.d/charmhelpers/contrib/charmhelpers/__init__.py 2015-08-19 19:17:42 +0000
1053+++ hooks/install.d/charmhelpers/contrib/charmhelpers/__init__.py 1970-01-01 00:00:00 +0000
1054@@ -1,184 +0,0 @@
1055-# Copyright 2012 Canonical Ltd. This software is licensed under the
1056-# GNU Affero General Public License version 3 (see the file LICENSE).
1057-
1058-import warnings
1059-warnings.warn("contrib.charmhelpers is deprecated", DeprecationWarning)
1060-
1061-"""Helper functions for writing Juju charms in Python."""
1062-
1063-__metaclass__ = type
1064-__all__ = [
1065- #'get_config', # core.hookenv.config()
1066- #'log', # core.hookenv.log()
1067- #'log_entry', # core.hookenv.log()
1068- #'log_exit', # core.hookenv.log()
1069- #'relation_get', # core.hookenv.relation_get()
1070- #'relation_set', # core.hookenv.relation_set()
1071- #'relation_ids', # core.hookenv.relation_ids()
1072- #'relation_list', # core.hookenv.relation_units()
1073- #'config_get', # core.hookenv.config()
1074- #'unit_get', # core.hookenv.unit_get()
1075- #'open_port', # core.hookenv.open_port()
1076- #'close_port', # core.hookenv.close_port()
1077- #'service_control', # core.host.service()
1078- 'unit_info', # client-side, NOT IMPLEMENTED
1079- 'wait_for_machine', # client-side, NOT IMPLEMENTED
1080- 'wait_for_page_contents', # client-side, NOT IMPLEMENTED
1081- 'wait_for_relation', # client-side, NOT IMPLEMENTED
1082- 'wait_for_unit', # client-side, NOT IMPLEMENTED
1083-]
1084-
1085-import operator
1086-from shelltoolbox import (
1087- command,
1088-)
1089-import tempfile
1090-import time
1091-import urllib2
1092-import yaml
1093-
1094-SLEEP_AMOUNT = 0.1
1095-# We create a juju_status Command here because it makes testing much,
1096-# much easier.
1097-juju_status = lambda: command('juju')('status')
1098-
1099-# re-implemented as charmhelpers.fetch.configure_sources()
1100-#def configure_source(update=False):
1101-# source = config_get('source')
1102-# if ((source.startswith('ppa:') or
1103-# source.startswith('cloud:') or
1104-# source.startswith('http:'))):
1105-# run('add-apt-repository', source)
1106-# if source.startswith("http:"):
1107-# run('apt-key', 'import', config_get('key'))
1108-# if update:
1109-# run('apt-get', 'update')
1110-
1111-
1112-# DEPRECATED: client-side only
1113-def make_charm_config_file(charm_config):
1114- charm_config_file = tempfile.NamedTemporaryFile()
1115- charm_config_file.write(yaml.dump(charm_config))
1116- charm_config_file.flush()
1117- # The NamedTemporaryFile instance is returned instead of just the name
1118- # because we want to take advantage of garbage collection-triggered
1119- # deletion of the temp file when it goes out of scope in the caller.
1120- return charm_config_file
1121-
1122-
1123-# DEPRECATED: client-side only
1124-def unit_info(service_name, item_name, data=None, unit=None):
1125- if data is None:
1126- data = yaml.safe_load(juju_status())
1127- service = data['services'].get(service_name)
1128- if service is None:
1129- # XXX 2012-02-08 gmb:
1130- # This allows us to cope with the race condition that we
1131- # have between deploying a service and having it come up in
1132- # `juju status`. We could probably do with cleaning it up so
1133- # that it fails a bit more noisily after a while.
1134- return ''
1135- units = service['units']
1136- if unit is not None:
1137- item = units[unit][item_name]
1138- else:
1139- # It might seem odd to sort the units here, but we do it to
1140- # ensure that when no unit is specified, the first unit for the
1141- # service (or at least the one with the lowest number) is the
1142- # one whose data gets returned.
1143- sorted_unit_names = sorted(units.keys())
1144- item = units[sorted_unit_names[0]][item_name]
1145- return item
1146-
1147-
1148-# DEPRECATED: client-side only
1149-def get_machine_data():
1150- return yaml.safe_load(juju_status())['machines']
1151-
1152-
1153-# DEPRECATED: client-side only
1154-def wait_for_machine(num_machines=1, timeout=300):
1155- """Wait `timeout` seconds for `num_machines` machines to come up.
1156-
1157- This wait_for... function can be called by other wait_for functions
1158- whose timeouts might be too short in situations where only a bare
1159- Juju setup has been bootstrapped.
1160-
1161- :return: A tuple of (num_machines, time_taken). This is used for
1162- testing.
1163- """
1164- # You may think this is a hack, and you'd be right. The easiest way
1165- # to tell what environment we're working in (LXC vs EC2) is to check
1166- # the dns-name of the first machine. If it's localhost we're in LXC
1167- # and we can just return here.
1168- if get_machine_data()[0]['dns-name'] == 'localhost':
1169- return 1, 0
1170- start_time = time.time()
1171- while True:
1172- # Drop the first machine, since it's the Zookeeper and that's
1173- # not a machine that we need to wait for. This will only work
1174- # for EC2 environments, which is why we return early above if
1175- # we're in LXC.
1176- machine_data = get_machine_data()
1177- non_zookeeper_machines = [
1178- machine_data[key] for key in machine_data.keys()[1:]]
1179- if len(non_zookeeper_machines) >= num_machines:
1180- all_machines_running = True
1181- for machine in non_zookeeper_machines:
1182- if machine.get('instance-state') != 'running':
1183- all_machines_running = False
1184- break
1185- if all_machines_running:
1186- break
1187- if time.time() - start_time >= timeout:
1188- raise RuntimeError('timeout waiting for service to start')
1189- time.sleep(SLEEP_AMOUNT)
1190- return num_machines, time.time() - start_time
1191-
1192-
1193-# DEPRECATED: client-side only
1194-def wait_for_unit(service_name, timeout=480):
1195- """Wait `timeout` seconds for a given service name to come up."""
1196- wait_for_machine(num_machines=1)
1197- start_time = time.time()
1198- while True:
1199- state = unit_info(service_name, 'agent-state')
1200- if 'error' in state or state == 'started':
1201- break
1202- if time.time() - start_time >= timeout:
1203- raise RuntimeError('timeout waiting for service to start')
1204- time.sleep(SLEEP_AMOUNT)
1205- if state != 'started':
1206- raise RuntimeError('unit did not start, agent-state: ' + state)
1207-
1208-
1209-# DEPRECATED: client-side only
1210-def wait_for_relation(service_name, relation_name, timeout=120):
1211- """Wait `timeout` seconds for a given relation to come up."""
1212- start_time = time.time()
1213- while True:
1214- relation = unit_info(service_name, 'relations').get(relation_name)
1215- if relation is not None and relation['state'] == 'up':
1216- break
1217- if time.time() - start_time >= timeout:
1218- raise RuntimeError('timeout waiting for relation to be up')
1219- time.sleep(SLEEP_AMOUNT)
1220-
1221-
1222-# DEPRECATED: client-side only
1223-def wait_for_page_contents(url, contents, timeout=120, validate=None):
1224- if validate is None:
1225- validate = operator.contains
1226- start_time = time.time()
1227- while True:
1228- try:
1229- stream = urllib2.urlopen(url)
1230- except (urllib2.HTTPError, urllib2.URLError):
1231- pass
1232- else:
1233- page = stream.read()
1234- if validate(page, contents):
1235- return page
1236- if time.time() - start_time >= timeout:
1237- raise RuntimeError('timeout waiting for contents of ' + url)
1238- time.sleep(SLEEP_AMOUNT)
1239
1240=== removed directory 'hooks/install.d/charmhelpers/contrib/charmsupport'
1241=== removed file 'hooks/install.d/charmhelpers/contrib/charmsupport/IMPORT'
1242--- hooks/install.d/charmhelpers/contrib/charmsupport/IMPORT 2015-08-19 19:17:42 +0000
1243+++ hooks/install.d/charmhelpers/contrib/charmsupport/IMPORT 1970-01-01 00:00:00 +0000
1244@@ -1,14 +0,0 @@
1245-Source: lp:charmsupport/trunk
1246-
1247-charmsupport/charmsupport/execd.py -> charm-helpers/charmhelpers/contrib/charmsupport/execd.py
1248-charmsupport/charmsupport/hookenv.py -> charm-helpers/charmhelpers/contrib/charmsupport/hookenv.py
1249-charmsupport/charmsupport/host.py -> charm-helpers/charmhelpers/contrib/charmsupport/host.py
1250-charmsupport/charmsupport/nrpe.py -> charm-helpers/charmhelpers/contrib/charmsupport/nrpe.py
1251-charmsupport/charmsupport/volumes.py -> charm-helpers/charmhelpers/contrib/charmsupport/volumes.py
1252-
1253-charmsupport/tests/test_execd.py -> charm-helpers/tests/contrib/charmsupport/test_execd.py
1254-charmsupport/tests/test_hookenv.py -> charm-helpers/tests/contrib/charmsupport/test_hookenv.py
1255-charmsupport/tests/test_host.py -> charm-helpers/tests/contrib/charmsupport/test_host.py
1256-charmsupport/tests/test_nrpe.py -> charm-helpers/tests/contrib/charmsupport/test_nrpe.py
1257-
1258-charmsupport/bin/charmsupport -> charm-helpers/bin/contrib/charmsupport/charmsupport
1259
1260=== removed file 'hooks/install.d/charmhelpers/contrib/charmsupport/__init__.py'
1261=== removed file 'hooks/install.d/charmhelpers/contrib/charmsupport/nrpe.py'
1262--- hooks/install.d/charmhelpers/contrib/charmsupport/nrpe.py 2015-08-19 19:17:42 +0000
1263+++ hooks/install.d/charmhelpers/contrib/charmsupport/nrpe.py 1970-01-01 00:00:00 +0000
1264@@ -1,218 +0,0 @@
1265-"""Compatibility with the nrpe-external-master charm"""
1266-# Copyright 2012 Canonical Ltd.
1267-#
1268-# Authors:
1269-# Matthew Wedgwood <matthew.wedgwood@canonical.com>
1270-
1271-import subprocess
1272-import pwd
1273-import grp
1274-import os
1275-import re
1276-import shlex
1277-import yaml
1278-
1279-from charmhelpers.core.hookenv import (
1280- config,
1281- local_unit,
1282- log,
1283- relation_ids,
1284- relation_set,
1285-)
1286-
1287-from charmhelpers.core.host import service
1288-
1289-# This module adds compatibility with the nrpe-external-master and plain nrpe
1290-# subordinate charms. To use it in your charm:
1291-#
1292-# 1. Update metadata.yaml
1293-#
1294-# provides:
1295-# (...)
1296-# nrpe-external-master:
1297-# interface: nrpe-external-master
1298-# scope: container
1299-#
1300-# and/or
1301-#
1302-# provides:
1303-# (...)
1304-# local-monitors:
1305-# interface: local-monitors
1306-# scope: container
1307-
1308-#
1309-# 2. Add the following to config.yaml
1310-#
1311-# nagios_context:
1312-# default: "juju"
1313-# type: string
1314-# description: |
1315-# Used by the nrpe subordinate charms.
1316-# A string that will be prepended to instance name to set the host name
1317-# in nagios. So for instance the hostname would be something like:
1318-# juju-myservice-0
1319-# If you're running multiple environments with the same services in them
1320-# this allows you to differentiate between them.
1321-#
1322-# 3. Add custom checks (Nagios plugins) to files/nrpe-external-master
1323-#
1324-# 4. Update your hooks.py with something like this:
1325-#
1326-# from charmsupport.nrpe import NRPE
1327-# (...)
1328-# def update_nrpe_config():
1329-# nrpe_compat = NRPE()
1330-# nrpe_compat.add_check(
1331-# shortname = "myservice",
1332-# description = "Check MyService",
1333-# check_cmd = "check_http -w 2 -c 10 http://localhost"
1334-# )
1335-# nrpe_compat.add_check(
1336-# "myservice_other",
1337-# "Check for widget failures",
1338-# check_cmd = "/srv/myapp/scripts/widget_check"
1339-# )
1340-# nrpe_compat.write()
1341-#
1342-# def config_changed():
1343-# (...)
1344-# update_nrpe_config()
1345-#
1346-# def nrpe_external_master_relation_changed():
1347-# update_nrpe_config()
1348-#
1349-# def local_monitors_relation_changed():
1350-# update_nrpe_config()
1351-#
1352-# 5. ln -s hooks.py nrpe-external-master-relation-changed
1353-# ln -s hooks.py local-monitors-relation-changed
1354-
1355-
1356-class CheckException(Exception):
1357- pass
1358-
1359-
1360-class Check(object):
1361- shortname_re = '[A-Za-z0-9-_]+$'
1362- service_template = ("""
1363-#---------------------------------------------------
1364-# This file is Juju managed
1365-#---------------------------------------------------
1366-define service {{
1367- use active-service
1368- host_name {nagios_hostname}
1369- service_description {nagios_hostname}[{shortname}] """
1370- """{description}
1371- check_command check_nrpe!{command}
1372- servicegroups {nagios_servicegroup}
1373-}}
1374-""")
1375-
1376- def __init__(self, shortname, description, check_cmd):
1377- super(Check, self).__init__()
1378- # XXX: could be better to calculate this from the service name
1379- if not re.match(self.shortname_re, shortname):
1380- raise CheckException("shortname must match {}".format(
1381- Check.shortname_re))
1382- self.shortname = shortname
1383- self.command = "check_{}".format(shortname)
1384- # Note: a set of invalid characters is defined by the
1385- # Nagios server config
1386- # The default is: illegal_object_name_chars=`~!$%^&*"|'<>?,()=
1387- self.description = description
1388- self.check_cmd = self._locate_cmd(check_cmd)
1389-
1390- def _locate_cmd(self, check_cmd):
1391- search_path = (
1392- '/',
1393- os.path.join(os.environ['CHARM_DIR'],
1394- 'files/nrpe-external-master'),
1395- '/usr/lib/nagios/plugins',
1396- )
1397- parts = shlex.split(check_cmd)
1398- for path in search_path:
1399- if os.path.exists(os.path.join(path, parts[0])):
1400- command = os.path.join(path, parts[0])
1401- if len(parts) > 1:
1402- command += " " + " ".join(parts[1:])
1403- return command
1404- log('Check command not found: {}'.format(parts[0]))
1405- return ''
1406-
1407- def write(self, nagios_context, hostname):
1408- nrpe_check_file = '/etc/nagios/nrpe.d/{}.cfg'.format(
1409- self.command)
1410- with open(nrpe_check_file, 'w') as nrpe_check_config:
1411- nrpe_check_config.write("# check {}\n".format(self.shortname))
1412- nrpe_check_config.write("command[{}]={}\n".format(
1413- self.command, self.check_cmd))
1414-
1415- if not os.path.exists(NRPE.nagios_exportdir):
1416- log('Not writing service config as {} is not accessible'.format(
1417- NRPE.nagios_exportdir))
1418- else:
1419- self.write_service_config(nagios_context, hostname)
1420-
1421- def write_service_config(self, nagios_context, hostname):
1422- for f in os.listdir(NRPE.nagios_exportdir):
1423- if re.search('.*{}.cfg'.format(self.command), f):
1424- os.remove(os.path.join(NRPE.nagios_exportdir, f))
1425-
1426- templ_vars = {
1427- 'nagios_hostname': hostname,
1428- 'nagios_servicegroup': nagios_context,
1429- 'description': self.description,
1430- 'shortname': self.shortname,
1431- 'command': self.command,
1432- }
1433- nrpe_service_text = Check.service_template.format(**templ_vars)
1434- nrpe_service_file = '{}/service__{}_{}.cfg'.format(
1435- NRPE.nagios_exportdir, hostname, self.command)
1436- with open(nrpe_service_file, 'w') as nrpe_service_config:
1437- nrpe_service_config.write(str(nrpe_service_text))
1438-
1439- def run(self):
1440- subprocess.call(self.check_cmd)
1441-
1442-
1443-class NRPE(object):
1444- nagios_logdir = '/var/log/nagios'
1445- nagios_exportdir = '/var/lib/nagios/export'
1446- nrpe_confdir = '/etc/nagios/nrpe.d'
1447-
1448- def __init__(self):
1449- super(NRPE, self).__init__()
1450- self.config = config()
1451- self.nagios_context = self.config['nagios_context']
1452- self.unit_name = local_unit().replace('/', '-')
1453- self.hostname = "{}-{}".format(self.nagios_context, self.unit_name)
1454- self.checks = []
1455-
1456- def add_check(self, *args, **kwargs):
1457- self.checks.append(Check(*args, **kwargs))
1458-
1459- def write(self):
1460- try:
1461- nagios_uid = pwd.getpwnam('nagios').pw_uid
1462- nagios_gid = grp.getgrnam('nagios').gr_gid
1463- except:
1464- log("Nagios user not set up, nrpe checks not updated")
1465- return
1466-
1467- if not os.path.exists(NRPE.nagios_logdir):
1468- os.mkdir(NRPE.nagios_logdir)
1469- os.chown(NRPE.nagios_logdir, nagios_uid, nagios_gid)
1470-
1471- nrpe_monitors = {}
1472- monitors = {"monitors": {"remote": {"nrpe": nrpe_monitors}}}
1473- for nrpecheck in self.checks:
1474- nrpecheck.write(self.nagios_context, self.hostname)
1475- nrpe_monitors[nrpecheck.shortname] = {
1476- "command": nrpecheck.command,
1477- }
1478-
1479- service('restart', 'nagios-nrpe-server')
1480-
1481- for rid in relation_ids("local-monitors"):
1482- relation_set(relation_id=rid, monitors=yaml.dump(monitors))
1483
1484=== removed file 'hooks/install.d/charmhelpers/contrib/charmsupport/volumes.py'
1485--- hooks/install.d/charmhelpers/contrib/charmsupport/volumes.py 2015-08-19 19:17:42 +0000
1486+++ hooks/install.d/charmhelpers/contrib/charmsupport/volumes.py 1970-01-01 00:00:00 +0000
1487@@ -1,156 +0,0 @@
1488-'''
1489-Functions for managing volumes in juju units. One volume is supported per unit.
1490-Subordinates may have their own storage, provided it is on its own partition.
1491-
1492-Configuration stanzas:
1493- volume-ephemeral:
1494- type: boolean
1495- default: true
1496- description: >
1497- If false, a volume is mounted as sepecified in "volume-map"
1498- If true, ephemeral storage will be used, meaning that log data
1499- will only exist as long as the machine. YOU HAVE BEEN WARNED.
1500- volume-map:
1501- type: string
1502- default: {}
1503- description: >
1504- YAML map of units to device names, e.g:
1505- "{ rsyslog/0: /dev/vdb, rsyslog/1: /dev/vdb }"
1506- Service units will raise a configure-error if volume-ephemeral
1507- is 'true' and no volume-map value is set. Use 'juju set' to set a
1508- value and 'juju resolved' to complete configuration.
1509-
1510-Usage:
1511- from charmsupport.volumes import configure_volume, VolumeConfigurationError
1512- from charmsupport.hookenv import log, ERROR
1513- def post_mount_hook():
1514- stop_service('myservice')
1515- def post_mount_hook():
1516- start_service('myservice')
1517-
1518- if __name__ == '__main__':
1519- try:
1520- configure_volume(before_change=pre_mount_hook,
1521- after_change=post_mount_hook)
1522- except VolumeConfigurationError:
1523- log('Storage could not be configured', ERROR)
1524-'''
1525-
1526-# XXX: Known limitations
1527-# - fstab is neither consulted nor updated
1528-
1529-import os
1530-from charmhelpers.core import hookenv
1531-from charmhelpers.core import host
1532-import yaml
1533-
1534-
1535-MOUNT_BASE = '/srv/juju/volumes'
1536-
1537-
1538-class VolumeConfigurationError(Exception):
1539- '''Volume configuration data is missing or invalid'''
1540- pass
1541-
1542-
1543-def get_config():
1544- '''Gather and sanity-check volume configuration data'''
1545- volume_config = {}
1546- config = hookenv.config()
1547-
1548- errors = False
1549-
1550- if config.get('volume-ephemeral') in (True, 'True', 'true', 'Yes', 'yes'):
1551- volume_config['ephemeral'] = True
1552- else:
1553- volume_config['ephemeral'] = False
1554-
1555- try:
1556- volume_map = yaml.safe_load(config.get('volume-map', '{}'))
1557- except yaml.YAMLError as e:
1558- hookenv.log("Error parsing YAML volume-map: {}".format(e),
1559- hookenv.ERROR)
1560- errors = True
1561- if volume_map is None:
1562- # probably an empty string
1563- volume_map = {}
1564- elif not isinstance(volume_map, dict):
1565- hookenv.log("Volume-map should be a dictionary, not {}".format(
1566- type(volume_map)))
1567- errors = True
1568-
1569- volume_config['device'] = volume_map.get(os.environ['JUJU_UNIT_NAME'])
1570- if volume_config['device'] and volume_config['ephemeral']:
1571- # asked for ephemeral storage but also defined a volume ID
1572- hookenv.log('A volume is defined for this unit, but ephemeral '
1573- 'storage was requested', hookenv.ERROR)
1574- errors = True
1575- elif not volume_config['device'] and not volume_config['ephemeral']:
1576- # asked for permanent storage but did not define volume ID
1577- hookenv.log('Ephemeral storage was requested, but there is no volume '
1578- 'defined for this unit.', hookenv.ERROR)
1579- errors = True
1580-
1581- unit_mount_name = hookenv.local_unit().replace('/', '-')
1582- volume_config['mountpoint'] = os.path.join(MOUNT_BASE, unit_mount_name)
1583-
1584- if errors:
1585- return None
1586- return volume_config
1587-
1588-
1589-def mount_volume(config):
1590- if os.path.exists(config['mountpoint']):
1591- if not os.path.isdir(config['mountpoint']):
1592- hookenv.log('Not a directory: {}'.format(config['mountpoint']))
1593- raise VolumeConfigurationError()
1594- else:
1595- host.mkdir(config['mountpoint'])
1596- if os.path.ismount(config['mountpoint']):
1597- unmount_volume(config)
1598- if not host.mount(config['device'], config['mountpoint'], persist=True):
1599- raise VolumeConfigurationError()
1600-
1601-
1602-def unmount_volume(config):
1603- if os.path.ismount(config['mountpoint']):
1604- if not host.umount(config['mountpoint'], persist=True):
1605- raise VolumeConfigurationError()
1606-
1607-
1608-def managed_mounts():
1609- '''List of all mounted managed volumes'''
1610- return filter(lambda mount: mount[0].startswith(MOUNT_BASE), host.mounts())
1611-
1612-
1613-def configure_volume(before_change=lambda: None, after_change=lambda: None):
1614- '''Set up storage (or don't) according to the charm's volume configuration.
1615- Returns the mount point or "ephemeral". before_change and after_change
1616- are optional functions to be called if the volume configuration changes.
1617- '''
1618-
1619- config = get_config()
1620- if not config:
1621- hookenv.log('Failed to read volume configuration', hookenv.CRITICAL)
1622- raise VolumeConfigurationError()
1623-
1624- if config['ephemeral']:
1625- if os.path.ismount(config['mountpoint']):
1626- before_change()
1627- unmount_volume(config)
1628- after_change()
1629- return 'ephemeral'
1630- else:
1631- # persistent storage
1632- if os.path.ismount(config['mountpoint']):
1633- mounts = dict(managed_mounts())
1634- if mounts.get(config['mountpoint']) != config['device']:
1635- before_change()
1636- unmount_volume(config)
1637- mount_volume(config)
1638- after_change()
1639- else:
1640- before_change()
1641- mount_volume(config)
1642- after_change()
1643- return config['mountpoint']
1644
1645=== removed directory 'hooks/install.d/charmhelpers/contrib/hahelpers'
1646=== removed file 'hooks/install.d/charmhelpers/contrib/hahelpers/__init__.py'
1647=== removed file 'hooks/install.d/charmhelpers/contrib/hahelpers/apache.py'
1648--- hooks/install.d/charmhelpers/contrib/hahelpers/apache.py 2015-08-19 19:17:42 +0000
1649+++ hooks/install.d/charmhelpers/contrib/hahelpers/apache.py 1970-01-01 00:00:00 +0000
1650@@ -1,58 +0,0 @@
1651-#
1652-# Copyright 2012 Canonical Ltd.
1653-#
1654-# This file is sourced from lp:openstack-charm-helpers
1655-#
1656-# Authors:
1657-# James Page <james.page@ubuntu.com>
1658-# Adam Gandelman <adamg@ubuntu.com>
1659-#
1660-
1661-import subprocess
1662-
1663-from charmhelpers.core.hookenv import (
1664- config as config_get,
1665- relation_get,
1666- relation_ids,
1667- related_units as relation_list,
1668- log,
1669- INFO,
1670-)
1671-
1672-
1673-def get_cert():
1674- cert = config_get('ssl_cert')
1675- key = config_get('ssl_key')
1676- if not (cert and key):
1677- log("Inspecting identity-service relations for SSL certificate.",
1678- level=INFO)
1679- cert = key = None
1680- for r_id in relation_ids('identity-service'):
1681- for unit in relation_list(r_id):
1682- if not cert:
1683- cert = relation_get('ssl_cert',
1684- rid=r_id, unit=unit)
1685- if not key:
1686- key = relation_get('ssl_key',
1687- rid=r_id, unit=unit)
1688- return (cert, key)
1689-
1690-
1691-def get_ca_cert():
1692- ca_cert = None
1693- log("Inspecting identity-service relations for CA SSL certificate.",
1694- level=INFO)
1695- for r_id in relation_ids('identity-service'):
1696- for unit in relation_list(r_id):
1697- if not ca_cert:
1698- ca_cert = relation_get('ca_cert',
1699- rid=r_id, unit=unit)
1700- return ca_cert
1701-
1702-
1703-def install_ca_cert(ca_cert):
1704- if ca_cert:
1705- with open('/usr/local/share/ca-certificates/keystone_juju_ca_cert.crt',
1706- 'w') as crt:
1707- crt.write(ca_cert)
1708- subprocess.check_call(['update-ca-certificates', '--fresh'])
1709
1710=== removed file 'hooks/install.d/charmhelpers/contrib/hahelpers/ceph.py'
1711--- hooks/install.d/charmhelpers/contrib/hahelpers/ceph.py 2015-08-19 19:17:42 +0000
1712+++ hooks/install.d/charmhelpers/contrib/hahelpers/ceph.py 1970-01-01 00:00:00 +0000
1713@@ -1,294 +0,0 @@
1714-#
1715-# Copyright 2012 Canonical Ltd.
1716-#
1717-# This file is sourced from lp:openstack-charm-helpers
1718-#
1719-# Authors:
1720-# James Page <james.page@ubuntu.com>
1721-# Adam Gandelman <adamg@ubuntu.com>
1722-#
1723-
1724-import commands
1725-import os
1726-import shutil
1727-import time
1728-
1729-from subprocess import (
1730- check_call,
1731- check_output,
1732- CalledProcessError
1733-)
1734-
1735-from charmhelpers.core.hookenv import (
1736- relation_get,
1737- relation_ids,
1738- related_units,
1739- log,
1740- INFO,
1741- ERROR
1742-)
1743-
1744-from charmhelpers.fetch import (
1745- apt_install,
1746-)
1747-
1748-from charmhelpers.core.host import (
1749- mount,
1750- mounts,
1751- service_start,
1752- service_stop,
1753- umount,
1754-)
1755-
1756-KEYRING = '/etc/ceph/ceph.client.%s.keyring'
1757-KEYFILE = '/etc/ceph/ceph.client.%s.key'
1758-
1759-CEPH_CONF = """[global]
1760- auth supported = %(auth)s
1761- keyring = %(keyring)s
1762- mon host = %(mon_hosts)s
1763-"""
1764-
1765-
1766-def running(service):
1767- # this local util can be dropped as soon the following branch lands
1768- # in lp:charm-helpers
1769- # https://code.launchpad.net/~gandelman-a/charm-helpers/service_running/
1770- try:
1771- output = check_output(['service', service, 'status'])
1772- except CalledProcessError:
1773- return False
1774- else:
1775- if ("start/running" in output or "is running" in output):
1776- return True
1777- else:
1778- return False
1779-
1780-
1781-def install():
1782- ceph_dir = "/etc/ceph"
1783- if not os.path.isdir(ceph_dir):
1784- os.mkdir(ceph_dir)
1785- apt_install('ceph-common', fatal=True)
1786-
1787-
1788-def rbd_exists(service, pool, rbd_img):
1789- (rc, out) = commands.getstatusoutput('rbd list --id %s --pool %s' %
1790- (service, pool))
1791- return rbd_img in out
1792-
1793-
1794-def create_rbd_image(service, pool, image, sizemb):
1795- cmd = [
1796- 'rbd',
1797- 'create',
1798- image,
1799- '--size',
1800- str(sizemb),
1801- '--id',
1802- service,
1803- '--pool',
1804- pool
1805- ]
1806- check_call(cmd)
1807-
1808-
1809-def pool_exists(service, name):
1810- (rc, out) = commands.getstatusoutput("rados --id %s lspools" % service)
1811- return name in out
1812-
1813-
1814-def create_pool(service, name):
1815- cmd = [
1816- 'rados',
1817- '--id',
1818- service,
1819- 'mkpool',
1820- name
1821- ]
1822- check_call(cmd)
1823-
1824-
1825-def keyfile_path(service):
1826- return KEYFILE % service
1827-
1828-
1829-def keyring_path(service):
1830- return KEYRING % service
1831-
1832-
1833-def create_keyring(service, key):
1834- keyring = keyring_path(service)
1835- if os.path.exists(keyring):
1836- log('ceph: Keyring exists at %s.' % keyring, level=INFO)
1837- cmd = [
1838- 'ceph-authtool',
1839- keyring,
1840- '--create-keyring',
1841- '--name=client.%s' % service,
1842- '--add-key=%s' % key
1843- ]
1844- check_call(cmd)
1845- log('ceph: Created new ring at %s.' % keyring, level=INFO)
1846-
1847-
1848-def create_key_file(service, key):
1849- # create a file containing the key
1850- keyfile = keyfile_path(service)
1851- if os.path.exists(keyfile):
1852- log('ceph: Keyfile exists at %s.' % keyfile, level=INFO)
1853- fd = open(keyfile, 'w')
1854- fd.write(key)
1855- fd.close()
1856- log('ceph: Created new keyfile at %s.' % keyfile, level=INFO)
1857-
1858-
1859-def get_ceph_nodes():
1860- hosts = []
1861- for r_id in relation_ids('ceph'):
1862- for unit in related_units(r_id):
1863- hosts.append(relation_get('private-address', unit=unit, rid=r_id))
1864- return hosts
1865-
1866-
1867-def configure(service, key, auth):
1868- create_keyring(service, key)
1869- create_key_file(service, key)
1870- hosts = get_ceph_nodes()
1871- mon_hosts = ",".join(map(str, hosts))
1872- keyring = keyring_path(service)
1873- with open('/etc/ceph/ceph.conf', 'w') as ceph_conf:
1874- ceph_conf.write(CEPH_CONF % locals())
1875- modprobe_kernel_module('rbd')
1876-
1877-
1878-def image_mapped(image_name):
1879- (rc, out) = commands.getstatusoutput('rbd showmapped')
1880- return image_name in out
1881-
1882-
1883-def map_block_storage(service, pool, image):
1884- cmd = [
1885- 'rbd',
1886- 'map',
1887- '%s/%s' % (pool, image),
1888- '--user',
1889- service,
1890- '--secret',
1891- keyfile_path(service),
1892- ]
1893- check_call(cmd)
1894-
1895-
1896-def filesystem_mounted(fs):
1897- return fs in [f for m, f in mounts()]
1898-
1899-
1900-def make_filesystem(blk_device, fstype='ext4', timeout=10):
1901- count = 0
1902- e_noent = os.errno.ENOENT
1903- while not os.path.exists(blk_device):
1904- if count >= timeout:
1905- log('ceph: gave up waiting on block device %s' % blk_device,
1906- level=ERROR)
1907- raise IOError(e_noent, os.strerror(e_noent), blk_device)
1908- log('ceph: waiting for block device %s to appear' % blk_device,
1909- level=INFO)
1910- count += 1
1911- time.sleep(1)
1912- else:
1913- log('ceph: Formatting block device %s as filesystem %s.' %
1914- (blk_device, fstype), level=INFO)
1915- check_call(['mkfs', '-t', fstype, blk_device])
1916-
1917-
1918-def place_data_on_ceph(service, blk_device, data_src_dst, fstype='ext4'):
1919- # mount block device into /mnt
1920- mount(blk_device, '/mnt')
1921-
1922- # copy data to /mnt
1923- try:
1924- copy_files(data_src_dst, '/mnt')
1925- except:
1926- pass
1927-
1928- # umount block device
1929- umount('/mnt')
1930-
1931- _dir = os.stat(data_src_dst)
1932- uid = _dir.st_uid
1933- gid = _dir.st_gid
1934-
1935- # re-mount where the data should originally be
1936- mount(blk_device, data_src_dst, persist=True)
1937-
1938- # ensure original ownership of new mount.
1939- cmd = ['chown', '-R', '%s:%s' % (uid, gid), data_src_dst]
1940- check_call(cmd)
1941-
1942-
1943-# TODO: re-use
1944-def modprobe_kernel_module(module):
1945- log('ceph: Loading kernel module', level=INFO)
1946- cmd = ['modprobe', module]
1947- check_call(cmd)
1948- cmd = 'echo %s >> /etc/modules' % module
1949- check_call(cmd, shell=True)
1950-
1951-
1952-def copy_files(src, dst, symlinks=False, ignore=None):
1953- for item in os.listdir(src):
1954- s = os.path.join(src, item)
1955- d = os.path.join(dst, item)
1956- if os.path.isdir(s):
1957- shutil.copytree(s, d, symlinks, ignore)
1958- else:
1959- shutil.copy2(s, d)
1960-
1961-
1962-def ensure_ceph_storage(service, pool, rbd_img, sizemb, mount_point,
1963- blk_device, fstype, system_services=[]):
1964- """
1965- To be called from the current cluster leader.
1966- Ensures given pool and RBD image exists, is mapped to a block device,
1967- and the device is formatted and mounted at the given mount_point.
1968-
1969- If formatting a device for the first time, data existing at mount_point
1970- will be migrated to the RBD device before being remounted.
1971-
1972- All services listed in system_services will be stopped prior to data
1973- migration and restarted when complete.
1974- """
1975- # Ensure pool, RBD image, RBD mappings are in place.
1976- if not pool_exists(service, pool):
1977- log('ceph: Creating new pool %s.' % pool, level=INFO)
1978- create_pool(service, pool)
1979-
1980- if not rbd_exists(service, pool, rbd_img):
1981- log('ceph: Creating RBD image (%s).' % rbd_img, level=INFO)
1982- create_rbd_image(service, pool, rbd_img, sizemb)
1983-
1984- if not image_mapped(rbd_img):
1985- log('ceph: Mapping RBD Image as a Block Device.', level=INFO)
1986- map_block_storage(service, pool, rbd_img)
1987-
1988- # make file system
1989- # TODO: What happens if for whatever reason this is run again and
1990- # the data is already in the rbd device and/or is mounted??
1991- # When it is mounted already, it will fail to make the fs
1992- # XXX: This is really sketchy! Need to at least add an fstab entry
1993- # otherwise this hook will blow away existing data if its executed
1994- # after a reboot.
1995- if not filesystem_mounted(mount_point):
1996- make_filesystem(blk_device, fstype)
1997-
1998- for svc in system_services:
1999- if running(svc):
2000- log('Stopping services %s prior to migrating data.' % svc,
2001- level=INFO)
2002- service_stop(svc)
2003-
2004- place_data_on_ceph(service, blk_device, mount_point, fstype)
2005-
2006- for svc in system_services:
2007- service_start(svc)
2008
2009=== removed file 'hooks/install.d/charmhelpers/contrib/hahelpers/cluster.py'
2010--- hooks/install.d/charmhelpers/contrib/hahelpers/cluster.py 2015-08-19 19:17:42 +0000
2011+++ hooks/install.d/charmhelpers/contrib/hahelpers/cluster.py 1970-01-01 00:00:00 +0000
2012@@ -1,181 +0,0 @@
2013-#
2014-# Copyright 2012 Canonical Ltd.
2015-#
2016-# Authors:
2017-# James Page <james.page@ubuntu.com>
2018-# Adam Gandelman <adamg@ubuntu.com>
2019-#
2020-
2021-import subprocess
2022-import os
2023-
2024-from socket import gethostname as get_unit_hostname
2025-
2026-from charmhelpers.core.hookenv import (
2027- log,
2028- relation_ids,
2029- related_units as relation_list,
2030- relation_get,
2031- config as config_get,
2032- INFO,
2033- ERROR,
2034- unit_get,
2035-)
2036-
2037-
2038-class HAIncompleteConfig(Exception):
2039- pass
2040-
2041-
2042-def is_clustered():
2043- for r_id in (relation_ids('ha') or []):
2044- for unit in (relation_list(r_id) or []):
2045- clustered = relation_get('clustered',
2046- rid=r_id,
2047- unit=unit)
2048- if clustered:
2049- return True
2050- return False
2051-
2052-
2053-def is_leader(resource):
2054- cmd = [
2055- "crm", "resource",
2056- "show", resource
2057- ]
2058- try:
2059- status = subprocess.check_output(cmd)
2060- except subprocess.CalledProcessError:
2061- return False
2062- else:
2063- if get_unit_hostname() in status:
2064- return True
2065- else:
2066- return False
2067-
2068-
2069-def peer_units():
2070- peers = []
2071- for r_id in (relation_ids('cluster') or []):
2072- for unit in (relation_list(r_id) or []):
2073- peers.append(unit)
2074- return peers
2075-
2076-
2077-def oldest_peer(peers):
2078- local_unit_no = int(os.getenv('JUJU_UNIT_NAME').split('/')[1])
2079- for peer in peers:
2080- remote_unit_no = int(peer.split('/')[1])
2081- if remote_unit_no < local_unit_no:
2082- return False
2083- return True
2084-
2085-
2086-def eligible_leader(resource):
2087- if is_clustered():
2088- if not is_leader(resource):
2089- log('Deferring action to CRM leader.', level=INFO)
2090- return False
2091- else:
2092- peers = peer_units()
2093- if peers and not oldest_peer(peers):
2094- log('Deferring action to oldest service unit.', level=INFO)
2095- return False
2096- return True
2097-
2098-
2099-def https():
2100- '''
2101- Determines whether enough data has been provided in configuration
2102- or relation data to configure HTTPS
2103- .
2104- returns: boolean
2105- '''
2106- if config_get('use-https') == "yes":
2107- return True
2108- if config_get('ssl_cert') and config_get('ssl_key'):
2109- return True
2110- for r_id in relation_ids('identity-service'):
2111- for unit in relation_list(r_id):
2112- if None not in [
2113- relation_get('https_keystone', rid=r_id, unit=unit),
2114- relation_get('ssl_cert', rid=r_id, unit=unit),
2115- relation_get('ssl_key', rid=r_id, unit=unit),
2116- relation_get('ca_cert', rid=r_id, unit=unit),
2117- ]:
2118- return True
2119- return False
2120-
2121-
2122-def determine_api_port(public_port):
2123- '''
2124- Determine correct API server listening port based on
2125- existence of HTTPS reverse proxy and/or haproxy.
2126-
2127- public_port: int: standard public port for given service
2128-
2129- returns: int: the correct listening port for the API service
2130- '''
2131- i = 0
2132- if len(peer_units()) > 0 or is_clustered():
2133- i += 1
2134- if https():
2135- i += 1
2136- return public_port - (i * 10)
2137-
2138-
2139-def determine_haproxy_port(public_port):
2140- '''
2141- Description: Determine correct proxy listening port based on public IP +
2142- existence of HTTPS reverse proxy.
2143-
2144- public_port: int: standard public port for given service
2145-
2146- returns: int: the correct listening port for the HAProxy service
2147- '''
2148- i = 0
2149- if https():
2150- i += 1
2151- return public_port - (i * 10)
2152-
2153-
2154-def get_hacluster_config():
2155- '''
2156- Obtains all relevant configuration from charm configuration required
2157- for initiating a relation to hacluster:
2158-
2159- ha-bindiface, ha-mcastport, vip, vip_iface, vip_cidr
2160-
2161- returns: dict: A dict containing settings keyed by setting name.
2162- raises: HAIncompleteConfig if settings are missing.
2163- '''
2164- settings = ['ha-bindiface', 'ha-mcastport', 'vip', 'vip_iface', 'vip_cidr']
2165- conf = {}
2166- for setting in settings:
2167- conf[setting] = config_get(setting)
2168- missing = []
2169- [missing.append(s) for s, v in conf.iteritems() if v is None]
2170- if missing:
2171- log('Insufficient config data to configure hacluster.', level=ERROR)
2172- raise HAIncompleteConfig
2173- return conf
2174-
2175-
2176-def canonical_url(configs, vip_setting='vip'):
2177- '''
2178- Returns the correct HTTP URL to this host given the state of HTTPS
2179- configuration and hacluster.
2180-
2181- :configs : OSTemplateRenderer: A config tempating object to inspect for
2182- a complete https context.
2183- :vip_setting: str: Setting in charm config that specifies
2184- VIP address.
2185- '''
2186- scheme = 'http'
2187- if 'https' in configs.complete_contexts():
2188- scheme = 'https'
2189- if is_clustered():
2190- addr = config_get(vip_setting)
2191- else:
2192- addr = unit_get('private-address')
2193- return '%s://%s' % (scheme, addr)
2194
2195=== removed directory 'hooks/install.d/charmhelpers/contrib/jujugui'
2196=== removed file 'hooks/install.d/charmhelpers/contrib/jujugui/IMPORT'
2197--- hooks/install.d/charmhelpers/contrib/jujugui/IMPORT 2015-08-19 19:17:42 +0000
2198+++ hooks/install.d/charmhelpers/contrib/jujugui/IMPORT 1970-01-01 00:00:00 +0000
2199@@ -1,4 +0,0 @@
2200-Source: lp:charms/juju-gui
2201-
2202-juju-gui/hooks/utils.py -> charm-helpers/charmhelpers/contrib/jujugui/utils.py
2203-juju-gui/tests/test_utils.py -> charm-helpers/tests/contrib/jujugui/test_utils.py
2204
2205=== removed file 'hooks/install.d/charmhelpers/contrib/jujugui/__init__.py'
2206=== removed file 'hooks/install.d/charmhelpers/contrib/jujugui/utils.py'
2207--- hooks/install.d/charmhelpers/contrib/jujugui/utils.py 2015-08-19 19:17:42 +0000
2208+++ hooks/install.d/charmhelpers/contrib/jujugui/utils.py 1970-01-01 00:00:00 +0000
2209@@ -1,602 +0,0 @@
2210-"""Juju GUI charm utilities."""
2211-
2212-__all__ = [
2213- 'AGENT',
2214- 'APACHE',
2215- 'API_PORT',
2216- 'CURRENT_DIR',
2217- 'HAPROXY',
2218- 'IMPROV',
2219- 'JUJU_DIR',
2220- 'JUJU_GUI_DIR',
2221- 'JUJU_GUI_SITE',
2222- 'JUJU_PEM',
2223- 'WEB_PORT',
2224- 'bzr_checkout',
2225- 'chain',
2226- 'cmd_log',
2227- 'fetch_api',
2228- 'fetch_gui',
2229- 'find_missing_packages',
2230- 'first_path_in_dir',
2231- 'get_api_address',
2232- 'get_npm_cache_archive_url',
2233- 'get_release_file_url',
2234- 'get_staging_dependencies',
2235- 'get_zookeeper_address',
2236- 'legacy_juju',
2237- 'log_hook',
2238- 'merge',
2239- 'parse_source',
2240- 'prime_npm_cache',
2241- 'render_to_file',
2242- 'save_or_create_certificates',
2243- 'setup_apache',
2244- 'setup_gui',
2245- 'start_agent',
2246- 'start_gui',
2247- 'start_improv',
2248- 'write_apache_config',
2249-]
2250-
2251-from contextlib import contextmanager
2252-import errno
2253-import json
2254-import os
2255-import logging
2256-import shutil
2257-from subprocess import CalledProcessError
2258-import tempfile
2259-from urlparse import urlparse
2260-
2261-import apt
2262-import tempita
2263-
2264-from launchpadlib.launchpad import Launchpad
2265-from shelltoolbox import (
2266- Serializer,
2267- apt_get_install,
2268- command,
2269- environ,
2270- install_extra_repositories,
2271- run,
2272- script_name,
2273- search_file,
2274- su,
2275-)
2276-from charmhelpers.core.host import (
2277- service_start,
2278-)
2279-from charmhelpers.core.hookenv import (
2280- log,
2281- config,
2282- unit_get,
2283-)
2284-
2285-
2286-AGENT = 'juju-api-agent'
2287-APACHE = 'apache2'
2288-IMPROV = 'juju-api-improv'
2289-HAPROXY = 'haproxy'
2290-
2291-API_PORT = 8080
2292-WEB_PORT = 8000
2293-
2294-CURRENT_DIR = os.getcwd()
2295-JUJU_DIR = os.path.join(CURRENT_DIR, 'juju')
2296-JUJU_GUI_DIR = os.path.join(CURRENT_DIR, 'juju-gui')
2297-JUJU_GUI_SITE = '/etc/apache2/sites-available/juju-gui'
2298-JUJU_GUI_PORTS = '/etc/apache2/ports.conf'
2299-JUJU_PEM = 'juju.includes-private-key.pem'
2300-BUILD_REPOSITORIES = ('ppa:chris-lea/node.js-legacy',)
2301-DEB_BUILD_DEPENDENCIES = (
2302- 'bzr', 'imagemagick', 'make', 'nodejs', 'npm',
2303-)
2304-DEB_STAGE_DEPENDENCIES = (
2305- 'zookeeper',
2306-)
2307-
2308-
2309-# Store the configuration from on invocation to the next.
2310-config_json = Serializer('/tmp/config.json')
2311-# Bazaar checkout command.
2312-bzr_checkout = command('bzr', 'co', '--lightweight')
2313-# Whether or not the charm is deployed using juju-core.
2314-# If juju-core has been used to deploy the charm, an agent.conf file must
2315-# be present in the charm parent directory.
2316-legacy_juju = lambda: not os.path.exists(
2317- os.path.join(CURRENT_DIR, '..', 'agent.conf'))
2318-
2319-
2320-def _get_build_dependencies():
2321- """Install deb dependencies for building."""
2322- log('Installing build dependencies.')
2323- cmd_log(install_extra_repositories(*BUILD_REPOSITORIES))
2324- cmd_log(apt_get_install(*DEB_BUILD_DEPENDENCIES))
2325-
2326-
2327-def get_api_address(unit_dir):
2328- """Return the Juju API address stored in the uniter agent.conf file."""
2329- import yaml # python-yaml is only installed if juju-core is used.
2330- # XXX 2013-03-27 frankban bug=1161443:
2331- # currently the uniter agent.conf file does not include the API
2332- # address. For now retrieve it from the machine agent file.
2333- base_dir = os.path.abspath(os.path.join(unit_dir, '..'))
2334- for dirname in os.listdir(base_dir):
2335- if dirname.startswith('machine-'):
2336- agent_conf = os.path.join(base_dir, dirname, 'agent.conf')
2337- break
2338- else:
2339- raise IOError('Juju agent configuration file not found.')
2340- contents = yaml.load(open(agent_conf))
2341- return contents['apiinfo']['addrs'][0]
2342-
2343-
2344-def get_staging_dependencies():
2345- """Install deb dependencies for the stage (improv) environment."""
2346- log('Installing stage dependencies.')
2347- cmd_log(apt_get_install(*DEB_STAGE_DEPENDENCIES))
2348-
2349-
2350-def first_path_in_dir(directory):
2351- """Return the full path of the first file/dir in *directory*."""
2352- return os.path.join(directory, os.listdir(directory)[0])
2353-
2354-
2355-def _get_by_attr(collection, attr, value):
2356- """Return the first item in collection having attr == value.
2357-
2358- Return None if the item is not found.
2359- """
2360- for item in collection:
2361- if getattr(item, attr) == value:
2362- return item
2363-
2364-
2365-def get_release_file_url(project, series_name, release_version):
2366- """Return the URL of the release file hosted in Launchpad.
2367-
2368- The returned URL points to a release file for the given project, series
2369- name and release version.
2370- The argument *project* is a project object as returned by launchpadlib.
2371- The arguments *series_name* and *release_version* are strings. If
2372- *release_version* is None, the URL of the latest release will be returned.
2373- """
2374- series = _get_by_attr(project.series, 'name', series_name)
2375- if series is None:
2376- raise ValueError('%r: series not found' % series_name)
2377- # Releases are returned by Launchpad in reverse date order.
2378- releases = list(series.releases)
2379- if not releases:
2380- raise ValueError('%r: series does not contain releases' % series_name)
2381- if release_version is not None:
2382- release = _get_by_attr(releases, 'version', release_version)
2383- if release is None:
2384- raise ValueError('%r: release not found' % release_version)
2385- releases = [release]
2386- for release in releases:
2387- for file_ in release.files:
2388- if str(file_).endswith('.tgz'):
2389- return file_.file_link
2390- raise ValueError('%r: file not found' % release_version)
2391-
2392-
2393-def get_zookeeper_address(agent_file_path):
2394- """Retrieve the Zookeeper address contained in the given *agent_file_path*.
2395-
2396- The *agent_file_path* is a path to a file containing a line similar to the
2397- following::
2398-
2399- env JUJU_ZOOKEEPER="address"
2400- """
2401- line = search_file('JUJU_ZOOKEEPER', agent_file_path).strip()
2402- return line.split('=')[1].strip('"')
2403-
2404-
2405-@contextmanager
2406-def log_hook():
2407- """Log when a hook starts and stops its execution.
2408-
2409- Also log to stdout possible CalledProcessError exceptions raised executing
2410- the hook.
2411- """
2412- script = script_name()
2413- log(">>> Entering {}".format(script))
2414- try:
2415- yield
2416- except CalledProcessError as err:
2417- log('Exception caught:')
2418- log(err.output)
2419- raise
2420- finally:
2421- log("<<< Exiting {}".format(script))
2422-
2423-
2424-def parse_source(source):
2425- """Parse the ``juju-gui-source`` option.
2426-
2427- Return a tuple of two elements representing info on how to deploy Juju GUI.
2428- Examples:
2429- - ('stable', None): latest stable release;
2430- - ('stable', '0.1.0'): stable release v0.1.0;
2431- - ('trunk', None): latest trunk release;
2432- - ('trunk', '0.1.0+build.1'): trunk release v0.1.0 bzr revision 1;
2433- - ('branch', 'lp:juju-gui'): release is made from a branch;
2434- - ('url', 'http://example.com/gui'): release from a downloaded file.
2435- """
2436- if source.startswith('url:'):
2437- source = source[4:]
2438- # Support file paths, including relative paths.
2439- if urlparse(source).scheme == '':
2440- if not source.startswith('/'):
2441- source = os.path.join(os.path.abspath(CURRENT_DIR), source)
2442- source = "file://%s" % source
2443- return 'url', source
2444- if source in ('stable', 'trunk'):
2445- return source, None
2446- if source.startswith('lp:') or source.startswith('http://'):
2447- return 'branch', source
2448- if 'build' in source:
2449- return 'trunk', source
2450- return 'stable', source
2451-
2452-
2453-def render_to_file(template_name, context, destination):
2454- """Render the given *template_name* into *destination* using *context*.
2455-
2456- The tempita template language is used to render contents
2457- (see http://pythonpaste.org/tempita/).
2458- The argument *template_name* is the name or path of the template file:
2459- it may be either a path relative to ``../config`` or an absolute path.
2460- The argument *destination* is a file path.
2461- The argument *context* is a dict-like object.
2462- """
2463- template_path = os.path.abspath(template_name)
2464- template = tempita.Template.from_filename(template_path)
2465- with open(destination, 'w') as stream:
2466- stream.write(template.substitute(context))
2467-
2468-
2469-results_log = None
2470-
2471-
2472-def _setupLogging():
2473- global results_log
2474- if results_log is not None:
2475- return
2476- cfg = config()
2477- logging.basicConfig(
2478- filename=cfg['command-log-file'],
2479- level=logging.INFO,
2480- format="%(asctime)s: %(name)s@%(levelname)s %(message)s")
2481- results_log = logging.getLogger('juju-gui')
2482-
2483-
2484-def cmd_log(results):
2485- global results_log
2486- if not results:
2487- return
2488- if results_log is None:
2489- _setupLogging()
2490- # Since 'results' may be multi-line output, start it on a separate line
2491- # from the logger timestamp, etc.
2492- results_log.info('\n' + results)
2493-
2494-
2495-def start_improv(staging_env, ssl_cert_path,
2496- config_path='/etc/init/juju-api-improv.conf'):
2497- """Start a simulated juju environment using ``improv.py``."""
2498- log('Setting up staging start up script.')
2499- context = {
2500- 'juju_dir': JUJU_DIR,
2501- 'keys': ssl_cert_path,
2502- 'port': API_PORT,
2503- 'staging_env': staging_env,
2504- }
2505- render_to_file('config/juju-api-improv.conf.template', context, config_path)
2506- log('Starting the staging backend.')
2507- with su('root'):
2508- service_start(IMPROV)
2509-
2510-
2511-def start_agent(
2512- ssl_cert_path, config_path='/etc/init/juju-api-agent.conf',
2513- read_only=False):
2514- """Start the Juju agent and connect to the current environment."""
2515- # Retrieve the Zookeeper address from the start up script.
2516- unit_dir = os.path.realpath(os.path.join(CURRENT_DIR, '..'))
2517- agent_file = '/etc/init/juju-{0}.conf'.format(os.path.basename(unit_dir))
2518- zookeeper = get_zookeeper_address(agent_file)
2519- log('Setting up API agent start up script.')
2520- context = {
2521- 'juju_dir': JUJU_DIR,
2522- 'keys': ssl_cert_path,
2523- 'port': API_PORT,
2524- 'zookeeper': zookeeper,
2525- 'read_only': read_only
2526- }
2527- render_to_file('config/juju-api-agent.conf.template', context, config_path)
2528- log('Starting API agent.')
2529- with su('root'):
2530- service_start(AGENT)
2531-
2532-
2533-def start_gui(
2534- console_enabled, login_help, readonly, in_staging, ssl_cert_path,
2535- charmworld_url, serve_tests, haproxy_path='/etc/haproxy/haproxy.cfg',
2536- config_js_path=None, secure=True, sandbox=False):
2537- """Set up and start the Juju GUI server."""
2538- with su('root'):
2539- run('chown', '-R', 'ubuntu:', JUJU_GUI_DIR)
2540- # XXX 2013-02-05 frankban bug=1116320:
2541- # External insecure resources are still loaded when testing in the
2542- # debug environment. For now, switch to the production environment if
2543- # the charm is configured to serve tests.
2544- if in_staging and not serve_tests:
2545- build_dirname = 'build-debug'
2546- else:
2547- build_dirname = 'build-prod'
2548- build_dir = os.path.join(JUJU_GUI_DIR, build_dirname)
2549- log('Generating the Juju GUI configuration file.')
2550- is_legacy_juju = legacy_juju()
2551- user, password = None, None
2552- if (is_legacy_juju and in_staging) or sandbox:
2553- user, password = 'admin', 'admin'
2554- else:
2555- user, password = None, None
2556-
2557- api_backend = 'python' if is_legacy_juju else 'go'
2558- if secure:
2559- protocol = 'wss'
2560- else:
2561- log('Running in insecure mode! Port 80 will serve unencrypted.')
2562- protocol = 'ws'
2563-
2564- context = {
2565- 'raw_protocol': protocol,
2566- 'address': unit_get('public-address'),
2567- 'console_enabled': json.dumps(console_enabled),
2568- 'login_help': json.dumps(login_help),
2569- 'password': json.dumps(password),
2570- 'api_backend': json.dumps(api_backend),
2571- 'readonly': json.dumps(readonly),
2572- 'user': json.dumps(user),
2573- 'protocol': json.dumps(protocol),
2574- 'sandbox': json.dumps(sandbox),
2575- 'charmworld_url': json.dumps(charmworld_url),
2576- }
2577- if config_js_path is None:
2578- config_js_path = os.path.join(
2579- build_dir, 'juju-ui', 'assets', 'config.js')
2580- render_to_file('config/config.js.template', context, config_js_path)
2581-
2582- write_apache_config(build_dir, serve_tests)
2583-
2584- log('Generating haproxy configuration file.')
2585- if is_legacy_juju:
2586- # The PyJuju API agent is listening on localhost.
2587- api_address = '127.0.0.1:{0}'.format(API_PORT)
2588- else:
2589- # Retrieve the juju-core API server address.
2590- api_address = get_api_address(os.path.join(CURRENT_DIR, '..'))
2591- context = {
2592- 'api_address': api_address,
2593- 'api_pem': JUJU_PEM,
2594- 'legacy_juju': is_legacy_juju,
2595- 'ssl_cert_path': ssl_cert_path,
2596- # In PyJuju environments, use the same certificate for both HTTPS and
2597- # WebSocket connections. In juju-core the system already has the proper
2598- # certificate installed.
2599- 'web_pem': JUJU_PEM,
2600- 'web_port': WEB_PORT,
2601- 'secure': secure
2602- }
2603- render_to_file('config/haproxy.cfg.template', context, haproxy_path)
2604- log('Starting Juju GUI.')
2605-
2606-
2607-def write_apache_config(build_dir, serve_tests=False):
2608- log('Generating the apache site configuration file.')
2609- context = {
2610- 'port': WEB_PORT,
2611- 'serve_tests': serve_tests,
2612- 'server_root': build_dir,
2613- 'tests_root': os.path.join(JUJU_GUI_DIR, 'test', ''),
2614- }
2615- render_to_file('config/apache-ports.template', context, JUJU_GUI_PORTS)
2616- render_to_file('config/apache-site.template', context, JUJU_GUI_SITE)
2617-
2618-
2619-def get_npm_cache_archive_url(Launchpad=Launchpad):
2620- """Figure out the URL of the most recent NPM cache archive on Launchpad."""
2621- launchpad = Launchpad.login_anonymously('Juju GUI charm', 'production')
2622- project = launchpad.projects['juju-gui']
2623- # Find the URL of the most recently created NPM cache archive.
2624- npm_cache_url = get_release_file_url(project, 'npm-cache', None)
2625- return npm_cache_url
2626-
2627-
2628-def prime_npm_cache(npm_cache_url):
2629- """Download NPM cache archive and prime the NPM cache with it."""
2630- # Download the cache archive and then uncompress it into the NPM cache.
2631- npm_cache_archive = os.path.join(CURRENT_DIR, 'npm-cache.tgz')
2632- cmd_log(run('curl', '-L', '-o', npm_cache_archive, npm_cache_url))
2633- npm_cache_dir = os.path.expanduser('~/.npm')
2634- # The NPM cache directory probably does not exist, so make it if not.
2635- try:
2636- os.mkdir(npm_cache_dir)
2637- except OSError, e:
2638- # If the directory already exists then ignore the error.
2639- if e.errno != errno.EEXIST: # File exists.
2640- raise
2641- uncompress = command('tar', '-x', '-z', '-C', npm_cache_dir, '-f')
2642- cmd_log(uncompress(npm_cache_archive))
2643-
2644-
2645-def fetch_gui(juju_gui_source, logpath):
2646- """Retrieve the Juju GUI release/branch."""
2647- # Retrieve a Juju GUI release.
2648- origin, version_or_branch = parse_source(juju_gui_source)
2649- if origin == 'branch':
2650- # Make sure we have the dependencies necessary for us to actually make
2651- # a build.
2652- _get_build_dependencies()
2653- # Create a release starting from a branch.
2654- juju_gui_source_dir = os.path.join(CURRENT_DIR, 'juju-gui-source')
2655- log('Retrieving Juju GUI source checkout from %s.' % version_or_branch)
2656- cmd_log(run('rm', '-rf', juju_gui_source_dir))
2657- cmd_log(bzr_checkout(version_or_branch, juju_gui_source_dir))
2658- log('Preparing a Juju GUI release.')
2659- logdir = os.path.dirname(logpath)
2660- fd, name = tempfile.mkstemp(prefix='make-distfile-', dir=logdir)
2661- log('Output from "make distfile" sent to %s' % name)
2662- with environ(NO_BZR='1'):
2663- run('make', '-C', juju_gui_source_dir, 'distfile',
2664- stdout=fd, stderr=fd)
2665- release_tarball = first_path_in_dir(
2666- os.path.join(juju_gui_source_dir, 'releases'))
2667- else:
2668- log('Retrieving Juju GUI release.')
2669- if origin == 'url':
2670- file_url = version_or_branch
2671- else:
2672- # Retrieve a release from Launchpad.
2673- launchpad = Launchpad.login_anonymously(
2674- 'Juju GUI charm', 'production')
2675- project = launchpad.projects['juju-gui']
2676- file_url = get_release_file_url(project, origin, version_or_branch)
2677- log('Downloading release file from %s.' % file_url)
2678- release_tarball = os.path.join(CURRENT_DIR, 'release.tgz')
2679- cmd_log(run('curl', '-L', '-o', release_tarball, file_url))
2680- return release_tarball
2681-
2682-
2683-def fetch_api(juju_api_branch):
2684- """Retrieve the Juju branch."""
2685- # Retrieve Juju API source checkout.
2686- log('Retrieving Juju API source checkout.')
2687- cmd_log(run('rm', '-rf', JUJU_DIR))
2688- cmd_log(bzr_checkout(juju_api_branch, JUJU_DIR))
2689-
2690-
2691-def setup_gui(release_tarball):
2692- """Set up Juju GUI."""
2693- # Uncompress the release tarball.
2694- log('Installing Juju GUI.')
2695- release_dir = os.path.join(CURRENT_DIR, 'release')
2696- cmd_log(run('rm', '-rf', release_dir))
2697- os.mkdir(release_dir)
2698- uncompress = command('tar', '-x', '-z', '-C', release_dir, '-f')
2699- cmd_log(uncompress(release_tarball))
2700- # Link the Juju GUI dir to the contents of the release tarball.
2701- cmd_log(run('ln', '-sf', first_path_in_dir(release_dir), JUJU_GUI_DIR))
2702-
2703-
2704-def setup_apache():
2705- """Set up apache."""
2706- log('Setting up apache.')
2707- if not os.path.exists(JUJU_GUI_SITE):
2708- cmd_log(run('touch', JUJU_GUI_SITE))
2709- cmd_log(run('chown', 'ubuntu:', JUJU_GUI_SITE))
2710- cmd_log(
2711- run('ln', '-s', JUJU_GUI_SITE,
2712- '/etc/apache2/sites-enabled/juju-gui'))
2713-
2714- if not os.path.exists(JUJU_GUI_PORTS):
2715- cmd_log(run('touch', JUJU_GUI_PORTS))
2716- cmd_log(run('chown', 'ubuntu:', JUJU_GUI_PORTS))
2717-
2718- with su('root'):
2719- run('a2dissite', 'default')
2720- run('a2ensite', 'juju-gui')
2721-
2722-
2723-def save_or_create_certificates(
2724- ssl_cert_path, ssl_cert_contents, ssl_key_contents):
2725- """Generate the SSL certificates.
2726-
2727- If both *ssl_cert_contents* and *ssl_key_contents* are provided, use them
2728- as certificates; otherwise, generate them.
2729-
2730- Also create a pem file, suitable for use in the haproxy configuration,
2731- concatenating the key and the certificate files.
2732- """
2733- crt_path = os.path.join(ssl_cert_path, 'juju.crt')
2734- key_path = os.path.join(ssl_cert_path, 'juju.key')
2735- if not os.path.exists(ssl_cert_path):
2736- os.makedirs(ssl_cert_path)
2737- if ssl_cert_contents and ssl_key_contents:
2738- # Save the provided certificates.
2739- with open(crt_path, 'w') as cert_file:
2740- cert_file.write(ssl_cert_contents)
2741- with open(key_path, 'w') as key_file:
2742- key_file.write(ssl_key_contents)
2743- else:
2744- # Generate certificates.
2745- # See http://superuser.com/questions/226192/openssl-without-prompt
2746- cmd_log(run(
2747- 'openssl', 'req', '-new', '-newkey', 'rsa:4096',
2748- '-days', '365', '-nodes', '-x509', '-subj',
2749- # These are arbitrary test values for the certificate.
2750- '/C=GB/ST=Juju/L=GUI/O=Ubuntu/CN=juju.ubuntu.com',
2751- '-keyout', key_path, '-out', crt_path))
2752- # Generate the pem file.
2753- pem_path = os.path.join(ssl_cert_path, JUJU_PEM)
2754- if os.path.exists(pem_path):
2755- os.remove(pem_path)
2756- with open(pem_path, 'w') as pem_file:
2757- shutil.copyfileobj(open(key_path), pem_file)
2758- shutil.copyfileobj(open(crt_path), pem_file)
2759-
2760-
2761-def find_missing_packages(*packages):
2762- """Given a list of packages, return the packages which are not installed.
2763- """
2764- cache = apt.Cache()
2765- missing = set()
2766- for pkg_name in packages:
2767- try:
2768- pkg = cache[pkg_name]
2769- except KeyError:
2770- missing.add(pkg_name)
2771- continue
2772- if pkg.is_installed:
2773- continue
2774- missing.add(pkg_name)
2775- return missing
2776-
2777-
2778-## Backend support decorators
2779-
2780-def chain(name):
2781- """Helper method to compose a set of mixin objects into a callable.
2782-
2783- Each method is called in the context of its mixin instance, and its
2784- argument is the Backend instance.
2785- """
2786- # Chain method calls through all implementing mixins.
2787- def method(self):
2788- for mixin in self.mixins:
2789- a_callable = getattr(type(mixin), name, None)
2790- if a_callable:
2791- a_callable(mixin, self)
2792-
2793- method.__name__ = name
2794- return method
2795-
2796-
2797-def merge(name):
2798- """Helper to merge a property from a set of strategy objects
2799- into a unified set.
2800- """
2801- # Return merged property from every providing mixin as a set.
2802- @property
2803- def method(self):
2804- result = set()
2805- for mixin in self.mixins:
2806- segment = getattr(type(mixin), name, None)
2807- if segment and isinstance(segment, (list, tuple, set)):
2808- result |= set(segment)
2809-
2810- return result
2811- return method
2812
2813=== removed directory 'hooks/install.d/charmhelpers/contrib/network'
2814=== removed file 'hooks/install.d/charmhelpers/contrib/network/__init__.py'
2815=== removed directory 'hooks/install.d/charmhelpers/contrib/network/ovs'
2816=== removed file 'hooks/install.d/charmhelpers/contrib/network/ovs/__init__.py'
2817--- hooks/install.d/charmhelpers/contrib/network/ovs/__init__.py 2015-08-19 19:17:42 +0000
2818+++ hooks/install.d/charmhelpers/contrib/network/ovs/__init__.py 1970-01-01 00:00:00 +0000
2819@@ -1,72 +0,0 @@
2820-''' Helpers for interacting with OpenvSwitch '''
2821-import subprocess
2822-import os
2823-from charmhelpers.core.hookenv import (
2824- log, WARNING
2825-)
2826-from charmhelpers.core.host import (
2827- service
2828-)
2829-
2830-
2831-def add_bridge(name):
2832- ''' Add the named bridge to openvswitch '''
2833- log('Creating bridge {}'.format(name))
2834- subprocess.check_call(["ovs-vsctl", "--", "--may-exist", "add-br", name])
2835-
2836-
2837-def del_bridge(name):
2838- ''' Delete the named bridge from openvswitch '''
2839- log('Deleting bridge {}'.format(name))
2840- subprocess.check_call(["ovs-vsctl", "--", "--if-exists", "del-br", name])
2841-
2842-
2843-def add_bridge_port(name, port):
2844- ''' Add a port to the named openvswitch bridge '''
2845- log('Adding port {} to bridge {}'.format(port, name))
2846- subprocess.check_call(["ovs-vsctl", "--", "--may-exist", "add-port",
2847- name, port])
2848- subprocess.check_call(["ip", "link", "set", port, "up"])
2849-
2850-
2851-def del_bridge_port(name, port):
2852- ''' Delete a port from the named openvswitch bridge '''
2853- log('Deleting port {} from bridge {}'.format(port, name))
2854- subprocess.check_call(["ovs-vsctl", "--", "--if-exists", "del-port",
2855- name, port])
2856- subprocess.check_call(["ip", "link", "set", port, "down"])
2857-
2858-
2859-def set_manager(manager):
2860- ''' Set the controller for the local openvswitch '''
2861- log('Setting manager for local ovs to {}'.format(manager))
2862- subprocess.check_call(['ovs-vsctl', 'set-manager',
2863- 'ssl:{}'.format(manager)])
2864-
2865-
2866-CERT_PATH = '/etc/openvswitch/ovsclient-cert.pem'
2867-
2868-
2869-def get_certificate():
2870- ''' Read openvswitch certificate from disk '''
2871- if os.path.exists(CERT_PATH):
2872- log('Reading ovs certificate from {}'.format(CERT_PATH))
2873- with open(CERT_PATH, 'r') as cert:
2874- full_cert = cert.read()
2875- begin_marker = "-----BEGIN CERTIFICATE-----"
2876- end_marker = "-----END CERTIFICATE-----"
2877- begin_index = full_cert.find(begin_marker)
2878- end_index = full_cert.rfind(end_marker)
2879- if end_index == -1 or begin_index == -1:
2880- raise RuntimeError("Certificate does not contain valid begin"
2881- " and end markers.")
2882- full_cert = full_cert[begin_index:(end_index + len(end_marker))]
2883- return full_cert
2884- else:
2885- log('Certificate not found', level=WARNING)
2886- return None
2887-
2888-
2889-def full_restart():
2890- ''' Full restart and reload of openvswitch '''
2891- service('force-reload-kmod', 'openvswitch-switch')
2892
2893=== removed directory 'hooks/install.d/charmhelpers/contrib/openstack'
2894=== removed file 'hooks/install.d/charmhelpers/contrib/openstack/__init__.py'
2895=== removed file 'hooks/install.d/charmhelpers/contrib/openstack/context.py'
2896--- hooks/install.d/charmhelpers/contrib/openstack/context.py 2015-08-19 19:17:42 +0000
2897+++ hooks/install.d/charmhelpers/contrib/openstack/context.py 1970-01-01 00:00:00 +0000
2898@@ -1,294 +0,0 @@
2899-import os
2900-
2901-from base64 import b64decode
2902-
2903-from subprocess import (
2904- check_call
2905-)
2906-
2907-from charmhelpers.core.hookenv import (
2908- config,
2909- local_unit,
2910- log,
2911- relation_get,
2912- relation_ids,
2913- related_units,
2914- unit_get,
2915-)
2916-
2917-from charmhelpers.contrib.hahelpers.cluster import (
2918- determine_api_port,
2919- determine_haproxy_port,
2920- https,
2921- is_clustered,
2922- peer_units,
2923-)
2924-
2925-from charmhelpers.contrib.hahelpers.apache import (
2926- get_cert,
2927- get_ca_cert,
2928-)
2929-
2930-CA_CERT_PATH = '/usr/local/share/ca-certificates/keystone_juju_ca_cert.crt'
2931-
2932-
2933-class OSContextError(Exception):
2934- pass
2935-
2936-
2937-def context_complete(ctxt):
2938- _missing = []
2939- for k, v in ctxt.iteritems():
2940- if v is None or v == '':
2941- _missing.append(k)
2942- if _missing:
2943- log('Missing required data: %s' % ' '.join(_missing), level='INFO')
2944- return False
2945- return True
2946-
2947-
2948-class OSContextGenerator(object):
2949- interfaces = []
2950-
2951- def __call__(self):
2952- raise NotImplementedError
2953-
2954-
2955-class SharedDBContext(OSContextGenerator):
2956- interfaces = ['shared-db']
2957-
2958- def __call__(self):
2959- log('Generating template context for shared-db')
2960- conf = config()
2961- try:
2962- database = conf['database']
2963- username = conf['database-user']
2964- except KeyError as e:
2965- log('Could not generate shared_db context. '
2966- 'Missing required charm config options: %s.' % e)
2967- raise OSContextError
2968- ctxt = {}
2969- for rid in relation_ids('shared-db'):
2970- for unit in related_units(rid):
2971- ctxt = {
2972- 'database_host': relation_get('db_host', rid=rid,
2973- unit=unit),
2974- 'database': database,
2975- 'database_user': username,
2976- 'database_password': relation_get('password', rid=rid,
2977- unit=unit)
2978- }
2979- if not context_complete(ctxt):
2980- return {}
2981- return ctxt
2982-
2983-
2984-class IdentityServiceContext(OSContextGenerator):
2985- interfaces = ['identity-service']
2986-
2987- def __call__(self):
2988- log('Generating template context for identity-service')
2989- ctxt = {}
2990-
2991- for rid in relation_ids('identity-service'):
2992- for unit in related_units(rid):
2993- ctxt = {
2994- 'service_port': relation_get('service_port', rid=rid,
2995- unit=unit),
2996- 'service_host': relation_get('service_host', rid=rid,
2997- unit=unit),
2998- 'auth_host': relation_get('auth_host', rid=rid, unit=unit),
2999- 'auth_port': relation_get('auth_port', rid=rid, unit=unit),
3000- 'admin_tenant_name': relation_get('service_tenant',
3001- rid=rid, unit=unit),
3002- 'admin_user': relation_get('service_username', rid=rid,
3003- unit=unit),
3004- 'admin_password': relation_get('service_password', rid=rid,
3005- unit=unit),
3006- # XXX: Hard-coded http.
3007- 'service_protocol': 'http',
3008- 'auth_protocol': 'http',
3009- }
3010- if not context_complete(ctxt):
3011- return {}
3012- return ctxt
3013-
3014-
3015-class AMQPContext(OSContextGenerator):
3016- interfaces = ['amqp']
3017-
3018- def __call__(self):
3019- log('Generating template context for amqp')
3020- conf = config()
3021- try:
3022- username = conf['rabbit-user']
3023- vhost = conf['rabbit-vhost']
3024- except KeyError as e:
3025- log('Could not generate shared_db context. '
3026- 'Missing required charm config options: %s.' % e)
3027- raise OSContextError
3028-
3029- ctxt = {}
3030- for rid in relation_ids('amqp'):
3031- for unit in related_units(rid):
3032- if relation_get('clustered', rid=rid, unit=unit):
3033- rabbitmq_host = relation_get('vip', rid=rid, unit=unit)
3034- else:
3035- rabbitmq_host = relation_get('private-address',
3036- rid=rid, unit=unit)
3037- ctxt = {
3038- 'rabbitmq_host': rabbitmq_host,
3039- 'rabbitmq_user': username,
3040- 'rabbitmq_password': relation_get('password', rid=rid,
3041- unit=unit),
3042- 'rabbitmq_virtual_host': vhost,
3043- }
3044- if not context_complete(ctxt):
3045- return {}
3046- return ctxt
3047-
3048-
3049-class CephContext(OSContextGenerator):
3050- interfaces = ['ceph']
3051-
3052- def __call__(self):
3053- '''This generates context for /etc/ceph/ceph.conf templates'''
3054- log('Generating tmeplate context for ceph')
3055- mon_hosts = []
3056- auth = None
3057- for rid in relation_ids('ceph'):
3058- for unit in related_units(rid):
3059- mon_hosts.append(relation_get('private-address', rid=rid,
3060- unit=unit))
3061- auth = relation_get('auth', rid=rid, unit=unit)
3062-
3063- ctxt = {
3064- 'mon_hosts': ' '.join(mon_hosts),
3065- 'auth': auth,
3066- }
3067- if not context_complete(ctxt):
3068- return {}
3069- return ctxt
3070-
3071-
3072-class HAProxyContext(OSContextGenerator):
3073- interfaces = ['cluster']
3074-
3075- def __call__(self):
3076- '''
3077- Builds half a context for the haproxy template, which describes
3078- all peers to be included in the cluster. Each charm needs to include
3079- its own context generator that describes the port mapping.
3080- '''
3081- if not relation_ids('cluster'):
3082- return {}
3083-
3084- cluster_hosts = {}
3085- l_unit = local_unit().replace('/', '-')
3086- cluster_hosts[l_unit] = unit_get('private-address')
3087-
3088- for rid in relation_ids('cluster'):
3089- for unit in related_units(rid):
3090- _unit = unit.replace('/', '-')
3091- addr = relation_get('private-address', rid=rid, unit=unit)
3092- cluster_hosts[_unit] = addr
3093-
3094- ctxt = {
3095- 'units': cluster_hosts,
3096- }
3097- if len(cluster_hosts.keys()) > 1:
3098- # Enable haproxy when we have enough peers.
3099- log('Ensuring haproxy enabled in /etc/default/haproxy.')
3100- with open('/etc/default/haproxy', 'w') as out:
3101- out.write('ENABLED=1\n')
3102- return ctxt
3103- log('HAProxy context is incomplete, this unit has no peers.')
3104- return {}
3105-
3106-
3107-class ImageServiceContext(OSContextGenerator):
3108- interfaces = ['image-servce']
3109-
3110- def __call__(self):
3111- '''
3112- Obtains the glance API server from the image-service relation. Useful
3113- in nova and cinder (currently).
3114- '''
3115- log('Generating template context for image-service.')
3116- rids = relation_ids('image-service')
3117- if not rids:
3118- return {}
3119- for rid in rids:
3120- for unit in related_units(rid):
3121- api_server = relation_get('glance-api-server',
3122- rid=rid, unit=unit)
3123- if api_server:
3124- return {'glance_api_servers': api_server}
3125- log('ImageService context is incomplete. '
3126- 'Missing required relation data.')
3127- return {}
3128-
3129-
3130-class ApacheSSLContext(OSContextGenerator):
3131- """
3132- Generates a context for an apache vhost configuration that configures
3133- HTTPS reverse proxying for one or many endpoints. Generated context
3134- looks something like:
3135- {
3136- 'namespace': 'cinder',
3137- 'private_address': 'iscsi.mycinderhost.com',
3138- 'endpoints': [(8776, 8766), (8777, 8767)]
3139- }
3140-
3141- The endpoints list consists of a tuples mapping external ports
3142- to internal ports.
3143- """
3144- interfaces = ['https']
3145-
3146- # charms should inherit this context and set external ports
3147- # and service namespace accordingly.
3148- external_ports = []
3149- service_namespace = None
3150-
3151- def enable_modules(self):
3152- cmd = ['a2enmod', 'ssl', 'proxy', 'proxy_http']
3153- check_call(cmd)
3154-
3155- def configure_cert(self):
3156- if not os.path.isdir('/etc/apache2/ssl'):
3157- os.mkdir('/etc/apache2/ssl')
3158- ssl_dir = os.path.join('/etc/apache2/ssl/', self.service_namespace)
3159- if not os.path.isdir(ssl_dir):
3160- os.mkdir(ssl_dir)
3161- cert, key = get_cert()
3162- with open(os.path.join(ssl_dir, 'cert'), 'w') as cert_out:
3163- cert_out.write(b64decode(cert))
3164- with open(os.path.join(ssl_dir, 'key'), 'w') as key_out:
3165- key_out.write(b64decode(key))
3166- ca_cert = get_ca_cert()
3167- if ca_cert:
3168- with open(CA_CERT_PATH, 'w') as ca_out:
3169- ca_out.write(b64decode(ca_cert))
3170-
3171- def __call__(self):
3172- if isinstance(self.external_ports, basestring):
3173- self.external_ports = [self.external_ports]
3174- if (not self.external_ports or not https()):
3175- return {}
3176-
3177- self.configure_cert()
3178- self.enable_modules()
3179-
3180- ctxt = {
3181- 'namespace': self.service_namespace,
3182- 'private_address': unit_get('private-address'),
3183- 'endpoints': []
3184- }
3185- for ext_port in self.external_ports:
3186- if peer_units() or is_clustered():
3187- int_port = determine_haproxy_port(ext_port)
3188- else:
3189- int_port = determine_api_port(ext_port)
3190- portmap = (int(ext_port), int(int_port))
3191- ctxt['endpoints'].append(portmap)
3192- return ctxt
3193
3194=== removed directory 'hooks/install.d/charmhelpers/contrib/openstack/templates'
3195=== removed file 'hooks/install.d/charmhelpers/contrib/openstack/templates/__init__.py'
3196--- hooks/install.d/charmhelpers/contrib/openstack/templates/__init__.py 2015-08-19 19:17:42 +0000
3197+++ hooks/install.d/charmhelpers/contrib/openstack/templates/__init__.py 1970-01-01 00:00:00 +0000
3198@@ -1,2 +0,0 @@
3199-# dummy __init__.py to fool syncer into thinking this is a syncable python
3200-# module
3201
3202=== removed file 'hooks/install.d/charmhelpers/contrib/openstack/templates/ceph.conf'
3203--- hooks/install.d/charmhelpers/contrib/openstack/templates/ceph.conf 2015-08-19 19:17:42 +0000
3204+++ hooks/install.d/charmhelpers/contrib/openstack/templates/ceph.conf 1970-01-01 00:00:00 +0000
3205@@ -1,11 +0,0 @@
3206-###############################################################################
3207-# [ WARNING ]
3208-# cinder configuration file maintained by Juju
3209-# local changes may be overwritten.
3210-###############################################################################
3211-{% if auth -%}
3212-[global]
3213- auth_supported = {{ auth }}
3214- keyring = /etc/ceph/$cluster.$name.keyring
3215- mon host = {{ mon_hosts }}
3216-{% endif -%}
3217
3218=== removed file 'hooks/install.d/charmhelpers/contrib/openstack/templates/haproxy.cfg'
3219--- hooks/install.d/charmhelpers/contrib/openstack/templates/haproxy.cfg 2015-08-19 19:17:42 +0000
3220+++ hooks/install.d/charmhelpers/contrib/openstack/templates/haproxy.cfg 1970-01-01 00:00:00 +0000
3221@@ -1,37 +0,0 @@
3222-global
3223- log 127.0.0.1 local0
3224- log 127.0.0.1 local1 notice
3225- maxconn 20000
3226- user haproxy
3227- group haproxy
3228- spread-checks 0
3229-
3230-defaults
3231- log global
3232- mode http
3233- option httplog
3234- option dontlognull
3235- retries 3
3236- timeout queue 1000
3237- timeout connect 1000
3238- timeout client 30000
3239- timeout server 30000
3240-
3241-listen stats :8888
3242- mode http
3243- stats enable
3244- stats hide-version
3245- stats realm Haproxy\ Statistics
3246- stats uri /
3247- stats auth admin:password
3248-
3249-{% if units -%}
3250-{% for service, ports in service_ports.iteritems() -%}
3251-listen {{ service }} 0.0.0.0:{{ ports[0] }}
3252- balance roundrobin
3253- option tcplog
3254- {% for unit, address in units.iteritems() -%}
3255- server {{ unit }} {{ address }}:{{ ports[1] }} check
3256- {% endfor %}
3257-{% endfor -%}
3258-{% endif -%}
3259
3260=== removed file 'hooks/install.d/charmhelpers/contrib/openstack/templates/openstack_https_frontend'
3261--- hooks/install.d/charmhelpers/contrib/openstack/templates/openstack_https_frontend 2015-08-19 19:17:42 +0000
3262+++ hooks/install.d/charmhelpers/contrib/openstack/templates/openstack_https_frontend 1970-01-01 00:00:00 +0000
3263@@ -1,23 +0,0 @@
3264-{% if endpoints -%}
3265-{% for ext, int in endpoints -%}
3266-Listen {{ ext }}
3267-NameVirtualHost *:{{ ext }}
3268-<VirtualHost *:{{ ext }}>
3269- ServerName {{ private_address }}
3270- SSLEngine on
3271- SSLCertificateFile /etc/apache2/ssl/{{ namespace }}/cert
3272- SSLCertificateKeyFile /etc/apache2/ssl/{{ namespace }}/key
3273- ProxyPass / http://localhost:{{ int }}/
3274- ProxyPassReverse / http://localhost:{{ int }}/
3275- ProxyPreserveHost on
3276-</VirtualHost>
3277-<Proxy *>
3278- Order deny,allow
3279- Allow from all
3280-</Proxy>
3281-<Location />
3282- Order allow,deny
3283- Allow from all
3284-</Location>
3285-{% endfor -%}
3286-{% endif -%}
3287
3288=== removed file 'hooks/install.d/charmhelpers/contrib/openstack/templating.py'
3289--- hooks/install.d/charmhelpers/contrib/openstack/templating.py 2015-08-19 19:17:42 +0000
3290+++ hooks/install.d/charmhelpers/contrib/openstack/templating.py 1970-01-01 00:00:00 +0000
3291@@ -1,261 +0,0 @@
3292-import os
3293-
3294-from charmhelpers.fetch import apt_install
3295-
3296-from charmhelpers.core.hookenv import (
3297- log,
3298- ERROR,
3299- INFO
3300-)
3301-
3302-from charmhelpers.contrib.openstack.utils import OPENSTACK_CODENAMES
3303-
3304-try:
3305- from jinja2 import FileSystemLoader, ChoiceLoader, Environment
3306-except ImportError:
3307- # python-jinja2 may not be installed yet, or we're running unittests.
3308- FileSystemLoader = ChoiceLoader = Environment = None
3309-
3310-
3311-class OSConfigException(Exception):
3312- pass
3313-
3314-
3315-def get_loader(templates_dir, os_release):
3316- """
3317- Create a jinja2.ChoiceLoader containing template dirs up to
3318- and including os_release. If directory template directory
3319- is missing at templates_dir, it will be omitted from the loader.
3320- templates_dir is added to the bottom of the search list as a base
3321- loading dir.
3322-
3323- A charm may also ship a templates dir with this module
3324- and it will be appended to the bottom of the search list, eg:
3325- hooks/charmhelpers/contrib/openstack/templates.
3326-
3327- :param templates_dir: str: Base template directory containing release
3328- sub-directories.
3329- :param os_release : str: OpenStack release codename to construct template
3330- loader.
3331-
3332- :returns : jinja2.ChoiceLoader constructed with a list of
3333- jinja2.FilesystemLoaders, ordered in descending
3334- order by OpenStack release.
3335- """
3336- tmpl_dirs = [(rel, os.path.join(templates_dir, rel))
3337- for rel in OPENSTACK_CODENAMES.itervalues()]
3338-
3339- if not os.path.isdir(templates_dir):
3340- log('Templates directory not found @ %s.' % templates_dir,
3341- level=ERROR)
3342- raise OSConfigException
3343-
3344- # the bottom contains tempaltes_dir and possibly a common templates dir
3345- # shipped with the helper.
3346- loaders = [FileSystemLoader(templates_dir)]
3347- helper_templates = os.path.join(os.path.dirname(__file__), 'templates')
3348- if os.path.isdir(helper_templates):
3349- loaders.append(FileSystemLoader(helper_templates))
3350-
3351- for rel, tmpl_dir in tmpl_dirs:
3352- if os.path.isdir(tmpl_dir):
3353- loaders.insert(0, FileSystemLoader(tmpl_dir))
3354- if rel == os_release:
3355- break
3356- log('Creating choice loader with dirs: %s' %
3357- [l.searchpath for l in loaders], level=INFO)
3358- return ChoiceLoader(loaders)
3359-
3360-
3361-class OSConfigTemplate(object):
3362- """
3363- Associates a config file template with a list of context generators.
3364- Responsible for constructing a template context based on those generators.
3365- """
3366- def __init__(self, config_file, contexts):
3367- self.config_file = config_file
3368-
3369- if hasattr(contexts, '__call__'):
3370- self.contexts = [contexts]
3371- else:
3372- self.contexts = contexts
3373-
3374- self._complete_contexts = []
3375-
3376- def context(self):
3377- ctxt = {}
3378- for context in self.contexts:
3379- _ctxt = context()
3380- if _ctxt:
3381- ctxt.update(_ctxt)
3382- # track interfaces for every complete context.
3383- [self._complete_contexts.append(interface)
3384- for interface in context.interfaces
3385- if interface not in self._complete_contexts]
3386- return ctxt
3387-
3388- def complete_contexts(self):
3389- '''
3390- Return a list of interfaces that have atisfied contexts.
3391- '''
3392- if self._complete_contexts:
3393- return self._complete_contexts
3394- self.context()
3395- return self._complete_contexts
3396-
3397-
3398-class OSConfigRenderer(object):
3399- """
3400- This class provides a common templating system to be used by OpenStack
3401- charms. It is intended to help charms share common code and templates,
3402- and ease the burden of managing config templates across multiple OpenStack
3403- releases.
3404-
3405- Basic usage:
3406- # import some common context generates from charmhelpers
3407- from charmhelpers.contrib.openstack import context
3408-
3409- # Create a renderer object for a specific OS release.
3410- configs = OSConfigRenderer(templates_dir='/tmp/templates',
3411- openstack_release='folsom')
3412- # register some config files with context generators.
3413- configs.register(config_file='/etc/nova/nova.conf',
3414- contexts=[context.SharedDBContext(),
3415- context.AMQPContext()])
3416- configs.register(config_file='/etc/nova/api-paste.ini',
3417- contexts=[context.IdentityServiceContext()])
3418- configs.register(config_file='/etc/haproxy/haproxy.conf',
3419- contexts=[context.HAProxyContext()])
3420- # write out a single config
3421- configs.write('/etc/nova/nova.conf')
3422- # write out all registered configs
3423- configs.write_all()
3424-
3425- Details:
3426-
3427- OpenStack Releases and template loading
3428- ---------------------------------------
3429- When the object is instantiated, it is associated with a specific OS
3430- release. This dictates how the template loader will be constructed.
3431-
3432- The constructed loader attempts to load the template from several places
3433- in the following order:
3434- - from the most recent OS release-specific template dir (if one exists)
3435- - the base templates_dir
3436- - a template directory shipped in the charm with this helper file.
3437-
3438-
3439- For the example above, '/tmp/templates' contains the following structure:
3440- /tmp/templates/nova.conf
3441- /tmp/templates/api-paste.ini
3442- /tmp/templates/grizzly/api-paste.ini
3443- /tmp/templates/havana/api-paste.ini
3444-
3445- Since it was registered with the grizzly release, it first seraches
3446- the grizzly directory for nova.conf, then the templates dir.
3447-
3448- When writing api-paste.ini, it will find the template in the grizzly
3449- directory.
3450-
3451- If the object were created with folsom, it would fall back to the
3452- base templates dir for its api-paste.ini template.
3453-
3454- This system should help manage changes in config files through
3455- openstack releases, allowing charms to fall back to the most recently
3456- updated config template for a given release
3457-
3458- The haproxy.conf, since it is not shipped in the templates dir, will
3459- be loaded from the module directory's template directory, eg
3460- $CHARM/hooks/charmhelpers/contrib/openstack/templates. This allows
3461- us to ship common templates (haproxy, apache) with the helpers.
3462-
3463- Context generators
3464- ---------------------------------------
3465- Context generators are used to generate template contexts during hook
3466- execution. Doing so may require inspecting service relations, charm
3467- config, etc. When registered, a config file is associated with a list
3468- of generators. When a template is rendered and written, all context
3469- generates are called in a chain to generate the context dictionary
3470- passed to the jinja2 template. See context.py for more info.
3471- """
3472- def __init__(self, templates_dir, openstack_release):
3473- if not os.path.isdir(templates_dir):
3474- log('Could not locate templates dir %s' % templates_dir,
3475- level=ERROR)
3476- raise OSConfigException
3477-
3478- self.templates_dir = templates_dir
3479- self.openstack_release = openstack_release
3480- self.templates = {}
3481- self._tmpl_env = None
3482-
3483- if None in [Environment, ChoiceLoader, FileSystemLoader]:
3484- # if this code is running, the object is created pre-install hook.
3485- # jinja2 shouldn't get touched until the module is reloaded on next
3486- # hook execution, with proper jinja2 bits successfully imported.
3487- apt_install('python-jinja2')
3488-
3489- def register(self, config_file, contexts):
3490- """
3491- Register a config file with a list of context generators to be called
3492- during rendering.
3493- """
3494- self.templates[config_file] = OSConfigTemplate(config_file=config_file,
3495- contexts=contexts)
3496- log('Registered config file: %s' % config_file, level=INFO)
3497-
3498- def _get_tmpl_env(self):
3499- if not self._tmpl_env:
3500- loader = get_loader(self.templates_dir, self.openstack_release)
3501- self._tmpl_env = Environment(loader=loader)
3502-
3503- def _get_template(self, template):
3504- self._get_tmpl_env()
3505- template = self._tmpl_env.get_template(template)
3506- log('Loaded template from %s' % template.filename, level=INFO)
3507- return template
3508-
3509- def render(self, config_file):
3510- if config_file not in self.templates:
3511- log('Config not registered: %s' % config_file, level=ERROR)
3512- raise OSConfigException
3513- ctxt = self.templates[config_file].context()
3514- _tmpl = os.path.basename(config_file)
3515- log('Rendering from template: %s' % _tmpl, level=INFO)
3516- template = self._get_template(_tmpl)
3517- return template.render(ctxt)
3518-
3519- def write(self, config_file):
3520- """
3521- Write a single config file, raises if config file is not registered.
3522- """
3523- if config_file not in self.templates:
3524- log('Config not registered: %s' % config_file, level=ERROR)
3525- raise OSConfigException
3526- with open(config_file, 'wb') as out:
3527- out.write(self.render(config_file))
3528- log('Wrote template %s.' % config_file, level=INFO)
3529-
3530- def write_all(self):
3531- """
3532- Write out all registered config files.
3533- """
3534- [self.write(k) for k in self.templates.iterkeys()]
3535-
3536- def set_release(self, openstack_release):
3537- """
3538- Resets the template environment and generates a new template loader
3539- based on a the new openstack release.
3540- """
3541- self._tmpl_env = None
3542- self.openstack_release = openstack_release
3543- self._get_tmpl_env()
3544-
3545- def complete_contexts(self):
3546- '''
3547- Returns a list of context interfaces that yield a complete context.
3548- '''
3549- interfaces = []
3550- [interfaces.extend(i.complete_contexts())
3551- for i in self.templates.itervalues()]
3552- return interfaces
3553
3554=== removed file 'hooks/install.d/charmhelpers/contrib/openstack/utils.py'
3555--- hooks/install.d/charmhelpers/contrib/openstack/utils.py 2015-08-19 19:17:42 +0000
3556+++ hooks/install.d/charmhelpers/contrib/openstack/utils.py 1970-01-01 00:00:00 +0000
3557@@ -1,276 +0,0 @@
3558-#!/usr/bin/python
3559-
3560-# Common python helper functions used for OpenStack charms.
3561-
3562-from collections import OrderedDict
3563-
3564-import apt_pkg as apt
3565-import subprocess
3566-import os
3567-import sys
3568-
3569-from charmhelpers.core.hookenv import (
3570- config,
3571- log as juju_log,
3572- charm_dir,
3573-)
3574-
3575-from charmhelpers.core.host import (
3576- lsb_release,
3577-)
3578-
3579-from charmhelpers.fetch import (
3580- apt_install,
3581-)
3582-
3583-CLOUD_ARCHIVE_URL = "http://ubuntu-cloud.archive.canonical.com/ubuntu"
3584-CLOUD_ARCHIVE_KEY_ID = '5EDB1B62EC4926EA'
3585-
3586-UBUNTU_OPENSTACK_RELEASE = OrderedDict([
3587- ('oneiric', 'diablo'),
3588- ('precise', 'essex'),
3589- ('quantal', 'folsom'),
3590- ('raring', 'grizzly'),
3591- ('saucy', 'havana'),
3592-])
3593-
3594-
3595-OPENSTACK_CODENAMES = OrderedDict([
3596- ('2011.2', 'diablo'),
3597- ('2012.1', 'essex'),
3598- ('2012.2', 'folsom'),
3599- ('2013.1', 'grizzly'),
3600- ('2013.2', 'havana'),
3601- ('2014.1', 'icehouse'),
3602-])
3603-
3604-# The ugly duckling
3605-SWIFT_CODENAMES = {
3606- '1.4.3': 'diablo',
3607- '1.4.8': 'essex',
3608- '1.7.4': 'folsom',
3609- '1.7.6': 'grizzly',
3610- '1.7.7': 'grizzly',
3611- '1.8.0': 'grizzly',
3612- '1.9.0': 'havana',
3613- '1.9.1': 'havana',
3614-}
3615-
3616-
3617-def error_out(msg):
3618- juju_log("FATAL ERROR: %s" % msg, level='ERROR')
3619- sys.exit(1)
3620-
3621-
3622-def get_os_codename_install_source(src):
3623- '''Derive OpenStack release codename from a given installation source.'''
3624- ubuntu_rel = lsb_release()['DISTRIB_CODENAME']
3625- rel = ''
3626- if src == 'distro':
3627- try:
3628- rel = UBUNTU_OPENSTACK_RELEASE[ubuntu_rel]
3629- except KeyError:
3630- e = 'Could not derive openstack release for '\
3631- 'this Ubuntu release: %s' % ubuntu_rel
3632- error_out(e)
3633- return rel
3634-
3635- if src.startswith('cloud:'):
3636- ca_rel = src.split(':')[1]
3637- ca_rel = ca_rel.split('%s-' % ubuntu_rel)[1].split('/')[0]
3638- return ca_rel
3639-
3640- # Best guess match based on deb string provided
3641- if src.startswith('deb') or src.startswith('ppa'):
3642- for k, v in OPENSTACK_CODENAMES.iteritems():
3643- if v in src:
3644- return v
3645-
3646-
3647-def get_os_version_install_source(src):
3648- codename = get_os_codename_install_source(src)
3649- return get_os_version_codename(codename)
3650-
3651-
3652-def get_os_codename_version(vers):
3653- '''Determine OpenStack codename from version number.'''
3654- try:
3655- return OPENSTACK_CODENAMES[vers]
3656- except KeyError:
3657- e = 'Could not determine OpenStack codename for version %s' % vers
3658- error_out(e)
3659-
3660-
3661-def get_os_version_codename(codename):
3662- '''Determine OpenStack version number from codename.'''
3663- for k, v in OPENSTACK_CODENAMES.iteritems():
3664- if v == codename:
3665- return k
3666- e = 'Could not derive OpenStack version for '\
3667- 'codename: %s' % codename
3668- error_out(e)
3669-
3670-
3671-def get_os_codename_package(package, fatal=True):
3672- '''Derive OpenStack release codename from an installed package.'''
3673- apt.init()
3674- cache = apt.Cache()
3675-
3676- try:
3677- pkg = cache[package]
3678- except:
3679- if not fatal:
3680- return None
3681- # the package is unknown to the current apt cache.
3682- e = 'Could not determine version of package with no installation '\
3683- 'candidate: %s' % package
3684- error_out(e)
3685-
3686- if not pkg.current_ver:
3687- if not fatal:
3688- return None
3689- # package is known, but no version is currently installed.
3690- e = 'Could not determine version of uninstalled package: %s' % package
3691- error_out(e)
3692-
3693- vers = apt.upstream_version(pkg.current_ver.ver_str)
3694-
3695- try:
3696- if 'swift' in pkg.name:
3697- vers = vers[:5]
3698- return SWIFT_CODENAMES[vers]
3699- else:
3700- vers = vers[:6]
3701- return OPENSTACK_CODENAMES[vers]
3702- except KeyError:
3703- e = 'Could not determine OpenStack codename for version %s' % vers
3704- error_out(e)
3705-
3706-
3707-def get_os_version_package(pkg, fatal=True):
3708- '''Derive OpenStack version number from an installed package.'''
3709- codename = get_os_codename_package(pkg, fatal=fatal)
3710-
3711- if not codename:
3712- return None
3713-
3714- if 'swift' in pkg:
3715- vers_map = SWIFT_CODENAMES
3716- else:
3717- vers_map = OPENSTACK_CODENAMES
3718-
3719- for version, cname in vers_map.iteritems():
3720- if cname == codename:
3721- return version
3722- #e = "Could not determine OpenStack version for package: %s" % pkg
3723- #error_out(e)
3724-
3725-
3726-def import_key(keyid):
3727- cmd = "apt-key adv --keyserver keyserver.ubuntu.com " \
3728- "--recv-keys %s" % keyid
3729- try:
3730- subprocess.check_call(cmd.split(' '))
3731- except subprocess.CalledProcessError:
3732- error_out("Error importing repo key %s" % keyid)
3733-
3734-
3735-def configure_installation_source(rel):
3736- '''Configure apt installation source.'''
3737- if rel == 'distro':
3738- return
3739- elif rel[:4] == "ppa:":
3740- src = rel
3741- subprocess.check_call(["add-apt-repository", "-y", src])
3742- elif rel[:3] == "deb":
3743- l = len(rel.split('|'))
3744- if l == 2:
3745- src, key = rel.split('|')
3746- juju_log("Importing PPA key from keyserver for %s" % src)
3747- import_key(key)
3748- elif l == 1:
3749- src = rel
3750- with open('/etc/apt/sources.list.d/juju_deb.list', 'w') as f:
3751- f.write(src)
3752- elif rel[:6] == 'cloud:':
3753- ubuntu_rel = lsb_release()['DISTRIB_CODENAME']
3754- rel = rel.split(':')[1]
3755- u_rel = rel.split('-')[0]
3756- ca_rel = rel.split('-')[1]
3757-
3758- if u_rel != ubuntu_rel:
3759- e = 'Cannot install from Cloud Archive pocket %s on this Ubuntu '\
3760- 'version (%s)' % (ca_rel, ubuntu_rel)
3761- error_out(e)
3762-
3763- if 'staging' in ca_rel:
3764- # staging is just a regular PPA.
3765- os_rel = ca_rel.split('/')[0]
3766- ppa = 'ppa:ubuntu-cloud-archive/%s-staging' % os_rel
3767- cmd = 'add-apt-repository -y %s' % ppa
3768- subprocess.check_call(cmd.split(' '))
3769- return
3770-
3771- # map charm config options to actual archive pockets.
3772- pockets = {
3773- 'folsom': 'precise-updates/folsom',
3774- 'folsom/updates': 'precise-updates/folsom',
3775- 'folsom/proposed': 'precise-proposed/folsom',
3776- 'grizzly': 'precise-updates/grizzly',
3777- 'grizzly/updates': 'precise-updates/grizzly',
3778- 'grizzly/proposed': 'precise-proposed/grizzly',
3779- 'havana': 'precise-updates/havana',
3780- 'havana/updates': 'precise-updates/havana',
3781- 'havana/proposed': 'precise-proposed/havana',
3782- }
3783-
3784- try:
3785- pocket = pockets[ca_rel]
3786- except KeyError:
3787- e = 'Invalid Cloud Archive release specified: %s' % rel
3788- error_out(e)
3789-
3790- src = "deb %s %s main" % (CLOUD_ARCHIVE_URL, pocket)
3791- apt_install('ubuntu-cloud-keyring', fatal=True)
3792-
3793- with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as f:
3794- f.write(src)
3795- else:
3796- error_out("Invalid openstack-release specified: %s" % rel)
3797-
3798-
3799-def save_script_rc(script_path="scripts/scriptrc", **env_vars):
3800- """
3801- Write an rc file in the charm-delivered directory containing
3802- exported environment variables provided by env_vars. Any charm scripts run
3803- outside the juju hook environment can source this scriptrc to obtain
3804- updated config information necessary to perform health checks or
3805- service changes.
3806- """
3807- juju_rc_path = "%s/%s" % (charm_dir(), script_path)
3808- if not os.path.exists(os.path.dirname(juju_rc_path)):
3809- os.mkdir(os.path.dirname(juju_rc_path))
3810- with open(juju_rc_path, 'wb') as rc_script:
3811- rc_script.write(
3812- "#!/bin/bash\n")
3813- [rc_script.write('export %s=%s\n' % (u, p))
3814- for u, p in env_vars.iteritems() if u != "script_path"]
3815-
3816-
3817-def openstack_upgrade_available(package):
3818- """
3819- Determines if an OpenStack upgrade is available from installation
3820- source, based on version of installed package.
3821-
3822- :param package: str: Name of installed package.
3823-
3824- :returns: bool: : Returns True if configured installation source offers
3825- a newer version of package.
3826-
3827- """
3828-
3829- src = config('openstack-origin')
3830- cur_vers = get_os_version_package(package)
3831- available_vers = get_os_version_install_source(src)
3832- apt.init()
3833- return apt.version_compare(available_vers, cur_vers) == 1
3834
3835=== removed directory 'hooks/install.d/charmhelpers/contrib/saltstack'
3836=== removed file 'hooks/install.d/charmhelpers/contrib/saltstack/__init__.py'
3837--- hooks/install.d/charmhelpers/contrib/saltstack/__init__.py 2015-08-19 19:17:42 +0000
3838+++ hooks/install.d/charmhelpers/contrib/saltstack/__init__.py 1970-01-01 00:00:00 +0000
3839@@ -1,149 +0,0 @@
3840-"""Charm Helpers saltstack - declare the state of your machines.
3841-
3842-This helper enables you to declare your machine state, rather than
3843-program it procedurally (and have to test each change to your procedures).
3844-Your install hook can be as simple as:
3845-
3846-{{{
3847-from charmhelpers.contrib.saltstack import (
3848- install_salt_support,
3849- update_machine_state,
3850-)
3851-
3852-
3853-def install():
3854- install_salt_support()
3855- update_machine_state('machine_states/dependencies.yaml')
3856- update_machine_state('machine_states/installed.yaml')
3857-}}}
3858-
3859-and won't need to change (nor will its tests) when you change the machine
3860-state.
3861-
3862-It's using a python package called salt-minion which allows various formats for
3863-specifying resources, such as:
3864-
3865-{{{
3866-/srv/{{ basedir }}:
3867- file.directory:
3868- - group: ubunet
3869- - user: ubunet
3870- - require:
3871- - user: ubunet
3872- - recurse:
3873- - user
3874- - group
3875-
3876-ubunet:
3877- group.present:
3878- - gid: 1500
3879- user.present:
3880- - uid: 1500
3881- - gid: 1500
3882- - createhome: False
3883- - require:
3884- - group: ubunet
3885-}}}
3886-
3887-The docs for all the different state definitions are at:
3888- http://docs.saltstack.com/ref/states/all/
3889-
3890-
3891-TODO:
3892- * Add test helpers which will ensure that machine state definitions
3893- are functionally (but not necessarily logically) correct (ie. getting
3894- salt to parse all state defs.
3895- * Add a link to a public bootstrap charm example / blogpost.
3896- * Find a way to obviate the need to use the grains['charm_dir'] syntax
3897- in templates.
3898-"""
3899-# Copyright 2013 Canonical Ltd.
3900-#
3901-# Authors:
3902-# Charm Helpers Developers <juju@lists.ubuntu.com>
3903-import os
3904-import subprocess
3905-import yaml
3906-
3907-import charmhelpers.core.host
3908-import charmhelpers.core.hookenv
3909-
3910-
3911-charm_dir = os.environ.get('CHARM_DIR', '')
3912-salt_grains_path = '/etc/salt/grains'
3913-
3914-
3915-def install_salt_support(from_ppa=True):
3916- """Installs the salt-minion helper for machine state.
3917-
3918- By default the salt-minion package is installed from
3919- the saltstack PPA. If from_ppa is False you must ensure
3920- that the salt-minion package is available in the apt cache.
3921- """
3922- if from_ppa:
3923- subprocess.check_call([
3924- '/usr/bin/add-apt-repository',
3925- '--yes',
3926- 'ppa:saltstack/salt',
3927- ])
3928- subprocess.check_call(['/usr/bin/apt-get', 'update'])
3929- # We install salt-common as salt-minion would run the salt-minion
3930- # daemon.
3931- charmhelpers.fetch.apt_install('salt-common')
3932-
3933-
3934-def update_machine_state(state_path):
3935- """Update the machine state using the provided state declaration."""
3936- juju_state_to_yaml(salt_grains_path)
3937- subprocess.check_call([
3938- 'salt-call',
3939- '--local',
3940- 'state.template',
3941- state_path,
3942- ])
3943-
3944-
3945-def juju_state_to_yaml(yaml_path, namespace_separator=':'):
3946- """Update the juju config and state in a yaml file.
3947-
3948- This includes any current relation-get data, and the charm
3949- directory.
3950- """
3951- config = charmhelpers.core.hookenv.config()
3952-
3953- # Add the charm_dir which we will need to refer to charm
3954- # file resources etc.
3955- config['charm_dir'] = charm_dir
3956- config['local_unit'] = charmhelpers.core.hookenv.local_unit()
3957-
3958- # Add any relation data prefixed with the relation type.
3959- relation_type = charmhelpers.core.hookenv.relation_type()
3960- if relation_type is not None:
3961- relation_data = charmhelpers.core.hookenv.relation_get()
3962- relation_data = dict(
3963- ("{relation_type}{namespace_separator}{key}".format(
3964- relation_type=relation_type.replace('-', '_'),
3965- key=key,
3966- namespace_separator=namespace_separator), val)
3967- for key, val in relation_data.items())
3968- config.update(relation_data)
3969-
3970- # Don't use non-standard tags for unicode which will not
3971- # work when salt uses yaml.load_safe.
3972- yaml.add_representer(unicode, lambda dumper,
3973- value: dumper.represent_scalar(
3974- u'tag:yaml.org,2002:str', value))
3975-
3976- yaml_dir = os.path.dirname(yaml_path)
3977- if not os.path.exists(yaml_dir):
3978- os.makedirs(yaml_dir)
3979-
3980- if os.path.exists(yaml_path):
3981- with open(yaml_path, "r") as existing_vars_file:
3982- existing_vars = yaml.load(existing_vars_file.read())
3983- else:
3984- existing_vars = {}
3985-
3986- existing_vars.update(config)
3987- with open(yaml_path, "w+") as fp:
3988- fp.write(yaml.dump(existing_vars))
3989
3990=== removed directory 'hooks/install.d/charmhelpers/contrib/ssl'
3991=== removed file 'hooks/install.d/charmhelpers/contrib/ssl/__init__.py'
3992--- hooks/install.d/charmhelpers/contrib/ssl/__init__.py 2015-08-19 19:17:42 +0000
3993+++ hooks/install.d/charmhelpers/contrib/ssl/__init__.py 1970-01-01 00:00:00 +0000
3994@@ -1,79 +0,0 @@
3995-import subprocess
3996-from charmhelpers.core import hookenv
3997-
3998-
3999-def generate_selfsigned(keyfile, certfile, keysize="1024", config=None, subject=None, cn=None):
4000- """Generate selfsigned SSL keypair
4001-
4002- You must provide one of the 3 optional arguments:
4003- config, subject or cn
4004- If more than one is provided the leftmost will be used
4005-
4006- Arguments:
4007- keyfile -- (required) full path to the keyfile to be created
4008- certfile -- (required) full path to the certfile to be created
4009- keysize -- (optional) SSL key length
4010- config -- (optional) openssl configuration file
4011- subject -- (optional) dictionary with SSL subject variables
4012- cn -- (optional) cerfificate common name
4013-
4014- Required keys in subject dict:
4015- cn -- Common name (eq. FQDN)
4016-
4017- Optional keys in subject dict
4018- country -- Country Name (2 letter code)
4019- state -- State or Province Name (full name)
4020- locality -- Locality Name (eg, city)
4021- organization -- Organization Name (eg, company)
4022- organizational_unit -- Organizational Unit Name (eg, section)
4023- email -- Email Address
4024- """
4025-
4026- cmd = []
4027- if config:
4028- cmd = ["/usr/bin/openssl", "req", "-new", "-newkey",
4029- "rsa:{}".format(keysize), "-days", "365", "-nodes", "-x509",
4030- "-keyout", keyfile,
4031- "-out", certfile, "-config", config]
4032- elif subject:
4033- ssl_subject = ""
4034- if "country" in subject:
4035- ssl_subject = ssl_subject + "/C={}".format(subject["country"])
4036- if "state" in subject:
4037- ssl_subject = ssl_subject + "/ST={}".format(subject["state"])
4038- if "locality" in subject:
4039- ssl_subject = ssl_subject + "/L={}".format(subject["locality"])
4040- if "organization" in subject:
4041- ssl_subject = ssl_subject + "/O={}".format(subject["organization"])
4042- if "organizational_unit" in subject:
4043- ssl_subject = ssl_subject + "/OU={}".format(subject["organizational_unit"])
4044- if "cn" in subject:
4045- ssl_subject = ssl_subject + "/CN={}".format(subject["cn"])
4046- else:
4047- hookenv.log("When using \"subject\" argument you must " \
4048- "provide \"cn\" field at very least")
4049- return False
4050- if "email" in subject:
4051- ssl_subject = ssl_subject + "/emailAddress={}".format(subject["email"])
4052-
4053- cmd = ["/usr/bin/openssl", "req", "-new", "-newkey",
4054- "rsa:{}".format(keysize), "-days", "365", "-nodes", "-x509",
4055- "-keyout", keyfile,
4056- "-out", certfile, "-subj", ssl_subject]
4057- elif cn:
4058- cmd = ["/usr/bin/openssl", "req", "-new", "-newkey",
4059- "rsa:{}".format(keysize), "-days", "365", "-nodes", "-x509",
4060- "-keyout", keyfile,
4061- "-out", certfile, "-subj", "/CN={}".format(cn)]
4062-
4063- if not cmd:
4064- hookenv.log("No config, subject or cn provided," \
4065- "unable to generate self signed SSL certificates")
4066- return False
4067- try:
4068- subprocess.check_call(cmd)
4069- return True
4070- except Exception as e:
4071- print "Execution of openssl command failed:\n{}".format(e)
4072- return False
4073-
4074
4075=== removed directory 'hooks/install.d/charmhelpers/contrib/storage'
4076=== removed file 'hooks/install.d/charmhelpers/contrib/storage/__init__.py'
4077=== removed directory 'hooks/install.d/charmhelpers/contrib/storage/linux'
4078=== removed file 'hooks/install.d/charmhelpers/contrib/storage/linux/__init__.py'
4079=== removed file 'hooks/install.d/charmhelpers/contrib/storage/linux/loopback.py'
4080--- hooks/install.d/charmhelpers/contrib/storage/linux/loopback.py 2015-08-19 19:17:42 +0000
4081+++ hooks/install.d/charmhelpers/contrib/storage/linux/loopback.py 1970-01-01 00:00:00 +0000
4082@@ -1,62 +0,0 @@
4083-
4084-import os
4085-import re
4086-
4087-from subprocess import (
4088- check_call,
4089- check_output,
4090-)
4091-
4092-
4093-##################################################
4094-# loopback device helpers.
4095-##################################################
4096-def loopback_devices():
4097- '''
4098- Parse through 'losetup -a' output to determine currently mapped
4099- loopback devices. Output is expected to look like:
4100-
4101- /dev/loop0: [0807]:961814 (/tmp/my.img)
4102-
4103- :returns: dict: a dict mapping {loopback_dev: backing_file}
4104- '''
4105- loopbacks = {}
4106- cmd = ['losetup', '-a']
4107- devs = [d.strip().split(' ') for d in
4108- check_output(cmd).splitlines() if d != '']
4109- for dev, _, f in devs:
4110- loopbacks[dev.replace(':', '')] = re.search('\((\S+)\)', f).groups()[0]
4111- return loopbacks
4112-
4113-
4114-def create_loopback(file_path):
4115- '''
4116- Create a loopback device for a given backing file.
4117-
4118- :returns: str: Full path to new loopback device (eg, /dev/loop0)
4119- '''
4120- file_path = os.path.abspath(file_path)
4121- check_call(['losetup', '--find', file_path])
4122- for d, f in loopback_devices().iteritems():
4123- if f == file_path:
4124- return d
4125-
4126-
4127-def ensure_loopback_device(path, size):
4128- '''
4129- Ensure a loopback device exists for a given backing file path and size.
4130- If it a loopback device is not mapped to file, a new one will be created.
4131-
4132- TODO: Confirm size of found loopback device.
4133-
4134- :returns: str: Full path to the ensured loopback device (eg, /dev/loop0)
4135- '''
4136- for d, f in loopback_devices().iteritems():
4137- if f == path:
4138- return d
4139-
4140- if not os.path.exists(path):
4141- cmd = ['truncate', '--size', size, path]
4142- check_call(cmd)
4143-
4144- return create_loopback(path)
4145
4146=== removed file 'hooks/install.d/charmhelpers/contrib/storage/linux/lvm.py'
4147--- hooks/install.d/charmhelpers/contrib/storage/linux/lvm.py 2015-08-19 19:17:42 +0000
4148+++ hooks/install.d/charmhelpers/contrib/storage/linux/lvm.py 1970-01-01 00:00:00 +0000
4149@@ -1,88 +0,0 @@
4150-from subprocess import (
4151- CalledProcessError,
4152- check_call,
4153- check_output,
4154- Popen,
4155- PIPE,
4156-)
4157-
4158-
4159-##################################################
4160-# LVM helpers.
4161-##################################################
4162-def deactivate_lvm_volume_group(block_device):
4163- '''
4164- Deactivate any volume gruop associated with an LVM physical volume.
4165-
4166- :param block_device: str: Full path to LVM physical volume
4167- '''
4168- vg = list_lvm_volume_group(block_device)
4169- if vg:
4170- cmd = ['vgchange', '-an', vg]
4171- check_call(cmd)
4172-
4173-
4174-def is_lvm_physical_volume(block_device):
4175- '''
4176- Determine whether a block device is initialized as an LVM PV.
4177-
4178- :param block_device: str: Full path of block device to inspect.
4179-
4180- :returns: boolean: True if block device is a PV, False if not.
4181- '''
4182- try:
4183- check_output(['pvdisplay', block_device])
4184- return True
4185- except CalledProcessError:
4186- return False
4187-
4188-
4189-def remove_lvm_physical_volume(block_device):
4190- '''
4191- Remove LVM PV signatures from a given block device.
4192-
4193- :param block_device: str: Full path of block device to scrub.
4194- '''
4195- p = Popen(['pvremove', '-ff', block_device],
4196- stdin=PIPE)
4197- p.communicate(input='y\n')
4198-
4199-
4200-def list_lvm_volume_group(block_device):
4201- '''
4202- List LVM volume group associated with a given block device.
4203-
4204- Assumes block device is a valid LVM PV.
4205-
4206- :param block_device: str: Full path of block device to inspect.
4207-
4208- :returns: str: Name of volume group associated with block device or None
4209- '''
4210- vg = None
4211- pvd = check_output(['pvdisplay', block_device]).splitlines()
4212- for l in pvd:
4213- if l.strip().startswith('VG Name'):
4214- vg = ' '.join(l.split()).split(' ').pop()
4215- return vg
4216-
4217-
4218-def create_lvm_physical_volume(block_device):
4219- '''
4220- Initialize a block device as an LVM physical volume.
4221-
4222- :param block_device: str: Full path of block device to initialize.
4223-
4224- '''
4225- check_call(['pvcreate', block_device])
4226-
4227-
4228-def create_lvm_volume_group(volume_group, block_device):
4229- '''
4230- Create an LVM volume group backed by a given block device.
4231-
4232- Assumes block device has already been initialized as an LVM PV.
4233-
4234- :param volume_group: str: Name of volume group to create.
4235- :block_device: str: Full path of PV-initialized block device.
4236- '''
4237- check_call(['vgcreate', volume_group, block_device])
4238
4239=== removed file 'hooks/install.d/charmhelpers/contrib/storage/linux/utils.py'
4240--- hooks/install.d/charmhelpers/contrib/storage/linux/utils.py 2015-08-19 19:17:42 +0000
4241+++ hooks/install.d/charmhelpers/contrib/storage/linux/utils.py 1970-01-01 00:00:00 +0000
4242@@ -1,25 +0,0 @@
4243-from os import stat
4244-from stat import S_ISBLK
4245-
4246-from subprocess import (
4247- check_call
4248-)
4249-
4250-
4251-def is_block_device(path):
4252- '''
4253- Confirm device at path is a valid block device node.
4254-
4255- :returns: boolean: True if path is a block device, False if not.
4256- '''
4257- return S_ISBLK(stat(path).st_mode)
4258-
4259-
4260-def zap_disk(block_device):
4261- '''
4262- Clear a block device of partition table. Relies on sgdisk, which is
4263- installed as pat of the 'gdisk' package in Ubuntu.
4264-
4265- :param block_device: str: Full path of block device to clean.
4266- '''
4267- check_call(['sgdisk', '--zap-all', block_device])
4268
4269=== removed directory 'hooks/install.d/charmhelpers/contrib/templating'
4270=== removed file 'hooks/install.d/charmhelpers/contrib/templating/__init__.py'
4271=== removed file 'hooks/install.d/charmhelpers/contrib/templating/pyformat.py'
4272--- hooks/install.d/charmhelpers/contrib/templating/pyformat.py 2015-08-19 19:17:42 +0000
4273+++ hooks/install.d/charmhelpers/contrib/templating/pyformat.py 1970-01-01 00:00:00 +0000
4274@@ -1,13 +0,0 @@
4275-'''
4276-Templating using standard Python str.format() method.
4277-'''
4278-
4279-from charmhelpers.core import hookenv
4280-
4281-
4282-def render(template, extra={}, **kwargs):
4283- """Return the template rendered using Python's str.format()."""
4284- context = hookenv.execution_environment()
4285- context.update(extra)
4286- context.update(kwargs)
4287- return template.format(**context)
4288
4289=== removed directory 'hooks/install.d/charmhelpers/core'
4290=== removed file 'hooks/install.d/charmhelpers/core/__init__.py'
4291=== removed file 'hooks/install.d/charmhelpers/core/hookenv.py'
4292--- hooks/install.d/charmhelpers/core/hookenv.py 2015-08-19 19:17:42 +0000
4293+++ hooks/install.d/charmhelpers/core/hookenv.py 1970-01-01 00:00:00 +0000
4294@@ -1,340 +0,0 @@
4295-"Interactions with the Juju environment"
4296-# Copyright 2013 Canonical Ltd.
4297-#
4298-# Authors:
4299-# Charm Helpers Developers <juju@lists.ubuntu.com>
4300-
4301-import os
4302-import json
4303-import yaml
4304-import subprocess
4305-import UserDict
4306-
4307-CRITICAL = "CRITICAL"
4308-ERROR = "ERROR"
4309-WARNING = "WARNING"
4310-INFO = "INFO"
4311-DEBUG = "DEBUG"
4312-MARKER = object()
4313-
4314-cache = {}
4315-
4316-
4317-def cached(func):
4318- ''' Cache return values for multiple executions of func + args
4319-
4320- For example:
4321-
4322- @cached
4323- def unit_get(attribute):
4324- pass
4325-
4326- unit_get('test')
4327-
4328- will cache the result of unit_get + 'test' for future calls.
4329- '''
4330- def wrapper(*args, **kwargs):
4331- global cache
4332- key = str((func, args, kwargs))
4333- try:
4334- return cache[key]
4335- except KeyError:
4336- res = func(*args, **kwargs)
4337- cache[key] = res
4338- return res
4339- return wrapper
4340-
4341-
4342-def flush(key):
4343- ''' Flushes any entries from function cache where the
4344- key is found in the function+args '''
4345- flush_list = []
4346- for item in cache:
4347- if key in item:
4348- flush_list.append(item)
4349- for item in flush_list:
4350- del cache[item]
4351-
4352-
4353-def log(message, level=None):
4354- "Write a message to the juju log"
4355- command = ['juju-log']
4356- if level:
4357- command += ['-l', level]
4358- command += [message]
4359- subprocess.call(command)
4360-
4361-
4362-class Serializable(UserDict.IterableUserDict):
4363- "Wrapper, an object that can be serialized to yaml or json"
4364-
4365- def __init__(self, obj):
4366- # wrap the object
4367- UserDict.IterableUserDict.__init__(self)
4368- self.data = obj
4369-
4370- def __getattr__(self, attr):
4371- # See if this object has attribute.
4372- if attr in ("json", "yaml", "data"):
4373- return self.__dict__[attr]
4374- # Check for attribute in wrapped object.
4375- got = getattr(self.data, attr, MARKER)
4376- if got is not MARKER:
4377- return got
4378- # Proxy to the wrapped object via dict interface.
4379- try:
4380- return self.data[attr]
4381- except KeyError:
4382- raise AttributeError(attr)
4383-
4384- def __getstate__(self):
4385- # Pickle as a standard dictionary.
4386- return self.data
4387-
4388- def __setstate__(self, state):
4389- # Unpickle into our wrapper.
4390- self.data = state
4391-
4392- def json(self):
4393- "Serialize the object to json"
4394- return json.dumps(self.data)
4395-
4396- def yaml(self):
4397- "Serialize the object to yaml"
4398- return yaml.dump(self.data)
4399-
4400-
4401-def execution_environment():
4402- """A convenient bundling of the current execution context"""
4403- context = {}
4404- context['conf'] = config()
4405- if relation_id():
4406- context['reltype'] = relation_type()
4407- context['relid'] = relation_id()
4408- context['rel'] = relation_get()
4409- context['unit'] = local_unit()
4410- context['rels'] = relations()
4411- context['env'] = os.environ
4412- return context
4413-
4414-
4415-def in_relation_hook():
4416- "Determine whether we're running in a relation hook"
4417- return 'JUJU_RELATION' in os.environ
4418-
4419-
4420-def relation_type():
4421- "The scope for the current relation hook"
4422- return os.environ.get('JUJU_RELATION', None)
4423-
4424-
4425-def relation_id():
4426- "The relation ID for the current relation hook"
4427- return os.environ.get('JUJU_RELATION_ID', None)
4428-
4429-
4430-def local_unit():
4431- "Local unit ID"
4432- return os.environ['JUJU_UNIT_NAME']
4433-
4434-
4435-def remote_unit():
4436- "The remote unit for the current relation hook"
4437- return os.environ['JUJU_REMOTE_UNIT']
4438-
4439-
4440-def service_name():
4441- "The name service group this unit belongs to"
4442- return local_unit().split('/')[0]
4443-
4444-
4445-@cached
4446-def config(scope=None):
4447- "Juju charm configuration"
4448- config_cmd_line = ['config-get']
4449- if scope is not None:
4450- config_cmd_line.append(scope)
4451- config_cmd_line.append('--format=json')
4452- try:
4453- return json.loads(subprocess.check_output(config_cmd_line))
4454- except ValueError:
4455- return None
4456-
4457-
4458-@cached
4459-def relation_get(attribute=None, unit=None, rid=None):
4460- _args = ['relation-get', '--format=json']
4461- if rid:
4462- _args.append('-r')
4463- _args.append(rid)
4464- _args.append(attribute or '-')
4465- if unit:
4466- _args.append(unit)
4467- try:
4468- return json.loads(subprocess.check_output(_args))
4469- except ValueError:
4470- return None
4471-
4472-
4473-def relation_set(relation_id=None, relation_settings={}, **kwargs):
4474- relation_cmd_line = ['relation-set']
4475- if relation_id is not None:
4476- relation_cmd_line.extend(('-r', relation_id))
4477- for k, v in (relation_settings.items() + kwargs.items()):
4478- if v is None:
4479- relation_cmd_line.append('{}='.format(k))
4480- else:
4481- relation_cmd_line.append('{}={}'.format(k, v))
4482- subprocess.check_call(relation_cmd_line)
4483- # Flush cache of any relation-gets for local unit
4484- flush(local_unit())
4485-
4486-
4487-@cached
4488-def relation_ids(reltype=None):
4489- "A list of relation_ids"
4490- reltype = reltype or relation_type()
4491- relid_cmd_line = ['relation-ids', '--format=json']
4492- if reltype is not None:
4493- relid_cmd_line.append(reltype)
4494- return json.loads(subprocess.check_output(relid_cmd_line)) or []
4495- return []
4496-
4497-
4498-@cached
4499-def related_units(relid=None):
4500- "A list of related units"
4501- relid = relid or relation_id()
4502- units_cmd_line = ['relation-list', '--format=json']
4503- if relid is not None:
4504- units_cmd_line.extend(('-r', relid))
4505- return json.loads(subprocess.check_output(units_cmd_line)) or []
4506-
4507-
4508-@cached
4509-def relation_for_unit(unit=None, rid=None):
4510- "Get the json represenation of a unit's relation"
4511- unit = unit or remote_unit()
4512- relation = relation_get(unit=unit, rid=rid)
4513- for key in relation:
4514- if key.endswith('-list'):
4515- relation[key] = relation[key].split()
4516- relation['__unit__'] = unit
4517- return relation
4518-
4519-
4520-@cached
4521-def relations_for_id(relid=None):
4522- "Get relations of a specific relation ID"
4523- relation_data = []
4524- relid = relid or relation_ids()
4525- for unit in related_units(relid):
4526- unit_data = relation_for_unit(unit, relid)
4527- unit_data['__relid__'] = relid
4528- relation_data.append(unit_data)
4529- return relation_data
4530-
4531-
4532-@cached
4533-def relations_of_type(reltype=None):
4534- "Get relations of a specific type"
4535- relation_data = []
4536- reltype = reltype or relation_type()
4537- for relid in relation_ids(reltype):
4538- for relation in relations_for_id(relid):
4539- relation['__relid__'] = relid
4540- relation_data.append(relation)
4541- return relation_data
4542-
4543-
4544-@cached
4545-def relation_types():
4546- "Get a list of relation types supported by this charm"
4547- charmdir = os.environ.get('CHARM_DIR', '')
4548- mdf = open(os.path.join(charmdir, 'metadata.yaml'))
4549- md = yaml.safe_load(mdf)
4550- rel_types = []
4551- for key in ('provides', 'requires', 'peers'):
4552- section = md.get(key)
4553- if section:
4554- rel_types.extend(section.keys())
4555- mdf.close()
4556- return rel_types
4557-
4558-
4559-@cached
4560-def relations():
4561- rels = {}
4562- for reltype in relation_types():
4563- relids = {}
4564- for relid in relation_ids(reltype):
4565- units = {local_unit(): relation_get(unit=local_unit(), rid=relid)}
4566- for unit in related_units(relid):
4567- reldata = relation_get(unit=unit, rid=relid)
4568- units[unit] = reldata
4569- relids[relid] = units
4570- rels[reltype] = relids
4571- return rels
4572-
4573-
4574-def open_port(port, protocol="TCP"):
4575- "Open a service network port"
4576- _args = ['open-port']
4577- _args.append('{}/{}'.format(port, protocol))
4578- subprocess.check_call(_args)
4579-
4580-
4581-def close_port(port, protocol="TCP"):
4582- "Close a service network port"
4583- _args = ['close-port']
4584- _args.append('{}/{}'.format(port, protocol))
4585- subprocess.check_call(_args)
4586-
4587-
4588-@cached
4589-def unit_get(attribute):
4590- _args = ['unit-get', '--format=json', attribute]
4591- try:
4592- return json.loads(subprocess.check_output(_args))
4593- except ValueError:
4594- return None
4595-
4596-
4597-def unit_private_ip():
4598- return unit_get('private-address')
4599-
4600-
4601-class UnregisteredHookError(Exception):
4602- pass
4603-
4604-
4605-class Hooks(object):
4606- def __init__(self):
4607- super(Hooks, self).__init__()
4608- self._hooks = {}
4609-
4610- def register(self, name, function):
4611- self._hooks[name] = function
4612-
4613- def execute(self, args):
4614- hook_name = os.path.basename(args[0])
4615- if hook_name in self._hooks:
4616- self._hooks[hook_name]()
4617- else:
4618- raise UnregisteredHookError(hook_name)
4619-
4620- def hook(self, *hook_names):
4621- def wrapper(decorated):
4622- for hook_name in hook_names:
4623- self.register(hook_name, decorated)
4624- else:
4625- self.register(decorated.__name__, decorated)
4626- if '_' in decorated.__name__:
4627- self.register(
4628- decorated.__name__.replace('_', '-'), decorated)
4629- return decorated
4630- return wrapper
4631-
4632-
4633-def charm_dir():
4634- return os.environ.get('CHARM_DIR')
4635
4636=== removed file 'hooks/install.d/charmhelpers/core/host.py'
4637--- hooks/install.d/charmhelpers/core/host.py 2015-08-19 19:17:42 +0000
4638+++ hooks/install.d/charmhelpers/core/host.py 1970-01-01 00:00:00 +0000
4639@@ -1,241 +0,0 @@
4640-"""Tools for working with the host system"""
4641-# Copyright 2012 Canonical Ltd.
4642-#
4643-# Authors:
4644-# Nick Moffitt <nick.moffitt@canonical.com>
4645-# Matthew Wedgwood <matthew.wedgwood@canonical.com>
4646-
4647-import os
4648-import pwd
4649-import grp
4650-import random
4651-import string
4652-import subprocess
4653-import hashlib
4654-
4655-from collections import OrderedDict
4656-
4657-from hookenv import log
4658-
4659-
4660-def service_start(service_name):
4661- return service('start', service_name)
4662-
4663-
4664-def service_stop(service_name):
4665- return service('stop', service_name)
4666-
4667-
4668-def service_restart(service_name):
4669- return service('restart', service_name)
4670-
4671-
4672-def service_reload(service_name, restart_on_failure=False):
4673- service_result = service('reload', service_name)
4674- if not service_result and restart_on_failure:
4675- service_result = service('restart', service_name)
4676- return service_result
4677-
4678-
4679-def service(action, service_name):
4680- cmd = ['service', service_name, action]
4681- return subprocess.call(cmd) == 0
4682-
4683-
4684-def service_running(service):
4685- try:
4686- output = subprocess.check_output(['service', service, 'status'])
4687- except subprocess.CalledProcessError:
4688- return False
4689- else:
4690- if ("start/running" in output or "is running" in output):
4691- return True
4692- else:
4693- return False
4694-
4695-
4696-def adduser(username, password=None, shell='/bin/bash', system_user=False):
4697- """Add a user"""
4698- try:
4699- user_info = pwd.getpwnam(username)
4700- log('user {0} already exists!'.format(username))
4701- except KeyError:
4702- log('creating user {0}'.format(username))
4703- cmd = ['useradd']
4704- if system_user or password is None:
4705- cmd.append('--system')
4706- else:
4707- cmd.extend([
4708- '--create-home',
4709- '--shell', shell,
4710- '--password', password,
4711- ])
4712- cmd.append(username)
4713- subprocess.check_call(cmd)
4714- user_info = pwd.getpwnam(username)
4715- return user_info
4716-
4717-
4718-def add_user_to_group(username, group):
4719- """Add a user to a group"""
4720- cmd = [
4721- 'gpasswd', '-a',
4722- username,
4723- group
4724- ]
4725- log("Adding user {} to group {}".format(username, group))
4726- subprocess.check_call(cmd)
4727-
4728-
4729-def rsync(from_path, to_path, flags='-r', options=None):
4730- """Replicate the contents of a path"""
4731- options = options or ['--delete', '--executability']
4732- cmd = ['/usr/bin/rsync', flags]
4733- cmd.extend(options)
4734- cmd.append(from_path)
4735- cmd.append(to_path)
4736- log(" ".join(cmd))
4737- return subprocess.check_output(cmd).strip()
4738-
4739-
4740-def symlink(source, destination):
4741- """Create a symbolic link"""
4742- log("Symlinking {} as {}".format(source, destination))
4743- cmd = [
4744- 'ln',
4745- '-sf',
4746- source,
4747- destination,
4748- ]
4749- subprocess.check_call(cmd)
4750-
4751-
4752-def mkdir(path, owner='root', group='root', perms=0555, force=False):
4753- """Create a directory"""
4754- log("Making dir {} {}:{} {:o}".format(path, owner, group,
4755- perms))
4756- uid = pwd.getpwnam(owner).pw_uid
4757- gid = grp.getgrnam(group).gr_gid
4758- realpath = os.path.abspath(path)
4759- if os.path.exists(realpath):
4760- if force and not os.path.isdir(realpath):
4761- log("Removing non-directory file {} prior to mkdir()".format(path))
4762- os.unlink(realpath)
4763- else:
4764- os.makedirs(realpath, perms)
4765- os.chown(realpath, uid, gid)
4766-
4767-
4768-def write_file(path, content, owner='root', group='root', perms=0444):
4769- """Create or overwrite a file with the contents of a string"""
4770- log("Writing file {} {}:{} {:o}".format(path, owner, group, perms))
4771- uid = pwd.getpwnam(owner).pw_uid
4772- gid = grp.getgrnam(group).gr_gid
4773- with open(path, 'w') as target:
4774- os.fchown(target.fileno(), uid, gid)
4775- os.fchmod(target.fileno(), perms)
4776- target.write(content)
4777-
4778-
4779-def mount(device, mountpoint, options=None, persist=False):
4780- '''Mount a filesystem'''
4781- cmd_args = ['mount']
4782- if options is not None:
4783- cmd_args.extend(['-o', options])
4784- cmd_args.extend([device, mountpoint])
4785- try:
4786- subprocess.check_output(cmd_args)
4787- except subprocess.CalledProcessError, e:
4788- log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output))
4789- return False
4790- if persist:
4791- # TODO: update fstab
4792- pass
4793- return True
4794-
4795-
4796-def umount(mountpoint, persist=False):
4797- '''Unmount a filesystem'''
4798- cmd_args = ['umount', mountpoint]
4799- try:
4800- subprocess.check_output(cmd_args)
4801- except subprocess.CalledProcessError, e:
4802- log('Error unmounting {}\n{}'.format(mountpoint, e.output))
4803- return False
4804- if persist:
4805- # TODO: update fstab
4806- pass
4807- return True
4808-
4809-
4810-def mounts():
4811- '''List of all mounted volumes as [[mountpoint,device],[...]]'''
4812- with open('/proc/mounts') as f:
4813- # [['/mount/point','/dev/path'],[...]]
4814- system_mounts = [m[1::-1] for m in [l.strip().split()
4815- for l in f.readlines()]]
4816- return system_mounts
4817-
4818-
4819-def file_hash(path):
4820- ''' Generate a md5 hash of the contents of 'path' or None if not found '''
4821- if os.path.exists(path):
4822- h = hashlib.md5()
4823- with open(path, 'r') as source:
4824- h.update(source.read()) # IGNORE:E1101 - it does have update
4825- return h.hexdigest()
4826- else:
4827- return None
4828-
4829-
4830-def restart_on_change(restart_map):
4831- ''' Restart services based on configuration files changing
4832-
4833- This function is used a decorator, for example
4834-
4835- @restart_on_change({
4836- '/etc/ceph/ceph.conf': [ 'cinder-api', 'cinder-volume' ]
4837- })
4838- def ceph_client_changed():
4839- ...
4840-
4841- In this example, the cinder-api and cinder-volume services
4842- would be restarted if /etc/ceph/ceph.conf is changed by the
4843- ceph_client_changed function.
4844- '''
4845- def wrap(f):
4846- def wrapped_f(*args):
4847- checksums = {}
4848- for path in restart_map:
4849- checksums[path] = file_hash(path)
4850- f(*args)
4851- restarts = []
4852- for path in restart_map:
4853- if checksums[path] != file_hash(path):
4854- restarts += restart_map[path]
4855- for service_name in list(OrderedDict.fromkeys(restarts)):
4856- service('restart', service_name)
4857- return wrapped_f
4858- return wrap
4859-
4860-
4861-def lsb_release():
4862- '''Return /etc/lsb-release in a dict'''
4863- d = {}
4864- with open('/etc/lsb-release', 'r') as lsb:
4865- for l in lsb:
4866- k, v = l.split('=')
4867- d[k.strip()] = v.strip()
4868- return d
4869-
4870-
4871-def pwgen(length=None):
4872- '''Generate a random pasword.'''
4873- if length is None:
4874- length = random.choice(range(35, 45))
4875- alphanumeric_chars = [
4876- l for l in (string.letters + string.digits)
4877- if l not in 'l0QD1vAEIOUaeiou']
4878- random_chars = [
4879- random.choice(alphanumeric_chars) for _ in range(length)]
4880- return(''.join(random_chars))
4881
4882=== removed directory 'hooks/install.d/charmhelpers/fetch'
4883=== removed file 'hooks/install.d/charmhelpers/fetch/__init__.py'
4884--- hooks/install.d/charmhelpers/fetch/__init__.py 2015-08-19 19:17:42 +0000
4885+++ hooks/install.d/charmhelpers/fetch/__init__.py 1970-01-01 00:00:00 +0000
4886@@ -1,209 +0,0 @@
4887-import importlib
4888-from yaml import safe_load
4889-from charmhelpers.core.host import (
4890- lsb_release
4891-)
4892-from urlparse import (
4893- urlparse,
4894- urlunparse,
4895-)
4896-import subprocess
4897-from charmhelpers.core.hookenv import (
4898- config,
4899- log,
4900-)
4901-import apt_pkg
4902-
4903-CLOUD_ARCHIVE = """# Ubuntu Cloud Archive
4904-deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main
4905-"""
4906-PROPOSED_POCKET = """# Proposed
4907-deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted
4908-"""
4909-
4910-
4911-def filter_installed_packages(packages):
4912- """Returns a list of packages that require installation"""
4913- apt_pkg.init()
4914- cache = apt_pkg.Cache()
4915- _pkgs = []
4916- for package in packages:
4917- try:
4918- p = cache[package]
4919- p.current_ver or _pkgs.append(package)
4920- except KeyError:
4921- log('Package {} has no installation candidate.'.format(package),
4922- level='WARNING')
4923- _pkgs.append(package)
4924- return _pkgs
4925-
4926-
4927-def apt_install(packages, options=None, fatal=False):
4928- """Install one or more packages"""
4929- options = options or []
4930- cmd = ['apt-get', '-y']
4931- cmd.extend(options)
4932- cmd.append('install')
4933- if isinstance(packages, basestring):
4934- cmd.append(packages)
4935- else:
4936- cmd.extend(packages)
4937- log("Installing {} with options: {}".format(packages,
4938- options))
4939- if fatal:
4940- subprocess.check_call(cmd)
4941- else:
4942- subprocess.call(cmd)
4943-
4944-
4945-def apt_update(fatal=False):
4946- """Update local apt cache"""
4947- cmd = ['apt-get', 'update']
4948- if fatal:
4949- subprocess.check_call(cmd)
4950- else:
4951- subprocess.call(cmd)
4952-
4953-
4954-def apt_purge(packages, fatal=False):
4955- """Purge one or more packages"""
4956- cmd = ['apt-get', '-y', 'purge']
4957- if isinstance(packages, basestring):
4958- cmd.append(packages)
4959- else:
4960- cmd.extend(packages)
4961- log("Purging {}".format(packages))
4962- if fatal:
4963- subprocess.check_call(cmd)
4964- else:
4965- subprocess.call(cmd)
4966-
4967-
4968-def add_source(source, key=None):
4969- if ((source.startswith('ppa:') or
4970- source.startswith('http:'))):
4971- subprocess.check_call(['add-apt-repository', '--yes', source])
4972- elif source.startswith('cloud:'):
4973- apt_install(filter_installed_packages(['ubuntu-cloud-keyring']),
4974- fatal=True)
4975- pocket = source.split(':')[-1]
4976- with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt:
4977- apt.write(CLOUD_ARCHIVE.format(pocket))
4978- elif source == 'proposed':
4979- release = lsb_release()['DISTRIB_CODENAME']
4980- with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt:
4981- apt.write(PROPOSED_POCKET.format(release))
4982- if key:
4983- subprocess.check_call(['apt-key', 'import', key])
4984-
4985-
4986-class SourceConfigError(Exception):
4987- pass
4988-
4989-
4990-def configure_sources(update=False,
4991- sources_var='install_sources',
4992- keys_var='install_keys'):
4993- """
4994- Configure multiple sources from charm configuration
4995-
4996- Example config:
4997- install_sources:
4998- - "ppa:foo"
4999- - "http://example.com/repo precise main"
5000- install_keys:
The diff has been truncated for viewing.

Subscribers

People subscribed via source and target branches