Merge ~aieri/charm-nagios:bug/1864192 into ~nagios-charmers/charm-nagios:master

Proposed by Andrea Ieri
Status: Superseded
Proposed branch: ~aieri/charm-nagios:bug/1864192
Merge into: ~nagios-charmers/charm-nagios:master
Diff against target: 3760 lines (+2529/-266)
32 files modified
Makefile (+1/-2)
bin/charm_helpers_sync.py (+20/-11)
charm-helpers.yaml (+2/-1)
hooks/charmhelpers/__init__.py (+4/-4)
hooks/charmhelpers/contrib/charmsupport/__init__.py (+13/-0)
hooks/charmhelpers/contrib/charmsupport/nrpe.py (+500/-0)
hooks/charmhelpers/contrib/charmsupport/volumes.py (+173/-0)
hooks/charmhelpers/core/hookenv.py (+525/-56)
hooks/charmhelpers/core/host.py (+166/-10)
hooks/charmhelpers/core/host_factory/ubuntu.py (+28/-1)
hooks/charmhelpers/core/kernel.py (+2/-2)
hooks/charmhelpers/core/services/base.py (+18/-7)
hooks/charmhelpers/core/strutils.py (+11/-5)
hooks/charmhelpers/core/sysctl.py (+32/-11)
hooks/charmhelpers/core/templating.py (+18/-9)
hooks/charmhelpers/core/unitdata.py (+8/-1)
hooks/charmhelpers/fetch/__init__.py (+4/-0)
hooks/charmhelpers/fetch/archiveurl.py (+1/-1)
hooks/charmhelpers/fetch/bzrurl.py (+2/-2)
hooks/charmhelpers/fetch/giturl.py (+2/-2)
hooks/charmhelpers/fetch/python/__init__.py (+13/-0)
hooks/charmhelpers/fetch/python/debug.py (+54/-0)
hooks/charmhelpers/fetch/python/packages.py (+154/-0)
hooks/charmhelpers/fetch/python/rpdb.py (+56/-0)
hooks/charmhelpers/fetch/python/version.py (+32/-0)
hooks/charmhelpers/fetch/snap.py (+17/-1)
hooks/charmhelpers/fetch/ubuntu.py (+305/-83)
hooks/charmhelpers/fetch/ubuntu_apt_pkg.py (+267/-0)
hooks/charmhelpers/osplatform.py (+24/-3)
hooks/common.py (+6/-15)
hooks/install (+1/-1)
hooks/monitors-relation-changed (+70/-38)
Reviewer Review Type Date Requested Status
Chris Sanders (community) Needs Fixing
Adam Dyess (community) Approve
Peter Sabaini Approve
Review via email: mp+386533@code.launchpad.net

This proposal has been superseded by a proposal from 2020-07-13.

To post a comment you must log in.
Revision history for this message
Peter Sabaini (peter-sabaini) wrote :

LGTM, some nits but nothing blocking imo

review: Approve
Revision history for this message
Andrea Ieri (aieri) wrote :

Thanks, the charm helpers sync script actually comes from https://raw.githubusercontent.com/juju/charm-helpers/master/tools/charm_helpers_sync/charm_helpers_sync.py, it's not part of this repo.

Revision history for this message
Adam Dyess (addyess) wrote :

Great. No issues

review: Approve
Revision history for this message
Chris Sanders (chris.sanders) wrote :

A few comments inline, and while I hate to do this I think the charmhelpers and this change need to be split. While reviewing it I'm not convinced that this merge isn't confusing local vs charmhelpers functions. For example 'ingress_address' is defined in this change and I *believe* is only actually used from charmhelpers.

If I'm just having difficulty understanding and you *do* think the change is dependent on charmhelpers you can make this MR dependent on the charmhelpers MR so the changes specific to this bug can be reviewed seperately.

review: Needs Fixing

Unmerged commits

e4bb62e... by Andrea Ieri

Gracefully handle incorrect relation data sent over the nagios relation

Closes-Bug: 1864192

cc67cb4... by Andrea Ieri

Charmhelpers sync

Install enum for python2 as this is needed by hookenv

3a6dc7c... by Andrea Ieri

Revert "Fully switch to the network-get primitives"

This reverts commit 66b8e0577d7f7f5761da4ff7dd50a0d01e04029c.
The fix was completely wrong, because network-get can only retrieve
local data; learning the ingress-address of a remote unit must be done
via relation-get.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1diff --git a/Makefile b/Makefile
2index 9d48829..038dc57 100644
3--- a/Makefile
4+++ b/Makefile
5@@ -19,8 +19,7 @@ test:
6
7 bin/charm_helpers_sync.py:
8 @mkdir -p bin
9- @bzr cat lp:charm-helpers/tools/charm_helpers_sync/charm_helpers_sync.py \
10- > bin/charm_helpers_sync.py
11+ @curl -o bin/charm_helpers_sync.py https://raw.githubusercontent.com/juju/charm-helpers/master/tools/charm_helpers_sync/charm_helpers_sync.py
12
13 sync: bin/charm_helpers_sync.py
14 @$(PYTHON) bin/charm_helpers_sync.py -c charm-helpers.yaml
15diff --git a/bin/charm_helpers_sync.py b/bin/charm_helpers_sync.py
16index bd79460..7c0c194 100644
17--- a/bin/charm_helpers_sync.py
18+++ b/bin/charm_helpers_sync.py
19@@ -29,7 +29,7 @@ from fnmatch import fnmatch
20
21 import six
22
23-CHARM_HELPERS_BRANCH = 'lp:charm-helpers'
24+CHARM_HELPERS_REPO = 'https://github.com/juju/charm-helpers'
25
26
27 def parse_config(conf_file):
28@@ -39,10 +39,16 @@ def parse_config(conf_file):
29 return yaml.load(open(conf_file).read())
30
31
32-def clone_helpers(work_dir, branch):
33+def clone_helpers(work_dir, repo):
34 dest = os.path.join(work_dir, 'charm-helpers')
35- logging.info('Checking out %s to %s.' % (branch, dest))
36- cmd = ['bzr', 'checkout', '--lightweight', branch, dest]
37+ logging.info('Cloning out %s to %s.' % (repo, dest))
38+ branch = None
39+ if '@' in repo:
40+ repo, branch = repo.split('@', 1)
41+ cmd = ['git', 'clone', '--depth=1']
42+ if branch is not None:
43+ cmd += ['--branch', branch]
44+ cmd += [repo, dest]
45 subprocess.check_call(cmd)
46 return dest
47
48@@ -174,6 +180,9 @@ def extract_options(inc, global_options=None):
49
50
51 def sync_helpers(include, src, dest, options=None):
52+ if os.path.exists(dest):
53+ logging.debug('Removing existing directory: %s' % dest)
54+ shutil.rmtree(dest)
55 if not os.path.isdir(dest):
56 os.makedirs(dest)
57
58@@ -198,8 +207,8 @@ if __name__ == '__main__':
59 default=None, help='helper config file')
60 parser.add_option('-D', '--debug', action='store_true', dest='debug',
61 default=False, help='debug')
62- parser.add_option('-b', '--branch', action='store', dest='branch',
63- help='charm-helpers bzr branch (overrides config)')
64+ parser.add_option('-r', '--repository', action='store', dest='repo',
65+ help='charm-helpers git repository (overrides config)')
66 parser.add_option('-d', '--destination', action='store', dest='dest_dir',
67 help='sync destination dir (overrides config)')
68 (opts, args) = parser.parse_args()
69@@ -218,10 +227,10 @@ if __name__ == '__main__':
70 else:
71 config = {}
72
73- if 'branch' not in config:
74- config['branch'] = CHARM_HELPERS_BRANCH
75- if opts.branch:
76- config['branch'] = opts.branch
77+ if 'repo' not in config:
78+ config['repo'] = CHARM_HELPERS_REPO
79+ if opts.repo:
80+ config['repo'] = opts.repo
81 if opts.dest_dir:
82 config['destination'] = opts.dest_dir
83
84@@ -241,7 +250,7 @@ if __name__ == '__main__':
85 sync_options = config['options']
86 tmpd = tempfile.mkdtemp()
87 try:
88- checkout = clone_helpers(tmpd, config['branch'])
89+ checkout = clone_helpers(tmpd, config['repo'])
90 sync_helpers(config['include'], checkout, config['destination'],
91 options=sync_options)
92 except Exception as e:
93diff --git a/charm-helpers.yaml b/charm-helpers.yaml
94index e5f7760..640679e 100644
95--- a/charm-helpers.yaml
96+++ b/charm-helpers.yaml
97@@ -1,7 +1,8 @@
98+repo: https://github.com/juju/charm-helpers
99 destination: hooks/charmhelpers
100-branch: lp:charm-helpers
101 include:
102 - core
103 - fetch
104 - osplatform
105 - contrib.ssl
106+ - contrib.charmsupport
107diff --git a/hooks/charmhelpers/__init__.py b/hooks/charmhelpers/__init__.py
108index e7aa471..61ef907 100644
109--- a/hooks/charmhelpers/__init__.py
110+++ b/hooks/charmhelpers/__init__.py
111@@ -23,22 +23,22 @@ import subprocess
112 import sys
113
114 try:
115- import six # flake8: noqa
116+ import six # NOQA:F401
117 except ImportError:
118 if sys.version_info.major == 2:
119 subprocess.check_call(['apt-get', 'install', '-y', 'python-six'])
120 else:
121 subprocess.check_call(['apt-get', 'install', '-y', 'python3-six'])
122- import six # flake8: noqa
123+ import six # NOQA:F401
124
125 try:
126- import yaml # flake8: noqa
127+ import yaml # NOQA:F401
128 except ImportError:
129 if sys.version_info.major == 2:
130 subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml'])
131 else:
132 subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml'])
133- import yaml # flake8: noqa
134+ import yaml # NOQA:F401
135
136
137 # Holds a list of mapping of mangled function names that have been deprecated
138diff --git a/hooks/charmhelpers/contrib/charmsupport/__init__.py b/hooks/charmhelpers/contrib/charmsupport/__init__.py
139new file mode 100644
140index 0000000..d7567b8
141--- /dev/null
142+++ b/hooks/charmhelpers/contrib/charmsupport/__init__.py
143@@ -0,0 +1,13 @@
144+# Copyright 2014-2015 Canonical Limited.
145+#
146+# Licensed under the Apache License, Version 2.0 (the "License");
147+# you may not use this file except in compliance with the License.
148+# You may obtain a copy of the License at
149+#
150+# http://www.apache.org/licenses/LICENSE-2.0
151+#
152+# Unless required by applicable law or agreed to in writing, software
153+# distributed under the License is distributed on an "AS IS" BASIS,
154+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
155+# See the License for the specific language governing permissions and
156+# limitations under the License.
157diff --git a/hooks/charmhelpers/contrib/charmsupport/nrpe.py b/hooks/charmhelpers/contrib/charmsupport/nrpe.py
158new file mode 100644
159index 0000000..d775861
160--- /dev/null
161+++ b/hooks/charmhelpers/contrib/charmsupport/nrpe.py
162@@ -0,0 +1,500 @@
163+# Copyright 2014-2015 Canonical Limited.
164+#
165+# Licensed under the Apache License, Version 2.0 (the "License");
166+# you may not use this file except in compliance with the License.
167+# You may obtain a copy of the License at
168+#
169+# http://www.apache.org/licenses/LICENSE-2.0
170+#
171+# Unless required by applicable law or agreed to in writing, software
172+# distributed under the License is distributed on an "AS IS" BASIS,
173+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
174+# See the License for the specific language governing permissions and
175+# limitations under the License.
176+
177+"""Compatibility with the nrpe-external-master charm"""
178+# Copyright 2012 Canonical Ltd.
179+#
180+# Authors:
181+# Matthew Wedgwood <matthew.wedgwood@canonical.com>
182+
183+import subprocess
184+import pwd
185+import grp
186+import os
187+import glob
188+import shutil
189+import re
190+import shlex
191+import yaml
192+
193+from charmhelpers.core.hookenv import (
194+ config,
195+ hook_name,
196+ local_unit,
197+ log,
198+ relation_get,
199+ relation_ids,
200+ relation_set,
201+ relations_of_type,
202+)
203+
204+from charmhelpers.core.host import service
205+from charmhelpers.core import host
206+
207+# This module adds compatibility with the nrpe-external-master and plain nrpe
208+# subordinate charms. To use it in your charm:
209+#
210+# 1. Update metadata.yaml
211+#
212+# provides:
213+# (...)
214+# nrpe-external-master:
215+# interface: nrpe-external-master
216+# scope: container
217+#
218+# and/or
219+#
220+# provides:
221+# (...)
222+# local-monitors:
223+# interface: local-monitors
224+# scope: container
225+
226+#
227+# 2. Add the following to config.yaml
228+#
229+# nagios_context:
230+# default: "juju"
231+# type: string
232+# description: |
233+# Used by the nrpe subordinate charms.
234+# A string that will be prepended to instance name to set the host name
235+# in nagios. So for instance the hostname would be something like:
236+# juju-myservice-0
237+# If you're running multiple environments with the same services in them
238+# this allows you to differentiate between them.
239+# nagios_servicegroups:
240+# default: ""
241+# type: string
242+# description: |
243+# A comma-separated list of nagios servicegroups.
244+# If left empty, the nagios_context will be used as the servicegroup
245+#
246+# 3. Add custom checks (Nagios plugins) to files/nrpe-external-master
247+#
248+# 4. Update your hooks.py with something like this:
249+#
250+# from charmsupport.nrpe import NRPE
251+# (...)
252+# def update_nrpe_config():
253+# nrpe_compat = NRPE()
254+# nrpe_compat.add_check(
255+# shortname = "myservice",
256+# description = "Check MyService",
257+# check_cmd = "check_http -w 2 -c 10 http://localhost"
258+# )
259+# nrpe_compat.add_check(
260+# "myservice_other",
261+# "Check for widget failures",
262+# check_cmd = "/srv/myapp/scripts/widget_check"
263+# )
264+# nrpe_compat.write()
265+#
266+# def config_changed():
267+# (...)
268+# update_nrpe_config()
269+#
270+# def nrpe_external_master_relation_changed():
271+# update_nrpe_config()
272+#
273+# def local_monitors_relation_changed():
274+# update_nrpe_config()
275+#
276+# 4.a If your charm is a subordinate charm set primary=False
277+#
278+# from charmsupport.nrpe import NRPE
279+# (...)
280+# def update_nrpe_config():
281+# nrpe_compat = NRPE(primary=False)
282+#
283+# 5. ln -s hooks.py nrpe-external-master-relation-changed
284+# ln -s hooks.py local-monitors-relation-changed
285+
286+
287+class CheckException(Exception):
288+ pass
289+
290+
291+class Check(object):
292+ shortname_re = '[A-Za-z0-9-_.@]+$'
293+ service_template = ("""
294+#---------------------------------------------------
295+# This file is Juju managed
296+#---------------------------------------------------
297+define service {{
298+ use active-service
299+ host_name {nagios_hostname}
300+ service_description {nagios_hostname}[{shortname}] """
301+ """{description}
302+ check_command check_nrpe!{command}
303+ servicegroups {nagios_servicegroup}
304+}}
305+""")
306+
307+ def __init__(self, shortname, description, check_cmd):
308+ super(Check, self).__init__()
309+ # XXX: could be better to calculate this from the service name
310+ if not re.match(self.shortname_re, shortname):
311+ raise CheckException("shortname must match {}".format(
312+ Check.shortname_re))
313+ self.shortname = shortname
314+ self.command = "check_{}".format(shortname)
315+ # Note: a set of invalid characters is defined by the
316+ # Nagios server config
317+ # The default is: illegal_object_name_chars=`~!$%^&*"|'<>?,()=
318+ self.description = description
319+ self.check_cmd = self._locate_cmd(check_cmd)
320+
321+ def _get_check_filename(self):
322+ return os.path.join(NRPE.nrpe_confdir, '{}.cfg'.format(self.command))
323+
324+ def _get_service_filename(self, hostname):
325+ return os.path.join(NRPE.nagios_exportdir,
326+ 'service__{}_{}.cfg'.format(hostname, self.command))
327+
328+ def _locate_cmd(self, check_cmd):
329+ search_path = (
330+ '/usr/lib/nagios/plugins',
331+ '/usr/local/lib/nagios/plugins',
332+ )
333+ parts = shlex.split(check_cmd)
334+ for path in search_path:
335+ if os.path.exists(os.path.join(path, parts[0])):
336+ command = os.path.join(path, parts[0])
337+ if len(parts) > 1:
338+ command += " " + " ".join(parts[1:])
339+ return command
340+ log('Check command not found: {}'.format(parts[0]))
341+ return ''
342+
343+ def _remove_service_files(self):
344+ if not os.path.exists(NRPE.nagios_exportdir):
345+ return
346+ for f in os.listdir(NRPE.nagios_exportdir):
347+ if f.endswith('_{}.cfg'.format(self.command)):
348+ os.remove(os.path.join(NRPE.nagios_exportdir, f))
349+
350+ def remove(self, hostname):
351+ nrpe_check_file = self._get_check_filename()
352+ if os.path.exists(nrpe_check_file):
353+ os.remove(nrpe_check_file)
354+ self._remove_service_files()
355+
356+ def write(self, nagios_context, hostname, nagios_servicegroups):
357+ nrpe_check_file = self._get_check_filename()
358+ with open(nrpe_check_file, 'w') as nrpe_check_config:
359+ nrpe_check_config.write("# check {}\n".format(self.shortname))
360+ if nagios_servicegroups:
361+ nrpe_check_config.write(
362+ "# The following header was added automatically by juju\n")
363+ nrpe_check_config.write(
364+ "# Modifying it will affect nagios monitoring and alerting\n")
365+ nrpe_check_config.write(
366+ "# servicegroups: {}\n".format(nagios_servicegroups))
367+ nrpe_check_config.write("command[{}]={}\n".format(
368+ self.command, self.check_cmd))
369+
370+ if not os.path.exists(NRPE.nagios_exportdir):
371+ log('Not writing service config as {} is not accessible'.format(
372+ NRPE.nagios_exportdir))
373+ else:
374+ self.write_service_config(nagios_context, hostname,
375+ nagios_servicegroups)
376+
377+ def write_service_config(self, nagios_context, hostname,
378+ nagios_servicegroups):
379+ self._remove_service_files()
380+
381+ templ_vars = {
382+ 'nagios_hostname': hostname,
383+ 'nagios_servicegroup': nagios_servicegroups,
384+ 'description': self.description,
385+ 'shortname': self.shortname,
386+ 'command': self.command,
387+ }
388+ nrpe_service_text = Check.service_template.format(**templ_vars)
389+ nrpe_service_file = self._get_service_filename(hostname)
390+ with open(nrpe_service_file, 'w') as nrpe_service_config:
391+ nrpe_service_config.write(str(nrpe_service_text))
392+
393+ def run(self):
394+ subprocess.call(self.check_cmd)
395+
396+
397+class NRPE(object):
398+ nagios_logdir = '/var/log/nagios'
399+ nagios_exportdir = '/var/lib/nagios/export'
400+ nrpe_confdir = '/etc/nagios/nrpe.d'
401+ homedir = '/var/lib/nagios' # home dir provided by nagios-nrpe-server
402+
403+ def __init__(self, hostname=None, primary=True):
404+ super(NRPE, self).__init__()
405+ self.config = config()
406+ self.primary = primary
407+ self.nagios_context = self.config['nagios_context']
408+ if 'nagios_servicegroups' in self.config and self.config['nagios_servicegroups']:
409+ self.nagios_servicegroups = self.config['nagios_servicegroups']
410+ else:
411+ self.nagios_servicegroups = self.nagios_context
412+ self.unit_name = local_unit().replace('/', '-')
413+ if hostname:
414+ self.hostname = hostname
415+ else:
416+ nagios_hostname = get_nagios_hostname()
417+ if nagios_hostname:
418+ self.hostname = nagios_hostname
419+ else:
420+ self.hostname = "{}-{}".format(self.nagios_context, self.unit_name)
421+ self.checks = []
422+ # Iff in an nrpe-external-master relation hook, set primary status
423+ relation = relation_ids('nrpe-external-master')
424+ if relation:
425+ log("Setting charm primary status {}".format(primary))
426+ for rid in relation:
427+ relation_set(relation_id=rid, relation_settings={'primary': self.primary})
428+ self.remove_check_queue = set()
429+
430+ def add_check(self, *args, **kwargs):
431+ shortname = None
432+ if kwargs.get('shortname') is None:
433+ if len(args) > 0:
434+ shortname = args[0]
435+ else:
436+ shortname = kwargs['shortname']
437+
438+ self.checks.append(Check(*args, **kwargs))
439+ try:
440+ self.remove_check_queue.remove(shortname)
441+ except KeyError:
442+ pass
443+
444+ def remove_check(self, *args, **kwargs):
445+ if kwargs.get('shortname') is None:
446+ raise ValueError('shortname of check must be specified')
447+
448+ # Use sensible defaults if they're not specified - these are not
449+ # actually used during removal, but they're required for constructing
450+ # the Check object; check_disk is chosen because it's part of the
451+ # nagios-plugins-basic package.
452+ if kwargs.get('check_cmd') is None:
453+ kwargs['check_cmd'] = 'check_disk'
454+ if kwargs.get('description') is None:
455+ kwargs['description'] = ''
456+
457+ check = Check(*args, **kwargs)
458+ check.remove(self.hostname)
459+ self.remove_check_queue.add(kwargs['shortname'])
460+
461+ def write(self):
462+ try:
463+ nagios_uid = pwd.getpwnam('nagios').pw_uid
464+ nagios_gid = grp.getgrnam('nagios').gr_gid
465+ except Exception:
466+ log("Nagios user not set up, nrpe checks not updated")
467+ return
468+
469+ if not os.path.exists(NRPE.nagios_logdir):
470+ os.mkdir(NRPE.nagios_logdir)
471+ os.chown(NRPE.nagios_logdir, nagios_uid, nagios_gid)
472+
473+ nrpe_monitors = {}
474+ monitors = {"monitors": {"remote": {"nrpe": nrpe_monitors}}}
475+ for nrpecheck in self.checks:
476+ nrpecheck.write(self.nagios_context, self.hostname,
477+ self.nagios_servicegroups)
478+ nrpe_monitors[nrpecheck.shortname] = {
479+ "command": nrpecheck.command,
480+ }
481+
482+ # update-status hooks are configured to firing every 5 minutes by
483+ # default. When nagios-nrpe-server is restarted, the nagios server
484+ # reports checks failing causing unnecessary alerts. Let's not restart
485+ # on update-status hooks.
486+ if not hook_name() == 'update-status':
487+ service('restart', 'nagios-nrpe-server')
488+
489+ monitor_ids = relation_ids("local-monitors") + \
490+ relation_ids("nrpe-external-master")
491+ for rid in monitor_ids:
492+ reldata = relation_get(unit=local_unit(), rid=rid)
493+ if 'monitors' in reldata:
494+ # update the existing set of monitors with the new data
495+ old_monitors = yaml.safe_load(reldata['monitors'])
496+ old_nrpe_monitors = old_monitors['monitors']['remote']['nrpe']
497+ # remove keys that are in the remove_check_queue
498+ old_nrpe_monitors = {k: v for k, v in old_nrpe_monitors.items()
499+ if k not in self.remove_check_queue}
500+ # update/add nrpe_monitors
501+ old_nrpe_monitors.update(nrpe_monitors)
502+ old_monitors['monitors']['remote']['nrpe'] = old_nrpe_monitors
503+ # write back to the relation
504+ relation_set(relation_id=rid, monitors=yaml.dump(old_monitors))
505+ else:
506+ # write a brand new set of monitors, as no existing ones.
507+ relation_set(relation_id=rid, monitors=yaml.dump(monitors))
508+
509+ self.remove_check_queue.clear()
510+
511+
512+def get_nagios_hostcontext(relation_name='nrpe-external-master'):
513+ """
514+ Query relation with nrpe subordinate, return the nagios_host_context
515+
516+ :param str relation_name: Name of relation nrpe sub joined to
517+ """
518+ for rel in relations_of_type(relation_name):
519+ if 'nagios_host_context' in rel:
520+ return rel['nagios_host_context']
521+
522+
523+def get_nagios_hostname(relation_name='nrpe-external-master'):
524+ """
525+ Query relation with nrpe subordinate, return the nagios_hostname
526+
527+ :param str relation_name: Name of relation nrpe sub joined to
528+ """
529+ for rel in relations_of_type(relation_name):
530+ if 'nagios_hostname' in rel:
531+ return rel['nagios_hostname']
532+
533+
534+def get_nagios_unit_name(relation_name='nrpe-external-master'):
535+ """
536+ Return the nagios unit name prepended with host_context if needed
537+
538+ :param str relation_name: Name of relation nrpe sub joined to
539+ """
540+ host_context = get_nagios_hostcontext(relation_name)
541+ if host_context:
542+ unit = "%s:%s" % (host_context, local_unit())
543+ else:
544+ unit = local_unit()
545+ return unit
546+
547+
548+def add_init_service_checks(nrpe, services, unit_name, immediate_check=True):
549+ """
550+ Add checks for each service in list
551+
552+ :param NRPE nrpe: NRPE object to add check to
553+ :param list services: List of services to check
554+ :param str unit_name: Unit name to use in check description
555+ :param bool immediate_check: For sysv init, run the service check immediately
556+ """
557+ for svc in services:
558+ # Don't add a check for these services from neutron-gateway
559+ if svc in ['ext-port', 'os-charm-phy-nic-mtu']:
560+ next
561+
562+ upstart_init = '/etc/init/%s.conf' % svc
563+ sysv_init = '/etc/init.d/%s' % svc
564+
565+ if host.init_is_systemd():
566+ nrpe.add_check(
567+ shortname=svc,
568+ description='process check {%s}' % unit_name,
569+ check_cmd='check_systemd.py %s' % svc
570+ )
571+ elif os.path.exists(upstart_init):
572+ nrpe.add_check(
573+ shortname=svc,
574+ description='process check {%s}' % unit_name,
575+ check_cmd='check_upstart_job %s' % svc
576+ )
577+ elif os.path.exists(sysv_init):
578+ cronpath = '/etc/cron.d/nagios-service-check-%s' % svc
579+ checkpath = '%s/service-check-%s.txt' % (nrpe.homedir, svc)
580+ croncmd = (
581+ '/usr/local/lib/nagios/plugins/check_exit_status.pl '
582+ '-e -s /etc/init.d/%s status' % svc
583+ )
584+ cron_file = '*/5 * * * * root %s > %s\n' % (croncmd, checkpath)
585+ f = open(cronpath, 'w')
586+ f.write(cron_file)
587+ f.close()
588+ nrpe.add_check(
589+ shortname=svc,
590+ description='service check {%s}' % unit_name,
591+ check_cmd='check_status_file.py -f %s' % checkpath,
592+ )
593+ # if /var/lib/nagios doesn't exist open(checkpath, 'w') will fail
594+ # (LP: #1670223).
595+ if immediate_check and os.path.isdir(nrpe.homedir):
596+ f = open(checkpath, 'w')
597+ subprocess.call(
598+ croncmd.split(),
599+ stdout=f,
600+ stderr=subprocess.STDOUT
601+ )
602+ f.close()
603+ os.chmod(checkpath, 0o644)
604+
605+
606+def copy_nrpe_checks(nrpe_files_dir=None):
607+ """
608+ Copy the nrpe checks into place
609+
610+ """
611+ NAGIOS_PLUGINS = '/usr/local/lib/nagios/plugins'
612+ if nrpe_files_dir is None:
613+ # determine if "charmhelpers" is in CHARMDIR or CHARMDIR/hooks
614+ for segment in ['.', 'hooks']:
615+ nrpe_files_dir = os.path.abspath(os.path.join(
616+ os.getenv('CHARM_DIR'),
617+ segment,
618+ 'charmhelpers',
619+ 'contrib',
620+ 'openstack',
621+ 'files'))
622+ if os.path.isdir(nrpe_files_dir):
623+ break
624+ else:
625+ raise RuntimeError("Couldn't find charmhelpers directory")
626+ if not os.path.exists(NAGIOS_PLUGINS):
627+ os.makedirs(NAGIOS_PLUGINS)
628+ for fname in glob.glob(os.path.join(nrpe_files_dir, "check_*")):
629+ if os.path.isfile(fname):
630+ shutil.copy2(fname,
631+ os.path.join(NAGIOS_PLUGINS, os.path.basename(fname)))
632+
633+
634+def add_haproxy_checks(nrpe, unit_name):
635+ """
636+ Add checks for each service in list
637+
638+ :param NRPE nrpe: NRPE object to add check to
639+ :param str unit_name: Unit name to use in check description
640+ """
641+ nrpe.add_check(
642+ shortname='haproxy_servers',
643+ description='Check HAProxy {%s}' % unit_name,
644+ check_cmd='check_haproxy.sh')
645+ nrpe.add_check(
646+ shortname='haproxy_queue',
647+ description='Check HAProxy queue depth {%s}' % unit_name,
648+ check_cmd='check_haproxy_queue_depth.sh')
649+
650+
651+def remove_deprecated_check(nrpe, deprecated_services):
652+ """
653+ Remove checks fro deprecated services in list
654+
655+ :param nrpe: NRPE object to remove check from
656+ :type nrpe: NRPE
657+ :param deprecated_services: List of deprecated services that are removed
658+ :type deprecated_services: list
659+ """
660+ for dep_svc in deprecated_services:
661+ log('Deprecated service: {}'.format(dep_svc))
662+ nrpe.remove_check(shortname=dep_svc)
663diff --git a/hooks/charmhelpers/contrib/charmsupport/volumes.py b/hooks/charmhelpers/contrib/charmsupport/volumes.py
664new file mode 100644
665index 0000000..7ea43f0
666--- /dev/null
667+++ b/hooks/charmhelpers/contrib/charmsupport/volumes.py
668@@ -0,0 +1,173 @@
669+# Copyright 2014-2015 Canonical Limited.
670+#
671+# Licensed under the Apache License, Version 2.0 (the "License");
672+# you may not use this file except in compliance with the License.
673+# You may obtain a copy of the License at
674+#
675+# http://www.apache.org/licenses/LICENSE-2.0
676+#
677+# Unless required by applicable law or agreed to in writing, software
678+# distributed under the License is distributed on an "AS IS" BASIS,
679+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
680+# See the License for the specific language governing permissions and
681+# limitations under the License.
682+
683+'''
684+Functions for managing volumes in juju units. One volume is supported per unit.
685+Subordinates may have their own storage, provided it is on its own partition.
686+
687+Configuration stanzas::
688+
689+ volume-ephemeral:
690+ type: boolean
691+ default: true
692+ description: >
693+ If false, a volume is mounted as sepecified in "volume-map"
694+ If true, ephemeral storage will be used, meaning that log data
695+ will only exist as long as the machine. YOU HAVE BEEN WARNED.
696+ volume-map:
697+ type: string
698+ default: {}
699+ description: >
700+ YAML map of units to device names, e.g:
701+ "{ rsyslog/0: /dev/vdb, rsyslog/1: /dev/vdb }"
702+ Service units will raise a configure-error if volume-ephemeral
703+ is 'true' and no volume-map value is set. Use 'juju set' to set a
704+ value and 'juju resolved' to complete configuration.
705+
706+Usage::
707+
708+ from charmsupport.volumes import configure_volume, VolumeConfigurationError
709+ from charmsupport.hookenv import log, ERROR
710+ def post_mount_hook():
711+ stop_service('myservice')
712+ def post_mount_hook():
713+ start_service('myservice')
714+
715+ if __name__ == '__main__':
716+ try:
717+ configure_volume(before_change=pre_mount_hook,
718+ after_change=post_mount_hook)
719+ except VolumeConfigurationError:
720+ log('Storage could not be configured', ERROR)
721+
722+'''
723+
724+# XXX: Known limitations
725+# - fstab is neither consulted nor updated
726+
727+import os
728+from charmhelpers.core import hookenv
729+from charmhelpers.core import host
730+import yaml
731+
732+
733+MOUNT_BASE = '/srv/juju/volumes'
734+
735+
736+class VolumeConfigurationError(Exception):
737+ '''Volume configuration data is missing or invalid'''
738+ pass
739+
740+
741+def get_config():
742+ '''Gather and sanity-check volume configuration data'''
743+ volume_config = {}
744+ config = hookenv.config()
745+
746+ errors = False
747+
748+ if config.get('volume-ephemeral') in (True, 'True', 'true', 'Yes', 'yes'):
749+ volume_config['ephemeral'] = True
750+ else:
751+ volume_config['ephemeral'] = False
752+
753+ try:
754+ volume_map = yaml.safe_load(config.get('volume-map', '{}'))
755+ except yaml.YAMLError as e:
756+ hookenv.log("Error parsing YAML volume-map: {}".format(e),
757+ hookenv.ERROR)
758+ errors = True
759+ if volume_map is None:
760+ # probably an empty string
761+ volume_map = {}
762+ elif not isinstance(volume_map, dict):
763+ hookenv.log("Volume-map should be a dictionary, not {}".format(
764+ type(volume_map)))
765+ errors = True
766+
767+ volume_config['device'] = volume_map.get(os.environ['JUJU_UNIT_NAME'])
768+ if volume_config['device'] and volume_config['ephemeral']:
769+ # asked for ephemeral storage but also defined a volume ID
770+ hookenv.log('A volume is defined for this unit, but ephemeral '
771+ 'storage was requested', hookenv.ERROR)
772+ errors = True
773+ elif not volume_config['device'] and not volume_config['ephemeral']:
774+ # asked for permanent storage but did not define volume ID
775+ hookenv.log('Ephemeral storage was requested, but there is no volume '
776+ 'defined for this unit.', hookenv.ERROR)
777+ errors = True
778+
779+ unit_mount_name = hookenv.local_unit().replace('/', '-')
780+ volume_config['mountpoint'] = os.path.join(MOUNT_BASE, unit_mount_name)
781+
782+ if errors:
783+ return None
784+ return volume_config
785+
786+
787+def mount_volume(config):
788+ if os.path.exists(config['mountpoint']):
789+ if not os.path.isdir(config['mountpoint']):
790+ hookenv.log('Not a directory: {}'.format(config['mountpoint']))
791+ raise VolumeConfigurationError()
792+ else:
793+ host.mkdir(config['mountpoint'])
794+ if os.path.ismount(config['mountpoint']):
795+ unmount_volume(config)
796+ if not host.mount(config['device'], config['mountpoint'], persist=True):
797+ raise VolumeConfigurationError()
798+
799+
800+def unmount_volume(config):
801+ if os.path.ismount(config['mountpoint']):
802+ if not host.umount(config['mountpoint'], persist=True):
803+ raise VolumeConfigurationError()
804+
805+
806+def managed_mounts():
807+ '''List of all mounted managed volumes'''
808+ return filter(lambda mount: mount[0].startswith(MOUNT_BASE), host.mounts())
809+
810+
811+def configure_volume(before_change=lambda: None, after_change=lambda: None):
812+ '''Set up storage (or don't) according to the charm's volume configuration.
813+ Returns the mount point or "ephemeral". before_change and after_change
814+ are optional functions to be called if the volume configuration changes.
815+ '''
816+
817+ config = get_config()
818+ if not config:
819+ hookenv.log('Failed to read volume configuration', hookenv.CRITICAL)
820+ raise VolumeConfigurationError()
821+
822+ if config['ephemeral']:
823+ if os.path.ismount(config['mountpoint']):
824+ before_change()
825+ unmount_volume(config)
826+ after_change()
827+ return 'ephemeral'
828+ else:
829+ # persistent storage
830+ if os.path.ismount(config['mountpoint']):
831+ mounts = dict(managed_mounts())
832+ if mounts.get(config['mountpoint']) != config['device']:
833+ before_change()
834+ unmount_volume(config)
835+ mount_volume(config)
836+ after_change()
837+ else:
838+ before_change()
839+ mount_volume(config)
840+ after_change()
841+ return config['mountpoint']
842diff --git a/hooks/charmhelpers/core/hookenv.py b/hooks/charmhelpers/core/hookenv.py
843index 67ad691..d7c37c1 100644
844--- a/hooks/charmhelpers/core/hookenv.py
845+++ b/hooks/charmhelpers/core/hookenv.py
846@@ -21,23 +21,29 @@
847 from __future__ import print_function
848 import copy
849 from distutils.version import LooseVersion
850+from enum import Enum
851 from functools import wraps
852+from collections import namedtuple
853 import glob
854 import os
855 import json
856 import yaml
857+import re
858 import subprocess
859 import sys
860 import errno
861 import tempfile
862 from subprocess import CalledProcessError
863
864+from charmhelpers import deprecate
865+
866 import six
867 if not six.PY3:
868 from UserDict import UserDict
869 else:
870 from collections import UserDict
871
872+
873 CRITICAL = "CRITICAL"
874 ERROR = "ERROR"
875 WARNING = "WARNING"
876@@ -45,6 +51,20 @@ INFO = "INFO"
877 DEBUG = "DEBUG"
878 TRACE = "TRACE"
879 MARKER = object()
880+SH_MAX_ARG = 131071
881+
882+
883+RANGE_WARNING = ('Passing NO_PROXY string that includes a cidr. '
884+ 'This may not be compatible with software you are '
885+ 'running in your shell.')
886+
887+
888+class WORKLOAD_STATES(Enum):
889+ ACTIVE = 'active'
890+ BLOCKED = 'blocked'
891+ MAINTENANCE = 'maintenance'
892+ WAITING = 'waiting'
893+
894
895 cache = {}
896
897@@ -65,7 +85,7 @@ def cached(func):
898 @wraps(func)
899 def wrapper(*args, **kwargs):
900 global cache
901- key = str((func, args, kwargs))
902+ key = json.dumps((func, args, kwargs), sort_keys=True, default=str)
903 try:
904 return cache[key]
905 except KeyError:
906@@ -95,7 +115,7 @@ def log(message, level=None):
907 command += ['-l', level]
908 if not isinstance(message, six.string_types):
909 message = repr(message)
910- command += [message]
911+ command += [message[:SH_MAX_ARG]]
912 # Missing juju-log should not cause failures in unit tests
913 # Send log output to stderr
914 try:
915@@ -110,6 +130,24 @@ def log(message, level=None):
916 raise
917
918
919+def function_log(message):
920+ """Write a function progress message"""
921+ command = ['function-log']
922+ if not isinstance(message, six.string_types):
923+ message = repr(message)
924+ command += [message[:SH_MAX_ARG]]
925+ # Missing function-log should not cause failures in unit tests
926+ # Send function_log output to stderr
927+ try:
928+ subprocess.call(command)
929+ except OSError as e:
930+ if e.errno == errno.ENOENT:
931+ message = "function-log: {}".format(message)
932+ print(message, file=sys.stderr)
933+ else:
934+ raise
935+
936+
937 class Serializable(UserDict):
938 """Wrapper, an object that can be serialized to yaml or json"""
939
940@@ -198,11 +236,35 @@ def remote_unit():
941 return os.environ.get('JUJU_REMOTE_UNIT', None)
942
943
944-def service_name():
945- """The name service group this unit belongs to"""
946+def application_name():
947+ """
948+ The name of the deployed application this unit belongs to.
949+ """
950 return local_unit().split('/')[0]
951
952
953+def service_name():
954+ """
955+ .. deprecated:: 0.19.1
956+ Alias for :func:`application_name`.
957+ """
958+ return application_name()
959+
960+
961+def model_name():
962+ """
963+ Name of the model that this unit is deployed in.
964+ """
965+ return os.environ['JUJU_MODEL_NAME']
966+
967+
968+def model_uuid():
969+ """
970+ UUID of the model that this unit is deployed in.
971+ """
972+ return os.environ['JUJU_MODEL_UUID']
973+
974+
975 def principal_unit():
976 """Returns the principal unit of this unit, otherwise None"""
977 # Juju 2.2 and above provides JUJU_PRINCIPAL_UNIT
978@@ -287,7 +349,7 @@ class Config(dict):
979 self.implicit_save = True
980 self._prev_dict = None
981 self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME)
982- if os.path.exists(self.path):
983+ if os.path.exists(self.path) and os.stat(self.path).st_size:
984 self.load_previous()
985 atexit(self._implicit_save)
986
987@@ -307,7 +369,11 @@ class Config(dict):
988 """
989 self.path = path or self.path
990 with open(self.path) as f:
991- self._prev_dict = json.load(f)
992+ try:
993+ self._prev_dict = json.load(f)
994+ except ValueError as e:
995+ log('Unable to parse previous config data - {}'.format(str(e)),
996+ level=ERROR)
997 for k, v in copy.deepcopy(self._prev_dict).items():
998 if k not in self:
999 self[k] = v
1000@@ -343,6 +409,7 @@ class Config(dict):
1001
1002 """
1003 with open(self.path, 'w') as f:
1004+ os.fchmod(f.fileno(), 0o600)
1005 json.dump(self, f)
1006
1007 def _implicit_save(self):
1008@@ -350,22 +417,40 @@ class Config(dict):
1009 self.save()
1010
1011
1012-@cached
1013+_cache_config = None
1014+
1015+
1016 def config(scope=None):
1017- """Juju charm configuration"""
1018- config_cmd_line = ['config-get']
1019- if scope is not None:
1020- config_cmd_line.append(scope)
1021- else:
1022- config_cmd_line.append('--all')
1023- config_cmd_line.append('--format=json')
1024+ """
1025+ Get the juju charm configuration (scope==None) or individual key,
1026+ (scope=str). The returned value is a Python data structure loaded as
1027+ JSON from the Juju config command.
1028+
1029+ :param scope: If set, return the value for the specified key.
1030+ :type scope: Optional[str]
1031+ :returns: Either the whole config as a Config, or a key from it.
1032+ :rtype: Any
1033+ """
1034+ global _cache_config
1035+ config_cmd_line = ['config-get', '--all', '--format=json']
1036+ try:
1037+ # JSON Decode Exception for Python3.5+
1038+ exc_json = json.decoder.JSONDecodeError
1039+ except AttributeError:
1040+ # JSON Decode Exception for Python2.7 through Python3.4
1041+ exc_json = ValueError
1042 try:
1043- config_data = json.loads(
1044- subprocess.check_output(config_cmd_line).decode('UTF-8'))
1045+ if _cache_config is None:
1046+ config_data = json.loads(
1047+ subprocess.check_output(config_cmd_line).decode('UTF-8'))
1048+ _cache_config = Config(config_data)
1049 if scope is not None:
1050- return config_data
1051- return Config(config_data)
1052- except ValueError:
1053+ return _cache_config.get(scope)
1054+ return _cache_config
1055+ except (exc_json, UnicodeDecodeError) as e:
1056+ log('Unable to parse output from config-get: config_cmd_line="{}" '
1057+ 'message="{}"'
1058+ .format(config_cmd_line, str(e)), level=ERROR)
1059 return None
1060
1061
1062@@ -459,6 +544,67 @@ def related_units(relid=None):
1063 subprocess.check_output(units_cmd_line).decode('UTF-8')) or []
1064
1065
1066+def expected_peer_units():
1067+ """Get a generator for units we expect to join peer relation based on
1068+ goal-state.
1069+
1070+ The local unit is excluded from the result to make it easy to gauge
1071+ completion of all peers joining the relation with existing hook tools.
1072+
1073+ Example usage:
1074+ log('peer {} of {} joined peer relation'
1075+ .format(len(related_units()),
1076+ len(list(expected_peer_units()))))
1077+
1078+ This function will raise NotImplementedError if used with juju versions
1079+ without goal-state support.
1080+
1081+ :returns: iterator
1082+ :rtype: types.GeneratorType
1083+ :raises: NotImplementedError
1084+ """
1085+ if not has_juju_version("2.4.0"):
1086+ # goal-state first appeared in 2.4.0.
1087+ raise NotImplementedError("goal-state")
1088+ _goal_state = goal_state()
1089+ return (key for key in _goal_state['units']
1090+ if '/' in key and key != local_unit())
1091+
1092+
1093+def expected_related_units(reltype=None):
1094+ """Get a generator for units we expect to join relation based on
1095+ goal-state.
1096+
1097+ Note that you can not use this function for the peer relation, take a look
1098+ at expected_peer_units() for that.
1099+
1100+ This function will raise KeyError if you request information for a
1101+ relation type for which juju goal-state does not have information. It will
1102+ raise NotImplementedError if used with juju versions without goal-state
1103+ support.
1104+
1105+ Example usage:
1106+ log('participant {} of {} joined relation {}'
1107+ .format(len(related_units()),
1108+ len(list(expected_related_units())),
1109+ relation_type()))
1110+
1111+ :param reltype: Relation type to list data for, default is to list data for
1112+ the realtion type we are currently executing a hook for.
1113+ :type reltype: str
1114+ :returns: iterator
1115+ :rtype: types.GeneratorType
1116+ :raises: KeyError, NotImplementedError
1117+ """
1118+ if not has_juju_version("2.4.4"):
1119+ # goal-state existed in 2.4.0, but did not list individual units to
1120+ # join a relation in 2.4.1 through 2.4.3. (LP: #1794739)
1121+ raise NotImplementedError("goal-state relation unit count")
1122+ reltype = reltype or relation_type()
1123+ _goal_state = goal_state()
1124+ return (key for key in _goal_state['relations'][reltype] if '/' in key)
1125+
1126+
1127 @cached
1128 def relation_for_unit(unit=None, rid=None):
1129 """Get the json represenation of a unit's relation"""
1130@@ -644,18 +790,31 @@ def is_relation_made(relation, keys='private-address'):
1131 return False
1132
1133
1134+def _port_op(op_name, port, protocol="TCP"):
1135+ """Open or close a service network port"""
1136+ _args = [op_name]
1137+ icmp = protocol.upper() == "ICMP"
1138+ if icmp:
1139+ _args.append(protocol)
1140+ else:
1141+ _args.append('{}/{}'.format(port, protocol))
1142+ try:
1143+ subprocess.check_call(_args)
1144+ except subprocess.CalledProcessError:
1145+ # Older Juju pre 2.3 doesn't support ICMP
1146+ # so treat it as a no-op if it fails.
1147+ if not icmp:
1148+ raise
1149+
1150+
1151 def open_port(port, protocol="TCP"):
1152 """Open a service network port"""
1153- _args = ['open-port']
1154- _args.append('{}/{}'.format(port, protocol))
1155- subprocess.check_call(_args)
1156+ _port_op('open-port', port, protocol)
1157
1158
1159 def close_port(port, protocol="TCP"):
1160 """Close a service network port"""
1161- _args = ['close-port']
1162- _args.append('{}/{}'.format(port, protocol))
1163- subprocess.check_call(_args)
1164+ _port_op('close-port', port, protocol)
1165
1166
1167 def open_ports(start, end, protocol="TCP"):
1168@@ -672,6 +831,17 @@ def close_ports(start, end, protocol="TCP"):
1169 subprocess.check_call(_args)
1170
1171
1172+def opened_ports():
1173+ """Get the opened ports
1174+
1175+ *Note that this will only show ports opened in a previous hook*
1176+
1177+ :returns: Opened ports as a list of strings: ``['8080/tcp', '8081-8083/tcp']``
1178+ """
1179+ _args = ['opened-ports', '--format=json']
1180+ return json.loads(subprocess.check_output(_args).decode('UTF-8'))
1181+
1182+
1183 @cached
1184 def unit_get(attribute):
1185 """Get the unit ID for the remote unit"""
1186@@ -793,6 +963,10 @@ class Hooks(object):
1187 return wrapper
1188
1189
1190+class NoNetworkBinding(Exception):
1191+ pass
1192+
1193+
1194 def charm_dir():
1195 """Return the root directory of the current charm"""
1196 d = os.environ.get('JUJU_CHARM_DIR')
1197@@ -801,9 +975,23 @@ def charm_dir():
1198 return os.environ.get('CHARM_DIR')
1199
1200
1201+def cmd_exists(cmd):
1202+ """Return True if the specified cmd exists in the path"""
1203+ return any(
1204+ os.access(os.path.join(path, cmd), os.X_OK)
1205+ for path in os.environ["PATH"].split(os.pathsep)
1206+ )
1207+
1208+
1209 @cached
1210+@deprecate("moved to function_get()", log=log)
1211 def action_get(key=None):
1212- """Gets the value of an action parameter, or all key/value param pairs"""
1213+ """
1214+ .. deprecated:: 0.20.7
1215+ Alias for :func:`function_get`.
1216+
1217+ Gets the value of an action parameter, or all key/value param pairs.
1218+ """
1219 cmd = ['action-get']
1220 if key is not None:
1221 cmd.append(key)
1222@@ -812,52 +1000,130 @@ def action_get(key=None):
1223 return action_data
1224
1225
1226+@cached
1227+def function_get(key=None):
1228+ """Gets the value of an action parameter, or all key/value param pairs"""
1229+ cmd = ['function-get']
1230+ # Fallback for older charms.
1231+ if not cmd_exists('function-get'):
1232+ cmd = ['action-get']
1233+
1234+ if key is not None:
1235+ cmd.append(key)
1236+ cmd.append('--format=json')
1237+ function_data = json.loads(subprocess.check_output(cmd).decode('UTF-8'))
1238+ return function_data
1239+
1240+
1241+@deprecate("moved to function_set()", log=log)
1242 def action_set(values):
1243- """Sets the values to be returned after the action finishes"""
1244+ """
1245+ .. deprecated:: 0.20.7
1246+ Alias for :func:`function_set`.
1247+
1248+ Sets the values to be returned after the action finishes.
1249+ """
1250 cmd = ['action-set']
1251 for k, v in list(values.items()):
1252 cmd.append('{}={}'.format(k, v))
1253 subprocess.check_call(cmd)
1254
1255
1256+def function_set(values):
1257+ """Sets the values to be returned after the function finishes"""
1258+ cmd = ['function-set']
1259+ # Fallback for older charms.
1260+ if not cmd_exists('function-get'):
1261+ cmd = ['action-set']
1262+
1263+ for k, v in list(values.items()):
1264+ cmd.append('{}={}'.format(k, v))
1265+ subprocess.check_call(cmd)
1266+
1267+
1268+@deprecate("moved to function_fail()", log=log)
1269 def action_fail(message):
1270- """Sets the action status to failed and sets the error message.
1271+ """
1272+ .. deprecated:: 0.20.7
1273+ Alias for :func:`function_fail`.
1274+
1275+ Sets the action status to failed and sets the error message.
1276
1277- The results set by action_set are preserved."""
1278+ The results set by action_set are preserved.
1279+ """
1280 subprocess.check_call(['action-fail', message])
1281
1282
1283+def function_fail(message):
1284+ """Sets the function status to failed and sets the error message.
1285+
1286+ The results set by function_set are preserved."""
1287+ cmd = ['function-fail']
1288+ # Fallback for older charms.
1289+ if not cmd_exists('function-fail'):
1290+ cmd = ['action-fail']
1291+ cmd.append(message)
1292+
1293+ subprocess.check_call(cmd)
1294+
1295+
1296 def action_name():
1297 """Get the name of the currently executing action."""
1298 return os.environ.get('JUJU_ACTION_NAME')
1299
1300
1301+def function_name():
1302+ """Get the name of the currently executing function."""
1303+ return os.environ.get('JUJU_FUNCTION_NAME') or action_name()
1304+
1305+
1306 def action_uuid():
1307 """Get the UUID of the currently executing action."""
1308 return os.environ.get('JUJU_ACTION_UUID')
1309
1310
1311+def function_id():
1312+ """Get the ID of the currently executing function."""
1313+ return os.environ.get('JUJU_FUNCTION_ID') or action_uuid()
1314+
1315+
1316 def action_tag():
1317 """Get the tag for the currently executing action."""
1318 return os.environ.get('JUJU_ACTION_TAG')
1319
1320
1321-def status_set(workload_state, message):
1322+def function_tag():
1323+ """Get the tag for the currently executing function."""
1324+ return os.environ.get('JUJU_FUNCTION_TAG') or action_tag()
1325+
1326+
1327+def status_set(workload_state, message, application=False):
1328 """Set the workload state with a message
1329
1330 Use status-set to set the workload state with a message which is visible
1331 to the user via juju status. If the status-set command is not found then
1332- assume this is juju < 1.23 and juju-log the message unstead.
1333+ assume this is juju < 1.23 and juju-log the message instead.
1334
1335- workload_state -- valid juju workload state.
1336- message -- status update message
1337+ workload_state -- valid juju workload state. str or WORKLOAD_STATES
1338+ message -- status update message
1339+ application -- Whether this is an application state set
1340 """
1341- valid_states = ['maintenance', 'blocked', 'waiting', 'active']
1342- if workload_state not in valid_states:
1343- raise ValueError(
1344- '{!r} is not a valid workload state'.format(workload_state)
1345- )
1346- cmd = ['status-set', workload_state, message]
1347+ bad_state_msg = '{!r} is not a valid workload state'
1348+
1349+ if isinstance(workload_state, str):
1350+ try:
1351+ # Convert string to enum.
1352+ workload_state = WORKLOAD_STATES[workload_state.upper()]
1353+ except KeyError:
1354+ raise ValueError(bad_state_msg.format(workload_state))
1355+
1356+ if workload_state not in WORKLOAD_STATES:
1357+ raise ValueError(bad_state_msg.format(workload_state))
1358+
1359+ cmd = ['status-set']
1360+ if application:
1361+ cmd.append('--application')
1362+ cmd.extend([workload_state.value, message])
1363 try:
1364 ret = subprocess.call(cmd)
1365 if ret == 0:
1366@@ -865,7 +1131,7 @@ def status_set(workload_state, message):
1367 except OSError as e:
1368 if e.errno != errno.ENOENT:
1369 raise
1370- log_message = 'status-set failed: {} {}'.format(workload_state,
1371+ log_message = 'status-set failed: {} {}'.format(workload_state.value,
1372 message)
1373 log(log_message, level='INFO')
1374
1375@@ -919,6 +1185,14 @@ def application_version_set(version):
1376
1377
1378 @translate_exc(from_exc=OSError, to_exc=NotImplementedError)
1379+@cached
1380+def goal_state():
1381+ """Juju goal state values"""
1382+ cmd = ['goal-state', '--format=json']
1383+ return json.loads(subprocess.check_output(cmd).decode('UTF-8'))
1384+
1385+
1386+@translate_exc(from_exc=OSError, to_exc=NotImplementedError)
1387 def is_leader():
1388 """Does the current unit hold the juju leadership
1389
1390@@ -1012,7 +1286,6 @@ def juju_version():
1391 universal_newlines=True).strip()
1392
1393
1394-@cached
1395 def has_juju_version(minimum_version):
1396 """Return True if the Juju version is at least the provided version"""
1397 return LooseVersion(juju_version()) >= LooseVersion(minimum_version)
1398@@ -1072,6 +1345,8 @@ def _run_atexit():
1399 @translate_exc(from_exc=OSError, to_exc=NotImplementedError)
1400 def network_get_primary_address(binding):
1401 '''
1402+ Deprecated since Juju 2.3; use network_get()
1403+
1404 Retrieve the primary network address for a named binding
1405
1406 :param binding: string. The name of a relation of extra-binding
1407@@ -1079,10 +1354,19 @@ def network_get_primary_address(binding):
1408 :raise: NotImplementedError if run on Juju < 2.0
1409 '''
1410 cmd = ['network-get', '--primary-address', binding]
1411- return subprocess.check_output(cmd).decode('UTF-8').strip()
1412+ try:
1413+ response = subprocess.check_output(
1414+ cmd,
1415+ stderr=subprocess.STDOUT).decode('UTF-8').strip()
1416+ except CalledProcessError as e:
1417+ if 'no network config found for binding' in e.output.decode('UTF-8'):
1418+ raise NoNetworkBinding("No network binding for {}"
1419+ .format(binding))
1420+ else:
1421+ raise
1422+ return response
1423
1424
1425-@translate_exc(from_exc=OSError, to_exc=NotImplementedError)
1426 def network_get(endpoint, relation_id=None):
1427 """
1428 Retrieve the network details for a relation endpoint
1429@@ -1090,24 +1374,20 @@ def network_get(endpoint, relation_id=None):
1430 :param endpoint: string. The name of a relation endpoint
1431 :param relation_id: int. The ID of the relation for the current context.
1432 :return: dict. The loaded YAML output of the network-get query.
1433- :raise: NotImplementedError if run on Juju < 2.1
1434+ :raise: NotImplementedError if request not supported by the Juju version.
1435 """
1436+ if not has_juju_version('2.2'):
1437+ raise NotImplementedError(juju_version()) # earlier versions require --primary-address
1438+ if relation_id and not has_juju_version('2.3'):
1439+ raise NotImplementedError # 2.3 added the -r option
1440+
1441 cmd = ['network-get', endpoint, '--format', 'yaml']
1442 if relation_id:
1443 cmd.append('-r')
1444 cmd.append(relation_id)
1445- try:
1446- response = subprocess.check_output(
1447- cmd,
1448- stderr=subprocess.STDOUT).decode('UTF-8').strip()
1449- except CalledProcessError as e:
1450- # Early versions of Juju 2.0.x required the --primary-address argument.
1451- # We catch that condition here and raise NotImplementedError since
1452- # the requested semantics are not available - the caller can then
1453- # use the network_get_primary_address() method instead.
1454- if '--primary-address is currently required' in e.output.decode('UTF-8'):
1455- raise NotImplementedError
1456- raise
1457+ response = subprocess.check_output(
1458+ cmd,
1459+ stderr=subprocess.STDOUT).decode('UTF-8').strip()
1460 return yaml.safe_load(response)
1461
1462
1463@@ -1140,3 +1420,192 @@ def meter_info():
1464 """Get the meter status information, if running in the meter-status-changed
1465 hook."""
1466 return os.environ.get('JUJU_METER_INFO')
1467+
1468+
1469+def iter_units_for_relation_name(relation_name):
1470+ """Iterate through all units in a relation
1471+
1472+ Generator that iterates through all the units in a relation and yields
1473+ a named tuple with rid and unit field names.
1474+
1475+ Usage:
1476+ data = [(u.rid, u.unit)
1477+ for u in iter_units_for_relation_name(relation_name)]
1478+
1479+ :param relation_name: string relation name
1480+ :yield: Named Tuple with rid and unit field names
1481+ """
1482+ RelatedUnit = namedtuple('RelatedUnit', 'rid, unit')
1483+ for rid in relation_ids(relation_name):
1484+ for unit in related_units(rid):
1485+ yield RelatedUnit(rid, unit)
1486+
1487+
1488+def ingress_address(rid=None, unit=None):
1489+ """
1490+ Retrieve the ingress-address from a relation when available.
1491+ Otherwise, return the private-address.
1492+
1493+ When used on the consuming side of the relation (unit is a remote
1494+ unit), the ingress-address is the IP address that this unit needs
1495+ to use to reach the provided service on the remote unit.
1496+
1497+ When used on the providing side of the relation (unit == local_unit()),
1498+ the ingress-address is the IP address that is advertised to remote
1499+ units on this relation. Remote units need to use this address to
1500+ reach the local provided service on this unit.
1501+
1502+ Note that charms may document some other method to use in
1503+ preference to the ingress_address(), such as an address provided
1504+ on a different relation attribute or a service discovery mechanism.
1505+ This allows charms to redirect inbound connections to their peers
1506+ or different applications such as load balancers.
1507+
1508+ Usage:
1509+ addresses = [ingress_address(rid=u.rid, unit=u.unit)
1510+ for u in iter_units_for_relation_name(relation_name)]
1511+
1512+ :param rid: string relation id
1513+ :param unit: string unit name
1514+ :side effect: calls relation_get
1515+ :return: string IP address
1516+ """
1517+ settings = relation_get(rid=rid, unit=unit)
1518+ return (settings.get('ingress-address') or
1519+ settings.get('private-address'))
1520+
1521+
1522+def egress_subnets(rid=None, unit=None):
1523+ """
1524+ Retrieve the egress-subnets from a relation.
1525+
1526+ This function is to be used on the providing side of the
1527+ relation, and provides the ranges of addresses that client
1528+ connections may come from. The result is uninteresting on
1529+ the consuming side of a relation (unit == local_unit()).
1530+
1531+ Returns a stable list of subnets in CIDR format.
1532+ eg. ['192.168.1.0/24', '2001::F00F/128']
1533+
1534+ If egress-subnets is not available, falls back to using the published
1535+ ingress-address, or finally private-address.
1536+
1537+ :param rid: string relation id
1538+ :param unit: string unit name
1539+ :side effect: calls relation_get
1540+ :return: list of subnets in CIDR format. eg. ['192.168.1.0/24', '2001::F00F/128']
1541+ """
1542+ def _to_range(addr):
1543+ if re.search(r'^(?:\d{1,3}\.){3}\d{1,3}$', addr) is not None:
1544+ addr += '/32'
1545+ elif ':' in addr and '/' not in addr: # IPv6
1546+ addr += '/128'
1547+ return addr
1548+
1549+ settings = relation_get(rid=rid, unit=unit)
1550+ if 'egress-subnets' in settings:
1551+ return [n.strip() for n in settings['egress-subnets'].split(',') if n.strip()]
1552+ if 'ingress-address' in settings:
1553+ return [_to_range(settings['ingress-address'])]
1554+ if 'private-address' in settings:
1555+ return [_to_range(settings['private-address'])]
1556+ return [] # Should never happen
1557+
1558+
1559+def unit_doomed(unit=None):
1560+ """Determines if the unit is being removed from the model
1561+
1562+ Requires Juju 2.4.1.
1563+
1564+ :param unit: string unit name, defaults to local_unit
1565+ :side effect: calls goal_state
1566+ :side effect: calls local_unit
1567+ :side effect: calls has_juju_version
1568+ :return: True if the unit is being removed, already gone, or never existed
1569+ """
1570+ if not has_juju_version("2.4.1"):
1571+ # We cannot risk blindly returning False for 'we don't know',
1572+ # because that could cause data loss; if call sites don't
1573+ # need an accurate answer, they likely don't need this helper
1574+ # at all.
1575+ # goal-state existed in 2.4.0, but did not handle removals
1576+ # correctly until 2.4.1.
1577+ raise NotImplementedError("is_doomed")
1578+ if unit is None:
1579+ unit = local_unit()
1580+ gs = goal_state()
1581+ units = gs.get('units', {})
1582+ if unit not in units:
1583+ return True
1584+ # I don't think 'dead' units ever show up in the goal-state, but
1585+ # check anyway in addition to 'dying'.
1586+ return units[unit]['status'] in ('dying', 'dead')
1587+
1588+
1589+def env_proxy_settings(selected_settings=None):
1590+ """Get proxy settings from process environment variables.
1591+
1592+ Get charm proxy settings from environment variables that correspond to
1593+ juju-http-proxy, juju-https-proxy juju-no-proxy (available as of 2.4.2, see
1594+ lp:1782236) and juju-ftp-proxy in a format suitable for passing to an
1595+ application that reacts to proxy settings passed as environment variables.
1596+ Some applications support lowercase or uppercase notation (e.g. curl), some
1597+ support only lowercase (e.g. wget), there are also subjectively rare cases
1598+ of only uppercase notation support. no_proxy CIDR and wildcard support also
1599+ varies between runtimes and applications as there is no enforced standard.
1600+
1601+ Some applications may connect to multiple destinations and expose config
1602+ options that would affect only proxy settings for a specific destination
1603+ these should be handled in charms in an application-specific manner.
1604+
1605+ :param selected_settings: format only a subset of possible settings
1606+ :type selected_settings: list
1607+ :rtype: Option(None, dict[str, str])
1608+ """
1609+ SUPPORTED_SETTINGS = {
1610+ 'http': 'HTTP_PROXY',
1611+ 'https': 'HTTPS_PROXY',
1612+ 'no_proxy': 'NO_PROXY',
1613+ 'ftp': 'FTP_PROXY'
1614+ }
1615+ if selected_settings is None:
1616+ selected_settings = SUPPORTED_SETTINGS
1617+
1618+ selected_vars = [v for k, v in SUPPORTED_SETTINGS.items()
1619+ if k in selected_settings]
1620+ proxy_settings = {}
1621+ for var in selected_vars:
1622+ var_val = os.getenv(var)
1623+ if var_val:
1624+ proxy_settings[var] = var_val
1625+ proxy_settings[var.lower()] = var_val
1626+ # Now handle juju-prefixed environment variables. The legacy vs new
1627+ # environment variable usage is mutually exclusive
1628+ charm_var_val = os.getenv('JUJU_CHARM_{}'.format(var))
1629+ if charm_var_val:
1630+ proxy_settings[var] = charm_var_val
1631+ proxy_settings[var.lower()] = charm_var_val
1632+ if 'no_proxy' in proxy_settings:
1633+ if _contains_range(proxy_settings['no_proxy']):
1634+ log(RANGE_WARNING, level=WARNING)
1635+ return proxy_settings if proxy_settings else None
1636+
1637+
1638+def _contains_range(addresses):
1639+ """Check for cidr or wildcard domain in a string.
1640+
1641+ Given a string comprising a comma seperated list of ip addresses
1642+ and domain names, determine whether the string contains IP ranges
1643+ or wildcard domains.
1644+
1645+ :param addresses: comma seperated list of domains and ip addresses.
1646+ :type addresses: str
1647+ """
1648+ return (
1649+ # Test for cidr (e.g. 10.20.20.0/24)
1650+ "/" in addresses or
1651+ # Test for wildcard domains (*.foo.com or .foo.com)
1652+ "*" in addresses or
1653+ addresses.startswith(".") or
1654+ ",." in addresses or
1655+ " ." in addresses)
1656diff --git a/hooks/charmhelpers/core/host.py b/hooks/charmhelpers/core/host.py
1657index 5656e2f..b33ac90 100644
1658--- a/hooks/charmhelpers/core/host.py
1659+++ b/hooks/charmhelpers/core/host.py
1660@@ -34,21 +34,23 @@ import six
1661
1662 from contextlib import contextmanager
1663 from collections import OrderedDict
1664-from .hookenv import log, DEBUG
1665+from .hookenv import log, INFO, DEBUG, local_unit, charm_name
1666 from .fstab import Fstab
1667 from charmhelpers.osplatform import get_platform
1668
1669 __platform__ = get_platform()
1670 if __platform__ == "ubuntu":
1671- from charmhelpers.core.host_factory.ubuntu import (
1672+ from charmhelpers.core.host_factory.ubuntu import ( # NOQA:F401
1673 service_available,
1674 add_new_group,
1675 lsb_release,
1676 cmp_pkgrevno,
1677 CompareHostReleases,
1678+ get_distrib_codename,
1679+ arch
1680 ) # flake8: noqa -- ignore F401 for this import
1681 elif __platform__ == "centos":
1682- from charmhelpers.core.host_factory.centos import (
1683+ from charmhelpers.core.host_factory.centos import ( # NOQA:F401
1684 service_available,
1685 add_new_group,
1686 lsb_release,
1687@@ -58,6 +60,7 @@ elif __platform__ == "centos":
1688
1689 UPDATEDB_PATH = '/etc/updatedb.conf'
1690
1691+
1692 def service_start(service_name, **kwargs):
1693 """Start a system service.
1694
1695@@ -287,8 +290,8 @@ def service_running(service_name, **kwargs):
1696 for key, value in six.iteritems(kwargs):
1697 parameter = '%s=%s' % (key, value)
1698 cmd.append(parameter)
1699- output = subprocess.check_output(cmd,
1700- stderr=subprocess.STDOUT).decode('UTF-8')
1701+ output = subprocess.check_output(
1702+ cmd, stderr=subprocess.STDOUT).decode('UTF-8')
1703 except subprocess.CalledProcessError:
1704 return False
1705 else:
1706@@ -441,6 +444,51 @@ def add_user_to_group(username, group):
1707 subprocess.check_call(cmd)
1708
1709
1710+def chage(username, lastday=None, expiredate=None, inactive=None,
1711+ mindays=None, maxdays=None, root=None, warndays=None):
1712+ """Change user password expiry information
1713+
1714+ :param str username: User to update
1715+ :param str lastday: Set when password was changed in YYYY-MM-DD format
1716+ :param str expiredate: Set when user's account will no longer be
1717+ accessible in YYYY-MM-DD format.
1718+ -1 will remove an account expiration date.
1719+ :param str inactive: Set the number of days of inactivity after a password
1720+ has expired before the account is locked.
1721+ -1 will remove an account's inactivity.
1722+ :param str mindays: Set the minimum number of days between password
1723+ changes to MIN_DAYS.
1724+ 0 indicates the password can be changed anytime.
1725+ :param str maxdays: Set the maximum number of days during which a
1726+ password is valid.
1727+ -1 as MAX_DAYS will remove checking maxdays
1728+ :param str root: Apply changes in the CHROOT_DIR directory
1729+ :param str warndays: Set the number of days of warning before a password
1730+ change is required
1731+ :raises subprocess.CalledProcessError: if call to chage fails
1732+ """
1733+ cmd = ['chage']
1734+ if root:
1735+ cmd.extend(['--root', root])
1736+ if lastday:
1737+ cmd.extend(['--lastday', lastday])
1738+ if expiredate:
1739+ cmd.extend(['--expiredate', expiredate])
1740+ if inactive:
1741+ cmd.extend(['--inactive', inactive])
1742+ if mindays:
1743+ cmd.extend(['--mindays', mindays])
1744+ if maxdays:
1745+ cmd.extend(['--maxdays', maxdays])
1746+ if warndays:
1747+ cmd.extend(['--warndays', warndays])
1748+ cmd.append(username)
1749+ subprocess.check_call(cmd)
1750+
1751+
1752+remove_password_expiry = functools.partial(chage, expiredate='-1', inactive='-1', mindays='0', maxdays='-1')
1753+
1754+
1755 def rsync(from_path, to_path, flags='-r', options=None, timeout=None):
1756 """Replicate the contents of a path"""
1757 options = options or ['--delete', '--executability']
1758@@ -492,13 +540,15 @@ def write_file(path, content, owner='root', group='root', perms=0o444):
1759 # lets see if we can grab the file and compare the context, to avoid doing
1760 # a write.
1761 existing_content = None
1762- existing_uid, existing_gid = None, None
1763+ existing_uid, existing_gid, existing_perms = None, None, None
1764 try:
1765 with open(path, 'rb') as target:
1766 existing_content = target.read()
1767 stat = os.stat(path)
1768- existing_uid, existing_gid = stat.st_uid, stat.st_gid
1769- except:
1770+ existing_uid, existing_gid, existing_perms = (
1771+ stat.st_uid, stat.st_gid, stat.st_mode
1772+ )
1773+ except Exception:
1774 pass
1775 if content != existing_content:
1776 log("Writing file {} {}:{} {:o}".format(path, owner, group, perms),
1777@@ -506,10 +556,12 @@ def write_file(path, content, owner='root', group='root', perms=0o444):
1778 with open(path, 'wb') as target:
1779 os.fchown(target.fileno(), uid, gid)
1780 os.fchmod(target.fileno(), perms)
1781+ if six.PY3 and isinstance(content, six.string_types):
1782+ content = content.encode('UTF-8')
1783 target.write(content)
1784 return
1785 # the contents were the same, but we might still need to change the
1786- # ownership.
1787+ # ownership or permissions.
1788 if existing_uid != uid:
1789 log("Changing uid on already existing content: {} -> {}"
1790 .format(existing_uid, uid), level=DEBUG)
1791@@ -518,6 +570,10 @@ def write_file(path, content, owner='root', group='root', perms=0o444):
1792 log("Changing gid on already existing content: {} -> {}"
1793 .format(existing_gid, gid), level=DEBUG)
1794 os.chown(path, -1, gid)
1795+ if existing_perms != perms:
1796+ log("Changing permissions on existing content: {} -> {}"
1797+ .format(existing_perms, perms), level=DEBUG)
1798+ os.chmod(path, perms)
1799
1800
1801 def fstab_remove(mp):
1802@@ -782,7 +838,7 @@ def list_nics(nic_type=None):
1803 ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n')
1804 ip_output = (line.strip() for line in ip_output if line)
1805
1806- key = re.compile('^[0-9]+:\s+(.+):')
1807+ key = re.compile(r'^[0-9]+:\s+(.+):')
1808 for line in ip_output:
1809 matched = re.search(key, line)
1810 if matched:
1811@@ -927,6 +983,20 @@ def is_container():
1812
1813
1814 def add_to_updatedb_prunepath(path, updatedb_path=UPDATEDB_PATH):
1815+ """Adds the specified path to the mlocate's udpatedb.conf PRUNEPATH list.
1816+
1817+ This method has no effect if the path specified by updatedb_path does not
1818+ exist or is not a file.
1819+
1820+ @param path: string the path to add to the updatedb.conf PRUNEPATHS value
1821+ @param updatedb_path: the path the updatedb.conf file
1822+ """
1823+ if not os.path.exists(updatedb_path) or os.path.isdir(updatedb_path):
1824+ # If the updatedb.conf file doesn't exist then don't attempt to update
1825+ # the file as the package providing mlocate may not be installed on
1826+ # the local system
1827+ return
1828+
1829 with open(updatedb_path, 'r+') as f_id:
1830 updatedb_text = f_id.read()
1831 output = updatedb(updatedb_text, path)
1832@@ -946,3 +1016,89 @@ def updatedb(updatedb_text, new_path):
1833 lines[i] = 'PRUNEPATHS="{}"'.format(' '.join(paths))
1834 output = "\n".join(lines)
1835 return output
1836+
1837+
1838+def modulo_distribution(modulo=3, wait=30, non_zero_wait=False):
1839+ """ Modulo distribution
1840+
1841+ This helper uses the unit number, a modulo value and a constant wait time
1842+ to produce a calculated wait time distribution. This is useful in large
1843+ scale deployments to distribute load during an expensive operation such as
1844+ service restarts.
1845+
1846+ If you have 1000 nodes that need to restart 100 at a time 1 minute at a
1847+ time:
1848+
1849+ time.wait(modulo_distribution(modulo=100, wait=60))
1850+ restart()
1851+
1852+ If you need restarts to happen serially set modulo to the exact number of
1853+ nodes and set a high constant wait time:
1854+
1855+ time.wait(modulo_distribution(modulo=10, wait=120))
1856+ restart()
1857+
1858+ @param modulo: int The modulo number creates the group distribution
1859+ @param wait: int The constant time wait value
1860+ @param non_zero_wait: boolean Override unit % modulo == 0,
1861+ return modulo * wait. Used to avoid collisions with
1862+ leader nodes which are often given priority.
1863+ @return: int Calculated time to wait for unit operation
1864+ """
1865+ unit_number = int(local_unit().split('/')[1])
1866+ calculated_wait_time = (unit_number % modulo) * wait
1867+ if non_zero_wait and calculated_wait_time == 0:
1868+ return modulo * wait
1869+ else:
1870+ return calculated_wait_time
1871+
1872+
1873+def install_ca_cert(ca_cert, name=None):
1874+ """
1875+ Install the given cert as a trusted CA.
1876+
1877+ The ``name`` is the stem of the filename where the cert is written, and if
1878+ not provided, it will default to ``juju-{charm_name}``.
1879+
1880+ If the cert is empty or None, or is unchanged, nothing is done.
1881+ """
1882+ if not ca_cert:
1883+ return
1884+ if not isinstance(ca_cert, bytes):
1885+ ca_cert = ca_cert.encode('utf8')
1886+ if not name:
1887+ name = 'juju-{}'.format(charm_name())
1888+ cert_file = '/usr/local/share/ca-certificates/{}.crt'.format(name)
1889+ new_hash = hashlib.md5(ca_cert).hexdigest()
1890+ if file_hash(cert_file) == new_hash:
1891+ return
1892+ log("Installing new CA cert at: {}".format(cert_file), level=INFO)
1893+ write_file(cert_file, ca_cert)
1894+ subprocess.check_call(['update-ca-certificates', '--fresh'])
1895+
1896+
1897+def get_system_env(key, default=None):
1898+ """Get data from system environment as represented in ``/etc/environment``.
1899+
1900+ :param key: Key to look up
1901+ :type key: str
1902+ :param default: Value to return if key is not found
1903+ :type default: any
1904+ :returns: Value for key if found or contents of default parameter
1905+ :rtype: any
1906+ :raises: subprocess.CalledProcessError
1907+ """
1908+ env_file = '/etc/environment'
1909+ # use the shell and env(1) to parse the global environments file. This is
1910+ # done to get the correct result even if the user has shell variable
1911+ # substitutions or other shell logic in that file.
1912+ output = subprocess.check_output(
1913+ ['env', '-i', '/bin/bash', '-c',
1914+ 'set -a && source {} && env'.format(env_file)],
1915+ universal_newlines=True)
1916+ for k, v in (line.split('=', 1)
1917+ for line in output.splitlines() if '=' in line):
1918+ if k == key:
1919+ return v
1920+ else:
1921+ return default
1922diff --git a/hooks/charmhelpers/core/host_factory/ubuntu.py b/hooks/charmhelpers/core/host_factory/ubuntu.py
1923index d8dc378..3edc068 100644
1924--- a/hooks/charmhelpers/core/host_factory/ubuntu.py
1925+++ b/hooks/charmhelpers/core/host_factory/ubuntu.py
1926@@ -1,5 +1,6 @@
1927 import subprocess
1928
1929+from charmhelpers.core.hookenv import cached
1930 from charmhelpers.core.strutils import BasicStringComparator
1931
1932
1933@@ -20,6 +21,11 @@ UBUNTU_RELEASES = (
1934 'yakkety',
1935 'zesty',
1936 'artful',
1937+ 'bionic',
1938+ 'cosmic',
1939+ 'disco',
1940+ 'eoan',
1941+ 'focal'
1942 )
1943
1944
1945@@ -70,6 +76,14 @@ def lsb_release():
1946 return d
1947
1948
1949+def get_distrib_codename():
1950+ """Return the codename of the distribution
1951+ :returns: The codename
1952+ :rtype: str
1953+ """
1954+ return lsb_release()['DISTRIB_CODENAME'].lower()
1955+
1956+
1957 def cmp_pkgrevno(package, revno, pkgcache=None):
1958 """Compare supplied revno with the revno of the installed package.
1959
1960@@ -81,9 +95,22 @@ def cmp_pkgrevno(package, revno, pkgcache=None):
1961 the pkgcache argument is None. Be sure to add charmhelpers.fetch if
1962 you call this function, or pass an apt_pkg.Cache() instance.
1963 """
1964- import apt_pkg
1965+ from charmhelpers.fetch import apt_pkg
1966 if not pkgcache:
1967 from charmhelpers.fetch import apt_cache
1968 pkgcache = apt_cache()
1969 pkg = pkgcache[package]
1970 return apt_pkg.version_compare(pkg.current_ver.ver_str, revno)
1971+
1972+
1973+@cached
1974+def arch():
1975+ """Return the package architecture as a string.
1976+
1977+ :returns: the architecture
1978+ :rtype: str
1979+ :raises: subprocess.CalledProcessError if dpkg command fails
1980+ """
1981+ return subprocess.check_output(
1982+ ['dpkg', '--print-architecture']
1983+ ).rstrip().decode('UTF-8')
1984diff --git a/hooks/charmhelpers/core/kernel.py b/hooks/charmhelpers/core/kernel.py
1985index 2d40452..e01f4f8 100644
1986--- a/hooks/charmhelpers/core/kernel.py
1987+++ b/hooks/charmhelpers/core/kernel.py
1988@@ -26,12 +26,12 @@ from charmhelpers.core.hookenv import (
1989
1990 __platform__ = get_platform()
1991 if __platform__ == "ubuntu":
1992- from charmhelpers.core.kernel_factory.ubuntu import (
1993+ from charmhelpers.core.kernel_factory.ubuntu import ( # NOQA:F401
1994 persistent_modprobe,
1995 update_initramfs,
1996 ) # flake8: noqa -- ignore F401 for this import
1997 elif __platform__ == "centos":
1998- from charmhelpers.core.kernel_factory.centos import (
1999+ from charmhelpers.core.kernel_factory.centos import ( # NOQA:F401
2000 persistent_modprobe,
2001 update_initramfs,
2002 ) # flake8: noqa -- ignore F401 for this import
2003diff --git a/hooks/charmhelpers/core/services/base.py b/hooks/charmhelpers/core/services/base.py
2004index ca9dc99..179ad4f 100644
2005--- a/hooks/charmhelpers/core/services/base.py
2006+++ b/hooks/charmhelpers/core/services/base.py
2007@@ -307,23 +307,34 @@ class PortManagerCallback(ManagerCallback):
2008 """
2009 def __call__(self, manager, service_name, event_name):
2010 service = manager.get_service(service_name)
2011- new_ports = service.get('ports', [])
2012+ # turn this generator into a list,
2013+ # as we'll be going over it multiple times
2014+ new_ports = list(service.get('ports', []))
2015 port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name))
2016 if os.path.exists(port_file):
2017 with open(port_file) as fp:
2018 old_ports = fp.read().split(',')
2019 for old_port in old_ports:
2020- if bool(old_port):
2021- old_port = int(old_port)
2022- if old_port not in new_ports:
2023- hookenv.close_port(old_port)
2024+ if bool(old_port) and not self.ports_contains(old_port, new_ports):
2025+ hookenv.close_port(old_port)
2026 with open(port_file, 'w') as fp:
2027 fp.write(','.join(str(port) for port in new_ports))
2028 for port in new_ports:
2029+ # A port is either a number or 'ICMP'
2030+ protocol = 'TCP'
2031+ if str(port).upper() == 'ICMP':
2032+ protocol = 'ICMP'
2033 if event_name == 'start':
2034- hookenv.open_port(port)
2035+ hookenv.open_port(port, protocol)
2036 elif event_name == 'stop':
2037- hookenv.close_port(port)
2038+ hookenv.close_port(port, protocol)
2039+
2040+ def ports_contains(self, port, ports):
2041+ if not bool(port):
2042+ return False
2043+ if str(port).upper() != 'ICMP':
2044+ port = int(port)
2045+ return port in ports
2046
2047
2048 def service_stop(service_name):
2049diff --git a/hooks/charmhelpers/core/strutils.py b/hooks/charmhelpers/core/strutils.py
2050index 685dabd..e8df045 100644
2051--- a/hooks/charmhelpers/core/strutils.py
2052+++ b/hooks/charmhelpers/core/strutils.py
2053@@ -61,13 +61,19 @@ def bytes_from_string(value):
2054 if isinstance(value, six.string_types):
2055 value = six.text_type(value)
2056 else:
2057- msg = "Unable to interpret non-string value '%s' as boolean" % (value)
2058+ msg = "Unable to interpret non-string value '%s' as bytes" % (value)
2059 raise ValueError(msg)
2060 matches = re.match("([0-9]+)([a-zA-Z]+)", value)
2061- if not matches:
2062- msg = "Unable to interpret string value '%s' as bytes" % (value)
2063- raise ValueError(msg)
2064- return int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)])
2065+ if matches:
2066+ size = int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)])
2067+ else:
2068+ # Assume that value passed in is bytes
2069+ try:
2070+ size = int(value)
2071+ except ValueError:
2072+ msg = "Unable to interpret string value '%s' as bytes" % (value)
2073+ raise ValueError(msg)
2074+ return size
2075
2076
2077 class BasicStringComparator(object):
2078diff --git a/hooks/charmhelpers/core/sysctl.py b/hooks/charmhelpers/core/sysctl.py
2079index 6e413e3..386428d 100644
2080--- a/hooks/charmhelpers/core/sysctl.py
2081+++ b/hooks/charmhelpers/core/sysctl.py
2082@@ -17,38 +17,59 @@
2083
2084 import yaml
2085
2086-from subprocess import check_call
2087+from subprocess import check_call, CalledProcessError
2088
2089 from charmhelpers.core.hookenv import (
2090 log,
2091 DEBUG,
2092 ERROR,
2093+ WARNING,
2094 )
2095
2096+from charmhelpers.core.host import is_container
2097+
2098 __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>'
2099
2100
2101-def create(sysctl_dict, sysctl_file):
2102+def create(sysctl_dict, sysctl_file, ignore=False):
2103 """Creates a sysctl.conf file from a YAML associative array
2104
2105- :param sysctl_dict: a YAML-formatted string of sysctl options eg "{ 'kernel.max_pid': 1337 }"
2106+ :param sysctl_dict: a dict or YAML-formatted string of sysctl
2107+ options eg "{ 'kernel.max_pid': 1337 }"
2108 :type sysctl_dict: str
2109 :param sysctl_file: path to the sysctl file to be saved
2110 :type sysctl_file: str or unicode
2111+ :param ignore: If True, ignore "unknown variable" errors.
2112+ :type ignore: bool
2113 :returns: None
2114 """
2115- try:
2116- sysctl_dict_parsed = yaml.safe_load(sysctl_dict)
2117- except yaml.YAMLError:
2118- log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict),
2119- level=ERROR)
2120- return
2121+ if type(sysctl_dict) is not dict:
2122+ try:
2123+ sysctl_dict_parsed = yaml.safe_load(sysctl_dict)
2124+ except yaml.YAMLError:
2125+ log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict),
2126+ level=ERROR)
2127+ return
2128+ else:
2129+ sysctl_dict_parsed = sysctl_dict
2130
2131 with open(sysctl_file, "w") as fd:
2132 for key, value in sysctl_dict_parsed.items():
2133 fd.write("{}={}\n".format(key, value))
2134
2135- log("Updating sysctl_file: %s values: %s" % (sysctl_file, sysctl_dict_parsed),
2136+ log("Updating sysctl_file: {} values: {}".format(sysctl_file,
2137+ sysctl_dict_parsed),
2138 level=DEBUG)
2139
2140- check_call(["sysctl", "-p", sysctl_file])
2141+ call = ["sysctl", "-p", sysctl_file]
2142+ if ignore:
2143+ call.append("-e")
2144+
2145+ try:
2146+ check_call(call)
2147+ except CalledProcessError as e:
2148+ if is_container():
2149+ log("Error setting some sysctl keys in this container: {}".format(e.output),
2150+ level=WARNING)
2151+ else:
2152+ raise e
2153diff --git a/hooks/charmhelpers/core/templating.py b/hooks/charmhelpers/core/templating.py
2154index 7b801a3..9014015 100644
2155--- a/hooks/charmhelpers/core/templating.py
2156+++ b/hooks/charmhelpers/core/templating.py
2157@@ -20,7 +20,8 @@ from charmhelpers.core import hookenv
2158
2159
2160 def render(source, target, context, owner='root', group='root',
2161- perms=0o444, templates_dir=None, encoding='UTF-8', template_loader=None):
2162+ perms=0o444, templates_dir=None, encoding='UTF-8',
2163+ template_loader=None, config_template=None):
2164 """
2165 Render a template.
2166
2167@@ -32,6 +33,9 @@ def render(source, target, context, owner='root', group='root',
2168 The context should be a dict containing the values to be replaced in the
2169 template.
2170
2171+ config_template may be provided to render from a provided template instead
2172+ of loading from a file.
2173+
2174 The `owner`, `group`, and `perms` options will be passed to `write_file`.
2175
2176 If omitted, `templates_dir` defaults to the `templates` folder in the charm.
2177@@ -65,14 +69,19 @@ def render(source, target, context, owner='root', group='root',
2178 if templates_dir is None:
2179 templates_dir = os.path.join(hookenv.charm_dir(), 'templates')
2180 template_env = Environment(loader=FileSystemLoader(templates_dir))
2181- try:
2182- source = source
2183- template = template_env.get_template(source)
2184- except exceptions.TemplateNotFound as e:
2185- hookenv.log('Could not load template %s from %s.' %
2186- (source, templates_dir),
2187- level=hookenv.ERROR)
2188- raise e
2189+
2190+ # load from a string if provided explicitly
2191+ if config_template is not None:
2192+ template = template_env.from_string(config_template)
2193+ else:
2194+ try:
2195+ source = source
2196+ template = template_env.get_template(source)
2197+ except exceptions.TemplateNotFound as e:
2198+ hookenv.log('Could not load template %s from %s.' %
2199+ (source, templates_dir),
2200+ level=hookenv.ERROR)
2201+ raise e
2202 content = template.render(context)
2203 if target is not None:
2204 target_dir = os.path.dirname(target)
2205diff --git a/hooks/charmhelpers/core/unitdata.py b/hooks/charmhelpers/core/unitdata.py
2206index 54ec969..ab55432 100644
2207--- a/hooks/charmhelpers/core/unitdata.py
2208+++ b/hooks/charmhelpers/core/unitdata.py
2209@@ -166,6 +166,10 @@ class Storage(object):
2210
2211 To support dicts, lists, integer, floats, and booleans values
2212 are automatically json encoded/decoded.
2213+
2214+ Note: to facilitate unit testing, ':memory:' can be passed as the
2215+ path parameter which causes sqlite3 to only build the db in memory.
2216+ This should only be used for testing purposes.
2217 """
2218 def __init__(self, path=None):
2219 self.db_path = path
2220@@ -175,6 +179,9 @@ class Storage(object):
2221 else:
2222 self.db_path = os.path.join(
2223 os.environ.get('CHARM_DIR', ''), '.unit-state.db')
2224+ if self.db_path != ':memory:':
2225+ with open(self.db_path, 'a') as f:
2226+ os.fchmod(f.fileno(), 0o600)
2227 self.conn = sqlite3.connect('%s' % self.db_path)
2228 self.cursor = self.conn.cursor()
2229 self.revision = None
2230@@ -358,7 +365,7 @@ class Storage(object):
2231 try:
2232 yield self.revision
2233 self.revision = None
2234- except:
2235+ except Exception:
2236 self.flush(False)
2237 self.revision = None
2238 raise
2239diff --git a/hooks/charmhelpers/fetch/__init__.py b/hooks/charmhelpers/fetch/__init__.py
2240index 480a627..0cc7fc8 100644
2241--- a/hooks/charmhelpers/fetch/__init__.py
2242+++ b/hooks/charmhelpers/fetch/__init__.py
2243@@ -84,6 +84,7 @@ module = "charmhelpers.fetch.%s" % __platform__
2244 fetch = importlib.import_module(module)
2245
2246 filter_installed_packages = fetch.filter_installed_packages
2247+filter_missing_packages = fetch.filter_missing_packages
2248 install = fetch.apt_install
2249 upgrade = fetch.apt_upgrade
2250 update = _fetch_update = fetch.apt_update
2251@@ -96,11 +97,14 @@ if __platform__ == "ubuntu":
2252 apt_update = fetch.apt_update
2253 apt_upgrade = fetch.apt_upgrade
2254 apt_purge = fetch.apt_purge
2255+ apt_autoremove = fetch.apt_autoremove
2256 apt_mark = fetch.apt_mark
2257 apt_hold = fetch.apt_hold
2258 apt_unhold = fetch.apt_unhold
2259 import_key = fetch.import_key
2260 get_upstream_version = fetch.get_upstream_version
2261+ apt_pkg = fetch.ubuntu_apt_pkg
2262+ get_apt_dpkg_env = fetch.get_apt_dpkg_env
2263 elif __platform__ == "centos":
2264 yum_search = fetch.yum_search
2265
2266diff --git a/hooks/charmhelpers/fetch/archiveurl.py b/hooks/charmhelpers/fetch/archiveurl.py
2267index dd24f9e..d25587a 100644
2268--- a/hooks/charmhelpers/fetch/archiveurl.py
2269+++ b/hooks/charmhelpers/fetch/archiveurl.py
2270@@ -89,7 +89,7 @@ class ArchiveUrlFetchHandler(BaseFetchHandler):
2271 :param str source: URL pointing to an archive file.
2272 :param str dest: Local path location to download archive file to.
2273 """
2274- # propogate all exceptions
2275+ # propagate all exceptions
2276 # URLError, OSError, etc
2277 proto, netloc, path, params, query, fragment = urlparse(source)
2278 if proto in ('http', 'https'):
2279diff --git a/hooks/charmhelpers/fetch/bzrurl.py b/hooks/charmhelpers/fetch/bzrurl.py
2280index 07cd029..c4ab3ff 100644
2281--- a/hooks/charmhelpers/fetch/bzrurl.py
2282+++ b/hooks/charmhelpers/fetch/bzrurl.py
2283@@ -13,7 +13,7 @@
2284 # limitations under the License.
2285
2286 import os
2287-from subprocess import check_call
2288+from subprocess import STDOUT, check_output
2289 from charmhelpers.fetch import (
2290 BaseFetchHandler,
2291 UnhandledSource,
2292@@ -55,7 +55,7 @@ class BzrUrlFetchHandler(BaseFetchHandler):
2293 cmd = ['bzr', 'branch']
2294 cmd += cmd_opts
2295 cmd += [source, dest]
2296- check_call(cmd)
2297+ check_output(cmd, stderr=STDOUT)
2298
2299 def install(self, source, dest=None, revno=None):
2300 url_parts = self.parse_url(source)
2301diff --git a/hooks/charmhelpers/fetch/giturl.py b/hooks/charmhelpers/fetch/giturl.py
2302index 4cf21bc..070ca9b 100644
2303--- a/hooks/charmhelpers/fetch/giturl.py
2304+++ b/hooks/charmhelpers/fetch/giturl.py
2305@@ -13,7 +13,7 @@
2306 # limitations under the License.
2307
2308 import os
2309-from subprocess import check_call, CalledProcessError
2310+from subprocess import check_output, CalledProcessError, STDOUT
2311 from charmhelpers.fetch import (
2312 BaseFetchHandler,
2313 UnhandledSource,
2314@@ -50,7 +50,7 @@ class GitUrlFetchHandler(BaseFetchHandler):
2315 cmd = ['git', 'clone', source, dest, '--branch', branch]
2316 if depth:
2317 cmd.extend(['--depth', depth])
2318- check_call(cmd)
2319+ check_output(cmd, stderr=STDOUT)
2320
2321 def install(self, source, branch="master", dest=None, depth=None):
2322 url_parts = self.parse_url(source)
2323diff --git a/hooks/charmhelpers/fetch/python/__init__.py b/hooks/charmhelpers/fetch/python/__init__.py
2324new file mode 100644
2325index 0000000..bff99dc
2326--- /dev/null
2327+++ b/hooks/charmhelpers/fetch/python/__init__.py
2328@@ -0,0 +1,13 @@
2329+# Copyright 2014-2019 Canonical Limited.
2330+#
2331+# Licensed under the Apache License, Version 2.0 (the "License");
2332+# you may not use this file except in compliance with the License.
2333+# You may obtain a copy of the License at
2334+#
2335+# http://www.apache.org/licenses/LICENSE-2.0
2336+#
2337+# Unless required by applicable law or agreed to in writing, software
2338+# distributed under the License is distributed on an "AS IS" BASIS,
2339+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
2340+# See the License for the specific language governing permissions and
2341+# limitations under the License.
2342diff --git a/hooks/charmhelpers/fetch/python/debug.py b/hooks/charmhelpers/fetch/python/debug.py
2343new file mode 100644
2344index 0000000..757135e
2345--- /dev/null
2346+++ b/hooks/charmhelpers/fetch/python/debug.py
2347@@ -0,0 +1,54 @@
2348+#!/usr/bin/env python
2349+# coding: utf-8
2350+
2351+# Copyright 2014-2015 Canonical Limited.
2352+#
2353+# Licensed under the Apache License, Version 2.0 (the "License");
2354+# you may not use this file except in compliance with the License.
2355+# You may obtain a copy of the License at
2356+#
2357+# http://www.apache.org/licenses/LICENSE-2.0
2358+#
2359+# Unless required by applicable law or agreed to in writing, software
2360+# distributed under the License is distributed on an "AS IS" BASIS,
2361+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
2362+# See the License for the specific language governing permissions and
2363+# limitations under the License.
2364+
2365+from __future__ import print_function
2366+
2367+import atexit
2368+import sys
2369+
2370+from charmhelpers.fetch.python.rpdb import Rpdb
2371+from charmhelpers.core.hookenv import (
2372+ open_port,
2373+ close_port,
2374+ ERROR,
2375+ log
2376+)
2377+
2378+__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>"
2379+
2380+DEFAULT_ADDR = "0.0.0.0"
2381+DEFAULT_PORT = 4444
2382+
2383+
2384+def _error(message):
2385+ log(message, level=ERROR)
2386+
2387+
2388+def set_trace(addr=DEFAULT_ADDR, port=DEFAULT_PORT):
2389+ """
2390+ Set a trace point using the remote debugger
2391+ """
2392+ atexit.register(close_port, port)
2393+ try:
2394+ log("Starting a remote python debugger session on %s:%s" % (addr,
2395+ port))
2396+ open_port(port)
2397+ debugger = Rpdb(addr=addr, port=port)
2398+ debugger.set_trace(sys._getframe().f_back)
2399+ except Exception:
2400+ _error("Cannot start a remote debug session on %s:%s" % (addr,
2401+ port))
2402diff --git a/hooks/charmhelpers/fetch/python/packages.py b/hooks/charmhelpers/fetch/python/packages.py
2403new file mode 100644
2404index 0000000..6e95028
2405--- /dev/null
2406+++ b/hooks/charmhelpers/fetch/python/packages.py
2407@@ -0,0 +1,154 @@
2408+#!/usr/bin/env python
2409+# coding: utf-8
2410+
2411+# Copyright 2014-2015 Canonical Limited.
2412+#
2413+# Licensed under the Apache License, Version 2.0 (the "License");
2414+# you may not use this file except in compliance with the License.
2415+# You may obtain a copy of the License at
2416+#
2417+# http://www.apache.org/licenses/LICENSE-2.0
2418+#
2419+# Unless required by applicable law or agreed to in writing, software
2420+# distributed under the License is distributed on an "AS IS" BASIS,
2421+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
2422+# See the License for the specific language governing permissions and
2423+# limitations under the License.
2424+
2425+import os
2426+import six
2427+import subprocess
2428+import sys
2429+
2430+from charmhelpers.fetch import apt_install, apt_update
2431+from charmhelpers.core.hookenv import charm_dir, log
2432+
2433+__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>"
2434+
2435+
2436+def pip_execute(*args, **kwargs):
2437+ """Overriden pip_execute() to stop sys.path being changed.
2438+
2439+ The act of importing main from the pip module seems to cause add wheels
2440+ from the /usr/share/python-wheels which are installed by various tools.
2441+ This function ensures that sys.path remains the same after the call is
2442+ executed.
2443+ """
2444+ try:
2445+ _path = sys.path
2446+ try:
2447+ from pip import main as _pip_execute
2448+ except ImportError:
2449+ apt_update()
2450+ if six.PY2:
2451+ apt_install('python-pip')
2452+ else:
2453+ apt_install('python3-pip')
2454+ from pip import main as _pip_execute
2455+ _pip_execute(*args, **kwargs)
2456+ finally:
2457+ sys.path = _path
2458+
2459+
2460+def parse_options(given, available):
2461+ """Given a set of options, check if available"""
2462+ for key, value in sorted(given.items()):
2463+ if not value:
2464+ continue
2465+ if key in available:
2466+ yield "--{0}={1}".format(key, value)
2467+
2468+
2469+def pip_install_requirements(requirements, constraints=None, **options):
2470+ """Install a requirements file.
2471+
2472+ :param constraints: Path to pip constraints file.
2473+ http://pip.readthedocs.org/en/stable/user_guide/#constraints-files
2474+ """
2475+ command = ["install"]
2476+
2477+ available_options = ('proxy', 'src', 'log', )
2478+ for option in parse_options(options, available_options):
2479+ command.append(option)
2480+
2481+ command.append("-r {0}".format(requirements))
2482+ if constraints:
2483+ command.append("-c {0}".format(constraints))
2484+ log("Installing from file: {} with constraints {} "
2485+ "and options: {}".format(requirements, constraints, command))
2486+ else:
2487+ log("Installing from file: {} with options: {}".format(requirements,
2488+ command))
2489+ pip_execute(command)
2490+
2491+
2492+def pip_install(package, fatal=False, upgrade=False, venv=None,
2493+ constraints=None, **options):
2494+ """Install a python package"""
2495+ if venv:
2496+ venv_python = os.path.join(venv, 'bin/pip')
2497+ command = [venv_python, "install"]
2498+ else:
2499+ command = ["install"]
2500+
2501+ available_options = ('proxy', 'src', 'log', 'index-url', )
2502+ for option in parse_options(options, available_options):
2503+ command.append(option)
2504+
2505+ if upgrade:
2506+ command.append('--upgrade')
2507+
2508+ if constraints:
2509+ command.extend(['-c', constraints])
2510+
2511+ if isinstance(package, list):
2512+ command.extend(package)
2513+ else:
2514+ command.append(package)
2515+
2516+ log("Installing {} package with options: {}".format(package,
2517+ command))
2518+ if venv:
2519+ subprocess.check_call(command)
2520+ else:
2521+ pip_execute(command)
2522+
2523+
2524+def pip_uninstall(package, **options):
2525+ """Uninstall a python package"""
2526+ command = ["uninstall", "-q", "-y"]
2527+
2528+ available_options = ('proxy', 'log', )
2529+ for option in parse_options(options, available_options):
2530+ command.append(option)
2531+
2532+ if isinstance(package, list):
2533+ command.extend(package)
2534+ else:
2535+ command.append(package)
2536+
2537+ log("Uninstalling {} package with options: {}".format(package,
2538+ command))
2539+ pip_execute(command)
2540+
2541+
2542+def pip_list():
2543+ """Returns the list of current python installed packages
2544+ """
2545+ return pip_execute(["list"])
2546+
2547+
2548+def pip_create_virtualenv(path=None):
2549+ """Create an isolated Python environment."""
2550+ if six.PY2:
2551+ apt_install('python-virtualenv')
2552+ else:
2553+ apt_install('python3-virtualenv')
2554+
2555+ if path:
2556+ venv_path = path
2557+ else:
2558+ venv_path = os.path.join(charm_dir(), 'venv')
2559+
2560+ if not os.path.exists(venv_path):
2561+ subprocess.check_call(['virtualenv', venv_path])
2562diff --git a/hooks/charmhelpers/fetch/python/rpdb.py b/hooks/charmhelpers/fetch/python/rpdb.py
2563new file mode 100644
2564index 0000000..9b31610
2565--- /dev/null
2566+++ b/hooks/charmhelpers/fetch/python/rpdb.py
2567@@ -0,0 +1,56 @@
2568+# Copyright 2014-2015 Canonical Limited.
2569+#
2570+# Licensed under the Apache License, Version 2.0 (the "License");
2571+# you may not use this file except in compliance with the License.
2572+# You may obtain a copy of the License at
2573+#
2574+# http://www.apache.org/licenses/LICENSE-2.0
2575+#
2576+# Unless required by applicable law or agreed to in writing, software
2577+# distributed under the License is distributed on an "AS IS" BASIS,
2578+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
2579+# See the License for the specific language governing permissions and
2580+# limitations under the License.
2581+
2582+"""Remote Python Debugger (pdb wrapper)."""
2583+
2584+import pdb
2585+import socket
2586+import sys
2587+
2588+__author__ = "Bertrand Janin <b@janin.com>"
2589+__version__ = "0.1.3"
2590+
2591+
2592+class Rpdb(pdb.Pdb):
2593+
2594+ def __init__(self, addr="127.0.0.1", port=4444):
2595+ """Initialize the socket and initialize pdb."""
2596+
2597+ # Backup stdin and stdout before replacing them by the socket handle
2598+ self.old_stdout = sys.stdout
2599+ self.old_stdin = sys.stdin
2600+
2601+ # Open a 'reusable' socket to let the webapp reload on the same port
2602+ self.skt = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
2603+ self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True)
2604+ self.skt.bind((addr, port))
2605+ self.skt.listen(1)
2606+ (clientsocket, address) = self.skt.accept()
2607+ handle = clientsocket.makefile('rw')
2608+ pdb.Pdb.__init__(self, completekey='tab', stdin=handle, stdout=handle)
2609+ sys.stdout = sys.stdin = handle
2610+
2611+ def shutdown(self):
2612+ """Revert stdin and stdout, close the socket."""
2613+ sys.stdout = self.old_stdout
2614+ sys.stdin = self.old_stdin
2615+ self.skt.close()
2616+ self.set_continue()
2617+
2618+ def do_continue(self, arg):
2619+ """Stop all operation on ``continue``."""
2620+ self.shutdown()
2621+ return 1
2622+
2623+ do_EOF = do_quit = do_exit = do_c = do_cont = do_continue
2624diff --git a/hooks/charmhelpers/fetch/python/version.py b/hooks/charmhelpers/fetch/python/version.py
2625new file mode 100644
2626index 0000000..3eb4210
2627--- /dev/null
2628+++ b/hooks/charmhelpers/fetch/python/version.py
2629@@ -0,0 +1,32 @@
2630+#!/usr/bin/env python
2631+# coding: utf-8
2632+
2633+# Copyright 2014-2015 Canonical Limited.
2634+#
2635+# Licensed under the Apache License, Version 2.0 (the "License");
2636+# you may not use this file except in compliance with the License.
2637+# You may obtain a copy of the License at
2638+#
2639+# http://www.apache.org/licenses/LICENSE-2.0
2640+#
2641+# Unless required by applicable law or agreed to in writing, software
2642+# distributed under the License is distributed on an "AS IS" BASIS,
2643+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
2644+# See the License for the specific language governing permissions and
2645+# limitations under the License.
2646+
2647+import sys
2648+
2649+__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>"
2650+
2651+
2652+def current_version():
2653+ """Current system python version"""
2654+ return sys.version_info
2655+
2656+
2657+def current_version_string():
2658+ """Current system python version as string major.minor.micro"""
2659+ return "{0}.{1}.{2}".format(sys.version_info.major,
2660+ sys.version_info.minor,
2661+ sys.version_info.micro)
2662diff --git a/hooks/charmhelpers/fetch/snap.py b/hooks/charmhelpers/fetch/snap.py
2663index 112a54c..fc70aa9 100644
2664--- a/hooks/charmhelpers/fetch/snap.py
2665+++ b/hooks/charmhelpers/fetch/snap.py
2666@@ -41,6 +41,10 @@ class CouldNotAcquireLockException(Exception):
2667 pass
2668
2669
2670+class InvalidSnapChannel(Exception):
2671+ pass
2672+
2673+
2674 def _snap_exec(commands):
2675 """
2676 Execute snap commands.
2677@@ -65,7 +69,7 @@ def _snap_exec(commands):
2678 .format(SNAP_NO_LOCK_RETRY_COUNT))
2679 return_code = e.returncode
2680 log('Snap failed to acquire lock, trying again in {} seconds.'
2681- .format(SNAP_NO_LOCK_RETRY_DELAY, level='WARN'))
2682+ .format(SNAP_NO_LOCK_RETRY_DELAY), level='WARN')
2683 sleep(SNAP_NO_LOCK_RETRY_DELAY)
2684
2685 return return_code
2686@@ -132,3 +136,15 @@ def snap_refresh(packages, *flags):
2687
2688 log(message, level='INFO')
2689 return _snap_exec(['refresh'] + flags + packages)
2690+
2691+
2692+def valid_snap_channel(channel):
2693+ """ Validate snap channel exists
2694+
2695+ :raises InvalidSnapChannel: When channel does not exist
2696+ :return: Boolean
2697+ """
2698+ if channel.lower() in SNAP_CHANNELS:
2699+ return True
2700+ else:
2701+ raise InvalidSnapChannel("Invalid Snap Channel: {}".format(channel))
2702diff --git a/hooks/charmhelpers/fetch/ubuntu.py b/hooks/charmhelpers/fetch/ubuntu.py
2703index 40e1cb5..3ddaf0d 100644
2704--- a/hooks/charmhelpers/fetch/ubuntu.py
2705+++ b/hooks/charmhelpers/fetch/ubuntu.py
2706@@ -13,23 +13,23 @@
2707 # limitations under the License.
2708
2709 from collections import OrderedDict
2710-import os
2711 import platform
2712 import re
2713 import six
2714-import time
2715 import subprocess
2716-from tempfile import NamedTemporaryFile
2717+import sys
2718+import time
2719+
2720+from charmhelpers.core.host import get_distrib_codename, get_system_env
2721
2722-from charmhelpers.core.host import (
2723- lsb_release
2724-)
2725 from charmhelpers.core.hookenv import (
2726 log,
2727 DEBUG,
2728 WARNING,
2729+ env_proxy_settings,
2730 )
2731 from charmhelpers.fetch import SourceConfigError, GPGKeyError
2732+from charmhelpers.fetch import ubuntu_apt_pkg
2733
2734 PROPOSED_POCKET = (
2735 "# Proposed\n"
2736@@ -44,6 +44,7 @@ ARCH_TO_PROPOSED_POCKET = {
2737 'x86_64': PROPOSED_POCKET,
2738 'ppc64le': PROPOSED_PORTS_POCKET,
2739 'aarch64': PROPOSED_PORTS_POCKET,
2740+ 's390x': PROPOSED_PORTS_POCKET,
2741 }
2742 CLOUD_ARCHIVE_URL = "http://ubuntu-cloud.archive.canonical.com/ubuntu"
2743 CLOUD_ARCHIVE_KEY_ID = '5EDB1B62EC4926EA'
2744@@ -157,6 +158,38 @@ CLOUD_ARCHIVE_POCKETS = {
2745 'queens/proposed': 'xenial-proposed/queens',
2746 'xenial-queens/proposed': 'xenial-proposed/queens',
2747 'xenial-proposed/queens': 'xenial-proposed/queens',
2748+ # Rocky
2749+ 'rocky': 'bionic-updates/rocky',
2750+ 'bionic-rocky': 'bionic-updates/rocky',
2751+ 'bionic-rocky/updates': 'bionic-updates/rocky',
2752+ 'bionic-updates/rocky': 'bionic-updates/rocky',
2753+ 'rocky/proposed': 'bionic-proposed/rocky',
2754+ 'bionic-rocky/proposed': 'bionic-proposed/rocky',
2755+ 'bionic-proposed/rocky': 'bionic-proposed/rocky',
2756+ # Stein
2757+ 'stein': 'bionic-updates/stein',
2758+ 'bionic-stein': 'bionic-updates/stein',
2759+ 'bionic-stein/updates': 'bionic-updates/stein',
2760+ 'bionic-updates/stein': 'bionic-updates/stein',
2761+ 'stein/proposed': 'bionic-proposed/stein',
2762+ 'bionic-stein/proposed': 'bionic-proposed/stein',
2763+ 'bionic-proposed/stein': 'bionic-proposed/stein',
2764+ # Train
2765+ 'train': 'bionic-updates/train',
2766+ 'bionic-train': 'bionic-updates/train',
2767+ 'bionic-train/updates': 'bionic-updates/train',
2768+ 'bionic-updates/train': 'bionic-updates/train',
2769+ 'train/proposed': 'bionic-proposed/train',
2770+ 'bionic-train/proposed': 'bionic-proposed/train',
2771+ 'bionic-proposed/train': 'bionic-proposed/train',
2772+ # Ussuri
2773+ 'ussuri': 'bionic-updates/ussuri',
2774+ 'bionic-ussuri': 'bionic-updates/ussuri',
2775+ 'bionic-ussuri/updates': 'bionic-updates/ussuri',
2776+ 'bionic-updates/ussuri': 'bionic-updates/ussuri',
2777+ 'ussuri/proposed': 'bionic-proposed/ussuri',
2778+ 'bionic-ussuri/proposed': 'bionic-proposed/ussuri',
2779+ 'bionic-proposed/ussuri': 'bionic-proposed/ussuri',
2780 }
2781
2782
2783@@ -180,18 +213,54 @@ def filter_installed_packages(packages):
2784 return _pkgs
2785
2786
2787-def apt_cache(in_memory=True, progress=None):
2788- """Build and return an apt cache."""
2789- from apt import apt_pkg
2790- apt_pkg.init()
2791- if in_memory:
2792- apt_pkg.config.set("Dir::Cache::pkgcache", "")
2793- apt_pkg.config.set("Dir::Cache::srcpkgcache", "")
2794- return apt_pkg.Cache(progress)
2795+def filter_missing_packages(packages):
2796+ """Return a list of packages that are installed.
2797+
2798+ :param packages: list of packages to evaluate.
2799+ :returns list: Packages that are installed.
2800+ """
2801+ return list(
2802+ set(packages) -
2803+ set(filter_installed_packages(packages))
2804+ )
2805+
2806+
2807+def apt_cache(*_, **__):
2808+ """Shim returning an object simulating the apt_pkg Cache.
2809+
2810+ :param _: Accept arguments for compability, not used.
2811+ :type _: any
2812+ :param __: Accept keyword arguments for compability, not used.
2813+ :type __: any
2814+ :returns:Object used to interrogate the system apt and dpkg databases.
2815+ :rtype:ubuntu_apt_pkg.Cache
2816+ """
2817+ if 'apt_pkg' in sys.modules:
2818+ # NOTE(fnordahl): When our consumer use the upstream ``apt_pkg`` module
2819+ # in conjunction with the apt_cache helper function, they may expect us
2820+ # to call ``apt_pkg.init()`` for them.
2821+ #
2822+ # Detect this situation, log a warning and make the call to
2823+ # ``apt_pkg.init()`` to avoid the consumer Python interpreter from
2824+ # crashing with a segmentation fault.
2825+ log('Support for use of upstream ``apt_pkg`` module in conjunction'
2826+ 'with charm-helpers is deprecated since 2019-06-25', level=WARNING)
2827+ sys.modules['apt_pkg'].init()
2828+ return ubuntu_apt_pkg.Cache()
2829
2830
2831 def apt_install(packages, options=None, fatal=False):
2832- """Install one or more packages."""
2833+ """Install one or more packages.
2834+
2835+ :param packages: Package(s) to install
2836+ :type packages: Option[str, List[str]]
2837+ :param options: Options to pass on to apt-get
2838+ :type options: Option[None, List[str]]
2839+ :param fatal: Whether the command's output should be checked and
2840+ retried.
2841+ :type fatal: bool
2842+ :raises: subprocess.CalledProcessError
2843+ """
2844 if options is None:
2845 options = ['--option=Dpkg::Options::=--force-confold']
2846
2847@@ -208,7 +277,17 @@ def apt_install(packages, options=None, fatal=False):
2848
2849
2850 def apt_upgrade(options=None, fatal=False, dist=False):
2851- """Upgrade all packages."""
2852+ """Upgrade all packages.
2853+
2854+ :param options: Options to pass on to apt-get
2855+ :type options: Option[None, List[str]]
2856+ :param fatal: Whether the command's output should be checked and
2857+ retried.
2858+ :type fatal: bool
2859+ :param dist: Whether ``dist-upgrade`` should be used over ``upgrade``
2860+ :type dist: bool
2861+ :raises: subprocess.CalledProcessError
2862+ """
2863 if options is None:
2864 options = ['--option=Dpkg::Options::=--force-confold']
2865
2866@@ -229,7 +308,15 @@ def apt_update(fatal=False):
2867
2868
2869 def apt_purge(packages, fatal=False):
2870- """Purge one or more packages."""
2871+ """Purge one or more packages.
2872+
2873+ :param packages: Package(s) to install
2874+ :type packages: Option[str, List[str]]
2875+ :param fatal: Whether the command's output should be checked and
2876+ retried.
2877+ :type fatal: bool
2878+ :raises: subprocess.CalledProcessError
2879+ """
2880 cmd = ['apt-get', '--assume-yes', 'purge']
2881 if isinstance(packages, six.string_types):
2882 cmd.append(packages)
2883@@ -239,6 +326,21 @@ def apt_purge(packages, fatal=False):
2884 _run_apt_command(cmd, fatal)
2885
2886
2887+def apt_autoremove(purge=True, fatal=False):
2888+ """Purge one or more packages.
2889+ :param purge: Whether the ``--purge`` option should be passed on or not.
2890+ :type purge: bool
2891+ :param fatal: Whether the command's output should be checked and
2892+ retried.
2893+ :type fatal: bool
2894+ :raises: subprocess.CalledProcessError
2895+ """
2896+ cmd = ['apt-get', '--assume-yes', 'autoremove']
2897+ if purge:
2898+ cmd.append('--purge')
2899+ _run_apt_command(cmd, fatal)
2900+
2901+
2902 def apt_mark(packages, mark, fatal=False):
2903 """Flag one or more packages using apt-mark."""
2904 log("Marking {} as {}".format(packages, mark))
2905@@ -265,13 +367,18 @@ def apt_unhold(packages, fatal=False):
2906 def import_key(key):
2907 """Import an ASCII Armor key.
2908
2909- /!\ A Radix64 format keyid is also supported for backwards
2910- compatibility, but should never be used; the key retrieval
2911- mechanism is insecure and subject to man-in-the-middle attacks
2912- voiding all signature checks using that key.
2913-
2914- :param keyid: The key in ASCII armor format,
2915- including BEGIN and END markers.
2916+ A Radix64 format keyid is also supported for backwards
2917+ compatibility. In this case Ubuntu keyserver will be
2918+ queried for a key via HTTPS by its keyid. This method
2919+ is less preferrable because https proxy servers may
2920+ require traffic decryption which is equivalent to a
2921+ man-in-the-middle attack (a proxy server impersonates
2922+ keyserver TLS certificates and has to be explicitly
2923+ trusted by the system).
2924+
2925+ :param key: A GPG key in ASCII armor format,
2926+ including BEGIN and END markers or a keyid.
2927+ :type key: (bytes, str)
2928 :raises: GPGKeyError if the key could not be imported
2929 """
2930 key = key.strip()
2931@@ -282,35 +389,131 @@ def import_key(key):
2932 log("PGP key found (looks like ASCII Armor format)", level=DEBUG)
2933 if ('-----BEGIN PGP PUBLIC KEY BLOCK-----' in key and
2934 '-----END PGP PUBLIC KEY BLOCK-----' in key):
2935- log("Importing ASCII Armor PGP key", level=DEBUG)
2936- with NamedTemporaryFile() as keyfile:
2937- with open(keyfile.name, 'w') as fd:
2938- fd.write(key)
2939- fd.write("\n")
2940- cmd = ['apt-key', 'add', keyfile.name]
2941- try:
2942- subprocess.check_call(cmd)
2943- except subprocess.CalledProcessError:
2944- error = "Error importing PGP key '{}'".format(key)
2945- log(error)
2946- raise GPGKeyError(error)
2947+ log("Writing provided PGP key in the binary format", level=DEBUG)
2948+ if six.PY3:
2949+ key_bytes = key.encode('utf-8')
2950+ else:
2951+ key_bytes = key
2952+ key_name = _get_keyid_by_gpg_key(key_bytes)
2953+ key_gpg = _dearmor_gpg_key(key_bytes)
2954+ _write_apt_gpg_keyfile(key_name=key_name, key_material=key_gpg)
2955 else:
2956 raise GPGKeyError("ASCII armor markers missing from GPG key")
2957 else:
2958- # We should only send things obviously not a keyid offsite
2959- # via this unsecured protocol, as it may be a secret or part
2960- # of one.
2961 log("PGP key found (looks like Radix64 format)", level=WARNING)
2962- log("INSECURLY importing PGP key from keyserver; "
2963+ log("SECURELY importing PGP key from keyserver; "
2964 "full key not provided.", level=WARNING)
2965- cmd = ['apt-key', 'adv', '--keyserver',
2966- 'hkp://keyserver.ubuntu.com:80', '--recv-keys', key]
2967- try:
2968- subprocess.check_call(cmd)
2969- except subprocess.CalledProcessError:
2970- error = "Error importing PGP key '{}'".format(key)
2971- log(error)
2972- raise GPGKeyError(error)
2973+ # as of bionic add-apt-repository uses curl with an HTTPS keyserver URL
2974+ # to retrieve GPG keys. `apt-key adv` command is deprecated as is
2975+ # apt-key in general as noted in its manpage. See lp:1433761 for more
2976+ # history. Instead, /etc/apt/trusted.gpg.d is used directly to drop
2977+ # gpg
2978+ key_asc = _get_key_by_keyid(key)
2979+ # write the key in GPG format so that apt-key list shows it
2980+ key_gpg = _dearmor_gpg_key(key_asc)
2981+ _write_apt_gpg_keyfile(key_name=key, key_material=key_gpg)
2982+
2983+
2984+def _get_keyid_by_gpg_key(key_material):
2985+ """Get a GPG key fingerprint by GPG key material.
2986+ Gets a GPG key fingerprint (40-digit, 160-bit) by the ASCII armor-encoded
2987+ or binary GPG key material. Can be used, for example, to generate file
2988+ names for keys passed via charm options.
2989+
2990+ :param key_material: ASCII armor-encoded or binary GPG key material
2991+ :type key_material: bytes
2992+ :raises: GPGKeyError if invalid key material has been provided
2993+ :returns: A GPG key fingerprint
2994+ :rtype: str
2995+ """
2996+ # Use the same gpg command for both Xenial and Bionic
2997+ cmd = 'gpg --with-colons --with-fingerprint'
2998+ ps = subprocess.Popen(cmd.split(),
2999+ stdout=subprocess.PIPE,
3000+ stderr=subprocess.PIPE,
3001+ stdin=subprocess.PIPE)
3002+ out, err = ps.communicate(input=key_material)
3003+ if six.PY3:
3004+ out = out.decode('utf-8')
3005+ err = err.decode('utf-8')
3006+ if 'gpg: no valid OpenPGP data found.' in err:
3007+ raise GPGKeyError('Invalid GPG key material provided')
3008+ # from gnupg2 docs: fpr :: Fingerprint (fingerprint is in field 10)
3009+ return re.search(r"^fpr:{9}([0-9A-F]{40}):$", out, re.MULTILINE).group(1)
3010+
3011+
3012+def _get_key_by_keyid(keyid):
3013+ """Get a key via HTTPS from the Ubuntu keyserver.
3014+ Different key ID formats are supported by SKS keyservers (the longer ones
3015+ are more secure, see "dead beef attack" and https://evil32.com/). Since
3016+ HTTPS is used, if SSLBump-like HTTPS proxies are in place, they will
3017+ impersonate keyserver.ubuntu.com and generate a certificate with
3018+ keyserver.ubuntu.com in the CN field or in SubjAltName fields of a
3019+ certificate. If such proxy behavior is expected it is necessary to add the
3020+ CA certificate chain containing the intermediate CA of the SSLBump proxy to
3021+ every machine that this code runs on via ca-certs cloud-init directive (via
3022+ cloudinit-userdata model-config) or via other means (such as through a
3023+ custom charm option). Also note that DNS resolution for the hostname in a
3024+ URL is done at a proxy server - not at the client side.
3025+
3026+ 8-digit (32 bit) key ID
3027+ https://keyserver.ubuntu.com/pks/lookup?search=0x4652B4E6
3028+ 16-digit (64 bit) key ID
3029+ https://keyserver.ubuntu.com/pks/lookup?search=0x6E85A86E4652B4E6
3030+ 40-digit key ID:
3031+ https://keyserver.ubuntu.com/pks/lookup?search=0x35F77D63B5CEC106C577ED856E85A86E4652B4E6
3032+
3033+ :param keyid: An 8, 16 or 40 hex digit keyid to find a key for
3034+ :type keyid: (bytes, str)
3035+ :returns: A key material for the specified GPG key id
3036+ :rtype: (str, bytes)
3037+ :raises: subprocess.CalledProcessError
3038+ """
3039+ # options=mr - machine-readable output (disables html wrappers)
3040+ keyserver_url = ('https://keyserver.ubuntu.com'
3041+ '/pks/lookup?op=get&options=mr&exact=on&search=0x{}')
3042+ curl_cmd = ['curl', keyserver_url.format(keyid)]
3043+ # use proxy server settings in order to retrieve the key
3044+ return subprocess.check_output(curl_cmd,
3045+ env=env_proxy_settings(['https']))
3046+
3047+
3048+def _dearmor_gpg_key(key_asc):
3049+ """Converts a GPG key in the ASCII armor format to the binary format.
3050+
3051+ :param key_asc: A GPG key in ASCII armor format.
3052+ :type key_asc: (str, bytes)
3053+ :returns: A GPG key in binary format
3054+ :rtype: (str, bytes)
3055+ :raises: GPGKeyError
3056+ """
3057+ ps = subprocess.Popen(['gpg', '--dearmor'],
3058+ stdout=subprocess.PIPE,
3059+ stderr=subprocess.PIPE,
3060+ stdin=subprocess.PIPE)
3061+ out, err = ps.communicate(input=key_asc)
3062+ # no need to decode output as it is binary (invalid utf-8), only error
3063+ if six.PY3:
3064+ err = err.decode('utf-8')
3065+ if 'gpg: no valid OpenPGP data found.' in err:
3066+ raise GPGKeyError('Invalid GPG key material. Check your network setup'
3067+ ' (MTU, routing, DNS) and/or proxy server settings'
3068+ ' as well as destination keyserver status.')
3069+ else:
3070+ return out
3071+
3072+
3073+def _write_apt_gpg_keyfile(key_name, key_material):
3074+ """Writes GPG key material into a file at a provided path.
3075+
3076+ :param key_name: A key name to use for a key file (could be a fingerprint)
3077+ :type key_name: str
3078+ :param key_material: A GPG key material (binary)
3079+ :type key_material: (str, bytes)
3080+ """
3081+ with open('/etc/apt/trusted.gpg.d/{}.gpg'.format(key_name),
3082+ 'wb') as keyf:
3083+ keyf.write(key_material)
3084
3085
3086 def add_source(source, key=None, fail_invalid=False):
3087@@ -385,14 +588,16 @@ def add_source(source, key=None, fail_invalid=False):
3088 for r, fn in six.iteritems(_mapping):
3089 m = re.match(r, source)
3090 if m:
3091- # call the assoicated function with the captured groups
3092- # raises SourceConfigError on error.
3093- fn(*m.groups())
3094 if key:
3095+ # Import key before adding the source which depends on it,
3096+ # as refreshing packages could fail otherwise.
3097 try:
3098 import_key(key)
3099 except GPGKeyError as e:
3100 raise SourceConfigError(str(e))
3101+ # call the associated function with the captured groups
3102+ # raises SourceConfigError on error.
3103+ fn(*m.groups())
3104 break
3105 else:
3106 # nothing matched. log an error and maybe sys.exit
3107@@ -405,13 +610,13 @@ def add_source(source, key=None, fail_invalid=False):
3108 def _add_proposed():
3109 """Add the PROPOSED_POCKET as /etc/apt/source.list.d/proposed.list
3110
3111- Uses lsb_release()['DISTRIB_CODENAME'] to determine the correct staza for
3112+ Uses get_distrib_codename to determine the correct stanza for
3113 the deb line.
3114
3115 For intel architecutres PROPOSED_POCKET is used for the release, but for
3116 other architectures PROPOSED_PORTS_POCKET is used for the release.
3117 """
3118- release = lsb_release()['DISTRIB_CODENAME']
3119+ release = get_distrib_codename()
3120 arch = platform.machine()
3121 if arch not in six.iterkeys(ARCH_TO_PROPOSED_POCKET):
3122 raise SourceConfigError("Arch {} not supported for (distro-)proposed"
3123@@ -424,8 +629,16 @@ def _add_apt_repository(spec):
3124 """Add the spec using add_apt_repository
3125
3126 :param spec: the parameter to pass to add_apt_repository
3127+ :type spec: str
3128 """
3129- _run_with_retries(['add-apt-repository', '--yes', spec])
3130+ if '{series}' in spec:
3131+ series = get_distrib_codename()
3132+ spec = spec.replace('{series}', series)
3133+ # software-properties package for bionic properly reacts to proxy settings
3134+ # passed as environment variables (See lp:1433761). This is not the case
3135+ # LTS and non-LTS releases below bionic.
3136+ _run_with_retries(['add-apt-repository', '--yes', spec],
3137+ cmd_env=env_proxy_settings(['https']))
3138
3139
3140 def _add_cloud_pocket(pocket):
3141@@ -494,7 +707,7 @@ def _verify_is_ubuntu_rel(release, os_release):
3142 :raises: SourceConfigError if the release is not the same as the ubuntu
3143 release.
3144 """
3145- ubuntu_rel = lsb_release()['DISTRIB_CODENAME']
3146+ ubuntu_rel = get_distrib_codename()
3147 if release != ubuntu_rel:
3148 raise SourceConfigError(
3149 'Invalid Cloud Archive release specified: {}-{} on this Ubuntu'
3150@@ -505,21 +718,22 @@ def _run_with_retries(cmd, max_retries=CMD_RETRY_COUNT, retry_exitcodes=(1,),
3151 retry_message="", cmd_env=None):
3152 """Run a command and retry until success or max_retries is reached.
3153
3154- :param: cmd: str: The apt command to run.
3155- :param: max_retries: int: The number of retries to attempt on a fatal
3156- command. Defaults to CMD_RETRY_COUNT.
3157- :param: retry_exitcodes: tuple: Optional additional exit codes to retry.
3158- Defaults to retry on exit code 1.
3159- :param: retry_message: str: Optional log prefix emitted during retries.
3160- :param: cmd_env: dict: Environment variables to add to the command run.
3161+ :param cmd: The apt command to run.
3162+ :type cmd: str
3163+ :param max_retries: The number of retries to attempt on a fatal
3164+ command. Defaults to CMD_RETRY_COUNT.
3165+ :type max_retries: int
3166+ :param retry_exitcodes: Optional additional exit codes to retry.
3167+ Defaults to retry on exit code 1.
3168+ :type retry_exitcodes: tuple
3169+ :param retry_message: Optional log prefix emitted during retries.
3170+ :type retry_message: str
3171+ :param: cmd_env: Environment variables to add to the command run.
3172+ :type cmd_env: Option[None, Dict[str, str]]
3173 """
3174-
3175- env = None
3176- kwargs = {}
3177+ env = get_apt_dpkg_env()
3178 if cmd_env:
3179- env = os.environ.copy()
3180 env.update(cmd_env)
3181- kwargs['env'] = env
3182
3183 if not retry_message:
3184 retry_message = "Failed executing '{}'".format(" ".join(cmd))
3185@@ -531,8 +745,7 @@ def _run_with_retries(cmd, max_retries=CMD_RETRY_COUNT, retry_exitcodes=(1,),
3186 retry_results = (None,) + retry_exitcodes
3187 while result in retry_results:
3188 try:
3189- # result = subprocess.check_call(cmd, env=env)
3190- result = subprocess.check_call(cmd, **kwargs)
3191+ result = subprocess.check_call(cmd, env=env)
3192 except subprocess.CalledProcessError as e:
3193 retry_count = retry_count + 1
3194 if retry_count > max_retries:
3195@@ -545,22 +758,18 @@ def _run_with_retries(cmd, max_retries=CMD_RETRY_COUNT, retry_exitcodes=(1,),
3196 def _run_apt_command(cmd, fatal=False):
3197 """Run an apt command with optional retries.
3198
3199- :param: cmd: str: The apt command to run.
3200- :param: fatal: bool: Whether the command's output should be checked and
3201- retried.
3202+ :param cmd: The apt command to run.
3203+ :type cmd: str
3204+ :param fatal: Whether the command's output should be checked and
3205+ retried.
3206+ :type fatal: bool
3207 """
3208- # Provide DEBIAN_FRONTEND=noninteractive if not present in the environment.
3209- cmd_env = {
3210- 'DEBIAN_FRONTEND': os.environ.get('DEBIAN_FRONTEND', 'noninteractive')}
3211-
3212 if fatal:
3213 _run_with_retries(
3214- cmd, cmd_env=cmd_env, retry_exitcodes=(1, APT_NO_LOCK,),
3215+ cmd, retry_exitcodes=(1, APT_NO_LOCK,),
3216 retry_message="Couldn't acquire DPKG lock")
3217 else:
3218- env = os.environ.copy()
3219- env.update(cmd_env)
3220- subprocess.call(cmd, env=env)
3221+ subprocess.call(cmd, env=get_apt_dpkg_env())
3222
3223
3224 def get_upstream_version(package):
3225@@ -568,11 +777,10 @@ def get_upstream_version(package):
3226
3227 @returns None (if not installed) or the upstream version
3228 """
3229- import apt_pkg
3230 cache = apt_cache()
3231 try:
3232 pkg = cache[package]
3233- except:
3234+ except Exception:
3235 # the package is unknown to the current apt cache.
3236 return None
3237
3238@@ -580,4 +788,18 @@ def get_upstream_version(package):
3239 # package is known, but no version is currently installed.
3240 return None
3241
3242- return apt_pkg.upstream_version(pkg.current_ver.ver_str)
3243+ return ubuntu_apt_pkg.upstream_version(pkg.current_ver.ver_str)
3244+
3245+
3246+def get_apt_dpkg_env():
3247+ """Get environment suitable for execution of APT and DPKG tools.
3248+
3249+ We keep this in a helper function instead of in a global constant to
3250+ avoid execution on import of the library.
3251+ :returns: Environment suitable for execution of APT and DPKG tools.
3252+ :rtype: Dict[str, str]
3253+ """
3254+ # The fallback is used in the event of ``/etc/environment`` not containing
3255+ # avalid PATH variable.
3256+ return {'DEBIAN_FRONTEND': 'noninteractive',
3257+ 'PATH': get_system_env('PATH', '/usr/sbin:/usr/bin:/sbin:/bin')}
3258diff --git a/hooks/charmhelpers/fetch/ubuntu_apt_pkg.py b/hooks/charmhelpers/fetch/ubuntu_apt_pkg.py
3259new file mode 100644
3260index 0000000..929a75d
3261--- /dev/null
3262+++ b/hooks/charmhelpers/fetch/ubuntu_apt_pkg.py
3263@@ -0,0 +1,267 @@
3264+# Copyright 2019 Canonical Ltd
3265+#
3266+# Licensed under the Apache License, Version 2.0 (the "License");
3267+# you may not use this file except in compliance with the License.
3268+# You may obtain a copy of the License at
3269+#
3270+# http://www.apache.org/licenses/LICENSE-2.0
3271+#
3272+# Unless required by applicable law or agreed to in writing, software
3273+# distributed under the License is distributed on an "AS IS" BASIS,
3274+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
3275+# See the License for the specific language governing permissions and
3276+# limitations under the License.
3277+
3278+"""Provide a subset of the ``python-apt`` module API.
3279+
3280+Data collection is done through subprocess calls to ``apt-cache`` and
3281+``dpkg-query`` commands.
3282+
3283+The main purpose for this module is to avoid dependency on the
3284+``python-apt`` python module.
3285+
3286+The indicated python module is a wrapper around the ``apt`` C++ library
3287+which is tightly connected to the version of the distribution it was
3288+shipped on. It is not developed in a backward/forward compatible manner.
3289+
3290+This in turn makes it incredibly hard to distribute as a wheel for a piece
3291+of python software that supports a span of distro releases [0][1].
3292+
3293+Upstream feedback like [2] does not give confidence in this ever changing,
3294+so with this we get rid of the dependency.
3295+
3296+0: https://github.com/juju-solutions/layer-basic/pull/135
3297+1: https://bugs.launchpad.net/charm-octavia/+bug/1824112
3298+2: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=845330#10
3299+"""
3300+
3301+import locale
3302+import os
3303+import subprocess
3304+import sys
3305+
3306+
3307+class _container(dict):
3308+ """Simple container for attributes."""
3309+ __getattr__ = dict.__getitem__
3310+ __setattr__ = dict.__setitem__
3311+
3312+
3313+class Package(_container):
3314+ """Simple container for package attributes."""
3315+
3316+
3317+class Version(_container):
3318+ """Simple container for version attributes."""
3319+
3320+
3321+class Cache(object):
3322+ """Simulation of ``apt_pkg`` Cache object."""
3323+ def __init__(self, progress=None):
3324+ pass
3325+
3326+ def __contains__(self, package):
3327+ try:
3328+ pkg = self.__getitem__(package)
3329+ return pkg is not None
3330+ except KeyError:
3331+ return False
3332+
3333+ def __getitem__(self, package):
3334+ """Get information about a package from apt and dpkg databases.
3335+
3336+ :param package: Name of package
3337+ :type package: str
3338+ :returns: Package object
3339+ :rtype: object
3340+ :raises: KeyError, subprocess.CalledProcessError
3341+ """
3342+ apt_result = self._apt_cache_show([package])[package]
3343+ apt_result['name'] = apt_result.pop('package')
3344+ pkg = Package(apt_result)
3345+ dpkg_result = self._dpkg_list([package]).get(package, {})
3346+ current_ver = None
3347+ installed_version = dpkg_result.get('version')
3348+ if installed_version:
3349+ current_ver = Version({'ver_str': installed_version})
3350+ pkg.current_ver = current_ver
3351+ pkg.architecture = dpkg_result.get('architecture')
3352+ return pkg
3353+
3354+ def _dpkg_list(self, packages):
3355+ """Get data from system dpkg database for package.
3356+
3357+ :param packages: Packages to get data from
3358+ :type packages: List[str]
3359+ :returns: Structured data about installed packages, keys like
3360+ ``dpkg-query --list``
3361+ :rtype: dict
3362+ :raises: subprocess.CalledProcessError
3363+ """
3364+ pkgs = {}
3365+ cmd = ['dpkg-query', '--list']
3366+ cmd.extend(packages)
3367+ if locale.getlocale() == (None, None):
3368+ # subprocess calls out to locale.getpreferredencoding(False) to
3369+ # determine encoding. Workaround for Trusty where the
3370+ # environment appears to not be set up correctly.
3371+ locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
3372+ try:
3373+ output = subprocess.check_output(cmd,
3374+ stderr=subprocess.STDOUT,
3375+ universal_newlines=True)
3376+ except subprocess.CalledProcessError as cp:
3377+ # ``dpkg-query`` may return error and at the same time have
3378+ # produced useful output, for example when asked for multiple
3379+ # packages where some are not installed
3380+ if cp.returncode != 1:
3381+ raise
3382+ output = cp.output
3383+ headings = []
3384+ for line in output.splitlines():
3385+ if line.startswith('||/'):
3386+ headings = line.split()
3387+ headings.pop(0)
3388+ continue
3389+ elif (line.startswith('|') or line.startswith('+') or
3390+ line.startswith('dpkg-query:')):
3391+ continue
3392+ else:
3393+ data = line.split(None, 4)
3394+ status = data.pop(0)
3395+ if status != 'ii':
3396+ continue
3397+ pkg = {}
3398+ pkg.update({k.lower(): v for k, v in zip(headings, data)})
3399+ if 'name' in pkg:
3400+ pkgs.update({pkg['name']: pkg})
3401+ return pkgs
3402+
3403+ def _apt_cache_show(self, packages):
3404+ """Get data from system apt cache for package.
3405+
3406+ :param packages: Packages to get data from
3407+ :type packages: List[str]
3408+ :returns: Structured data about package, keys like
3409+ ``apt-cache show``
3410+ :rtype: dict
3411+ :raises: subprocess.CalledProcessError
3412+ """
3413+ pkgs = {}
3414+ cmd = ['apt-cache', 'show', '--no-all-versions']
3415+ cmd.extend(packages)
3416+ if locale.getlocale() == (None, None):
3417+ # subprocess calls out to locale.getpreferredencoding(False) to
3418+ # determine encoding. Workaround for Trusty where the
3419+ # environment appears to not be set up correctly.
3420+ locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
3421+ try:
3422+ output = subprocess.check_output(cmd,
3423+ stderr=subprocess.STDOUT,
3424+ universal_newlines=True)
3425+ previous = None
3426+ pkg = {}
3427+ for line in output.splitlines():
3428+ if not line:
3429+ if 'package' in pkg:
3430+ pkgs.update({pkg['package']: pkg})
3431+ pkg = {}
3432+ continue
3433+ if line.startswith(' '):
3434+ if previous and previous in pkg:
3435+ pkg[previous] += os.linesep + line.lstrip()
3436+ continue
3437+ if ':' in line:
3438+ kv = line.split(':', 1)
3439+ key = kv[0].lower()
3440+ if key == 'n':
3441+ continue
3442+ previous = key
3443+ pkg.update({key: kv[1].lstrip()})
3444+ except subprocess.CalledProcessError as cp:
3445+ # ``apt-cache`` returns 100 if none of the packages asked for
3446+ # exist in the apt cache.
3447+ if cp.returncode != 100:
3448+ raise
3449+ return pkgs
3450+
3451+
3452+class Config(_container):
3453+ def __init__(self):
3454+ super(Config, self).__init__(self._populate())
3455+
3456+ def _populate(self):
3457+ cfgs = {}
3458+ cmd = ['apt-config', 'dump']
3459+ output = subprocess.check_output(cmd,
3460+ stderr=subprocess.STDOUT,
3461+ universal_newlines=True)
3462+ for line in output.splitlines():
3463+ if not line.startswith("CommandLine"):
3464+ k, v = line.split(" ", 1)
3465+ cfgs[k] = v.strip(";").strip("\"")
3466+
3467+ return cfgs
3468+
3469+
3470+# Backwards compatibility with old apt_pkg module
3471+sys.modules[__name__].config = Config()
3472+
3473+
3474+def init():
3475+ """Compability shim that does nothing."""
3476+ pass
3477+
3478+
3479+def upstream_version(version):
3480+ """Extracts upstream version from a version string.
3481+
3482+ Upstream reference: https://salsa.debian.org/apt-team/apt/blob/master/
3483+ apt-pkg/deb/debversion.cc#L259
3484+
3485+ :param version: Version string
3486+ :type version: str
3487+ :returns: Upstream version
3488+ :rtype: str
3489+ """
3490+ if version:
3491+ version = version.split(':')[-1]
3492+ version = version.split('-')[0]
3493+ return version
3494+
3495+
3496+def version_compare(a, b):
3497+ """Compare the given versions.
3498+
3499+ Call out to ``dpkg`` to make sure the code doing the comparison is
3500+ compatible with what the ``apt`` library would do. Mimic the return
3501+ values.
3502+
3503+ Upstream reference:
3504+ https://apt-team.pages.debian.net/python-apt/library/apt_pkg.html
3505+ ?highlight=version_compare#apt_pkg.version_compare
3506+
3507+ :param a: version string
3508+ :type a: str
3509+ :param b: version string
3510+ :type b: str
3511+ :returns: >0 if ``a`` is greater than ``b``, 0 if a equals b,
3512+ <0 if ``a`` is smaller than ``b``
3513+ :rtype: int
3514+ :raises: subprocess.CalledProcessError, RuntimeError
3515+ """
3516+ for op in ('gt', 1), ('eq', 0), ('lt', -1):
3517+ try:
3518+ subprocess.check_call(['dpkg', '--compare-versions',
3519+ a, op[0], b],
3520+ stderr=subprocess.STDOUT,
3521+ universal_newlines=True)
3522+ return op[1]
3523+ except subprocess.CalledProcessError as cp:
3524+ if cp.returncode == 1:
3525+ continue
3526+ raise
3527+ else:
3528+ raise RuntimeError('Unable to compare "{}" and "{}", according to '
3529+ 'our logic they are neither greater, equal nor '
3530+ 'less than each other.')
3531diff --git a/hooks/charmhelpers/osplatform.py b/hooks/charmhelpers/osplatform.py
3532index d9a4d5c..78c81af 100644
3533--- a/hooks/charmhelpers/osplatform.py
3534+++ b/hooks/charmhelpers/osplatform.py
3535@@ -1,4 +1,5 @@
3536 import platform
3537+import os
3538
3539
3540 def get_platform():
3541@@ -9,9 +10,13 @@ def get_platform():
3542 This string is used to decide which platform module should be imported.
3543 """
3544 # linux_distribution is deprecated and will be removed in Python 3.7
3545- # Warings *not* disabled, as we certainly need to fix this.
3546- tuple_platform = platform.linux_distribution()
3547- current_platform = tuple_platform[0]
3548+ # Warnings *not* disabled, as we certainly need to fix this.
3549+ if hasattr(platform, 'linux_distribution'):
3550+ tuple_platform = platform.linux_distribution()
3551+ current_platform = tuple_platform[0]
3552+ else:
3553+ current_platform = _get_platform_from_fs()
3554+
3555 if "Ubuntu" in current_platform:
3556 return "ubuntu"
3557 elif "CentOS" in current_platform:
3558@@ -20,6 +25,22 @@ def get_platform():
3559 # Stock Python does not detect Ubuntu and instead returns debian.
3560 # Or at least it does in some build environments like Travis CI
3561 return "ubuntu"
3562+ elif "elementary" in current_platform:
3563+ # ElementaryOS fails to run tests locally without this.
3564+ return "ubuntu"
3565 else:
3566 raise RuntimeError("This module is not supported on {}."
3567 .format(current_platform))
3568+
3569+
3570+def _get_platform_from_fs():
3571+ """Get Platform from /etc/os-release."""
3572+ with open(os.path.join(os.sep, 'etc', 'os-release')) as fin:
3573+ content = dict(
3574+ line.split('=', 1)
3575+ for line in fin.read().splitlines()
3576+ if '=' in line
3577+ )
3578+ for k, v in content.items():
3579+ content[k] = v.strip('"')
3580+ return content["NAME"]
3581diff --git a/hooks/common.py b/hooks/common.py
3582index 66d41ec..c2280a3 100644
3583--- a/hooks/common.py
3584+++ b/hooks/common.py
3585@@ -43,6 +43,12 @@ def check_ip(n):
3586 return False
3587
3588
3589+def ingress_address(relation_data):
3590+ if 'ingress-address' in relation_data:
3591+ return relation_data['ingress-address']
3592+ return relation_data['private-address']
3593+
3594+
3595 def get_local_ingress_address(binding='website'):
3596 # using network-get to retrieve the address details if available.
3597 log('Getting hostname for binding %s' % binding)
3598@@ -342,21 +348,6 @@ def apply_host_policy(target_id, owner_unit, owner_relation):
3599 ssh_service.save()
3600
3601
3602-def get_valid_relations():
3603- for x in subprocess.Popen(['relation-ids', 'monitors'],
3604- stdout=subprocess.PIPE).stdout:
3605- yield x.strip()
3606- for x in subprocess.Popen(['relation-ids', 'nagios'],
3607- stdout=subprocess.PIPE).stdout:
3608- yield x.strip()
3609-
3610-
3611-def get_valid_units(relation_id):
3612- for x in subprocess.Popen(['relation-list', '-r', relation_id],
3613- stdout=subprocess.PIPE).stdout:
3614- yield x.strip()
3615-
3616-
3617 def _replace_in_config(find_me, replacement):
3618 with open(INPROGRESS_CFG) as cf:
3619 with tempfile.NamedTemporaryFile(dir=INPROGRESS_DIR, delete=False) as new_cf:
3620diff --git a/hooks/install b/hooks/install
3621index f002e46..a8900a3 100755
3622--- a/hooks/install
3623+++ b/hooks/install
3624@@ -29,7 +29,7 @@ echo nagios3-cgi nagios3/adminpassword password $PASSWORD | debconf-set-selectio
3625 echo nagios3-cgi nagios3/adminpassword-repeat password $PASSWORD | debconf-set-selections
3626
3627 DEBIAN_FRONTEND=noninteractive apt-get -qy \
3628- install nagios3 nagios-plugins python-cheetah python-jinja2 dnsutils debconf-utils nagios-nrpe-plugin pynag python-apt python-yaml
3629+ install nagios3 nagios-plugins python-cheetah python-jinja2 dnsutils debconf-utils nagios-nrpe-plugin pynag python-apt python-yaml python-enum34
3630
3631 scripts/postfix_loopback_only.sh
3632
3633diff --git a/hooks/monitors-relation-changed b/hooks/monitors-relation-changed
3634index 13cb96c..e16589d 100755
3635--- a/hooks/monitors-relation-changed
3636+++ b/hooks/monitors-relation-changed
3637@@ -18,17 +18,77 @@
3638
3639 import sys
3640 import os
3641-import subprocess
3642 import yaml
3643-import json
3644 import re
3645-
3646-
3647-from common import (customize_service, get_pynag_host,
3648- get_pynag_service, refresh_hostgroups,
3649- get_valid_relations, get_valid_units,
3650- initialize_inprogress_config, flush_inprogress_config,
3651- get_local_ingress_address)
3652+from collections import defaultdict
3653+
3654+from charmhelpers.core.hookenv import (
3655+ relation_get,
3656+ ingress_address,
3657+ related_units,
3658+ relation_ids,
3659+ log,
3660+ DEBUG
3661+)
3662+
3663+from common import (
3664+ customize_service,
3665+ get_pynag_host,
3666+ get_pynag_service,
3667+ refresh_hostgroups,
3668+ initialize_inprogress_config,
3669+ flush_inprogress_config
3670+)
3671+
3672+
3673+REQUIRED_REL_DATA_KEYS = [
3674+ 'target-address',
3675+ 'monitors',
3676+ 'target-id',
3677+]
3678+
3679+
3680+def _prepare_relation_data(unit, rid):
3681+ relation_data = relation_get(unit=unit, rid=rid)
3682+
3683+ if not relation_data:
3684+ msg = (
3685+ 'no relation data found for unit {} in relation {} - '
3686+ 'skipping'.format(unit, rid)
3687+ )
3688+ log(msg, level=DEBUG)
3689+ return {}
3690+
3691+ if rid.split(':')[0] == 'nagios':
3692+ # Fake it for the more generic 'nagios' relation'
3693+ relation_data['target-id'] = unit.replace('/', '-')
3694+ relation_data['monitors'] = {'monitors': {'remote': {}}}
3695+
3696+ if not relation_data.get('target-address'):
3697+ relation_data['target-address'] = ingress_address(unit=unit, rid=rid)
3698+
3699+ for key in REQUIRED_REL_DATA_KEYS:
3700+ if not relation_data.get(key):
3701+ msg = (
3702+ '{} not found for unit {} in relation {} - '
3703+ 'skipping'.format(key, unit, rid)
3704+ )
3705+ log(msg, level=DEBUG)
3706+ return {}
3707+
3708+ return relation_data
3709+
3710+
3711+def _collect_relation_data():
3712+ all_relations = defaultdict(dict)
3713+ for relname in ['nagios', 'monitors']:
3714+ for relid in relation_ids(relname):
3715+ for unit in related_units(relid):
3716+ relation_data = _prepare_relation_data(unit=unit, rid=relid)
3717+ if relation_data:
3718+ all_relations[relid][unit] = relation_data
3719+
3720+ return all_relations
3721
3722
3723 def main(argv):
3724@@ -43,35 +103,7 @@ def main(argv):
3725 relation_settings['target-address'] = argv[3]
3726 all_relations = {'monitors:99': {'testing/0': relation_settings}}
3727 else:
3728- all_relations = {}
3729- for relid in get_valid_relations():
3730- (relname, relnum) = relid.split(':')
3731- for unit in get_valid_units(relid):
3732- relation_settings = json.loads(
3733- subprocess.check_output(['relation-get', '--format=json',
3734- '-r', relid,
3735- '-',unit]).strip())
3736-
3737- if relation_settings is None or relation_settings == '':
3738- continue
3739-
3740- if relname == 'monitors':
3741- if ('monitors' not in relation_settings
3742- or 'target-id' not in relation_settings):
3743- continue
3744- if ('target-id' in relation_settings and 'target-address' not in relation_settings):
3745- relation_settings['target-address'] = get_local_ingress_address('monitors')
3746-
3747- else:
3748- # Fake it for the more generic 'nagios' relation'
3749- relation_settings['target-id'] = unit.replace('/','-')
3750- relation_settings['target-address'] = get_local_ingress_address('monitors')
3751- relation_settings['monitors'] = {'monitors': {'remote': {} } }
3752-
3753- if relid not in all_relations:
3754- all_relations[relid] = {}
3755-
3756- all_relations[relid][unit] = relation_settings
3757+ all_relations = _collect_relation_data()
3758
3759 # Hack to work around http://pad.lv/1025478
3760 targets_with_addresses = set()

Subscribers

People subscribed via source and target branches