Merge lp:~barryprice/charm-haproxy/charm-helpers-sync into lp:charm-haproxy

Proposed by Barry Price
Status: Superseded
Proposed branch: lp:~barryprice/charm-haproxy/charm-helpers-sync
Merge into: lp:charm-haproxy
Diff against target: 2894 lines (+1735/-187)
29 files modified
.bzrignore (+1/-0)
Makefile (+1/-1)
hooks/charmhelpers/__init__.py (+65/-4)
hooks/charmhelpers/contrib/charmsupport/nrpe.py (+32/-9)
hooks/charmhelpers/core/hookenv.py (+450/-28)
hooks/charmhelpers/core/host.py (+168/-11)
hooks/charmhelpers/core/host_factory/centos.py (+16/-0)
hooks/charmhelpers/core/host_factory/ubuntu.py (+57/-0)
hooks/charmhelpers/core/kernel.py (+2/-2)
hooks/charmhelpers/core/services/base.py (+18/-7)
hooks/charmhelpers/core/strutils.py (+64/-5)
hooks/charmhelpers/core/sysctl.py (+21/-10)
hooks/charmhelpers/core/templating.py (+18/-9)
hooks/charmhelpers/core/unitdata.py (+8/-1)
hooks/charmhelpers/fetch/__init__.py (+19/-9)
hooks/charmhelpers/fetch/archiveurl.py (+1/-1)
hooks/charmhelpers/fetch/bzrurl.py (+2/-2)
hooks/charmhelpers/fetch/centos.py (+1/-1)
hooks/charmhelpers/fetch/giturl.py (+2/-2)
hooks/charmhelpers/fetch/python/__init__.py (+13/-0)
hooks/charmhelpers/fetch/python/debug.py (+54/-0)
hooks/charmhelpers/fetch/python/packages.py (+154/-0)
hooks/charmhelpers/fetch/python/rpdb.py (+56/-0)
hooks/charmhelpers/fetch/python/version.py (+32/-0)
hooks/charmhelpers/fetch/snap.py (+33/-5)
hooks/charmhelpers/fetch/ubuntu.py (+426/-62)
hooks/hooks.py (+8/-5)
hooks/tests/test_config_changed_hooks.py (+5/-5)
hooks/tests/test_helpers.py (+8/-8)
To merge this branch: bzr merge lp:~barryprice/charm-haproxy/charm-helpers-sync
Reviewer Review Type Date Requested Status
haproxy-team Pending
Review via email: mp+364880@code.launchpad.net

This proposal has been superseded by a proposal from 2019-03-21.

Commit message

charm-helpers sync

To post a comment you must log in.
Revision history for this message
🤖 Canonical IS Merge Bot (canonical-is-mergebot) wrote :

This merge proposal is being monitored by mergebot. Change the status to Approved to merge.

Unmerged revisions

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file '.bzrignore'
2--- .bzrignore 2015-05-15 16:54:14 +0000
3+++ .bzrignore 2019-03-21 11:40:09 +0000
4@@ -8,3 +8,4 @@
5 exec.d
6 build/charm-helpers
7 revision
8+.venv
9
10=== modified file 'Makefile'
11--- Makefile 2019-03-19 10:48:26 +0000
12+++ Makefile 2019-03-21 11:40:09 +0000
13@@ -24,7 +24,7 @@
14
15 lint: .venv
16 @echo Checking for Python syntax...
17- @python -m flake8 $(HOOKS_DIR) --ignore=E123 --exclude=$(HOOKS_DIR)/charmhelpers
18+ @python -m flake8 $(HOOKS_DIR) --extend-ignore=E123 --exclude=$(HOOKS_DIR)/charmhelpers
19
20 sourcedeps:
21 @echo Updating source dependencies...
22
23=== modified file 'hooks/charmhelpers/__init__.py'
24--- hooks/charmhelpers/__init__.py 2017-03-21 15:08:36 +0000
25+++ hooks/charmhelpers/__init__.py 2019-03-21 11:40:09 +0000
26@@ -14,23 +14,84 @@
27
28 # Bootstrap charm-helpers, installing its dependencies if necessary using
29 # only standard libraries.
30+from __future__ import print_function
31+from __future__ import absolute_import
32+
33+import functools
34+import inspect
35 import subprocess
36 import sys
37
38 try:
39- import six # flake8: noqa
40+ import six # NOQA:F401
41 except ImportError:
42 if sys.version_info.major == 2:
43 subprocess.check_call(['apt-get', 'install', '-y', 'python-six'])
44 else:
45 subprocess.check_call(['apt-get', 'install', '-y', 'python3-six'])
46- import six # flake8: noqa
47+ import six # NOQA:F401
48
49 try:
50- import yaml # flake8: noqa
51+ import yaml # NOQA:F401
52 except ImportError:
53 if sys.version_info.major == 2:
54 subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml'])
55 else:
56 subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml'])
57- import yaml # flake8: noqa
58+ import yaml # NOQA:F401
59+
60+
61+# Holds a list of mapping of mangled function names that have been deprecated
62+# using the @deprecate decorator below. This is so that the warning is only
63+# printed once for each usage of the function.
64+__deprecated_functions = {}
65+
66+
67+def deprecate(warning, date=None, log=None):
68+ """Add a deprecation warning the first time the function is used.
69+ The date, which is a string in semi-ISO8660 format indicate the year-month
70+ that the function is officially going to be removed.
71+
72+ usage:
73+
74+ @deprecate('use core/fetch/add_source() instead', '2017-04')
75+ def contributed_add_source_thing(...):
76+ ...
77+
78+ And it then prints to the log ONCE that the function is deprecated.
79+ The reason for passing the logging function (log) is so that hookenv.log
80+ can be used for a charm if needed.
81+
82+ :param warning: String to indicat where it has moved ot.
83+ :param date: optional sting, in YYYY-MM format to indicate when the
84+ function will definitely (probably) be removed.
85+ :param log: The log function to call to log. If not, logs to stdout
86+ """
87+ def wrap(f):
88+
89+ @functools.wraps(f)
90+ def wrapped_f(*args, **kwargs):
91+ try:
92+ module = inspect.getmodule(f)
93+ file = inspect.getsourcefile(f)
94+ lines = inspect.getsourcelines(f)
95+ f_name = "{}-{}-{}..{}-{}".format(
96+ module.__name__, file, lines[0], lines[-1], f.__name__)
97+ except (IOError, TypeError):
98+ # assume it was local, so just use the name of the function
99+ f_name = f.__name__
100+ if f_name not in __deprecated_functions:
101+ __deprecated_functions[f_name] = True
102+ s = "DEPRECATION WARNING: Function {} is being removed".format(
103+ f.__name__)
104+ if date:
105+ s = "{} on/around {}".format(s, date)
106+ if warning:
107+ s = "{} : {}".format(s, warning)
108+ if log:
109+ log(s)
110+ else:
111+ print(s)
112+ return f(*args, **kwargs)
113+ return wrapped_f
114+ return wrap
115
116=== modified file 'hooks/charmhelpers/contrib/charmsupport/nrpe.py'
117--- hooks/charmhelpers/contrib/charmsupport/nrpe.py 2017-03-21 15:08:36 +0000
118+++ hooks/charmhelpers/contrib/charmsupport/nrpe.py 2019-03-21 11:40:09 +0000
119@@ -30,6 +30,7 @@
120
121 from charmhelpers.core.hookenv import (
122 config,
123+ hook_name,
124 local_unit,
125 log,
126 relation_ids,
127@@ -125,7 +126,7 @@
128
129
130 class Check(object):
131- shortname_re = '[A-Za-z0-9-_]+$'
132+ shortname_re = '[A-Za-z0-9-_.@]+$'
133 service_template = ("""
134 #---------------------------------------------------
135 # This file is Juju managed
136@@ -193,6 +194,13 @@
137 nrpe_check_file = self._get_check_filename()
138 with open(nrpe_check_file, 'w') as nrpe_check_config:
139 nrpe_check_config.write("# check {}\n".format(self.shortname))
140+ if nagios_servicegroups:
141+ nrpe_check_config.write(
142+ "# The following header was added automatically by juju\n")
143+ nrpe_check_config.write(
144+ "# Modifying it will affect nagios monitoring and alerting\n")
145+ nrpe_check_config.write(
146+ "# servicegroups: {}\n".format(nagios_servicegroups))
147 nrpe_check_config.write("command[{}]={}\n".format(
148 self.command, self.check_cmd))
149
150@@ -278,7 +286,7 @@
151 try:
152 nagios_uid = pwd.getpwnam('nagios').pw_uid
153 nagios_gid = grp.getgrnam('nagios').gr_gid
154- except:
155+ except Exception:
156 log("Nagios user not set up, nrpe checks not updated")
157 return
158
159@@ -295,7 +303,12 @@
160 "command": nrpecheck.command,
161 }
162
163- service('restart', 'nagios-nrpe-server')
164+ # update-status hooks are configured to firing every 5 minutes by
165+ # default. When nagios-nrpe-server is restarted, the nagios server
166+ # reports checks failing causing unnecessary alerts. Let's not restart
167+ # on update-status hooks.
168+ if not hook_name() == 'update-status':
169+ service('restart', 'nagios-nrpe-server')
170
171 monitor_ids = relation_ids("local-monitors") + \
172 relation_ids("nrpe-external-master")
173@@ -373,7 +386,7 @@
174 checkpath = '%s/service-check-%s.txt' % (nrpe.homedir, svc)
175 croncmd = (
176 '/usr/local/lib/nagios/plugins/check_exit_status.pl '
177- '-s /etc/init.d/%s status' % svc
178+ '-e -s /etc/init.d/%s status' % svc
179 )
180 cron_file = '*/5 * * * * root %s > %s\n' % (croncmd, checkpath)
181 f = open(cronpath, 'w')
182@@ -397,16 +410,26 @@
183 os.chmod(checkpath, 0o644)
184
185
186-def copy_nrpe_checks():
187+def copy_nrpe_checks(nrpe_files_dir=None):
188 """
189 Copy the nrpe checks into place
190
191 """
192 NAGIOS_PLUGINS = '/usr/local/lib/nagios/plugins'
193- nrpe_files_dir = os.path.join(os.getenv('CHARM_DIR'), 'hooks',
194- 'charmhelpers', 'contrib', 'openstack',
195- 'files')
196-
197+ if nrpe_files_dir is None:
198+ # determine if "charmhelpers" is in CHARMDIR or CHARMDIR/hooks
199+ for segment in ['.', 'hooks']:
200+ nrpe_files_dir = os.path.abspath(os.path.join(
201+ os.getenv('CHARM_DIR'),
202+ segment,
203+ 'charmhelpers',
204+ 'contrib',
205+ 'openstack',
206+ 'files'))
207+ if os.path.isdir(nrpe_files_dir):
208+ break
209+ else:
210+ raise RuntimeError("Couldn't find charmhelpers directory")
211 if not os.path.exists(NAGIOS_PLUGINS):
212 os.makedirs(NAGIOS_PLUGINS)
213 for fname in glob.glob(os.path.join(nrpe_files_dir, "check_*")):
214
215=== modified file 'hooks/charmhelpers/core/hookenv.py'
216--- hooks/charmhelpers/core/hookenv.py 2017-03-02 15:18:31 +0000
217+++ hooks/charmhelpers/core/hookenv.py 2019-03-21 11:40:09 +0000
218@@ -22,10 +22,12 @@
219 import copy
220 from distutils.version import LooseVersion
221 from functools import wraps
222+from collections import namedtuple
223 import glob
224 import os
225 import json
226 import yaml
227+import re
228 import subprocess
229 import sys
230 import errno
231@@ -38,12 +40,20 @@
232 else:
233 from collections import UserDict
234
235+
236 CRITICAL = "CRITICAL"
237 ERROR = "ERROR"
238 WARNING = "WARNING"
239 INFO = "INFO"
240 DEBUG = "DEBUG"
241+TRACE = "TRACE"
242 MARKER = object()
243+SH_MAX_ARG = 131071
244+
245+
246+RANGE_WARNING = ('Passing NO_PROXY string that includes a cidr. '
247+ 'This may not be compatible with software you are '
248+ 'running in your shell.')
249
250 cache = {}
251
252@@ -64,7 +74,7 @@
253 @wraps(func)
254 def wrapper(*args, **kwargs):
255 global cache
256- key = str((func, args, kwargs))
257+ key = json.dumps((func, args, kwargs), sort_keys=True, default=str)
258 try:
259 return cache[key]
260 except KeyError:
261@@ -94,7 +104,7 @@
262 command += ['-l', level]
263 if not isinstance(message, six.string_types):
264 message = repr(message)
265- command += [message]
266+ command += [message[:SH_MAX_ARG]]
267 # Missing juju-log should not cause failures in unit tests
268 # Send log output to stderr
269 try:
270@@ -197,9 +207,56 @@
271 return os.environ.get('JUJU_REMOTE_UNIT', None)
272
273
274+def application_name():
275+ """
276+ The name of the deployed application this unit belongs to.
277+ """
278+ return local_unit().split('/')[0]
279+
280+
281 def service_name():
282- """The name service group this unit belongs to"""
283- return local_unit().split('/')[0]
284+ """
285+ .. deprecated:: 0.19.1
286+ Alias for :func:`application_name`.
287+ """
288+ return application_name()
289+
290+
291+def model_name():
292+ """
293+ Name of the model that this unit is deployed in.
294+ """
295+ return os.environ['JUJU_MODEL_NAME']
296+
297+
298+def model_uuid():
299+ """
300+ UUID of the model that this unit is deployed in.
301+ """
302+ return os.environ['JUJU_MODEL_UUID']
303+
304+
305+def principal_unit():
306+ """Returns the principal unit of this unit, otherwise None"""
307+ # Juju 2.2 and above provides JUJU_PRINCIPAL_UNIT
308+ principal_unit = os.environ.get('JUJU_PRINCIPAL_UNIT', None)
309+ # If it's empty, then this unit is the principal
310+ if principal_unit == '':
311+ return os.environ['JUJU_UNIT_NAME']
312+ elif principal_unit is not None:
313+ return principal_unit
314+ # For Juju 2.1 and below, let's try work out the principle unit by
315+ # the various charms' metadata.yaml.
316+ for reltype in relation_types():
317+ for rid in relation_ids(reltype):
318+ for unit in related_units(rid):
319+ md = _metadata_unit(unit)
320+ if not md:
321+ continue
322+ subordinate = md.pop('subordinate', None)
323+ if not subordinate:
324+ return unit
325+ return None
326
327
328 @cached
329@@ -263,7 +320,7 @@
330 self.implicit_save = True
331 self._prev_dict = None
332 self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME)
333- if os.path.exists(self.path):
334+ if os.path.exists(self.path) and os.stat(self.path).st_size:
335 self.load_previous()
336 atexit(self._implicit_save)
337
338@@ -283,7 +340,11 @@
339 """
340 self.path = path or self.path
341 with open(self.path) as f:
342- self._prev_dict = json.load(f)
343+ try:
344+ self._prev_dict = json.load(f)
345+ except ValueError as e:
346+ log('Unable to parse previous config data - {}'.format(str(e)),
347+ level=ERROR)
348 for k, v in copy.deepcopy(self._prev_dict).items():
349 if k not in self:
350 self[k] = v
351@@ -319,6 +380,7 @@
352
353 """
354 with open(self.path, 'w') as f:
355+ os.fchmod(f.fileno(), 0o600)
356 json.dump(self, f)
357
358 def _implicit_save(self):
359@@ -326,22 +388,40 @@
360 self.save()
361
362
363-@cached
364+_cache_config = None
365+
366+
367 def config(scope=None):
368- """Juju charm configuration"""
369- config_cmd_line = ['config-get']
370- if scope is not None:
371- config_cmd_line.append(scope)
372- else:
373- config_cmd_line.append('--all')
374- config_cmd_line.append('--format=json')
375- try:
376- config_data = json.loads(
377- subprocess.check_output(config_cmd_line).decode('UTF-8'))
378+ """
379+ Get the juju charm configuration (scope==None) or individual key,
380+ (scope=str). The returned value is a Python data structure loaded as
381+ JSON from the Juju config command.
382+
383+ :param scope: If set, return the value for the specified key.
384+ :type scope: Optional[str]
385+ :returns: Either the whole config as a Config, or a key from it.
386+ :rtype: Any
387+ """
388+ global _cache_config
389+ config_cmd_line = ['config-get', '--all', '--format=json']
390+ try:
391+ # JSON Decode Exception for Python3.5+
392+ exc_json = json.decoder.JSONDecodeError
393+ except AttributeError:
394+ # JSON Decode Exception for Python2.7 through Python3.4
395+ exc_json = ValueError
396+ try:
397+ if _cache_config is None:
398+ config_data = json.loads(
399+ subprocess.check_output(config_cmd_line).decode('UTF-8'))
400+ _cache_config = Config(config_data)
401 if scope is not None:
402- return config_data
403- return Config(config_data)
404- except ValueError:
405+ return _cache_config.get(scope)
406+ return _cache_config
407+ except (exc_json, UnicodeDecodeError) as e:
408+ log('Unable to parse output from config-get: config_cmd_line="{}" '
409+ 'message="{}"'
410+ .format(config_cmd_line, str(e)), level=ERROR)
411 return None
412
413
414@@ -435,6 +515,67 @@
415 subprocess.check_output(units_cmd_line).decode('UTF-8')) or []
416
417
418+def expected_peer_units():
419+ """Get a generator for units we expect to join peer relation based on
420+ goal-state.
421+
422+ The local unit is excluded from the result to make it easy to gauge
423+ completion of all peers joining the relation with existing hook tools.
424+
425+ Example usage:
426+ log('peer {} of {} joined peer relation'
427+ .format(len(related_units()),
428+ len(list(expected_peer_units()))))
429+
430+ This function will raise NotImplementedError if used with juju versions
431+ without goal-state support.
432+
433+ :returns: iterator
434+ :rtype: types.GeneratorType
435+ :raises: NotImplementedError
436+ """
437+ if not has_juju_version("2.4.0"):
438+ # goal-state first appeared in 2.4.0.
439+ raise NotImplementedError("goal-state")
440+ _goal_state = goal_state()
441+ return (key for key in _goal_state['units']
442+ if '/' in key and key != local_unit())
443+
444+
445+def expected_related_units(reltype=None):
446+ """Get a generator for units we expect to join relation based on
447+ goal-state.
448+
449+ Note that you can not use this function for the peer relation, take a look
450+ at expected_peer_units() for that.
451+
452+ This function will raise KeyError if you request information for a
453+ relation type for which juju goal-state does not have information. It will
454+ raise NotImplementedError if used with juju versions without goal-state
455+ support.
456+
457+ Example usage:
458+ log('participant {} of {} joined relation {}'
459+ .format(len(related_units()),
460+ len(list(expected_related_units())),
461+ relation_type()))
462+
463+ :param reltype: Relation type to list data for, default is to list data for
464+ the realtion type we are currently executing a hook for.
465+ :type reltype: str
466+ :returns: iterator
467+ :rtype: types.GeneratorType
468+ :raises: KeyError, NotImplementedError
469+ """
470+ if not has_juju_version("2.4.4"):
471+ # goal-state existed in 2.4.0, but did not list individual units to
472+ # join a relation in 2.4.1 through 2.4.3. (LP: #1794739)
473+ raise NotImplementedError("goal-state relation unit count")
474+ reltype = reltype or relation_type()
475+ _goal_state = goal_state()
476+ return (key for key in _goal_state['relations'][reltype] if '/' in key)
477+
478+
479 @cached
480 def relation_for_unit(unit=None, rid=None):
481 """Get the json represenation of a unit's relation"""
482@@ -478,6 +619,24 @@
483 return yaml.safe_load(md)
484
485
486+def _metadata_unit(unit):
487+ """Given the name of a unit (e.g. apache2/0), get the unit charm's
488+ metadata.yaml. Very similar to metadata() but allows us to inspect
489+ other units. Unit needs to be co-located, such as a subordinate or
490+ principal/primary.
491+
492+ :returns: metadata.yaml as a python object.
493+
494+ """
495+ basedir = os.sep.join(charm_dir().split(os.sep)[:-2])
496+ unitdir = 'unit-{}'.format(unit.replace(os.sep, '-'))
497+ joineddir = os.path.join(basedir, unitdir, 'charm', 'metadata.yaml')
498+ if not os.path.exists(joineddir):
499+ return None
500+ with open(joineddir) as md:
501+ return yaml.safe_load(md)
502+
503+
504 @cached
505 def relation_types():
506 """Get a list of relation types supported by this charm"""
507@@ -602,18 +761,31 @@
508 return False
509
510
511+def _port_op(op_name, port, protocol="TCP"):
512+ """Open or close a service network port"""
513+ _args = [op_name]
514+ icmp = protocol.upper() == "ICMP"
515+ if icmp:
516+ _args.append(protocol)
517+ else:
518+ _args.append('{}/{}'.format(port, protocol))
519+ try:
520+ subprocess.check_call(_args)
521+ except subprocess.CalledProcessError:
522+ # Older Juju pre 2.3 doesn't support ICMP
523+ # so treat it as a no-op if it fails.
524+ if not icmp:
525+ raise
526+
527+
528 def open_port(port, protocol="TCP"):
529 """Open a service network port"""
530- _args = ['open-port']
531- _args.append('{}/{}'.format(port, protocol))
532- subprocess.check_call(_args)
533+ _port_op('open-port', port, protocol)
534
535
536 def close_port(port, protocol="TCP"):
537 """Close a service network port"""
538- _args = ['close-port']
539- _args.append('{}/{}'.format(port, protocol))
540- subprocess.check_call(_args)
541+ _port_op('close-port', port, protocol)
542
543
544 def open_ports(start, end, protocol="TCP"):
545@@ -630,6 +802,17 @@
546 subprocess.check_call(_args)
547
548
549+def opened_ports():
550+ """Get the opened ports
551+
552+ *Note that this will only show ports opened in a previous hook*
553+
554+ :returns: Opened ports as a list of strings: ``['8080/tcp', '8081-8083/tcp']``
555+ """
556+ _args = ['opened-ports', '--format=json']
557+ return json.loads(subprocess.check_output(_args).decode('UTF-8'))
558+
559+
560 @cached
561 def unit_get(attribute):
562 """Get the unit ID for the remote unit"""
563@@ -751,8 +934,15 @@
564 return wrapper
565
566
567+class NoNetworkBinding(Exception):
568+ pass
569+
570+
571 def charm_dir():
572 """Return the root directory of the current charm"""
573+ d = os.environ.get('JUJU_CHARM_DIR')
574+ if d is not None:
575+ return d
576 return os.environ.get('CHARM_DIR')
577
578
579@@ -874,6 +1064,14 @@
580
581
582 @translate_exc(from_exc=OSError, to_exc=NotImplementedError)
583+@cached
584+def goal_state():
585+ """Juju goal state values"""
586+ cmd = ['goal-state', '--format=json']
587+ return json.loads(subprocess.check_output(cmd).decode('UTF-8'))
588+
589+
590+@translate_exc(from_exc=OSError, to_exc=NotImplementedError)
591 def is_leader():
592 """Does the current unit hold the juju leadership
593
594@@ -967,7 +1165,6 @@
595 universal_newlines=True).strip()
596
597
598-@cached
599 def has_juju_version(minimum_version):
600 """Return True if the Juju version is at least the provided version"""
601 return LooseVersion(juju_version()) >= LooseVersion(minimum_version)
602@@ -1027,6 +1224,8 @@
603 @translate_exc(from_exc=OSError, to_exc=NotImplementedError)
604 def network_get_primary_address(binding):
605 '''
606+ Deprecated since Juju 2.3; use network_get()
607+
608 Retrieve the primary network address for a named binding
609
610 :param binding: string. The name of a relation of extra-binding
611@@ -1034,7 +1233,41 @@
612 :raise: NotImplementedError if run on Juju < 2.0
613 '''
614 cmd = ['network-get', '--primary-address', binding]
615- return subprocess.check_output(cmd).decode('UTF-8').strip()
616+ try:
617+ response = subprocess.check_output(
618+ cmd,
619+ stderr=subprocess.STDOUT).decode('UTF-8').strip()
620+ except CalledProcessError as e:
621+ if 'no network config found for binding' in e.output.decode('UTF-8'):
622+ raise NoNetworkBinding("No network binding for {}"
623+ .format(binding))
624+ else:
625+ raise
626+ return response
627+
628+
629+def network_get(endpoint, relation_id=None):
630+ """
631+ Retrieve the network details for a relation endpoint
632+
633+ :param endpoint: string. The name of a relation endpoint
634+ :param relation_id: int. The ID of the relation for the current context.
635+ :return: dict. The loaded YAML output of the network-get query.
636+ :raise: NotImplementedError if request not supported by the Juju version.
637+ """
638+ if not has_juju_version('2.2'):
639+ raise NotImplementedError(juju_version()) # earlier versions require --primary-address
640+ if relation_id and not has_juju_version('2.3'):
641+ raise NotImplementedError # 2.3 added the -r option
642+
643+ cmd = ['network-get', endpoint, '--format', 'yaml']
644+ if relation_id:
645+ cmd.append('-r')
646+ cmd.append(relation_id)
647+ response = subprocess.check_output(
648+ cmd,
649+ stderr=subprocess.STDOUT).decode('UTF-8').strip()
650+ return yaml.safe_load(response)
651
652
653 def add_metric(*args, **kwargs):
654@@ -1066,3 +1299,192 @@
655 """Get the meter status information, if running in the meter-status-changed
656 hook."""
657 return os.environ.get('JUJU_METER_INFO')
658+
659+
660+def iter_units_for_relation_name(relation_name):
661+ """Iterate through all units in a relation
662+
663+ Generator that iterates through all the units in a relation and yields
664+ a named tuple with rid and unit field names.
665+
666+ Usage:
667+ data = [(u.rid, u.unit)
668+ for u in iter_units_for_relation_name(relation_name)]
669+
670+ :param relation_name: string relation name
671+ :yield: Named Tuple with rid and unit field names
672+ """
673+ RelatedUnit = namedtuple('RelatedUnit', 'rid, unit')
674+ for rid in relation_ids(relation_name):
675+ for unit in related_units(rid):
676+ yield RelatedUnit(rid, unit)
677+
678+
679+def ingress_address(rid=None, unit=None):
680+ """
681+ Retrieve the ingress-address from a relation when available.
682+ Otherwise, return the private-address.
683+
684+ When used on the consuming side of the relation (unit is a remote
685+ unit), the ingress-address is the IP address that this unit needs
686+ to use to reach the provided service on the remote unit.
687+
688+ When used on the providing side of the relation (unit == local_unit()),
689+ the ingress-address is the IP address that is advertised to remote
690+ units on this relation. Remote units need to use this address to
691+ reach the local provided service on this unit.
692+
693+ Note that charms may document some other method to use in
694+ preference to the ingress_address(), such as an address provided
695+ on a different relation attribute or a service discovery mechanism.
696+ This allows charms to redirect inbound connections to their peers
697+ or different applications such as load balancers.
698+
699+ Usage:
700+ addresses = [ingress_address(rid=u.rid, unit=u.unit)
701+ for u in iter_units_for_relation_name(relation_name)]
702+
703+ :param rid: string relation id
704+ :param unit: string unit name
705+ :side effect: calls relation_get
706+ :return: string IP address
707+ """
708+ settings = relation_get(rid=rid, unit=unit)
709+ return (settings.get('ingress-address') or
710+ settings.get('private-address'))
711+
712+
713+def egress_subnets(rid=None, unit=None):
714+ """
715+ Retrieve the egress-subnets from a relation.
716+
717+ This function is to be used on the providing side of the
718+ relation, and provides the ranges of addresses that client
719+ connections may come from. The result is uninteresting on
720+ the consuming side of a relation (unit == local_unit()).
721+
722+ Returns a stable list of subnets in CIDR format.
723+ eg. ['192.168.1.0/24', '2001::F00F/128']
724+
725+ If egress-subnets is not available, falls back to using the published
726+ ingress-address, or finally private-address.
727+
728+ :param rid: string relation id
729+ :param unit: string unit name
730+ :side effect: calls relation_get
731+ :return: list of subnets in CIDR format. eg. ['192.168.1.0/24', '2001::F00F/128']
732+ """
733+ def _to_range(addr):
734+ if re.search(r'^(?:\d{1,3}\.){3}\d{1,3}$', addr) is not None:
735+ addr += '/32'
736+ elif ':' in addr and '/' not in addr: # IPv6
737+ addr += '/128'
738+ return addr
739+
740+ settings = relation_get(rid=rid, unit=unit)
741+ if 'egress-subnets' in settings:
742+ return [n.strip() for n in settings['egress-subnets'].split(',') if n.strip()]
743+ if 'ingress-address' in settings:
744+ return [_to_range(settings['ingress-address'])]
745+ if 'private-address' in settings:
746+ return [_to_range(settings['private-address'])]
747+ return [] # Should never happen
748+
749+
750+def unit_doomed(unit=None):
751+ """Determines if the unit is being removed from the model
752+
753+ Requires Juju 2.4.1.
754+
755+ :param unit: string unit name, defaults to local_unit
756+ :side effect: calls goal_state
757+ :side effect: calls local_unit
758+ :side effect: calls has_juju_version
759+ :return: True if the unit is being removed, already gone, or never existed
760+ """
761+ if not has_juju_version("2.4.1"):
762+ # We cannot risk blindly returning False for 'we don't know',
763+ # because that could cause data loss; if call sites don't
764+ # need an accurate answer, they likely don't need this helper
765+ # at all.
766+ # goal-state existed in 2.4.0, but did not handle removals
767+ # correctly until 2.4.1.
768+ raise NotImplementedError("is_doomed")
769+ if unit is None:
770+ unit = local_unit()
771+ gs = goal_state()
772+ units = gs.get('units', {})
773+ if unit not in units:
774+ return True
775+ # I don't think 'dead' units ever show up in the goal-state, but
776+ # check anyway in addition to 'dying'.
777+ return units[unit]['status'] in ('dying', 'dead')
778+
779+
780+def env_proxy_settings(selected_settings=None):
781+ """Get proxy settings from process environment variables.
782+
783+ Get charm proxy settings from environment variables that correspond to
784+ juju-http-proxy, juju-https-proxy and juju-no-proxy (available as of 2.4.2,
785+ see lp:1782236) in a format suitable for passing to an application that
786+ reacts to proxy settings passed as environment variables. Some applications
787+ support lowercase or uppercase notation (e.g. curl), some support only
788+ lowercase (e.g. wget), there are also subjectively rare cases of only
789+ uppercase notation support. no_proxy CIDR and wildcard support also varies
790+ between runtimes and applications as there is no enforced standard.
791+
792+ Some applications may connect to multiple destinations and expose config
793+ options that would affect only proxy settings for a specific destination
794+ these should be handled in charms in an application-specific manner.
795+
796+ :param selected_settings: format only a subset of possible settings
797+ :type selected_settings: list
798+ :rtype: Option(None, dict[str, str])
799+ """
800+ SUPPORTED_SETTINGS = {
801+ 'http': 'HTTP_PROXY',
802+ 'https': 'HTTPS_PROXY',
803+ 'no_proxy': 'NO_PROXY',
804+ 'ftp': 'FTP_PROXY'
805+ }
806+ if selected_settings is None:
807+ selected_settings = SUPPORTED_SETTINGS
808+
809+ selected_vars = [v for k, v in SUPPORTED_SETTINGS.items()
810+ if k in selected_settings]
811+ proxy_settings = {}
812+ for var in selected_vars:
813+ var_val = os.getenv(var)
814+ if var_val:
815+ proxy_settings[var] = var_val
816+ proxy_settings[var.lower()] = var_val
817+ # Now handle juju-prefixed environment variables. The legacy vs new
818+ # environment variable usage is mutually exclusive
819+ charm_var_val = os.getenv('JUJU_CHARM_{}'.format(var))
820+ if charm_var_val:
821+ proxy_settings[var] = charm_var_val
822+ proxy_settings[var.lower()] = charm_var_val
823+ if 'no_proxy' in proxy_settings:
824+ if _contains_range(proxy_settings['no_proxy']):
825+ log(RANGE_WARNING, level=WARNING)
826+ return proxy_settings if proxy_settings else None
827+
828+
829+def _contains_range(addresses):
830+ """Check for cidr or wildcard domain in a string.
831+
832+ Given a string comprising a comma seperated list of ip addresses
833+ and domain names, determine whether the string contains IP ranges
834+ or wildcard domains.
835+
836+ :param addresses: comma seperated list of domains and ip addresses.
837+ :type addresses: str
838+ """
839+ return (
840+ # Test for cidr (e.g. 10.20.20.0/24)
841+ "/" in addresses or
842+ # Test for wildcard domains (*.foo.com or .foo.com)
843+ "*" in addresses or
844+ addresses.startswith(".") or
845+ ",." in addresses or
846+ " ." in addresses)
847
848=== modified file 'hooks/charmhelpers/core/host.py'
849--- hooks/charmhelpers/core/host.py 2017-03-21 15:08:36 +0000
850+++ hooks/charmhelpers/core/host.py 2019-03-21 11:40:09 +0000
851@@ -34,28 +34,33 @@
852
853 from contextlib import contextmanager
854 from collections import OrderedDict
855-from .hookenv import log
856+from .hookenv import log, INFO, DEBUG, local_unit, charm_name
857 from .fstab import Fstab
858 from charmhelpers.osplatform import get_platform
859
860 __platform__ = get_platform()
861 if __platform__ == "ubuntu":
862- from charmhelpers.core.host_factory.ubuntu import (
863+ from charmhelpers.core.host_factory.ubuntu import ( # NOQA:F401
864 service_available,
865 add_new_group,
866 lsb_release,
867 cmp_pkgrevno,
868+ CompareHostReleases,
869+ get_distrib_codename,
870+ arch
871 ) # flake8: noqa -- ignore F401 for this import
872 elif __platform__ == "centos":
873- from charmhelpers.core.host_factory.centos import (
874+ from charmhelpers.core.host_factory.centos import ( # NOQA:F401
875 service_available,
876 add_new_group,
877 lsb_release,
878 cmp_pkgrevno,
879+ CompareHostReleases,
880 ) # flake8: noqa -- ignore F401 for this import
881
882 UPDATEDB_PATH = '/etc/updatedb.conf'
883
884+
885 def service_start(service_name, **kwargs):
886 """Start a system service.
887
888@@ -190,6 +195,7 @@
889 sysv_file = os.path.join(initd_dir, service_name)
890 if init_is_systemd():
891 service('disable', service_name)
892+ service('mask', service_name)
893 elif os.path.exists(upstart_file):
894 override_path = os.path.join(
895 init_dir, '{}.override'.format(service_name))
896@@ -222,6 +228,7 @@
897 upstart_file = os.path.join(init_dir, "{}.conf".format(service_name))
898 sysv_file = os.path.join(initd_dir, service_name)
899 if init_is_systemd():
900+ service('unmask', service_name)
901 service('enable', service_name)
902 elif os.path.exists(upstart_file):
903 override_path = os.path.join(
904@@ -283,8 +290,8 @@
905 for key, value in six.iteritems(kwargs):
906 parameter = '%s=%s' % (key, value)
907 cmd.append(parameter)
908- output = subprocess.check_output(cmd,
909- stderr=subprocess.STDOUT).decode('UTF-8')
910+ output = subprocess.check_output(
911+ cmd, stderr=subprocess.STDOUT).decode('UTF-8')
912 except subprocess.CalledProcessError:
913 return False
914 else:
915@@ -437,6 +444,51 @@
916 subprocess.check_call(cmd)
917
918
919+def chage(username, lastday=None, expiredate=None, inactive=None,
920+ mindays=None, maxdays=None, root=None, warndays=None):
921+ """Change user password expiry information
922+
923+ :param str username: User to update
924+ :param str lastday: Set when password was changed in YYYY-MM-DD format
925+ :param str expiredate: Set when user's account will no longer be
926+ accessible in YYYY-MM-DD format.
927+ -1 will remove an account expiration date.
928+ :param str inactive: Set the number of days of inactivity after a password
929+ has expired before the account is locked.
930+ -1 will remove an account's inactivity.
931+ :param str mindays: Set the minimum number of days between password
932+ changes to MIN_DAYS.
933+ 0 indicates the password can be changed anytime.
934+ :param str maxdays: Set the maximum number of days during which a
935+ password is valid.
936+ -1 as MAX_DAYS will remove checking maxdays
937+ :param str root: Apply changes in the CHROOT_DIR directory
938+ :param str warndays: Set the number of days of warning before a password
939+ change is required
940+ :raises subprocess.CalledProcessError: if call to chage fails
941+ """
942+ cmd = ['chage']
943+ if root:
944+ cmd.extend(['--root', root])
945+ if lastday:
946+ cmd.extend(['--lastday', lastday])
947+ if expiredate:
948+ cmd.extend(['--expiredate', expiredate])
949+ if inactive:
950+ cmd.extend(['--inactive', inactive])
951+ if mindays:
952+ cmd.extend(['--mindays', mindays])
953+ if maxdays:
954+ cmd.extend(['--maxdays', maxdays])
955+ if warndays:
956+ cmd.extend(['--warndays', warndays])
957+ cmd.append(username)
958+ subprocess.check_call(cmd)
959+
960+
961+remove_password_expiry = functools.partial(chage, expiredate='-1', inactive='-1', mindays='0', maxdays='-1')
962+
963+
964 def rsync(from_path, to_path, flags='-r', options=None, timeout=None):
965 """Replicate the contents of a path"""
966 options = options or ['--delete', '--executability']
967@@ -483,13 +535,45 @@
968
969 def write_file(path, content, owner='root', group='root', perms=0o444):
970 """Create or overwrite a file with the contents of a byte string."""
971- log("Writing file {} {}:{} {:o}".format(path, owner, group, perms))
972 uid = pwd.getpwnam(owner).pw_uid
973 gid = grp.getgrnam(group).gr_gid
974- with open(path, 'wb') as target:
975- os.fchown(target.fileno(), uid, gid)
976- os.fchmod(target.fileno(), perms)
977- target.write(content)
978+ # lets see if we can grab the file and compare the context, to avoid doing
979+ # a write.
980+ existing_content = None
981+ existing_uid, existing_gid, existing_perms = None, None, None
982+ try:
983+ with open(path, 'rb') as target:
984+ existing_content = target.read()
985+ stat = os.stat(path)
986+ existing_uid, existing_gid, existing_perms = (
987+ stat.st_uid, stat.st_gid, stat.st_mode
988+ )
989+ except Exception:
990+ pass
991+ if content != existing_content:
992+ log("Writing file {} {}:{} {:o}".format(path, owner, group, perms),
993+ level=DEBUG)
994+ with open(path, 'wb') as target:
995+ os.fchown(target.fileno(), uid, gid)
996+ os.fchmod(target.fileno(), perms)
997+ if six.PY3 and isinstance(content, six.string_types):
998+ content = content.encode('UTF-8')
999+ target.write(content)
1000+ return
1001+ # the contents were the same, but we might still need to change the
1002+ # ownership or permissions.
1003+ if existing_uid != uid:
1004+ log("Changing uid on already existing content: {} -> {}"
1005+ .format(existing_uid, uid), level=DEBUG)
1006+ os.chown(path, uid, -1)
1007+ if existing_gid != gid:
1008+ log("Changing gid on already existing content: {} -> {}"
1009+ .format(existing_gid, gid), level=DEBUG)
1010+ os.chown(path, -1, gid)
1011+ if existing_perms != perms:
1012+ log("Changing permissions on existing content: {} -> {}"
1013+ .format(existing_perms, perms), level=DEBUG)
1014+ os.chmod(path, perms)
1015
1016
1017 def fstab_remove(mp):
1018@@ -754,7 +838,7 @@
1019 ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n')
1020 ip_output = (line.strip() for line in ip_output if line)
1021
1022- key = re.compile('^[0-9]+:\s+(.+):')
1023+ key = re.compile(r'^[0-9]+:\s+(.+):')
1024 for line in ip_output:
1025 matched = re.search(key, line)
1026 if matched:
1027@@ -899,6 +983,20 @@
1028
1029
1030 def add_to_updatedb_prunepath(path, updatedb_path=UPDATEDB_PATH):
1031+ """Adds the specified path to the mlocate's udpatedb.conf PRUNEPATH list.
1032+
1033+ This method has no effect if the path specified by updatedb_path does not
1034+ exist or is not a file.
1035+
1036+ @param path: string the path to add to the updatedb.conf PRUNEPATHS value
1037+ @param updatedb_path: the path the updatedb.conf file
1038+ """
1039+ if not os.path.exists(updatedb_path) or os.path.isdir(updatedb_path):
1040+ # If the updatedb.conf file doesn't exist then don't attempt to update
1041+ # the file as the package providing mlocate may not be installed on
1042+ # the local system
1043+ return
1044+
1045 with open(updatedb_path, 'r+') as f_id:
1046 updatedb_text = f_id.read()
1047 output = updatedb(updatedb_text, path)
1048@@ -918,3 +1016,62 @@
1049 lines[i] = 'PRUNEPATHS="{}"'.format(' '.join(paths))
1050 output = "\n".join(lines)
1051 return output
1052+
1053+
1054+def modulo_distribution(modulo=3, wait=30, non_zero_wait=False):
1055+ """ Modulo distribution
1056+
1057+ This helper uses the unit number, a modulo value and a constant wait time
1058+ to produce a calculated wait time distribution. This is useful in large
1059+ scale deployments to distribute load during an expensive operation such as
1060+ service restarts.
1061+
1062+ If you have 1000 nodes that need to restart 100 at a time 1 minute at a
1063+ time:
1064+
1065+ time.wait(modulo_distribution(modulo=100, wait=60))
1066+ restart()
1067+
1068+ If you need restarts to happen serially set modulo to the exact number of
1069+ nodes and set a high constant wait time:
1070+
1071+ time.wait(modulo_distribution(modulo=10, wait=120))
1072+ restart()
1073+
1074+ @param modulo: int The modulo number creates the group distribution
1075+ @param wait: int The constant time wait value
1076+ @param non_zero_wait: boolean Override unit % modulo == 0,
1077+ return modulo * wait. Used to avoid collisions with
1078+ leader nodes which are often given priority.
1079+ @return: int Calculated time to wait for unit operation
1080+ """
1081+ unit_number = int(local_unit().split('/')[1])
1082+ calculated_wait_time = (unit_number % modulo) * wait
1083+ if non_zero_wait and calculated_wait_time == 0:
1084+ return modulo * wait
1085+ else:
1086+ return calculated_wait_time
1087+
1088+
1089+def install_ca_cert(ca_cert, name=None):
1090+ """
1091+ Install the given cert as a trusted CA.
1092+
1093+ The ``name`` is the stem of the filename where the cert is written, and if
1094+ not provided, it will default to ``juju-{charm_name}``.
1095+
1096+ If the cert is empty or None, or is unchanged, nothing is done.
1097+ """
1098+ if not ca_cert:
1099+ return
1100+ if not isinstance(ca_cert, bytes):
1101+ ca_cert = ca_cert.encode('utf8')
1102+ if not name:
1103+ name = 'juju-{}'.format(charm_name())
1104+ cert_file = '/usr/local/share/ca-certificates/{}.crt'.format(name)
1105+ new_hash = hashlib.md5(ca_cert).hexdigest()
1106+ if file_hash(cert_file) == new_hash:
1107+ return
1108+ log("Installing new CA cert at: {}".format(cert_file), level=INFO)
1109+ write_file(cert_file, ca_cert)
1110+ subprocess.check_call(['update-ca-certificates', '--fresh'])
1111
1112=== modified file 'hooks/charmhelpers/core/host_factory/centos.py'
1113--- hooks/charmhelpers/core/host_factory/centos.py 2017-03-02 15:18:31 +0000
1114+++ hooks/charmhelpers/core/host_factory/centos.py 2019-03-21 11:40:09 +0000
1115@@ -2,6 +2,22 @@
1116 import yum
1117 import os
1118
1119+from charmhelpers.core.strutils import BasicStringComparator
1120+
1121+
1122+class CompareHostReleases(BasicStringComparator):
1123+ """Provide comparisons of Host releases.
1124+
1125+ Use in the form of
1126+
1127+ if CompareHostReleases(release) > 'trusty':
1128+ # do something with mitaka
1129+ """
1130+
1131+ def __init__(self, item):
1132+ raise NotImplementedError(
1133+ "CompareHostReleases() is not implemented for CentOS")
1134+
1135
1136 def service_available(service_name):
1137 # """Determine whether a system service is available."""
1138
1139=== modified file 'hooks/charmhelpers/core/host_factory/ubuntu.py'
1140--- hooks/charmhelpers/core/host_factory/ubuntu.py 2017-03-02 15:18:31 +0000
1141+++ hooks/charmhelpers/core/host_factory/ubuntu.py 2019-03-21 11:40:09 +0000
1142@@ -1,5 +1,41 @@
1143 import subprocess
1144
1145+from charmhelpers.core.hookenv import cached
1146+from charmhelpers.core.strutils import BasicStringComparator
1147+
1148+
1149+UBUNTU_RELEASES = (
1150+ 'lucid',
1151+ 'maverick',
1152+ 'natty',
1153+ 'oneiric',
1154+ 'precise',
1155+ 'quantal',
1156+ 'raring',
1157+ 'saucy',
1158+ 'trusty',
1159+ 'utopic',
1160+ 'vivid',
1161+ 'wily',
1162+ 'xenial',
1163+ 'yakkety',
1164+ 'zesty',
1165+ 'artful',
1166+ 'bionic',
1167+ 'cosmic',
1168+)
1169+
1170+
1171+class CompareHostReleases(BasicStringComparator):
1172+ """Provide comparisons of Ubuntu releases.
1173+
1174+ Use in the form of
1175+
1176+ if CompareHostReleases(release) > 'trusty':
1177+ # do something with mitaka
1178+ """
1179+ _list = UBUNTU_RELEASES
1180+
1181
1182 def service_available(service_name):
1183 """Determine whether a system service is available"""
1184@@ -37,6 +73,14 @@
1185 return d
1186
1187
1188+def get_distrib_codename():
1189+ """Return the codename of the distribution
1190+ :returns: The codename
1191+ :rtype: str
1192+ """
1193+ return lsb_release()['DISTRIB_CODENAME'].lower()
1194+
1195+
1196 def cmp_pkgrevno(package, revno, pkgcache=None):
1197 """Compare supplied revno with the revno of the installed package.
1198
1199@@ -54,3 +98,16 @@
1200 pkgcache = apt_cache()
1201 pkg = pkgcache[package]
1202 return apt_pkg.version_compare(pkg.current_ver.ver_str, revno)
1203+
1204+
1205+@cached
1206+def arch():
1207+ """Return the package architecture as a string.
1208+
1209+ :returns: the architecture
1210+ :rtype: str
1211+ :raises: subprocess.CalledProcessError if dpkg command fails
1212+ """
1213+ return subprocess.check_output(
1214+ ['dpkg', '--print-architecture']
1215+ ).rstrip().decode('UTF-8')
1216
1217=== modified file 'hooks/charmhelpers/core/kernel.py'
1218--- hooks/charmhelpers/core/kernel.py 2017-03-02 15:18:31 +0000
1219+++ hooks/charmhelpers/core/kernel.py 2019-03-21 11:40:09 +0000
1220@@ -26,12 +26,12 @@
1221
1222 __platform__ = get_platform()
1223 if __platform__ == "ubuntu":
1224- from charmhelpers.core.kernel_factory.ubuntu import (
1225+ from charmhelpers.core.kernel_factory.ubuntu import ( # NOQA:F401
1226 persistent_modprobe,
1227 update_initramfs,
1228 ) # flake8: noqa -- ignore F401 for this import
1229 elif __platform__ == "centos":
1230- from charmhelpers.core.kernel_factory.centos import (
1231+ from charmhelpers.core.kernel_factory.centos import ( # NOQA:F401
1232 persistent_modprobe,
1233 update_initramfs,
1234 ) # flake8: noqa -- ignore F401 for this import
1235
1236=== modified file 'hooks/charmhelpers/core/services/base.py'
1237--- hooks/charmhelpers/core/services/base.py 2017-03-02 15:18:31 +0000
1238+++ hooks/charmhelpers/core/services/base.py 2019-03-21 11:40:09 +0000
1239@@ -307,23 +307,34 @@
1240 """
1241 def __call__(self, manager, service_name, event_name):
1242 service = manager.get_service(service_name)
1243- new_ports = service.get('ports', [])
1244+ # turn this generator into a list,
1245+ # as we'll be going over it multiple times
1246+ new_ports = list(service.get('ports', []))
1247 port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name))
1248 if os.path.exists(port_file):
1249 with open(port_file) as fp:
1250 old_ports = fp.read().split(',')
1251 for old_port in old_ports:
1252- if bool(old_port):
1253- old_port = int(old_port)
1254- if old_port not in new_ports:
1255- hookenv.close_port(old_port)
1256+ if bool(old_port) and not self.ports_contains(old_port, new_ports):
1257+ hookenv.close_port(old_port)
1258 with open(port_file, 'w') as fp:
1259 fp.write(','.join(str(port) for port in new_ports))
1260 for port in new_ports:
1261+ # A port is either a number or 'ICMP'
1262+ protocol = 'TCP'
1263+ if str(port).upper() == 'ICMP':
1264+ protocol = 'ICMP'
1265 if event_name == 'start':
1266- hookenv.open_port(port)
1267+ hookenv.open_port(port, protocol)
1268 elif event_name == 'stop':
1269- hookenv.close_port(port)
1270+ hookenv.close_port(port, protocol)
1271+
1272+ def ports_contains(self, port, ports):
1273+ if not bool(port):
1274+ return False
1275+ if str(port).upper() != 'ICMP':
1276+ port = int(port)
1277+ return port in ports
1278
1279
1280 def service_stop(service_name):
1281
1282=== modified file 'hooks/charmhelpers/core/strutils.py'
1283--- hooks/charmhelpers/core/strutils.py 2017-03-02 15:18:31 +0000
1284+++ hooks/charmhelpers/core/strutils.py 2019-03-21 11:40:09 +0000
1285@@ -61,10 +61,69 @@
1286 if isinstance(value, six.string_types):
1287 value = six.text_type(value)
1288 else:
1289- msg = "Unable to interpret non-string value '%s' as boolean" % (value)
1290+ msg = "Unable to interpret non-string value '%s' as bytes" % (value)
1291 raise ValueError(msg)
1292 matches = re.match("([0-9]+)([a-zA-Z]+)", value)
1293- if not matches:
1294- msg = "Unable to interpret string value '%s' as bytes" % (value)
1295- raise ValueError(msg)
1296- return int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)])
1297+ if matches:
1298+ size = int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)])
1299+ else:
1300+ # Assume that value passed in is bytes
1301+ try:
1302+ size = int(value)
1303+ except ValueError:
1304+ msg = "Unable to interpret string value '%s' as bytes" % (value)
1305+ raise ValueError(msg)
1306+ return size
1307+
1308+
1309+class BasicStringComparator(object):
1310+ """Provides a class that will compare strings from an iterator type object.
1311+ Used to provide > and < comparisons on strings that may not necessarily be
1312+ alphanumerically ordered. e.g. OpenStack or Ubuntu releases AFTER the
1313+ z-wrap.
1314+ """
1315+
1316+ _list = None
1317+
1318+ def __init__(self, item):
1319+ if self._list is None:
1320+ raise Exception("Must define the _list in the class definition!")
1321+ try:
1322+ self.index = self._list.index(item)
1323+ except Exception:
1324+ raise KeyError("Item '{}' is not in list '{}'"
1325+ .format(item, self._list))
1326+
1327+ def __eq__(self, other):
1328+ assert isinstance(other, str) or isinstance(other, self.__class__)
1329+ return self.index == self._list.index(other)
1330+
1331+ def __ne__(self, other):
1332+ return not self.__eq__(other)
1333+
1334+ def __lt__(self, other):
1335+ assert isinstance(other, str) or isinstance(other, self.__class__)
1336+ return self.index < self._list.index(other)
1337+
1338+ def __ge__(self, other):
1339+ return not self.__lt__(other)
1340+
1341+ def __gt__(self, other):
1342+ assert isinstance(other, str) or isinstance(other, self.__class__)
1343+ return self.index > self._list.index(other)
1344+
1345+ def __le__(self, other):
1346+ return not self.__gt__(other)
1347+
1348+ def __str__(self):
1349+ """Always give back the item at the index so it can be used in
1350+ comparisons like:
1351+
1352+ s_mitaka = CompareOpenStack('mitaka')
1353+ s_newton = CompareOpenstack('newton')
1354+
1355+ assert s_newton > s_mitaka
1356+
1357+ @returns: <string>
1358+ """
1359+ return self._list[self.index]
1360
1361=== modified file 'hooks/charmhelpers/core/sysctl.py'
1362--- hooks/charmhelpers/core/sysctl.py 2017-03-02 15:18:31 +0000
1363+++ hooks/charmhelpers/core/sysctl.py 2019-03-21 11:40:09 +0000
1364@@ -28,27 +28,38 @@
1365 __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>'
1366
1367
1368-def create(sysctl_dict, sysctl_file):
1369+def create(sysctl_dict, sysctl_file, ignore=False):
1370 """Creates a sysctl.conf file from a YAML associative array
1371
1372- :param sysctl_dict: a YAML-formatted string of sysctl options eg "{ 'kernel.max_pid': 1337 }"
1373+ :param sysctl_dict: a dict or YAML-formatted string of sysctl
1374+ options eg "{ 'kernel.max_pid': 1337 }"
1375 :type sysctl_dict: str
1376 :param sysctl_file: path to the sysctl file to be saved
1377 :type sysctl_file: str or unicode
1378+ :param ignore: If True, ignore "unknown variable" errors.
1379+ :type ignore: bool
1380 :returns: None
1381 """
1382- try:
1383- sysctl_dict_parsed = yaml.safe_load(sysctl_dict)
1384- except yaml.YAMLError:
1385- log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict),
1386- level=ERROR)
1387- return
1388+ if type(sysctl_dict) is not dict:
1389+ try:
1390+ sysctl_dict_parsed = yaml.safe_load(sysctl_dict)
1391+ except yaml.YAMLError:
1392+ log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict),
1393+ level=ERROR)
1394+ return
1395+ else:
1396+ sysctl_dict_parsed = sysctl_dict
1397
1398 with open(sysctl_file, "w") as fd:
1399 for key, value in sysctl_dict_parsed.items():
1400 fd.write("{}={}\n".format(key, value))
1401
1402- log("Updating sysctl_file: %s values: %s" % (sysctl_file, sysctl_dict_parsed),
1403+ log("Updating sysctl_file: {} values: {}".format(sysctl_file,
1404+ sysctl_dict_parsed),
1405 level=DEBUG)
1406
1407- check_call(["sysctl", "-p", sysctl_file])
1408+ call = ["sysctl", "-p", sysctl_file]
1409+ if ignore:
1410+ call.append("-e")
1411+
1412+ check_call(call)
1413
1414=== modified file 'hooks/charmhelpers/core/templating.py'
1415--- hooks/charmhelpers/core/templating.py 2017-03-02 15:18:31 +0000
1416+++ hooks/charmhelpers/core/templating.py 2019-03-21 11:40:09 +0000
1417@@ -20,7 +20,8 @@
1418
1419
1420 def render(source, target, context, owner='root', group='root',
1421- perms=0o444, templates_dir=None, encoding='UTF-8', template_loader=None):
1422+ perms=0o444, templates_dir=None, encoding='UTF-8',
1423+ template_loader=None, config_template=None):
1424 """
1425 Render a template.
1426
1427@@ -32,6 +33,9 @@
1428 The context should be a dict containing the values to be replaced in the
1429 template.
1430
1431+ config_template may be provided to render from a provided template instead
1432+ of loading from a file.
1433+
1434 The `owner`, `group`, and `perms` options will be passed to `write_file`.
1435
1436 If omitted, `templates_dir` defaults to the `templates` folder in the charm.
1437@@ -65,14 +69,19 @@
1438 if templates_dir is None:
1439 templates_dir = os.path.join(hookenv.charm_dir(), 'templates')
1440 template_env = Environment(loader=FileSystemLoader(templates_dir))
1441- try:
1442- source = source
1443- template = template_env.get_template(source)
1444- except exceptions.TemplateNotFound as e:
1445- hookenv.log('Could not load template %s from %s.' %
1446- (source, templates_dir),
1447- level=hookenv.ERROR)
1448- raise e
1449+
1450+ # load from a string if provided explicitly
1451+ if config_template is not None:
1452+ template = template_env.from_string(config_template)
1453+ else:
1454+ try:
1455+ source = source
1456+ template = template_env.get_template(source)
1457+ except exceptions.TemplateNotFound as e:
1458+ hookenv.log('Could not load template %s from %s.' %
1459+ (source, templates_dir),
1460+ level=hookenv.ERROR)
1461+ raise e
1462 content = template.render(context)
1463 if target is not None:
1464 target_dir = os.path.dirname(target)
1465
1466=== modified file 'hooks/charmhelpers/core/unitdata.py'
1467--- hooks/charmhelpers/core/unitdata.py 2017-03-02 15:18:31 +0000
1468+++ hooks/charmhelpers/core/unitdata.py 2019-03-21 11:40:09 +0000
1469@@ -166,6 +166,10 @@
1470
1471 To support dicts, lists, integer, floats, and booleans values
1472 are automatically json encoded/decoded.
1473+
1474+ Note: to facilitate unit testing, ':memory:' can be passed as the
1475+ path parameter which causes sqlite3 to only build the db in memory.
1476+ This should only be used for testing purposes.
1477 """
1478 def __init__(self, path=None):
1479 self.db_path = path
1480@@ -175,6 +179,9 @@
1481 else:
1482 self.db_path = os.path.join(
1483 os.environ.get('CHARM_DIR', ''), '.unit-state.db')
1484+ if self.db_path != ':memory:':
1485+ with open(self.db_path, 'a') as f:
1486+ os.fchmod(f.fileno(), 0o600)
1487 self.conn = sqlite3.connect('%s' % self.db_path)
1488 self.cursor = self.conn.cursor()
1489 self.revision = None
1490@@ -358,7 +365,7 @@
1491 try:
1492 yield self.revision
1493 self.revision = None
1494- except:
1495+ except Exception:
1496 self.flush(False)
1497 self.revision = None
1498 raise
1499
1500=== modified file 'hooks/charmhelpers/fetch/__init__.py'
1501--- hooks/charmhelpers/fetch/__init__.py 2017-03-02 15:18:31 +0000
1502+++ hooks/charmhelpers/fetch/__init__.py 2019-03-21 11:40:09 +0000
1503@@ -48,6 +48,13 @@
1504 pass
1505
1506
1507+class GPGKeyError(Exception):
1508+ """Exception occurs when a GPG key cannot be fetched or used. The message
1509+ indicates what the problem is.
1510+ """
1511+ pass
1512+
1513+
1514 class BaseFetchHandler(object):
1515
1516 """Base class for FetchHandler implementations in fetch plugins"""
1517@@ -77,21 +84,24 @@
1518 fetch = importlib.import_module(module)
1519
1520 filter_installed_packages = fetch.filter_installed_packages
1521-install = fetch.install
1522-upgrade = fetch.upgrade
1523-update = fetch.update
1524-purge = fetch.purge
1525+filter_missing_packages = fetch.filter_missing_packages
1526+install = fetch.apt_install
1527+upgrade = fetch.apt_upgrade
1528+update = _fetch_update = fetch.apt_update
1529+purge = fetch.apt_purge
1530 add_source = fetch.add_source
1531
1532 if __platform__ == "ubuntu":
1533 apt_cache = fetch.apt_cache
1534- apt_install = fetch.install
1535- apt_update = fetch.update
1536- apt_upgrade = fetch.upgrade
1537- apt_purge = fetch.purge
1538+ apt_install = fetch.apt_install
1539+ apt_update = fetch.apt_update
1540+ apt_upgrade = fetch.apt_upgrade
1541+ apt_purge = fetch.apt_purge
1542+ apt_autoremove = fetch.apt_autoremove
1543 apt_mark = fetch.apt_mark
1544 apt_hold = fetch.apt_hold
1545 apt_unhold = fetch.apt_unhold
1546+ import_key = fetch.import_key
1547 get_upstream_version = fetch.get_upstream_version
1548 elif __platform__ == "centos":
1549 yum_search = fetch.yum_search
1550@@ -135,7 +145,7 @@
1551 for source, key in zip(sources, keys):
1552 add_source(source, key)
1553 if update:
1554- fetch.update(fatal=True)
1555+ _fetch_update(fatal=True)
1556
1557
1558 def install_remote(source, *args, **kwargs):
1559
1560=== modified file 'hooks/charmhelpers/fetch/archiveurl.py'
1561--- hooks/charmhelpers/fetch/archiveurl.py 2017-03-02 15:18:31 +0000
1562+++ hooks/charmhelpers/fetch/archiveurl.py 2019-03-21 11:40:09 +0000
1563@@ -89,7 +89,7 @@
1564 :param str source: URL pointing to an archive file.
1565 :param str dest: Local path location to download archive file to.
1566 """
1567- # propogate all exceptions
1568+ # propagate all exceptions
1569 # URLError, OSError, etc
1570 proto, netloc, path, params, query, fragment = urlparse(source)
1571 if proto in ('http', 'https'):
1572
1573=== modified file 'hooks/charmhelpers/fetch/bzrurl.py'
1574--- hooks/charmhelpers/fetch/bzrurl.py 2017-03-02 15:18:31 +0000
1575+++ hooks/charmhelpers/fetch/bzrurl.py 2019-03-21 11:40:09 +0000
1576@@ -13,7 +13,7 @@
1577 # limitations under the License.
1578
1579 import os
1580-from subprocess import check_call
1581+from subprocess import STDOUT, check_output
1582 from charmhelpers.fetch import (
1583 BaseFetchHandler,
1584 UnhandledSource,
1585@@ -55,7 +55,7 @@
1586 cmd = ['bzr', 'branch']
1587 cmd += cmd_opts
1588 cmd += [source, dest]
1589- check_call(cmd)
1590+ check_output(cmd, stderr=STDOUT)
1591
1592 def install(self, source, dest=None, revno=None):
1593 url_parts = self.parse_url(source)
1594
1595=== modified file 'hooks/charmhelpers/fetch/centos.py'
1596--- hooks/charmhelpers/fetch/centos.py 2017-03-02 15:18:31 +0000
1597+++ hooks/charmhelpers/fetch/centos.py 2019-03-21 11:40:09 +0000
1598@@ -132,7 +132,7 @@
1599 key_file.write(key)
1600 key_file.flush()
1601 key_file.seek(0)
1602- subprocess.check_call(['rpm', '--import', key_file])
1603+ subprocess.check_call(['rpm', '--import', key_file.name])
1604 else:
1605 subprocess.check_call(['rpm', '--import', key])
1606
1607
1608=== modified file 'hooks/charmhelpers/fetch/giturl.py'
1609--- hooks/charmhelpers/fetch/giturl.py 2017-03-02 15:18:31 +0000
1610+++ hooks/charmhelpers/fetch/giturl.py 2019-03-21 11:40:09 +0000
1611@@ -13,7 +13,7 @@
1612 # limitations under the License.
1613
1614 import os
1615-from subprocess import check_call, CalledProcessError
1616+from subprocess import check_output, CalledProcessError, STDOUT
1617 from charmhelpers.fetch import (
1618 BaseFetchHandler,
1619 UnhandledSource,
1620@@ -50,7 +50,7 @@
1621 cmd = ['git', 'clone', source, dest, '--branch', branch]
1622 if depth:
1623 cmd.extend(['--depth', depth])
1624- check_call(cmd)
1625+ check_output(cmd, stderr=STDOUT)
1626
1627 def install(self, source, branch="master", dest=None, depth=None):
1628 url_parts = self.parse_url(source)
1629
1630=== added directory 'hooks/charmhelpers/fetch/python'
1631=== added file 'hooks/charmhelpers/fetch/python/__init__.py'
1632--- hooks/charmhelpers/fetch/python/__init__.py 1970-01-01 00:00:00 +0000
1633+++ hooks/charmhelpers/fetch/python/__init__.py 2019-03-21 11:40:09 +0000
1634@@ -0,0 +1,13 @@
1635+# Copyright 2014-2019 Canonical Limited.
1636+#
1637+# Licensed under the Apache License, Version 2.0 (the "License");
1638+# you may not use this file except in compliance with the License.
1639+# You may obtain a copy of the License at
1640+#
1641+# http://www.apache.org/licenses/LICENSE-2.0
1642+#
1643+# Unless required by applicable law or agreed to in writing, software
1644+# distributed under the License is distributed on an "AS IS" BASIS,
1645+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1646+# See the License for the specific language governing permissions and
1647+# limitations under the License.
1648
1649=== added file 'hooks/charmhelpers/fetch/python/debug.py'
1650--- hooks/charmhelpers/fetch/python/debug.py 1970-01-01 00:00:00 +0000
1651+++ hooks/charmhelpers/fetch/python/debug.py 2019-03-21 11:40:09 +0000
1652@@ -0,0 +1,54 @@
1653+#!/usr/bin/env python
1654+# coding: utf-8
1655+
1656+# Copyright 2014-2015 Canonical Limited.
1657+#
1658+# Licensed under the Apache License, Version 2.0 (the "License");
1659+# you may not use this file except in compliance with the License.
1660+# You may obtain a copy of the License at
1661+#
1662+# http://www.apache.org/licenses/LICENSE-2.0
1663+#
1664+# Unless required by applicable law or agreed to in writing, software
1665+# distributed under the License is distributed on an "AS IS" BASIS,
1666+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1667+# See the License for the specific language governing permissions and
1668+# limitations under the License.
1669+
1670+from __future__ import print_function
1671+
1672+import atexit
1673+import sys
1674+
1675+from charmhelpers.fetch.python.rpdb import Rpdb
1676+from charmhelpers.core.hookenv import (
1677+ open_port,
1678+ close_port,
1679+ ERROR,
1680+ log
1681+)
1682+
1683+__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>"
1684+
1685+DEFAULT_ADDR = "0.0.0.0"
1686+DEFAULT_PORT = 4444
1687+
1688+
1689+def _error(message):
1690+ log(message, level=ERROR)
1691+
1692+
1693+def set_trace(addr=DEFAULT_ADDR, port=DEFAULT_PORT):
1694+ """
1695+ Set a trace point using the remote debugger
1696+ """
1697+ atexit.register(close_port, port)
1698+ try:
1699+ log("Starting a remote python debugger session on %s:%s" % (addr,
1700+ port))
1701+ open_port(port)
1702+ debugger = Rpdb(addr=addr, port=port)
1703+ debugger.set_trace(sys._getframe().f_back)
1704+ except Exception:
1705+ _error("Cannot start a remote debug session on %s:%s" % (addr,
1706+ port))
1707
1708=== added file 'hooks/charmhelpers/fetch/python/packages.py'
1709--- hooks/charmhelpers/fetch/python/packages.py 1970-01-01 00:00:00 +0000
1710+++ hooks/charmhelpers/fetch/python/packages.py 2019-03-21 11:40:09 +0000
1711@@ -0,0 +1,154 @@
1712+#!/usr/bin/env python
1713+# coding: utf-8
1714+
1715+# Copyright 2014-2015 Canonical Limited.
1716+#
1717+# Licensed under the Apache License, Version 2.0 (the "License");
1718+# you may not use this file except in compliance with the License.
1719+# You may obtain a copy of the License at
1720+#
1721+# http://www.apache.org/licenses/LICENSE-2.0
1722+#
1723+# Unless required by applicable law or agreed to in writing, software
1724+# distributed under the License is distributed on an "AS IS" BASIS,
1725+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1726+# See the License for the specific language governing permissions and
1727+# limitations under the License.
1728+
1729+import os
1730+import six
1731+import subprocess
1732+import sys
1733+
1734+from charmhelpers.fetch import apt_install, apt_update
1735+from charmhelpers.core.hookenv import charm_dir, log
1736+
1737+__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>"
1738+
1739+
1740+def pip_execute(*args, **kwargs):
1741+ """Overriden pip_execute() to stop sys.path being changed.
1742+
1743+ The act of importing main from the pip module seems to cause add wheels
1744+ from the /usr/share/python-wheels which are installed by various tools.
1745+ This function ensures that sys.path remains the same after the call is
1746+ executed.
1747+ """
1748+ try:
1749+ _path = sys.path
1750+ try:
1751+ from pip import main as _pip_execute
1752+ except ImportError:
1753+ apt_update()
1754+ if six.PY2:
1755+ apt_install('python-pip')
1756+ else:
1757+ apt_install('python3-pip')
1758+ from pip import main as _pip_execute
1759+ _pip_execute(*args, **kwargs)
1760+ finally:
1761+ sys.path = _path
1762+
1763+
1764+def parse_options(given, available):
1765+ """Given a set of options, check if available"""
1766+ for key, value in sorted(given.items()):
1767+ if not value:
1768+ continue
1769+ if key in available:
1770+ yield "--{0}={1}".format(key, value)
1771+
1772+
1773+def pip_install_requirements(requirements, constraints=None, **options):
1774+ """Install a requirements file.
1775+
1776+ :param constraints: Path to pip constraints file.
1777+ http://pip.readthedocs.org/en/stable/user_guide/#constraints-files
1778+ """
1779+ command = ["install"]
1780+
1781+ available_options = ('proxy', 'src', 'log', )
1782+ for option in parse_options(options, available_options):
1783+ command.append(option)
1784+
1785+ command.append("-r {0}".format(requirements))
1786+ if constraints:
1787+ command.append("-c {0}".format(constraints))
1788+ log("Installing from file: {} with constraints {} "
1789+ "and options: {}".format(requirements, constraints, command))
1790+ else:
1791+ log("Installing from file: {} with options: {}".format(requirements,
1792+ command))
1793+ pip_execute(command)
1794+
1795+
1796+def pip_install(package, fatal=False, upgrade=False, venv=None,
1797+ constraints=None, **options):
1798+ """Install a python package"""
1799+ if venv:
1800+ venv_python = os.path.join(venv, 'bin/pip')
1801+ command = [venv_python, "install"]
1802+ else:
1803+ command = ["install"]
1804+
1805+ available_options = ('proxy', 'src', 'log', 'index-url', )
1806+ for option in parse_options(options, available_options):
1807+ command.append(option)
1808+
1809+ if upgrade:
1810+ command.append('--upgrade')
1811+
1812+ if constraints:
1813+ command.extend(['-c', constraints])
1814+
1815+ if isinstance(package, list):
1816+ command.extend(package)
1817+ else:
1818+ command.append(package)
1819+
1820+ log("Installing {} package with options: {}".format(package,
1821+ command))
1822+ if venv:
1823+ subprocess.check_call(command)
1824+ else:
1825+ pip_execute(command)
1826+
1827+
1828+def pip_uninstall(package, **options):
1829+ """Uninstall a python package"""
1830+ command = ["uninstall", "-q", "-y"]
1831+
1832+ available_options = ('proxy', 'log', )
1833+ for option in parse_options(options, available_options):
1834+ command.append(option)
1835+
1836+ if isinstance(package, list):
1837+ command.extend(package)
1838+ else:
1839+ command.append(package)
1840+
1841+ log("Uninstalling {} package with options: {}".format(package,
1842+ command))
1843+ pip_execute(command)
1844+
1845+
1846+def pip_list():
1847+ """Returns the list of current python installed packages
1848+ """
1849+ return pip_execute(["list"])
1850+
1851+
1852+def pip_create_virtualenv(path=None):
1853+ """Create an isolated Python environment."""
1854+ if six.PY2:
1855+ apt_install('python-virtualenv')
1856+ else:
1857+ apt_install('python3-virtualenv')
1858+
1859+ if path:
1860+ venv_path = path
1861+ else:
1862+ venv_path = os.path.join(charm_dir(), 'venv')
1863+
1864+ if not os.path.exists(venv_path):
1865+ subprocess.check_call(['virtualenv', venv_path])
1866
1867=== added file 'hooks/charmhelpers/fetch/python/rpdb.py'
1868--- hooks/charmhelpers/fetch/python/rpdb.py 1970-01-01 00:00:00 +0000
1869+++ hooks/charmhelpers/fetch/python/rpdb.py 2019-03-21 11:40:09 +0000
1870@@ -0,0 +1,56 @@
1871+# Copyright 2014-2015 Canonical Limited.
1872+#
1873+# Licensed under the Apache License, Version 2.0 (the "License");
1874+# you may not use this file except in compliance with the License.
1875+# You may obtain a copy of the License at
1876+#
1877+# http://www.apache.org/licenses/LICENSE-2.0
1878+#
1879+# Unless required by applicable law or agreed to in writing, software
1880+# distributed under the License is distributed on an "AS IS" BASIS,
1881+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1882+# See the License for the specific language governing permissions and
1883+# limitations under the License.
1884+
1885+"""Remote Python Debugger (pdb wrapper)."""
1886+
1887+import pdb
1888+import socket
1889+import sys
1890+
1891+__author__ = "Bertrand Janin <b@janin.com>"
1892+__version__ = "0.1.3"
1893+
1894+
1895+class Rpdb(pdb.Pdb):
1896+
1897+ def __init__(self, addr="127.0.0.1", port=4444):
1898+ """Initialize the socket and initialize pdb."""
1899+
1900+ # Backup stdin and stdout before replacing them by the socket handle
1901+ self.old_stdout = sys.stdout
1902+ self.old_stdin = sys.stdin
1903+
1904+ # Open a 'reusable' socket to let the webapp reload on the same port
1905+ self.skt = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
1906+ self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True)
1907+ self.skt.bind((addr, port))
1908+ self.skt.listen(1)
1909+ (clientsocket, address) = self.skt.accept()
1910+ handle = clientsocket.makefile('rw')
1911+ pdb.Pdb.__init__(self, completekey='tab', stdin=handle, stdout=handle)
1912+ sys.stdout = sys.stdin = handle
1913+
1914+ def shutdown(self):
1915+ """Revert stdin and stdout, close the socket."""
1916+ sys.stdout = self.old_stdout
1917+ sys.stdin = self.old_stdin
1918+ self.skt.close()
1919+ self.set_continue()
1920+
1921+ def do_continue(self, arg):
1922+ """Stop all operation on ``continue``."""
1923+ self.shutdown()
1924+ return 1
1925+
1926+ do_EOF = do_quit = do_exit = do_c = do_cont = do_continue
1927
1928=== added file 'hooks/charmhelpers/fetch/python/version.py'
1929--- hooks/charmhelpers/fetch/python/version.py 1970-01-01 00:00:00 +0000
1930+++ hooks/charmhelpers/fetch/python/version.py 2019-03-21 11:40:09 +0000
1931@@ -0,0 +1,32 @@
1932+#!/usr/bin/env python
1933+# coding: utf-8
1934+
1935+# Copyright 2014-2015 Canonical Limited.
1936+#
1937+# Licensed under the Apache License, Version 2.0 (the "License");
1938+# you may not use this file except in compliance with the License.
1939+# You may obtain a copy of the License at
1940+#
1941+# http://www.apache.org/licenses/LICENSE-2.0
1942+#
1943+# Unless required by applicable law or agreed to in writing, software
1944+# distributed under the License is distributed on an "AS IS" BASIS,
1945+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1946+# See the License for the specific language governing permissions and
1947+# limitations under the License.
1948+
1949+import sys
1950+
1951+__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>"
1952+
1953+
1954+def current_version():
1955+ """Current system python version"""
1956+ return sys.version_info
1957+
1958+
1959+def current_version_string():
1960+ """Current system python version as string major.minor.micro"""
1961+ return "{0}.{1}.{2}".format(sys.version_info.major,
1962+ sys.version_info.minor,
1963+ sys.version_info.micro)
1964
1965=== modified file 'hooks/charmhelpers/fetch/snap.py'
1966--- hooks/charmhelpers/fetch/snap.py 2017-03-21 15:08:36 +0000
1967+++ hooks/charmhelpers/fetch/snap.py 2019-03-21 11:40:09 +0000
1968@@ -18,21 +18,33 @@
1969 https://lists.ubuntu.com/archives/snapcraft/2016-September/001114.html
1970 """
1971 import subprocess
1972-from os import environ
1973+import os
1974 from time import sleep
1975 from charmhelpers.core.hookenv import log
1976
1977 __author__ = 'Joseph Borg <joseph.borg@canonical.com>'
1978
1979-SNAP_NO_LOCK = 1 # The return code for "couldn't acquire lock" in Snap (hopefully this will be improved).
1980+# The return code for "couldn't acquire lock" in Snap
1981+# (hopefully this will be improved).
1982+SNAP_NO_LOCK = 1
1983 SNAP_NO_LOCK_RETRY_DELAY = 10 # Wait X seconds between Snap lock checks.
1984 SNAP_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times.
1985+SNAP_CHANNELS = [
1986+ 'edge',
1987+ 'beta',
1988+ 'candidate',
1989+ 'stable',
1990+]
1991
1992
1993 class CouldNotAcquireLockException(Exception):
1994 pass
1995
1996
1997+class InvalidSnapChannel(Exception):
1998+ pass
1999+
2000+
2001 def _snap_exec(commands):
2002 """
2003 Execute snap commands.
2004@@ -47,13 +59,17 @@
2005
2006 while return_code is None or return_code == SNAP_NO_LOCK:
2007 try:
2008- return_code = subprocess.check_call(['snap'] + commands, env=environ)
2009+ return_code = subprocess.check_call(['snap'] + commands,
2010+ env=os.environ)
2011 except subprocess.CalledProcessError as e:
2012 retry_count += + 1
2013 if retry_count > SNAP_NO_LOCK_RETRY_COUNT:
2014- raise CouldNotAcquireLockException('Could not aquire lock after %s attempts' % SNAP_NO_LOCK_RETRY_COUNT)
2015+ raise CouldNotAcquireLockException(
2016+ 'Could not aquire lock after {} attempts'
2017+ .format(SNAP_NO_LOCK_RETRY_COUNT))
2018 return_code = e.returncode
2019- log('Snap failed to acquire lock, trying again in %s seconds.' % SNAP_NO_LOCK_RETRY_DELAY, level='WARN')
2020+ log('Snap failed to acquire lock, trying again in {} seconds.'
2021+ .format(SNAP_NO_LOCK_RETRY_DELAY, level='WARN'))
2022 sleep(SNAP_NO_LOCK_RETRY_DELAY)
2023
2024 return return_code
2025@@ -120,3 +136,15 @@
2026
2027 log(message, level='INFO')
2028 return _snap_exec(['refresh'] + flags + packages)
2029+
2030+
2031+def valid_snap_channel(channel):
2032+ """ Validate snap channel exists
2033+
2034+ :raises InvalidSnapChannel: When channel does not exist
2035+ :return: Boolean
2036+ """
2037+ if channel.lower() in SNAP_CHANNELS:
2038+ return True
2039+ else:
2040+ raise InvalidSnapChannel("Invalid Snap Channel: {}".format(channel))
2041
2042=== modified file 'hooks/charmhelpers/fetch/ubuntu.py'
2043--- hooks/charmhelpers/fetch/ubuntu.py 2017-03-21 15:08:36 +0000
2044+++ hooks/charmhelpers/fetch/ubuntu.py 2019-03-21 11:40:09 +0000
2045@@ -12,29 +12,48 @@
2046 # See the License for the specific language governing permissions and
2047 # limitations under the License.
2048
2049+from collections import OrderedDict
2050 import os
2051+import platform
2052+import re
2053 import six
2054 import time
2055 import subprocess
2056
2057-from tempfile import NamedTemporaryFile
2058-from charmhelpers.core.host import (
2059- lsb_release
2060+from charmhelpers.core.host import get_distrib_codename
2061+
2062+from charmhelpers.core.hookenv import (
2063+ log,
2064+ DEBUG,
2065+ WARNING,
2066+ env_proxy_settings,
2067 )
2068-from charmhelpers.core.hookenv import log
2069-from charmhelpers.fetch import SourceConfigError
2070+from charmhelpers.fetch import SourceConfigError, GPGKeyError
2071
2072+PROPOSED_POCKET = (
2073+ "# Proposed\n"
2074+ "deb http://archive.ubuntu.com/ubuntu {}-proposed main universe "
2075+ "multiverse restricted\n")
2076+PROPOSED_PORTS_POCKET = (
2077+ "# Proposed\n"
2078+ "deb http://ports.ubuntu.com/ubuntu-ports {}-proposed main universe "
2079+ "multiverse restricted\n")
2080+# Only supports 64bit and ppc64 at the moment.
2081+ARCH_TO_PROPOSED_POCKET = {
2082+ 'x86_64': PROPOSED_POCKET,
2083+ 'ppc64le': PROPOSED_PORTS_POCKET,
2084+ 'aarch64': PROPOSED_PORTS_POCKET,
2085+ 's390x': PROPOSED_PORTS_POCKET,
2086+}
2087+CLOUD_ARCHIVE_URL = "http://ubuntu-cloud.archive.canonical.com/ubuntu"
2088+CLOUD_ARCHIVE_KEY_ID = '5EDB1B62EC4926EA'
2089 CLOUD_ARCHIVE = """# Ubuntu Cloud Archive
2090 deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main
2091 """
2092-
2093-PROPOSED_POCKET = """# Proposed
2094-deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted
2095-"""
2096-
2097 CLOUD_ARCHIVE_POCKETS = {
2098 # Folsom
2099 'folsom': 'precise-updates/folsom',
2100+ 'folsom/updates': 'precise-updates/folsom',
2101 'precise-folsom': 'precise-updates/folsom',
2102 'precise-folsom/updates': 'precise-updates/folsom',
2103 'precise-updates/folsom': 'precise-updates/folsom',
2104@@ -43,6 +62,7 @@
2105 'precise-proposed/folsom': 'precise-proposed/folsom',
2106 # Grizzly
2107 'grizzly': 'precise-updates/grizzly',
2108+ 'grizzly/updates': 'precise-updates/grizzly',
2109 'precise-grizzly': 'precise-updates/grizzly',
2110 'precise-grizzly/updates': 'precise-updates/grizzly',
2111 'precise-updates/grizzly': 'precise-updates/grizzly',
2112@@ -51,6 +71,7 @@
2113 'precise-proposed/grizzly': 'precise-proposed/grizzly',
2114 # Havana
2115 'havana': 'precise-updates/havana',
2116+ 'havana/updates': 'precise-updates/havana',
2117 'precise-havana': 'precise-updates/havana',
2118 'precise-havana/updates': 'precise-updates/havana',
2119 'precise-updates/havana': 'precise-updates/havana',
2120@@ -59,6 +80,7 @@
2121 'precise-proposed/havana': 'precise-proposed/havana',
2122 # Icehouse
2123 'icehouse': 'precise-updates/icehouse',
2124+ 'icehouse/updates': 'precise-updates/icehouse',
2125 'precise-icehouse': 'precise-updates/icehouse',
2126 'precise-icehouse/updates': 'precise-updates/icehouse',
2127 'precise-updates/icehouse': 'precise-updates/icehouse',
2128@@ -67,6 +89,7 @@
2129 'precise-proposed/icehouse': 'precise-proposed/icehouse',
2130 # Juno
2131 'juno': 'trusty-updates/juno',
2132+ 'juno/updates': 'trusty-updates/juno',
2133 'trusty-juno': 'trusty-updates/juno',
2134 'trusty-juno/updates': 'trusty-updates/juno',
2135 'trusty-updates/juno': 'trusty-updates/juno',
2136@@ -75,6 +98,7 @@
2137 'trusty-proposed/juno': 'trusty-proposed/juno',
2138 # Kilo
2139 'kilo': 'trusty-updates/kilo',
2140+ 'kilo/updates': 'trusty-updates/kilo',
2141 'trusty-kilo': 'trusty-updates/kilo',
2142 'trusty-kilo/updates': 'trusty-updates/kilo',
2143 'trusty-updates/kilo': 'trusty-updates/kilo',
2144@@ -83,6 +107,7 @@
2145 'trusty-proposed/kilo': 'trusty-proposed/kilo',
2146 # Liberty
2147 'liberty': 'trusty-updates/liberty',
2148+ 'liberty/updates': 'trusty-updates/liberty',
2149 'trusty-liberty': 'trusty-updates/liberty',
2150 'trusty-liberty/updates': 'trusty-updates/liberty',
2151 'trusty-updates/liberty': 'trusty-updates/liberty',
2152@@ -91,6 +116,7 @@
2153 'trusty-proposed/liberty': 'trusty-proposed/liberty',
2154 # Mitaka
2155 'mitaka': 'trusty-updates/mitaka',
2156+ 'mitaka/updates': 'trusty-updates/mitaka',
2157 'trusty-mitaka': 'trusty-updates/mitaka',
2158 'trusty-mitaka/updates': 'trusty-updates/mitaka',
2159 'trusty-updates/mitaka': 'trusty-updates/mitaka',
2160@@ -99,6 +125,7 @@
2161 'trusty-proposed/mitaka': 'trusty-proposed/mitaka',
2162 # Newton
2163 'newton': 'xenial-updates/newton',
2164+ 'newton/updates': 'xenial-updates/newton',
2165 'xenial-newton': 'xenial-updates/newton',
2166 'xenial-newton/updates': 'xenial-updates/newton',
2167 'xenial-updates/newton': 'xenial-updates/newton',
2168@@ -107,17 +134,51 @@
2169 'xenial-proposed/newton': 'xenial-proposed/newton',
2170 # Ocata
2171 'ocata': 'xenial-updates/ocata',
2172+ 'ocata/updates': 'xenial-updates/ocata',
2173 'xenial-ocata': 'xenial-updates/ocata',
2174 'xenial-ocata/updates': 'xenial-updates/ocata',
2175 'xenial-updates/ocata': 'xenial-updates/ocata',
2176 'ocata/proposed': 'xenial-proposed/ocata',
2177 'xenial-ocata/proposed': 'xenial-proposed/ocata',
2178- 'xenial-ocata/newton': 'xenial-proposed/ocata',
2179+ 'xenial-proposed/ocata': 'xenial-proposed/ocata',
2180+ # Pike
2181+ 'pike': 'xenial-updates/pike',
2182+ 'xenial-pike': 'xenial-updates/pike',
2183+ 'xenial-pike/updates': 'xenial-updates/pike',
2184+ 'xenial-updates/pike': 'xenial-updates/pike',
2185+ 'pike/proposed': 'xenial-proposed/pike',
2186+ 'xenial-pike/proposed': 'xenial-proposed/pike',
2187+ 'xenial-proposed/pike': 'xenial-proposed/pike',
2188+ # Queens
2189+ 'queens': 'xenial-updates/queens',
2190+ 'xenial-queens': 'xenial-updates/queens',
2191+ 'xenial-queens/updates': 'xenial-updates/queens',
2192+ 'xenial-updates/queens': 'xenial-updates/queens',
2193+ 'queens/proposed': 'xenial-proposed/queens',
2194+ 'xenial-queens/proposed': 'xenial-proposed/queens',
2195+ 'xenial-proposed/queens': 'xenial-proposed/queens',
2196+ # Rocky
2197+ 'rocky': 'bionic-updates/rocky',
2198+ 'bionic-rocky': 'bionic-updates/rocky',
2199+ 'bionic-rocky/updates': 'bionic-updates/rocky',
2200+ 'bionic-updates/rocky': 'bionic-updates/rocky',
2201+ 'rocky/proposed': 'bionic-proposed/rocky',
2202+ 'bionic-rocky/proposed': 'bionic-proposed/rocky',
2203+ 'bionic-proposed/rocky': 'bionic-proposed/rocky',
2204+ # Stein
2205+ 'stein': 'bionic-updates/stein',
2206+ 'bionic-stein': 'bionic-updates/stein',
2207+ 'bionic-stein/updates': 'bionic-updates/stein',
2208+ 'bionic-updates/stein': 'bionic-updates/stein',
2209+ 'stein/proposed': 'bionic-proposed/stein',
2210+ 'bionic-stein/proposed': 'bionic-proposed/stein',
2211+ 'bionic-proposed/stein': 'bionic-proposed/stein',
2212 }
2213
2214+
2215 APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT.
2216 CMD_RETRY_DELAY = 10 # Wait 10 seconds between command retries.
2217-CMD_RETRY_COUNT = 30 # Retry a failing fatal command X times.
2218+CMD_RETRY_COUNT = 3 # Retry a failing fatal command X times.
2219
2220
2221 def filter_installed_packages(packages):
2222@@ -135,6 +196,18 @@
2223 return _pkgs
2224
2225
2226+def filter_missing_packages(packages):
2227+ """Return a list of packages that are installed.
2228+
2229+ :param packages: list of packages to evaluate.
2230+ :returns list: Packages that are installed.
2231+ """
2232+ return list(
2233+ set(packages) -
2234+ set(filter_installed_packages(packages))
2235+ )
2236+
2237+
2238 def apt_cache(in_memory=True, progress=None):
2239 """Build and return an apt cache."""
2240 from apt import apt_pkg
2241@@ -145,7 +218,7 @@
2242 return apt_pkg.Cache(progress)
2243
2244
2245-def install(packages, options=None, fatal=False):
2246+def apt_install(packages, options=None, fatal=False):
2247 """Install one or more packages."""
2248 if options is None:
2249 options = ['--option=Dpkg::Options::=--force-confold']
2250@@ -162,7 +235,7 @@
2251 _run_apt_command(cmd, fatal)
2252
2253
2254-def upgrade(options=None, fatal=False, dist=False):
2255+def apt_upgrade(options=None, fatal=False, dist=False):
2256 """Upgrade all packages."""
2257 if options is None:
2258 options = ['--option=Dpkg::Options::=--force-confold']
2259@@ -177,13 +250,13 @@
2260 _run_apt_command(cmd, fatal)
2261
2262
2263-def update(fatal=False):
2264+def apt_update(fatal=False):
2265 """Update local apt cache."""
2266 cmd = ['apt-get', 'update']
2267 _run_apt_command(cmd, fatal)
2268
2269
2270-def purge(packages, fatal=False):
2271+def apt_purge(packages, fatal=False):
2272 """Purge one or more packages."""
2273 cmd = ['apt-get', '--assume-yes', 'purge']
2274 if isinstance(packages, six.string_types):
2275@@ -194,6 +267,14 @@
2276 _run_apt_command(cmd, fatal)
2277
2278
2279+def apt_autoremove(purge=True, fatal=False):
2280+ """Purge one or more packages."""
2281+ cmd = ['apt-get', '--assume-yes', 'autoremove']
2282+ if purge:
2283+ cmd.append('--purge')
2284+ _run_apt_command(cmd, fatal)
2285+
2286+
2287 def apt_mark(packages, mark, fatal=False):
2288 """Flag one or more packages using apt-mark."""
2289 log("Marking {} as {}".format(packages, mark))
2290@@ -217,7 +298,159 @@
2291 return apt_mark(packages, 'unhold', fatal=fatal)
2292
2293
2294-def add_source(source, key=None):
2295+def import_key(key):
2296+ """Import an ASCII Armor key.
2297+
2298+ A Radix64 format keyid is also supported for backwards
2299+ compatibility. In this case Ubuntu keyserver will be
2300+ queried for a key via HTTPS by its keyid. This method
2301+ is less preferrable because https proxy servers may
2302+ require traffic decryption which is equivalent to a
2303+ man-in-the-middle attack (a proxy server impersonates
2304+ keyserver TLS certificates and has to be explicitly
2305+ trusted by the system).
2306+
2307+ :param key: A GPG key in ASCII armor format,
2308+ including BEGIN and END markers or a keyid.
2309+ :type key: (bytes, str)
2310+ :raises: GPGKeyError if the key could not be imported
2311+ """
2312+ key = key.strip()
2313+ if '-' in key or '\n' in key:
2314+ # Send everything not obviously a keyid to GPG to import, as
2315+ # we trust its validation better than our own. eg. handling
2316+ # comments before the key.
2317+ log("PGP key found (looks like ASCII Armor format)", level=DEBUG)
2318+ if ('-----BEGIN PGP PUBLIC KEY BLOCK-----' in key and
2319+ '-----END PGP PUBLIC KEY BLOCK-----' in key):
2320+ log("Writing provided PGP key in the binary format", level=DEBUG)
2321+ if six.PY3:
2322+ key_bytes = key.encode('utf-8')
2323+ else:
2324+ key_bytes = key
2325+ key_name = _get_keyid_by_gpg_key(key_bytes)
2326+ key_gpg = _dearmor_gpg_key(key_bytes)
2327+ _write_apt_gpg_keyfile(key_name=key_name, key_material=key_gpg)
2328+ else:
2329+ raise GPGKeyError("ASCII armor markers missing from GPG key")
2330+ else:
2331+ log("PGP key found (looks like Radix64 format)", level=WARNING)
2332+ log("SECURELY importing PGP key from keyserver; "
2333+ "full key not provided.", level=WARNING)
2334+ # as of bionic add-apt-repository uses curl with an HTTPS keyserver URL
2335+ # to retrieve GPG keys. `apt-key adv` command is deprecated as is
2336+ # apt-key in general as noted in its manpage. See lp:1433761 for more
2337+ # history. Instead, /etc/apt/trusted.gpg.d is used directly to drop
2338+ # gpg
2339+ key_asc = _get_key_by_keyid(key)
2340+ # write the key in GPG format so that apt-key list shows it
2341+ key_gpg = _dearmor_gpg_key(key_asc)
2342+ _write_apt_gpg_keyfile(key_name=key, key_material=key_gpg)
2343+
2344+
2345+def _get_keyid_by_gpg_key(key_material):
2346+ """Get a GPG key fingerprint by GPG key material.
2347+ Gets a GPG key fingerprint (40-digit, 160-bit) by the ASCII armor-encoded
2348+ or binary GPG key material. Can be used, for example, to generate file
2349+ names for keys passed via charm options.
2350+
2351+ :param key_material: ASCII armor-encoded or binary GPG key material
2352+ :type key_material: bytes
2353+ :raises: GPGKeyError if invalid key material has been provided
2354+ :returns: A GPG key fingerprint
2355+ :rtype: str
2356+ """
2357+ # Use the same gpg command for both Xenial and Bionic
2358+ cmd = 'gpg --with-colons --with-fingerprint'
2359+ ps = subprocess.Popen(cmd.split(),
2360+ stdout=subprocess.PIPE,
2361+ stderr=subprocess.PIPE,
2362+ stdin=subprocess.PIPE)
2363+ out, err = ps.communicate(input=key_material)
2364+ if six.PY3:
2365+ out = out.decode('utf-8')
2366+ err = err.decode('utf-8')
2367+ if 'gpg: no valid OpenPGP data found.' in err:
2368+ raise GPGKeyError('Invalid GPG key material provided')
2369+ # from gnupg2 docs: fpr :: Fingerprint (fingerprint is in field 10)
2370+ return re.search(r"^fpr:{9}([0-9A-F]{40}):$", out, re.MULTILINE).group(1)
2371+
2372+
2373+def _get_key_by_keyid(keyid):
2374+ """Get a key via HTTPS from the Ubuntu keyserver.
2375+ Different key ID formats are supported by SKS keyservers (the longer ones
2376+ are more secure, see "dead beef attack" and https://evil32.com/). Since
2377+ HTTPS is used, if SSLBump-like HTTPS proxies are in place, they will
2378+ impersonate keyserver.ubuntu.com and generate a certificate with
2379+ keyserver.ubuntu.com in the CN field or in SubjAltName fields of a
2380+ certificate. If such proxy behavior is expected it is necessary to add the
2381+ CA certificate chain containing the intermediate CA of the SSLBump proxy to
2382+ every machine that this code runs on via ca-certs cloud-init directive (via
2383+ cloudinit-userdata model-config) or via other means (such as through a
2384+ custom charm option). Also note that DNS resolution for the hostname in a
2385+ URL is done at a proxy server - not at the client side.
2386+
2387+ 8-digit (32 bit) key ID
2388+ https://keyserver.ubuntu.com/pks/lookup?search=0x4652B4E6
2389+ 16-digit (64 bit) key ID
2390+ https://keyserver.ubuntu.com/pks/lookup?search=0x6E85A86E4652B4E6
2391+ 40-digit key ID:
2392+ https://keyserver.ubuntu.com/pks/lookup?search=0x35F77D63B5CEC106C577ED856E85A86E4652B4E6
2393+
2394+ :param keyid: An 8, 16 or 40 hex digit keyid to find a key for
2395+ :type keyid: (bytes, str)
2396+ :returns: A key material for the specified GPG key id
2397+ :rtype: (str, bytes)
2398+ :raises: subprocess.CalledProcessError
2399+ """
2400+ # options=mr - machine-readable output (disables html wrappers)
2401+ keyserver_url = ('https://keyserver.ubuntu.com'
2402+ '/pks/lookup?op=get&options=mr&exact=on&search=0x{}')
2403+ curl_cmd = ['curl', keyserver_url.format(keyid)]
2404+ # use proxy server settings in order to retrieve the key
2405+ return subprocess.check_output(curl_cmd,
2406+ env=env_proxy_settings(['https']))
2407+
2408+
2409+def _dearmor_gpg_key(key_asc):
2410+ """Converts a GPG key in the ASCII armor format to the binary format.
2411+
2412+ :param key_asc: A GPG key in ASCII armor format.
2413+ :type key_asc: (str, bytes)
2414+ :returns: A GPG key in binary format
2415+ :rtype: (str, bytes)
2416+ :raises: GPGKeyError
2417+ """
2418+ ps = subprocess.Popen(['gpg', '--dearmor'],
2419+ stdout=subprocess.PIPE,
2420+ stderr=subprocess.PIPE,
2421+ stdin=subprocess.PIPE)
2422+ out, err = ps.communicate(input=key_asc)
2423+ # no need to decode output as it is binary (invalid utf-8), only error
2424+ if six.PY3:
2425+ err = err.decode('utf-8')
2426+ if 'gpg: no valid OpenPGP data found.' in err:
2427+ raise GPGKeyError('Invalid GPG key material. Check your network setup'
2428+ ' (MTU, routing, DNS) and/or proxy server settings'
2429+ ' as well as destination keyserver status.')
2430+ else:
2431+ return out
2432+
2433+
2434+def _write_apt_gpg_keyfile(key_name, key_material):
2435+ """Writes GPG key material into a file at a provided path.
2436+
2437+ :param key_name: A key name to use for a key file (could be a fingerprint)
2438+ :type key_name: str
2439+ :param key_material: A GPG key material (binary)
2440+ :type key_material: (str, bytes)
2441+ """
2442+ with open('/etc/apt/trusted.gpg.d/{}.gpg'.format(key_name),
2443+ 'wb') as keyf:
2444+ keyf.write(key_material)
2445+
2446+
2447+def add_source(source, key=None, fail_invalid=False):
2448 """Add a package source to this system.
2449
2450 @param source: a URL or sources.list entry, as supported by
2451@@ -233,6 +466,33 @@
2452 such as 'cloud:icehouse'
2453 'distro' may be used as a noop
2454
2455+ Full list of source specifications supported by the function are:
2456+
2457+ 'distro': A NOP; i.e. it has no effect.
2458+ 'proposed': the proposed deb spec [2] is wrtten to
2459+ /etc/apt/sources.list/proposed
2460+ 'distro-proposed': adds <version>-proposed to the debs [2]
2461+ 'ppa:<ppa-name>': add-apt-repository --yes <ppa_name>
2462+ 'deb <deb-spec>': add-apt-repository --yes deb <deb-spec>
2463+ 'http://....': add-apt-repository --yes http://...
2464+ 'cloud-archive:<spec>': add-apt-repository -yes cloud-archive:<spec>
2465+ 'cloud:<release>[-staging]': specify a Cloud Archive pocket <release> with
2466+ optional staging version. If staging is used then the staging PPA [2]
2467+ with be used. If staging is NOT used then the cloud archive [3] will be
2468+ added, and the 'ubuntu-cloud-keyring' package will be added for the
2469+ current distro.
2470+
2471+ Otherwise the source is not recognised and this is logged to the juju log.
2472+ However, no error is raised, unless sys_error_on_exit is True.
2473+
2474+ [1] deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main
2475+ where {} is replaced with the derived pocket name.
2476+ [2] deb http://archive.ubuntu.com/ubuntu {}-proposed \
2477+ main universe multiverse restricted
2478+ where {} is replaced with the lsb_release codename (e.g. xenial)
2479+ [3] deb http://ubuntu-cloud.archive.canonical.com/ubuntu <pocket>
2480+ to /etc/apt/sources.list.d/cloud-archive-list
2481+
2482 @param key: A key to be added to the system's APT keyring and used
2483 to verify the signatures on packages. Ideally, this should be an
2484 ASCII format GPG public key including the block headers. A GPG key
2485@@ -240,51 +500,150 @@
2486 available to retrieve the actual public key from a public keyserver
2487 placing your Juju environment at risk. ppa and cloud archive keys
2488 are securely added automtically, so sould not be provided.
2489+
2490+ @param fail_invalid: (boolean) if True, then the function raises a
2491+ SourceConfigError is there is no matching installation source.
2492+
2493+ @raises SourceConfigError() if for cloud:<pocket>, the <pocket> is not a
2494+ valid pocket in CLOUD_ARCHIVE_POCKETS
2495 """
2496+ _mapping = OrderedDict([
2497+ (r"^distro$", lambda: None), # This is a NOP
2498+ (r"^(?:proposed|distro-proposed)$", _add_proposed),
2499+ (r"^cloud-archive:(.*)$", _add_apt_repository),
2500+ (r"^((?:deb |http:|https:|ppa:).*)$", _add_apt_repository),
2501+ (r"^cloud:(.*)-(.*)\/staging$", _add_cloud_staging),
2502+ (r"^cloud:(.*)-(.*)$", _add_cloud_distro_check),
2503+ (r"^cloud:(.*)$", _add_cloud_pocket),
2504+ (r"^snap:.*-(.*)-(.*)$", _add_cloud_distro_check),
2505+ ])
2506 if source is None:
2507- log('Source is not present. Skipping')
2508- return
2509-
2510- if (source.startswith('ppa:') or
2511- source.startswith('http') or
2512- source.startswith('deb ') or
2513- source.startswith('cloud-archive:')):
2514- cmd = ['add-apt-repository', '--yes', source]
2515- _run_with_retries(cmd)
2516- elif source.startswith('cloud:'):
2517- install(filter_installed_packages(['ubuntu-cloud-keyring']),
2518+ source = ''
2519+ for r, fn in six.iteritems(_mapping):
2520+ m = re.match(r, source)
2521+ if m:
2522+ # call the assoicated function with the captured groups
2523+ # raises SourceConfigError on error.
2524+ fn(*m.groups())
2525+ if key:
2526+ try:
2527+ import_key(key)
2528+ except GPGKeyError as e:
2529+ raise SourceConfigError(str(e))
2530+ break
2531+ else:
2532+ # nothing matched. log an error and maybe sys.exit
2533+ err = "Unknown source: {!r}".format(source)
2534+ log(err)
2535+ if fail_invalid:
2536+ raise SourceConfigError(err)
2537+
2538+
2539+def _add_proposed():
2540+ """Add the PROPOSED_POCKET as /etc/apt/source.list.d/proposed.list
2541+
2542+ Uses get_distrib_codename to determine the correct stanza for
2543+ the deb line.
2544+
2545+ For intel architecutres PROPOSED_POCKET is used for the release, but for
2546+ other architectures PROPOSED_PORTS_POCKET is used for the release.
2547+ """
2548+ release = get_distrib_codename()
2549+ arch = platform.machine()
2550+ if arch not in six.iterkeys(ARCH_TO_PROPOSED_POCKET):
2551+ raise SourceConfigError("Arch {} not supported for (distro-)proposed"
2552+ .format(arch))
2553+ with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt:
2554+ apt.write(ARCH_TO_PROPOSED_POCKET[arch].format(release))
2555+
2556+
2557+def _add_apt_repository(spec):
2558+ """Add the spec using add_apt_repository
2559+
2560+ :param spec: the parameter to pass to add_apt_repository
2561+ :type spec: str
2562+ """
2563+ if '{series}' in spec:
2564+ series = get_distrib_codename()
2565+ spec = spec.replace('{series}', series)
2566+ # software-properties package for bionic properly reacts to proxy settings
2567+ # passed as environment variables (See lp:1433761). This is not the case
2568+ # LTS and non-LTS releases below bionic.
2569+ _run_with_retries(['add-apt-repository', '--yes', spec],
2570+ cmd_env=env_proxy_settings(['https']))
2571+
2572+
2573+def _add_cloud_pocket(pocket):
2574+ """Add a cloud pocket as /etc/apt/sources.d/cloud-archive.list
2575+
2576+ Note that this overwrites the existing file if there is one.
2577+
2578+ This function also converts the simple pocket in to the actual pocket using
2579+ the CLOUD_ARCHIVE_POCKETS mapping.
2580+
2581+ :param pocket: string representing the pocket to add a deb spec for.
2582+ :raises: SourceConfigError if the cloud pocket doesn't exist or the
2583+ requested release doesn't match the current distro version.
2584+ """
2585+ apt_install(filter_installed_packages(['ubuntu-cloud-keyring']),
2586 fatal=True)
2587- pocket = source.split(':')[-1]
2588- if pocket not in CLOUD_ARCHIVE_POCKETS:
2589- raise SourceConfigError(
2590- 'Unsupported cloud: source option %s' %
2591- pocket)
2592- actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket]
2593- with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt:
2594- apt.write(CLOUD_ARCHIVE.format(actual_pocket))
2595- elif source == 'proposed':
2596- release = lsb_release()['DISTRIB_CODENAME']
2597- with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt:
2598- apt.write(PROPOSED_POCKET.format(release))
2599- elif source == 'distro':
2600- pass
2601- else:
2602- log("Unknown source: {!r}".format(source))
2603-
2604- if key:
2605- if '-----BEGIN PGP PUBLIC KEY BLOCK-----' in key:
2606- with NamedTemporaryFile('w+') as key_file:
2607- key_file.write(key)
2608- key_file.flush()
2609- key_file.seek(0)
2610- subprocess.check_call(['apt-key', 'add', '-'], stdin=key_file)
2611- else:
2612- # Note that hkp: is in no way a secure protocol. Using a
2613- # GPG key id is pointless from a security POV unless you
2614- # absolutely trust your network and DNS.
2615- subprocess.check_call(['apt-key', 'adv', '--keyserver',
2616- 'hkp://keyserver.ubuntu.com:80', '--recv',
2617- key])
2618+ if pocket not in CLOUD_ARCHIVE_POCKETS:
2619+ raise SourceConfigError(
2620+ 'Unsupported cloud: source option %s' %
2621+ pocket)
2622+ actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket]
2623+ with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt:
2624+ apt.write(CLOUD_ARCHIVE.format(actual_pocket))
2625+
2626+
2627+def _add_cloud_staging(cloud_archive_release, openstack_release):
2628+ """Add the cloud staging repository which is in
2629+ ppa:ubuntu-cloud-archive/<openstack_release>-staging
2630+
2631+ This function checks that the cloud_archive_release matches the current
2632+ codename for the distro that charm is being installed on.
2633+
2634+ :param cloud_archive_release: string, codename for the release.
2635+ :param openstack_release: String, codename for the openstack release.
2636+ :raises: SourceConfigError if the cloud_archive_release doesn't match the
2637+ current version of the os.
2638+ """
2639+ _verify_is_ubuntu_rel(cloud_archive_release, openstack_release)
2640+ ppa = 'ppa:ubuntu-cloud-archive/{}-staging'.format(openstack_release)
2641+ cmd = 'add-apt-repository -y {}'.format(ppa)
2642+ _run_with_retries(cmd.split(' '))
2643+
2644+
2645+def _add_cloud_distro_check(cloud_archive_release, openstack_release):
2646+ """Add the cloud pocket, but also check the cloud_archive_release against
2647+ the current distro, and use the openstack_release as the full lookup.
2648+
2649+ This just calls _add_cloud_pocket() with the openstack_release as pocket
2650+ to get the correct cloud-archive.list for dpkg to work with.
2651+
2652+ :param cloud_archive_release:String, codename for the distro release.
2653+ :param openstack_release: String, spec for the release to look up in the
2654+ CLOUD_ARCHIVE_POCKETS
2655+ :raises: SourceConfigError if this is the wrong distro, or the pocket spec
2656+ doesn't exist.
2657+ """
2658+ _verify_is_ubuntu_rel(cloud_archive_release, openstack_release)
2659+ _add_cloud_pocket("{}-{}".format(cloud_archive_release, openstack_release))
2660+
2661+
2662+def _verify_is_ubuntu_rel(release, os_release):
2663+ """Verify that the release is in the same as the current ubuntu release.
2664+
2665+ :param release: String, lowercase for the release.
2666+ :param os_release: String, the os_release being asked for
2667+ :raises: SourceConfigError if the release is not the same as the ubuntu
2668+ release.
2669+ """
2670+ ubuntu_rel = get_distrib_codename()
2671+ if release != ubuntu_rel:
2672+ raise SourceConfigError(
2673+ 'Invalid Cloud Archive release specified: {}-{} on this Ubuntu'
2674+ 'version ({})'.format(release, os_release, ubuntu_rel))
2675
2676
2677 def _run_with_retries(cmd, max_retries=CMD_RETRY_COUNT, retry_exitcodes=(1,),
2678@@ -300,9 +659,12 @@
2679 :param: cmd_env: dict: Environment variables to add to the command run.
2680 """
2681
2682- env = os.environ.copy()
2683+ env = None
2684+ kwargs = {}
2685 if cmd_env:
2686+ env = os.environ.copy()
2687 env.update(cmd_env)
2688+ kwargs['env'] = env
2689
2690 if not retry_message:
2691 retry_message = "Failed executing '{}'".format(" ".join(cmd))
2692@@ -314,7 +676,8 @@
2693 retry_results = (None,) + retry_exitcodes
2694 while result in retry_results:
2695 try:
2696- result = subprocess.check_call(cmd, env=env)
2697+ # result = subprocess.check_call(cmd, env=env)
2698+ result = subprocess.check_call(cmd, **kwargs)
2699 except subprocess.CalledProcessError as e:
2700 retry_count = retry_count + 1
2701 if retry_count > max_retries:
2702@@ -327,6 +690,7 @@
2703 def _run_apt_command(cmd, fatal=False):
2704 """Run an apt command with optional retries.
2705
2706+ :param: cmd: str: The apt command to run.
2707 :param: fatal: bool: Whether the command's output should be checked and
2708 retried.
2709 """
2710@@ -353,7 +717,7 @@
2711 cache = apt_cache()
2712 try:
2713 pkg = cache[package]
2714- except:
2715+ except Exception:
2716 # the package is unknown to the current apt cache.
2717 return None
2718
2719
2720=== modified file 'hooks/hooks.py'
2721--- hooks/hooks.py 2019-03-21 10:59:11 +0000
2722+++ hooks/hooks.py 2019-03-21 11:40:09 +0000
2723@@ -218,7 +218,7 @@
2724 haproxy_config = load_haproxy_config(haproxy_config_file)
2725 if haproxy_config is None:
2726 return None
2727- m = re.search("stats auth\s+(\w+):(\w+)", haproxy_config)
2728+ m = re.search(r"stats auth\s+(\w+):(\w+)", haproxy_config)
2729 if m is not None:
2730 return m.group(2)
2731 else:
2732@@ -246,7 +246,7 @@
2733 if haproxy_config is None:
2734 return ()
2735 listen_stanzas = re.findall(
2736- "listen\s+([^\s]+)\s+([^:]+):(.*)",
2737+ r"listen\s+([^\s]+)\s+([^:]+):(.*)",
2738 haproxy_config)
2739 # Match bind stanzas like:
2740 #
2741@@ -254,7 +254,7 @@
2742 # bind 2001:db8::1:80
2743 # bind 1.2.3.4:123 ssl crt /foo/bar
2744 bind_stanzas = re.findall(
2745- "\s+bind\s+([a-fA-F0-9\.:\*]+):(\d+).*\n\s+default_backend\s+([^\s]+)",
2746+ r"\s+bind\s+([a-fA-F0-9\.:\*]+):(\d+).*\n\s+default_backend\s+([^\s]+)",
2747 haproxy_config, re.M)
2748 return (tuple(((service, addr, int(port))
2749 for service, addr, port in listen_stanzas)) +
2750@@ -454,7 +454,7 @@
2751 monitoring_config.append("http-request deny unless allowed_cidr")
2752 monitoring_config.append("stats enable")
2753 monitoring_config.append("stats uri /")
2754- monitoring_config.append("stats realm Haproxy\ Statistics")
2755+ monitoring_config.append("stats realm Haproxy\ Statistics") # noqa: W605
2756 monitoring_config.append("stats auth %s:%s" %
2757 (config_data['monitoring_username'],
2758 monitoring_password))
2759@@ -1269,7 +1269,7 @@
2760 # to turn it into a string for comparison
2761 component_addr = octet_parser(name.getComponent())
2762 cert_addresses.add(str(component_addr))
2763- except:
2764+ except Exception:
2765 pass
2766 if cert_addresses != unit_addresses:
2767 log('subjAltName: Cert (%s) != Unit (%s), assuming stale' % (
2768@@ -1289,11 +1289,13 @@
2769 """
2770 try:
2771 import ipaddress
2772+
2773 def ipaddress_parser(octet):
2774 return str(ipaddress.ip_address(str(octet)))
2775 return ipaddress_parser
2776 except ImportError:
2777 import ipaddr
2778+
2779 def trusty_ipaddress_parser(octet):
2780 return str(ipaddr.IPAddress(ipaddr.Bytes(str(octet))))
2781 return trusty_ipaddress_parser
2782@@ -1405,6 +1407,7 @@
2783 print("Unknown hook")
2784 sys.exit(1)
2785
2786+
2787 if __name__ == "__main__":
2788 hook_name = os.path.basename(sys.argv[0])
2789 # Also support being invoked directly with hook as argument name.
2790
2791=== modified file 'hooks/tests/test_config_changed_hooks.py'
2792--- hooks/tests/test_config_changed_hooks.py 2019-03-21 10:08:36 +0000
2793+++ hooks/tests/test_config_changed_hooks.py 2019-03-21 11:40:09 +0000
2794@@ -46,7 +46,7 @@
2795 self.addCleanup(mock_controller.stop)
2796 return mock
2797
2798- @patch('hooks.opened_ports', return_value=['443/tcp',])
2799+ @patch('hooks.opened_ports', return_value=['443/tcp', ])
2800 def test_config_changed_notify_website_changed_stanzas(self, opened_ports):
2801 self.service_haproxy.return_value = True
2802 self.get_listen_stanzas.side_effect = (
2803@@ -59,7 +59,7 @@
2804 self.notify_website.assert_called_once_with()
2805 self.notify_peer.assert_called_once_with()
2806
2807- @patch('hooks.opened_ports', return_value=['443/tcp',])
2808+ @patch('hooks.opened_ports', return_value=['443/tcp', ])
2809 def test_config_changed_no_notify_website_not_changed(self, opened_ports):
2810 self.service_haproxy.return_value = True
2811 self.get_listen_stanzas.side_effect = (
2812@@ -71,7 +71,7 @@
2813 self.notify_website.assert_not_called()
2814 self.notify_peer.assert_not_called()
2815
2816- @patch('hooks.opened_ports', return_value=['443/tcp',])
2817+ @patch('hooks.opened_ports', return_value=['443/tcp', ])
2818 def test_config_changed_no_notify_website_failed_check(self, opened_ports):
2819 self.service_haproxy.return_value = False
2820 self.get_listen_stanzas.side_effect = (
2821@@ -87,7 +87,7 @@
2822 "HAProxy configuration check failed, exiting.")
2823 self.sys_exit.assert_called_once_with(1)
2824
2825- @patch('hooks.opened_ports', return_value=['443/tcp',])
2826+ @patch('hooks.opened_ports', return_value=['443/tcp', ])
2827 def test_config_changed_notify_reverseproxy(self, opened_ports):
2828 """
2829 If the ssl_cert config value changes, the reverseproxy relations get
2830@@ -103,7 +103,7 @@
2831 _notify_reverseproxy.assert_called_once_with()
2832 service_restart.assert_called_once_with('rsyslog')
2833
2834- @patch('hooks.opened_ports', return_value=['443/tcp',])
2835+ @patch('hooks.opened_ports', return_value=['443/tcp', ])
2836 def test_config_changed_restart_rsyslog(self, opened_ports):
2837 """
2838 If the gloabl_log or source config value changes, rsyslog is
2839
2840=== modified file 'hooks/tests/test_helpers.py'
2841--- hooks/tests/test_helpers.py 2017-10-09 17:38:17 +0000
2842+++ hooks/tests/test_helpers.py 2019-03-21 11:40:09 +0000
2843@@ -680,10 +680,10 @@
2844 safe_load.return_value = [
2845 {
2846 'service_name': 'foo',
2847- 'service_options': {
2848+ 'service_options': { # noqa: F601
2849 'foo-1': 123,
2850 },
2851- 'service_options': ['foo1', 'foo2'],
2852+ 'service_options': ['foo1', 'foo2'], # noqa: F601
2853 'server_options': ['baz1', 'baz2'],
2854 },
2855 {
2856@@ -724,10 +724,10 @@
2857 safe_load.return_value = [
2858 {
2859 'service_name': 'foo',
2860- 'service_options': {
2861+ 'service_options': { # noqa: F601
2862 'foo-1': 123,
2863 },
2864- 'service_options': ['foo1', 'foo2'],
2865+ 'service_options': ['foo1', 'foo2'], # noqa: F601
2866 'server_options': ['baz1', 'baz2'],
2867 },
2868 {
2869@@ -768,10 +768,10 @@
2870 safe_load.return_value = [
2871 {
2872 'service_name': 'foo',
2873- 'service_options': {
2874+ 'service_options': { # noqa: F601
2875 'foo-1': 123,
2876 },
2877- 'service_options': ['foo1', 'foo2'],
2878+ 'service_options': ['foo1', 'foo2'], # noqa: F601
2879 'server_options': 'baz1, baz2',
2880 },
2881 {
2882@@ -812,10 +812,10 @@
2883 safe_load.return_value = [
2884 {
2885 'service_name': 'foo',
2886- 'service_options': {
2887+ 'service_options': { # noqa: F601
2888 'foo-1': 123,
2889 },
2890- 'service_options': ['foo1', 'foo2'],
2891+ 'service_options': ['foo1', 'foo2'], # noqa: F601
2892 'server_options': 'baz1, baz2',
2893 },
2894 {

Subscribers

People subscribed via source and target branches

to all changes: