Merge ~ziyiwang/charm-mongodb:bug/1879842 into charm-mongodb:master

Proposed by Celia Wang
Status: Merged
Approved by: Paul Goins
Approved revision: d0f92fe9721d2d7bcc372298d0d594e3bb2e353a
Merged at revision: be66c637ac0e53c6ea140a59904bfc41997bf5bf
Proposed branch: ~ziyiwang/charm-mongodb:bug/1879842
Merge into: charm-mongodb:master
Diff against target: 3367 lines (+2135/-227)
27 files modified
Makefile (+1/-1)
charm-helpers-sync.yaml (+1/-2)
charmhelpers/__init__.py (+65/-4)
charmhelpers/contrib/charmsupport/nrpe.py (+71/-10)
charmhelpers/contrib/hahelpers/cluster.py (+88/-0)
charmhelpers/core/hookenv.py (+585/-42)
charmhelpers/core/host.py (+191/-11)
charmhelpers/core/host_factory/ubuntu.py (+28/-1)
charmhelpers/core/kernel.py (+2/-2)
charmhelpers/core/services/base.py (+18/-7)
charmhelpers/core/strutils.py (+11/-5)
charmhelpers/core/sysctl.py (+32/-11)
charmhelpers/core/templating.py (+18/-9)
charmhelpers/core/unitdata.py (+8/-1)
charmhelpers/fetch/__init__.py (+21/-9)
charmhelpers/fetch/archiveurl.py (+1/-1)
charmhelpers/fetch/bzrurl.py (+2/-2)
charmhelpers/fetch/centos.py (+1/-1)
charmhelpers/fetch/giturl.py (+2/-2)
charmhelpers/fetch/python/__init__.py (+1/-1)
charmhelpers/fetch/python/debug.py (+54/-0)
charmhelpers/fetch/python/rpdb.py (+56/-0)
charmhelpers/fetch/python/version.py (+32/-0)
charmhelpers/fetch/snap.py (+33/-5)
charmhelpers/fetch/ubuntu.py (+522/-97)
charmhelpers/fetch/ubuntu_apt_pkg.py (+267/-0)
charmhelpers/osplatform.py (+24/-3)
Reviewer Review Type Date Requested Status
Paul Goins Approve
Review via email: mp+384328@code.launchpad.net

Commit message

Sync charmhelpers for release 20.05

To post a comment you must log in.
Revision history for this message
🤖 Canonical IS Merge Bot (canonical-is-mergebot) wrote :

This merge proposal is being monitored by mergebot. Change the status to Approved to merge.

Revision history for this message
Paul Goins (vultaire) :
review: Approve
Revision history for this message
🤖 Canonical IS Merge Bot (canonical-is-mergebot) wrote :

Change successfully merged at revision be66c637ac0e53c6ea140a59904bfc41997bf5bf

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1diff --git a/Makefile b/Makefile
2index 8e20153..4d444f8 100644
3--- a/Makefile
4+++ b/Makefile
5@@ -43,7 +43,7 @@ functional:
6
7 sync:
8 @mkdir -p bin
9- @bzr cat lp:charm-helpers/tools/charm_helpers_sync/charm_helpers_sync.py > bin/charm_helpers_sync.py
10+ @curl -o bin/charm_helpers_sync.py https://raw.githubusercontent.com/juju/charm-helpers/master/tools/charm_helpers_sync/charm_helpers_sync.py
11 @$(PYTHON) bin/charm_helpers_sync.py -c charm-helpers-sync.yaml
12
13 publish: lint unit_test
14diff --git a/charm-helpers-sync.yaml b/charm-helpers-sync.yaml
15index ed5ea5f..64273d7 100644
16--- a/charm-helpers-sync.yaml
17+++ b/charm-helpers-sync.yaml
18@@ -1,10 +1,9 @@
19-branch: lp:charm-helpers
20+repo: https://github.com/juju/charm-helpers
21 destination: charmhelpers
22 include:
23 - core
24 - fetch
25 - contrib.hahelpers.cluster
26- - contrib.python.packages
27 - payload.execd
28 - contrib.charmsupport
29 - osplatform
30diff --git a/charmhelpers/__init__.py b/charmhelpers/__init__.py
31index 4886788..61ef907 100644
32--- a/charmhelpers/__init__.py
33+++ b/charmhelpers/__init__.py
34@@ -14,23 +14,84 @@
35
36 # Bootstrap charm-helpers, installing its dependencies if necessary using
37 # only standard libraries.
38+from __future__ import print_function
39+from __future__ import absolute_import
40+
41+import functools
42+import inspect
43 import subprocess
44 import sys
45
46 try:
47- import six # flake8: noqa
48+ import six # NOQA:F401
49 except ImportError:
50 if sys.version_info.major == 2:
51 subprocess.check_call(['apt-get', 'install', '-y', 'python-six'])
52 else:
53 subprocess.check_call(['apt-get', 'install', '-y', 'python3-six'])
54- import six # flake8: noqa
55+ import six # NOQA:F401
56
57 try:
58- import yaml # flake8: noqa
59+ import yaml # NOQA:F401
60 except ImportError:
61 if sys.version_info.major == 2:
62 subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml'])
63 else:
64 subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml'])
65- import yaml # flake8: noqa
66+ import yaml # NOQA:F401
67+
68+
69+# Holds a list of mapping of mangled function names that have been deprecated
70+# using the @deprecate decorator below. This is so that the warning is only
71+# printed once for each usage of the function.
72+__deprecated_functions = {}
73+
74+
75+def deprecate(warning, date=None, log=None):
76+ """Add a deprecation warning the first time the function is used.
77+ The date, which is a string in semi-ISO8660 format indicate the year-month
78+ that the function is officially going to be removed.
79+
80+ usage:
81+
82+ @deprecate('use core/fetch/add_source() instead', '2017-04')
83+ def contributed_add_source_thing(...):
84+ ...
85+
86+ And it then prints to the log ONCE that the function is deprecated.
87+ The reason for passing the logging function (log) is so that hookenv.log
88+ can be used for a charm if needed.
89+
90+ :param warning: String to indicat where it has moved ot.
91+ :param date: optional sting, in YYYY-MM format to indicate when the
92+ function will definitely (probably) be removed.
93+ :param log: The log function to call to log. If not, logs to stdout
94+ """
95+ def wrap(f):
96+
97+ @functools.wraps(f)
98+ def wrapped_f(*args, **kwargs):
99+ try:
100+ module = inspect.getmodule(f)
101+ file = inspect.getsourcefile(f)
102+ lines = inspect.getsourcelines(f)
103+ f_name = "{}-{}-{}..{}-{}".format(
104+ module.__name__, file, lines[0], lines[-1], f.__name__)
105+ except (IOError, TypeError):
106+ # assume it was local, so just use the name of the function
107+ f_name = f.__name__
108+ if f_name not in __deprecated_functions:
109+ __deprecated_functions[f_name] = True
110+ s = "DEPRECATION WARNING: Function {} is being removed".format(
111+ f.__name__)
112+ if date:
113+ s = "{} on/around {}".format(s, date)
114+ if warning:
115+ s = "{} : {}".format(s, warning)
116+ if log:
117+ log(s)
118+ else:
119+ print(s)
120+ return f(*args, **kwargs)
121+ return wrapped_f
122+ return wrap
123diff --git a/charmhelpers/contrib/charmsupport/nrpe.py b/charmhelpers/contrib/charmsupport/nrpe.py
124index 424b7f7..d775861 100644
125--- a/charmhelpers/contrib/charmsupport/nrpe.py
126+++ b/charmhelpers/contrib/charmsupport/nrpe.py
127@@ -30,8 +30,10 @@ import yaml
128
129 from charmhelpers.core.hookenv import (
130 config,
131+ hook_name,
132 local_unit,
133 log,
134+ relation_get,
135 relation_ids,
136 relation_set,
137 relations_of_type,
138@@ -125,7 +127,7 @@ class CheckException(Exception):
139
140
141 class Check(object):
142- shortname_re = '[A-Za-z0-9-_]+$'
143+ shortname_re = '[A-Za-z0-9-_.@]+$'
144 service_template = ("""
145 #---------------------------------------------------
146 # This file is Juju managed
147@@ -259,11 +261,23 @@ class NRPE(object):
148 relation = relation_ids('nrpe-external-master')
149 if relation:
150 log("Setting charm primary status {}".format(primary))
151- for rid in relation_ids('nrpe-external-master'):
152+ for rid in relation:
153 relation_set(relation_id=rid, relation_settings={'primary': self.primary})
154+ self.remove_check_queue = set()
155
156 def add_check(self, *args, **kwargs):
157+ shortname = None
158+ if kwargs.get('shortname') is None:
159+ if len(args) > 0:
160+ shortname = args[0]
161+ else:
162+ shortname = kwargs['shortname']
163+
164 self.checks.append(Check(*args, **kwargs))
165+ try:
166+ self.remove_check_queue.remove(shortname)
167+ except KeyError:
168+ pass
169
170 def remove_check(self, *args, **kwargs):
171 if kwargs.get('shortname') is None:
172@@ -280,12 +294,13 @@ class NRPE(object):
173
174 check = Check(*args, **kwargs)
175 check.remove(self.hostname)
176+ self.remove_check_queue.add(kwargs['shortname'])
177
178 def write(self):
179 try:
180 nagios_uid = pwd.getpwnam('nagios').pw_uid
181 nagios_gid = grp.getgrnam('nagios').gr_gid
182- except:
183+ except Exception:
184 log("Nagios user not set up, nrpe checks not updated")
185 return
186
187@@ -302,12 +317,34 @@ class NRPE(object):
188 "command": nrpecheck.command,
189 }
190
191- service('restart', 'nagios-nrpe-server')
192+ # update-status hooks are configured to firing every 5 minutes by
193+ # default. When nagios-nrpe-server is restarted, the nagios server
194+ # reports checks failing causing unnecessary alerts. Let's not restart
195+ # on update-status hooks.
196+ if not hook_name() == 'update-status':
197+ service('restart', 'nagios-nrpe-server')
198
199 monitor_ids = relation_ids("local-monitors") + \
200 relation_ids("nrpe-external-master")
201 for rid in monitor_ids:
202- relation_set(relation_id=rid, monitors=yaml.dump(monitors))
203+ reldata = relation_get(unit=local_unit(), rid=rid)
204+ if 'monitors' in reldata:
205+ # update the existing set of monitors with the new data
206+ old_monitors = yaml.safe_load(reldata['monitors'])
207+ old_nrpe_monitors = old_monitors['monitors']['remote']['nrpe']
208+ # remove keys that are in the remove_check_queue
209+ old_nrpe_monitors = {k: v for k, v in old_nrpe_monitors.items()
210+ if k not in self.remove_check_queue}
211+ # update/add nrpe_monitors
212+ old_nrpe_monitors.update(nrpe_monitors)
213+ old_monitors['monitors']['remote']['nrpe'] = old_nrpe_monitors
214+ # write back to the relation
215+ relation_set(relation_id=rid, monitors=yaml.dump(old_monitors))
216+ else:
217+ # write a brand new set of monitors, as no existing ones.
218+ relation_set(relation_id=rid, monitors=yaml.dump(monitors))
219+
220+ self.remove_check_queue.clear()
221
222
223 def get_nagios_hostcontext(relation_name='nrpe-external-master'):
224@@ -404,16 +441,26 @@ def add_init_service_checks(nrpe, services, unit_name, immediate_check=True):
225 os.chmod(checkpath, 0o644)
226
227
228-def copy_nrpe_checks():
229+def copy_nrpe_checks(nrpe_files_dir=None):
230 """
231 Copy the nrpe checks into place
232
233 """
234 NAGIOS_PLUGINS = '/usr/local/lib/nagios/plugins'
235- nrpe_files_dir = os.path.join(os.getenv('CHARM_DIR'), 'hooks',
236- 'charmhelpers', 'contrib', 'openstack',
237- 'files')
238-
239+ if nrpe_files_dir is None:
240+ # determine if "charmhelpers" is in CHARMDIR or CHARMDIR/hooks
241+ for segment in ['.', 'hooks']:
242+ nrpe_files_dir = os.path.abspath(os.path.join(
243+ os.getenv('CHARM_DIR'),
244+ segment,
245+ 'charmhelpers',
246+ 'contrib',
247+ 'openstack',
248+ 'files'))
249+ if os.path.isdir(nrpe_files_dir):
250+ break
251+ else:
252+ raise RuntimeError("Couldn't find charmhelpers directory")
253 if not os.path.exists(NAGIOS_PLUGINS):
254 os.makedirs(NAGIOS_PLUGINS)
255 for fname in glob.glob(os.path.join(nrpe_files_dir, "check_*")):
256@@ -437,3 +484,17 @@ def add_haproxy_checks(nrpe, unit_name):
257 shortname='haproxy_queue',
258 description='Check HAProxy queue depth {%s}' % unit_name,
259 check_cmd='check_haproxy_queue_depth.sh')
260+
261+
262+def remove_deprecated_check(nrpe, deprecated_services):
263+ """
264+ Remove checks fro deprecated services in list
265+
266+ :param nrpe: NRPE object to remove check from
267+ :type nrpe: NRPE
268+ :param deprecated_services: List of deprecated services that are removed
269+ :type deprecated_services: list
270+ """
271+ for dep_svc in deprecated_services:
272+ log('Deprecated service: {}'.format(dep_svc))
273+ nrpe.remove_check(shortname=dep_svc)
274diff --git a/charmhelpers/contrib/hahelpers/cluster.py b/charmhelpers/contrib/hahelpers/cluster.py
275index e02350e..ba34fba 100644
276--- a/charmhelpers/contrib/hahelpers/cluster.py
277+++ b/charmhelpers/contrib/hahelpers/cluster.py
278@@ -25,8 +25,10 @@ Helpers for clustering and determining "cluster leadership" and other
279 clustering-related helpers.
280 """
281
282+import functools
283 import subprocess
284 import os
285+import time
286
287 from socket import gethostname as get_unit_hostname
288
289@@ -45,6 +47,9 @@ from charmhelpers.core.hookenv import (
290 is_leader as juju_is_leader,
291 status_set,
292 )
293+from charmhelpers.core.host import (
294+ modulo_distribution,
295+)
296 from charmhelpers.core.decorators import (
297 retry_on_exception,
298 )
299@@ -219,6 +224,11 @@ def https():
300 return True
301 if config_get('ssl_cert') and config_get('ssl_key'):
302 return True
303+ for r_id in relation_ids('certificates'):
304+ for unit in relation_list(r_id):
305+ ca = relation_get('ca', rid=r_id, unit=unit)
306+ if ca:
307+ return True
308 for r_id in relation_ids('identity-service'):
309 for unit in relation_list(r_id):
310 # TODO - needs fixing for new helper as ssl_cert/key suffixes with CN
311@@ -272,6 +282,10 @@ def determine_apache_port(public_port, singlenode_mode=False):
312 return public_port - (i * 10)
313
314
315+determine_apache_port_single = functools.partial(
316+ determine_apache_port, singlenode_mode=True)
317+
318+
319 def get_hacluster_config(exclude_keys=None):
320 '''
321 Obtains all relevant configuration from charm configuration required
322@@ -361,3 +375,77 @@ def canonical_url(configs, vip_setting='vip'):
323 else:
324 addr = unit_get('private-address')
325 return '%s://%s' % (scheme, addr)
326+
327+
328+def distributed_wait(modulo=None, wait=None, operation_name='operation'):
329+ ''' Distribute operations by waiting based on modulo_distribution
330+
331+ If modulo and or wait are not set, check config_get for those values.
332+ If config values are not set, default to modulo=3 and wait=30.
333+
334+ :param modulo: int The modulo number creates the group distribution
335+ :param wait: int The constant time wait value
336+ :param operation_name: string Operation name for status message
337+ i.e. 'restart'
338+ :side effect: Calls config_get()
339+ :side effect: Calls log()
340+ :side effect: Calls status_set()
341+ :side effect: Calls time.sleep()
342+ '''
343+ if modulo is None:
344+ modulo = config_get('modulo-nodes') or 3
345+ if wait is None:
346+ wait = config_get('known-wait') or 30
347+ if juju_is_leader():
348+ # The leader should never wait
349+ calculated_wait = 0
350+ else:
351+ # non_zero_wait=True guarantees the non-leader who gets modulo 0
352+ # will still wait
353+ calculated_wait = modulo_distribution(modulo=modulo, wait=wait,
354+ non_zero_wait=True)
355+ msg = "Waiting {} seconds for {} ...".format(calculated_wait,
356+ operation_name)
357+ log(msg, DEBUG)
358+ status_set('maintenance', msg)
359+ time.sleep(calculated_wait)
360+
361+
362+def get_managed_services_and_ports(services, external_ports,
363+ external_services=None,
364+ port_conv_f=determine_apache_port_single):
365+ """Get the services and ports managed by this charm.
366+
367+ Return only the services and corresponding ports that are managed by this
368+ charm. This excludes haproxy when there is a relation with hacluster. This
369+ is because this charm passes responsability for stopping and starting
370+ haproxy to hacluster.
371+
372+ Similarly, if a relation with hacluster exists then the ports returned by
373+ this method correspond to those managed by the apache server rather than
374+ haproxy.
375+
376+ :param services: List of services.
377+ :type services: List[str]
378+ :param external_ports: List of ports managed by external services.
379+ :type external_ports: List[int]
380+ :param external_services: List of services to be removed if ha relation is
381+ present.
382+ :type external_services: List[str]
383+ :param port_conv_f: Function to apply to ports to calculate the ports
384+ managed by services controlled by this charm.
385+ :type port_convert_func: f()
386+ :returns: A tuple containing a list of services first followed by a list of
387+ ports.
388+ :rtype: Tuple[List[str], List[int]]
389+ """
390+ if external_services is None:
391+ external_services = ['haproxy']
392+ if relation_ids('ha'):
393+ for svc in external_services:
394+ try:
395+ services.remove(svc)
396+ except ValueError:
397+ pass
398+ external_ports = [port_conv_f(p) for p in external_ports]
399+ return services, external_ports
400diff --git a/charmhelpers/core/hookenv.py b/charmhelpers/core/hookenv.py
401index e44e22b..d7c37c1 100644
402--- a/charmhelpers/core/hookenv.py
403+++ b/charmhelpers/core/hookenv.py
404@@ -21,29 +21,50 @@
405 from __future__ import print_function
406 import copy
407 from distutils.version import LooseVersion
408+from enum import Enum
409 from functools import wraps
410+from collections import namedtuple
411 import glob
412 import os
413 import json
414 import yaml
415+import re
416 import subprocess
417 import sys
418 import errno
419 import tempfile
420 from subprocess import CalledProcessError
421
422+from charmhelpers import deprecate
423+
424 import six
425 if not six.PY3:
426 from UserDict import UserDict
427 else:
428 from collections import UserDict
429
430+
431 CRITICAL = "CRITICAL"
432 ERROR = "ERROR"
433 WARNING = "WARNING"
434 INFO = "INFO"
435 DEBUG = "DEBUG"
436+TRACE = "TRACE"
437 MARKER = object()
438+SH_MAX_ARG = 131071
439+
440+
441+RANGE_WARNING = ('Passing NO_PROXY string that includes a cidr. '
442+ 'This may not be compatible with software you are '
443+ 'running in your shell.')
444+
445+
446+class WORKLOAD_STATES(Enum):
447+ ACTIVE = 'active'
448+ BLOCKED = 'blocked'
449+ MAINTENANCE = 'maintenance'
450+ WAITING = 'waiting'
451+
452
453 cache = {}
454
455@@ -64,7 +85,7 @@ def cached(func):
456 @wraps(func)
457 def wrapper(*args, **kwargs):
458 global cache
459- key = str((func, args, kwargs))
460+ key = json.dumps((func, args, kwargs), sort_keys=True, default=str)
461 try:
462 return cache[key]
463 except KeyError:
464@@ -94,7 +115,7 @@ def log(message, level=None):
465 command += ['-l', level]
466 if not isinstance(message, six.string_types):
467 message = repr(message)
468- command += [message]
469+ command += [message[:SH_MAX_ARG]]
470 # Missing juju-log should not cause failures in unit tests
471 # Send log output to stderr
472 try:
473@@ -109,6 +130,24 @@ def log(message, level=None):
474 raise
475
476
477+def function_log(message):
478+ """Write a function progress message"""
479+ command = ['function-log']
480+ if not isinstance(message, six.string_types):
481+ message = repr(message)
482+ command += [message[:SH_MAX_ARG]]
483+ # Missing function-log should not cause failures in unit tests
484+ # Send function_log output to stderr
485+ try:
486+ subprocess.call(command)
487+ except OSError as e:
488+ if e.errno == errno.ENOENT:
489+ message = "function-log: {}".format(message)
490+ print(message, file=sys.stderr)
491+ else:
492+ raise
493+
494+
495 class Serializable(UserDict):
496 """Wrapper, an object that can be serialized to yaml or json"""
497
498@@ -197,11 +236,58 @@ def remote_unit():
499 return os.environ.get('JUJU_REMOTE_UNIT', None)
500
501
502-def service_name():
503- """The name service group this unit belongs to"""
504+def application_name():
505+ """
506+ The name of the deployed application this unit belongs to.
507+ """
508 return local_unit().split('/')[0]
509
510
511+def service_name():
512+ """
513+ .. deprecated:: 0.19.1
514+ Alias for :func:`application_name`.
515+ """
516+ return application_name()
517+
518+
519+def model_name():
520+ """
521+ Name of the model that this unit is deployed in.
522+ """
523+ return os.environ['JUJU_MODEL_NAME']
524+
525+
526+def model_uuid():
527+ """
528+ UUID of the model that this unit is deployed in.
529+ """
530+ return os.environ['JUJU_MODEL_UUID']
531+
532+
533+def principal_unit():
534+ """Returns the principal unit of this unit, otherwise None"""
535+ # Juju 2.2 and above provides JUJU_PRINCIPAL_UNIT
536+ principal_unit = os.environ.get('JUJU_PRINCIPAL_UNIT', None)
537+ # If it's empty, then this unit is the principal
538+ if principal_unit == '':
539+ return os.environ['JUJU_UNIT_NAME']
540+ elif principal_unit is not None:
541+ return principal_unit
542+ # For Juju 2.1 and below, let's try work out the principle unit by
543+ # the various charms' metadata.yaml.
544+ for reltype in relation_types():
545+ for rid in relation_ids(reltype):
546+ for unit in related_units(rid):
547+ md = _metadata_unit(unit)
548+ if not md:
549+ continue
550+ subordinate = md.pop('subordinate', None)
551+ if not subordinate:
552+ return unit
553+ return None
554+
555+
556 @cached
557 def remote_service_name(relid=None):
558 """The remote service name for a given relation-id (or the current relation)"""
559@@ -263,7 +349,7 @@ class Config(dict):
560 self.implicit_save = True
561 self._prev_dict = None
562 self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME)
563- if os.path.exists(self.path):
564+ if os.path.exists(self.path) and os.stat(self.path).st_size:
565 self.load_previous()
566 atexit(self._implicit_save)
567
568@@ -283,7 +369,11 @@ class Config(dict):
569 """
570 self.path = path or self.path
571 with open(self.path) as f:
572- self._prev_dict = json.load(f)
573+ try:
574+ self._prev_dict = json.load(f)
575+ except ValueError as e:
576+ log('Unable to parse previous config data - {}'.format(str(e)),
577+ level=ERROR)
578 for k, v in copy.deepcopy(self._prev_dict).items():
579 if k not in self:
580 self[k] = v
581@@ -319,6 +409,7 @@ class Config(dict):
582
583 """
584 with open(self.path, 'w') as f:
585+ os.fchmod(f.fileno(), 0o600)
586 json.dump(self, f)
587
588 def _implicit_save(self):
589@@ -326,22 +417,40 @@ class Config(dict):
590 self.save()
591
592
593-@cached
594+_cache_config = None
595+
596+
597 def config(scope=None):
598- """Juju charm configuration"""
599- config_cmd_line = ['config-get']
600- if scope is not None:
601- config_cmd_line.append(scope)
602- else:
603- config_cmd_line.append('--all')
604- config_cmd_line.append('--format=json')
605+ """
606+ Get the juju charm configuration (scope==None) or individual key,
607+ (scope=str). The returned value is a Python data structure loaded as
608+ JSON from the Juju config command.
609+
610+ :param scope: If set, return the value for the specified key.
611+ :type scope: Optional[str]
612+ :returns: Either the whole config as a Config, or a key from it.
613+ :rtype: Any
614+ """
615+ global _cache_config
616+ config_cmd_line = ['config-get', '--all', '--format=json']
617 try:
618- config_data = json.loads(
619- subprocess.check_output(config_cmd_line).decode('UTF-8'))
620+ # JSON Decode Exception for Python3.5+
621+ exc_json = json.decoder.JSONDecodeError
622+ except AttributeError:
623+ # JSON Decode Exception for Python2.7 through Python3.4
624+ exc_json = ValueError
625+ try:
626+ if _cache_config is None:
627+ config_data = json.loads(
628+ subprocess.check_output(config_cmd_line).decode('UTF-8'))
629+ _cache_config = Config(config_data)
630 if scope is not None:
631- return config_data
632- return Config(config_data)
633- except ValueError:
634+ return _cache_config.get(scope)
635+ return _cache_config
636+ except (exc_json, UnicodeDecodeError) as e:
637+ log('Unable to parse output from config-get: config_cmd_line="{}" '
638+ 'message="{}"'
639+ .format(config_cmd_line, str(e)), level=ERROR)
640 return None
641
642
643@@ -435,6 +544,67 @@ def related_units(relid=None):
644 subprocess.check_output(units_cmd_line).decode('UTF-8')) or []
645
646
647+def expected_peer_units():
648+ """Get a generator for units we expect to join peer relation based on
649+ goal-state.
650+
651+ The local unit is excluded from the result to make it easy to gauge
652+ completion of all peers joining the relation with existing hook tools.
653+
654+ Example usage:
655+ log('peer {} of {} joined peer relation'
656+ .format(len(related_units()),
657+ len(list(expected_peer_units()))))
658+
659+ This function will raise NotImplementedError if used with juju versions
660+ without goal-state support.
661+
662+ :returns: iterator
663+ :rtype: types.GeneratorType
664+ :raises: NotImplementedError
665+ """
666+ if not has_juju_version("2.4.0"):
667+ # goal-state first appeared in 2.4.0.
668+ raise NotImplementedError("goal-state")
669+ _goal_state = goal_state()
670+ return (key for key in _goal_state['units']
671+ if '/' in key and key != local_unit())
672+
673+
674+def expected_related_units(reltype=None):
675+ """Get a generator for units we expect to join relation based on
676+ goal-state.
677+
678+ Note that you can not use this function for the peer relation, take a look
679+ at expected_peer_units() for that.
680+
681+ This function will raise KeyError if you request information for a
682+ relation type for which juju goal-state does not have information. It will
683+ raise NotImplementedError if used with juju versions without goal-state
684+ support.
685+
686+ Example usage:
687+ log('participant {} of {} joined relation {}'
688+ .format(len(related_units()),
689+ len(list(expected_related_units())),
690+ relation_type()))
691+
692+ :param reltype: Relation type to list data for, default is to list data for
693+ the realtion type we are currently executing a hook for.
694+ :type reltype: str
695+ :returns: iterator
696+ :rtype: types.GeneratorType
697+ :raises: KeyError, NotImplementedError
698+ """
699+ if not has_juju_version("2.4.4"):
700+ # goal-state existed in 2.4.0, but did not list individual units to
701+ # join a relation in 2.4.1 through 2.4.3. (LP: #1794739)
702+ raise NotImplementedError("goal-state relation unit count")
703+ reltype = reltype or relation_type()
704+ _goal_state = goal_state()
705+ return (key for key in _goal_state['relations'][reltype] if '/' in key)
706+
707+
708 @cached
709 def relation_for_unit(unit=None, rid=None):
710 """Get the json represenation of a unit's relation"""
711@@ -478,6 +648,24 @@ def metadata():
712 return yaml.safe_load(md)
713
714
715+def _metadata_unit(unit):
716+ """Given the name of a unit (e.g. apache2/0), get the unit charm's
717+ metadata.yaml. Very similar to metadata() but allows us to inspect
718+ other units. Unit needs to be co-located, such as a subordinate or
719+ principal/primary.
720+
721+ :returns: metadata.yaml as a python object.
722+
723+ """
724+ basedir = os.sep.join(charm_dir().split(os.sep)[:-2])
725+ unitdir = 'unit-{}'.format(unit.replace(os.sep, '-'))
726+ joineddir = os.path.join(basedir, unitdir, 'charm', 'metadata.yaml')
727+ if not os.path.exists(joineddir):
728+ return None
729+ with open(joineddir) as md:
730+ return yaml.safe_load(md)
731+
732+
733 @cached
734 def relation_types():
735 """Get a list of relation types supported by this charm"""
736@@ -602,18 +790,31 @@ def is_relation_made(relation, keys='private-address'):
737 return False
738
739
740+def _port_op(op_name, port, protocol="TCP"):
741+ """Open or close a service network port"""
742+ _args = [op_name]
743+ icmp = protocol.upper() == "ICMP"
744+ if icmp:
745+ _args.append(protocol)
746+ else:
747+ _args.append('{}/{}'.format(port, protocol))
748+ try:
749+ subprocess.check_call(_args)
750+ except subprocess.CalledProcessError:
751+ # Older Juju pre 2.3 doesn't support ICMP
752+ # so treat it as a no-op if it fails.
753+ if not icmp:
754+ raise
755+
756+
757 def open_port(port, protocol="TCP"):
758 """Open a service network port"""
759- _args = ['open-port']
760- _args.append('{}/{}'.format(port, protocol))
761- subprocess.check_call(_args)
762+ _port_op('open-port', port, protocol)
763
764
765 def close_port(port, protocol="TCP"):
766 """Close a service network port"""
767- _args = ['close-port']
768- _args.append('{}/{}'.format(port, protocol))
769- subprocess.check_call(_args)
770+ _port_op('close-port', port, protocol)
771
772
773 def open_ports(start, end, protocol="TCP"):
774@@ -630,6 +831,17 @@ def close_ports(start, end, protocol="TCP"):
775 subprocess.check_call(_args)
776
777
778+def opened_ports():
779+ """Get the opened ports
780+
781+ *Note that this will only show ports opened in a previous hook*
782+
783+ :returns: Opened ports as a list of strings: ``['8080/tcp', '8081-8083/tcp']``
784+ """
785+ _args = ['opened-ports', '--format=json']
786+ return json.loads(subprocess.check_output(_args).decode('UTF-8'))
787+
788+
789 @cached
790 def unit_get(attribute):
791 """Get the unit ID for the remote unit"""
792@@ -751,14 +963,35 @@ class Hooks(object):
793 return wrapper
794
795
796+class NoNetworkBinding(Exception):
797+ pass
798+
799+
800 def charm_dir():
801 """Return the root directory of the current charm"""
802+ d = os.environ.get('JUJU_CHARM_DIR')
803+ if d is not None:
804+ return d
805 return os.environ.get('CHARM_DIR')
806
807
808+def cmd_exists(cmd):
809+ """Return True if the specified cmd exists in the path"""
810+ return any(
811+ os.access(os.path.join(path, cmd), os.X_OK)
812+ for path in os.environ["PATH"].split(os.pathsep)
813+ )
814+
815+
816 @cached
817+@deprecate("moved to function_get()", log=log)
818 def action_get(key=None):
819- """Gets the value of an action parameter, or all key/value param pairs"""
820+ """
821+ .. deprecated:: 0.20.7
822+ Alias for :func:`function_get`.
823+
824+ Gets the value of an action parameter, or all key/value param pairs.
825+ """
826 cmd = ['action-get']
827 if key is not None:
828 cmd.append(key)
829@@ -767,52 +1000,130 @@ def action_get(key=None):
830 return action_data
831
832
833+@cached
834+def function_get(key=None):
835+ """Gets the value of an action parameter, or all key/value param pairs"""
836+ cmd = ['function-get']
837+ # Fallback for older charms.
838+ if not cmd_exists('function-get'):
839+ cmd = ['action-get']
840+
841+ if key is not None:
842+ cmd.append(key)
843+ cmd.append('--format=json')
844+ function_data = json.loads(subprocess.check_output(cmd).decode('UTF-8'))
845+ return function_data
846+
847+
848+@deprecate("moved to function_set()", log=log)
849 def action_set(values):
850- """Sets the values to be returned after the action finishes"""
851+ """
852+ .. deprecated:: 0.20.7
853+ Alias for :func:`function_set`.
854+
855+ Sets the values to be returned after the action finishes.
856+ """
857 cmd = ['action-set']
858 for k, v in list(values.items()):
859 cmd.append('{}={}'.format(k, v))
860 subprocess.check_call(cmd)
861
862
863+def function_set(values):
864+ """Sets the values to be returned after the function finishes"""
865+ cmd = ['function-set']
866+ # Fallback for older charms.
867+ if not cmd_exists('function-get'):
868+ cmd = ['action-set']
869+
870+ for k, v in list(values.items()):
871+ cmd.append('{}={}'.format(k, v))
872+ subprocess.check_call(cmd)
873+
874+
875+@deprecate("moved to function_fail()", log=log)
876 def action_fail(message):
877- """Sets the action status to failed and sets the error message.
878+ """
879+ .. deprecated:: 0.20.7
880+ Alias for :func:`function_fail`.
881+
882+ Sets the action status to failed and sets the error message.
883
884- The results set by action_set are preserved."""
885+ The results set by action_set are preserved.
886+ """
887 subprocess.check_call(['action-fail', message])
888
889
890+def function_fail(message):
891+ """Sets the function status to failed and sets the error message.
892+
893+ The results set by function_set are preserved."""
894+ cmd = ['function-fail']
895+ # Fallback for older charms.
896+ if not cmd_exists('function-fail'):
897+ cmd = ['action-fail']
898+ cmd.append(message)
899+
900+ subprocess.check_call(cmd)
901+
902+
903 def action_name():
904 """Get the name of the currently executing action."""
905 return os.environ.get('JUJU_ACTION_NAME')
906
907
908+def function_name():
909+ """Get the name of the currently executing function."""
910+ return os.environ.get('JUJU_FUNCTION_NAME') or action_name()
911+
912+
913 def action_uuid():
914 """Get the UUID of the currently executing action."""
915 return os.environ.get('JUJU_ACTION_UUID')
916
917
918+def function_id():
919+ """Get the ID of the currently executing function."""
920+ return os.environ.get('JUJU_FUNCTION_ID') or action_uuid()
921+
922+
923 def action_tag():
924 """Get the tag for the currently executing action."""
925 return os.environ.get('JUJU_ACTION_TAG')
926
927
928-def status_set(workload_state, message):
929+def function_tag():
930+ """Get the tag for the currently executing function."""
931+ return os.environ.get('JUJU_FUNCTION_TAG') or action_tag()
932+
933+
934+def status_set(workload_state, message, application=False):
935 """Set the workload state with a message
936
937 Use status-set to set the workload state with a message which is visible
938 to the user via juju status. If the status-set command is not found then
939- assume this is juju < 1.23 and juju-log the message unstead.
940+ assume this is juju < 1.23 and juju-log the message instead.
941
942- workload_state -- valid juju workload state.
943- message -- status update message
944+ workload_state -- valid juju workload state. str or WORKLOAD_STATES
945+ message -- status update message
946+ application -- Whether this is an application state set
947 """
948- valid_states = ['maintenance', 'blocked', 'waiting', 'active']
949- if workload_state not in valid_states:
950- raise ValueError(
951- '{!r} is not a valid workload state'.format(workload_state)
952- )
953- cmd = ['status-set', workload_state, message]
954+ bad_state_msg = '{!r} is not a valid workload state'
955+
956+ if isinstance(workload_state, str):
957+ try:
958+ # Convert string to enum.
959+ workload_state = WORKLOAD_STATES[workload_state.upper()]
960+ except KeyError:
961+ raise ValueError(bad_state_msg.format(workload_state))
962+
963+ if workload_state not in WORKLOAD_STATES:
964+ raise ValueError(bad_state_msg.format(workload_state))
965+
966+ cmd = ['status-set']
967+ if application:
968+ cmd.append('--application')
969+ cmd.extend([workload_state.value, message])
970 try:
971 ret = subprocess.call(cmd)
972 if ret == 0:
973@@ -820,7 +1131,7 @@ def status_set(workload_state, message):
974 except OSError as e:
975 if e.errno != errno.ENOENT:
976 raise
977- log_message = 'status-set failed: {} {}'.format(workload_state,
978+ log_message = 'status-set failed: {} {}'.format(workload_state.value,
979 message)
980 log(log_message, level='INFO')
981
982@@ -874,6 +1185,14 @@ def application_version_set(version):
983
984
985 @translate_exc(from_exc=OSError, to_exc=NotImplementedError)
986+@cached
987+def goal_state():
988+ """Juju goal state values"""
989+ cmd = ['goal-state', '--format=json']
990+ return json.loads(subprocess.check_output(cmd).decode('UTF-8'))
991+
992+
993+@translate_exc(from_exc=OSError, to_exc=NotImplementedError)
994 def is_leader():
995 """Does the current unit hold the juju leadership
996
997@@ -967,7 +1286,6 @@ def juju_version():
998 universal_newlines=True).strip()
999
1000
1001-@cached
1002 def has_juju_version(minimum_version):
1003 """Return True if the Juju version is at least the provided version"""
1004 return LooseVersion(juju_version()) >= LooseVersion(minimum_version)
1005@@ -1027,6 +1345,8 @@ def _run_atexit():
1006 @translate_exc(from_exc=OSError, to_exc=NotImplementedError)
1007 def network_get_primary_address(binding):
1008 '''
1009+ Deprecated since Juju 2.3; use network_get()
1010+
1011 Retrieve the primary network address for a named binding
1012
1013 :param binding: string. The name of a relation of extra-binding
1014@@ -1034,7 +1354,41 @@ def network_get_primary_address(binding):
1015 :raise: NotImplementedError if run on Juju < 2.0
1016 '''
1017 cmd = ['network-get', '--primary-address', binding]
1018- return subprocess.check_output(cmd).decode('UTF-8').strip()
1019+ try:
1020+ response = subprocess.check_output(
1021+ cmd,
1022+ stderr=subprocess.STDOUT).decode('UTF-8').strip()
1023+ except CalledProcessError as e:
1024+ if 'no network config found for binding' in e.output.decode('UTF-8'):
1025+ raise NoNetworkBinding("No network binding for {}"
1026+ .format(binding))
1027+ else:
1028+ raise
1029+ return response
1030+
1031+
1032+def network_get(endpoint, relation_id=None):
1033+ """
1034+ Retrieve the network details for a relation endpoint
1035+
1036+ :param endpoint: string. The name of a relation endpoint
1037+ :param relation_id: int. The ID of the relation for the current context.
1038+ :return: dict. The loaded YAML output of the network-get query.
1039+ :raise: NotImplementedError if request not supported by the Juju version.
1040+ """
1041+ if not has_juju_version('2.2'):
1042+ raise NotImplementedError(juju_version()) # earlier versions require --primary-address
1043+ if relation_id and not has_juju_version('2.3'):
1044+ raise NotImplementedError # 2.3 added the -r option
1045+
1046+ cmd = ['network-get', endpoint, '--format', 'yaml']
1047+ if relation_id:
1048+ cmd.append('-r')
1049+ cmd.append(relation_id)
1050+ response = subprocess.check_output(
1051+ cmd,
1052+ stderr=subprocess.STDOUT).decode('UTF-8').strip()
1053+ return yaml.safe_load(response)
1054
1055
1056 def add_metric(*args, **kwargs):
1057@@ -1066,3 +1420,192 @@ def meter_info():
1058 """Get the meter status information, if running in the meter-status-changed
1059 hook."""
1060 return os.environ.get('JUJU_METER_INFO')
1061+
1062+
1063+def iter_units_for_relation_name(relation_name):
1064+ """Iterate through all units in a relation
1065+
1066+ Generator that iterates through all the units in a relation and yields
1067+ a named tuple with rid and unit field names.
1068+
1069+ Usage:
1070+ data = [(u.rid, u.unit)
1071+ for u in iter_units_for_relation_name(relation_name)]
1072+
1073+ :param relation_name: string relation name
1074+ :yield: Named Tuple with rid and unit field names
1075+ """
1076+ RelatedUnit = namedtuple('RelatedUnit', 'rid, unit')
1077+ for rid in relation_ids(relation_name):
1078+ for unit in related_units(rid):
1079+ yield RelatedUnit(rid, unit)
1080+
1081+
1082+def ingress_address(rid=None, unit=None):
1083+ """
1084+ Retrieve the ingress-address from a relation when available.
1085+ Otherwise, return the private-address.
1086+
1087+ When used on the consuming side of the relation (unit is a remote
1088+ unit), the ingress-address is the IP address that this unit needs
1089+ to use to reach the provided service on the remote unit.
1090+
1091+ When used on the providing side of the relation (unit == local_unit()),
1092+ the ingress-address is the IP address that is advertised to remote
1093+ units on this relation. Remote units need to use this address to
1094+ reach the local provided service on this unit.
1095+
1096+ Note that charms may document some other method to use in
1097+ preference to the ingress_address(), such as an address provided
1098+ on a different relation attribute or a service discovery mechanism.
1099+ This allows charms to redirect inbound connections to their peers
1100+ or different applications such as load balancers.
1101+
1102+ Usage:
1103+ addresses = [ingress_address(rid=u.rid, unit=u.unit)
1104+ for u in iter_units_for_relation_name(relation_name)]
1105+
1106+ :param rid: string relation id
1107+ :param unit: string unit name
1108+ :side effect: calls relation_get
1109+ :return: string IP address
1110+ """
1111+ settings = relation_get(rid=rid, unit=unit)
1112+ return (settings.get('ingress-address') or
1113+ settings.get('private-address'))
1114+
1115+
1116+def egress_subnets(rid=None, unit=None):
1117+ """
1118+ Retrieve the egress-subnets from a relation.
1119+
1120+ This function is to be used on the providing side of the
1121+ relation, and provides the ranges of addresses that client
1122+ connections may come from. The result is uninteresting on
1123+ the consuming side of a relation (unit == local_unit()).
1124+
1125+ Returns a stable list of subnets in CIDR format.
1126+ eg. ['192.168.1.0/24', '2001::F00F/128']
1127+
1128+ If egress-subnets is not available, falls back to using the published
1129+ ingress-address, or finally private-address.
1130+
1131+ :param rid: string relation id
1132+ :param unit: string unit name
1133+ :side effect: calls relation_get
1134+ :return: list of subnets in CIDR format. eg. ['192.168.1.0/24', '2001::F00F/128']
1135+ """
1136+ def _to_range(addr):
1137+ if re.search(r'^(?:\d{1,3}\.){3}\d{1,3}$', addr) is not None:
1138+ addr += '/32'
1139+ elif ':' in addr and '/' not in addr: # IPv6
1140+ addr += '/128'
1141+ return addr
1142+
1143+ settings = relation_get(rid=rid, unit=unit)
1144+ if 'egress-subnets' in settings:
1145+ return [n.strip() for n in settings['egress-subnets'].split(',') if n.strip()]
1146+ if 'ingress-address' in settings:
1147+ return [_to_range(settings['ingress-address'])]
1148+ if 'private-address' in settings:
1149+ return [_to_range(settings['private-address'])]
1150+ return [] # Should never happen
1151+
1152+
1153+def unit_doomed(unit=None):
1154+ """Determines if the unit is being removed from the model
1155+
1156+ Requires Juju 2.4.1.
1157+
1158+ :param unit: string unit name, defaults to local_unit
1159+ :side effect: calls goal_state
1160+ :side effect: calls local_unit
1161+ :side effect: calls has_juju_version
1162+ :return: True if the unit is being removed, already gone, or never existed
1163+ """
1164+ if not has_juju_version("2.4.1"):
1165+ # We cannot risk blindly returning False for 'we don't know',
1166+ # because that could cause data loss; if call sites don't
1167+ # need an accurate answer, they likely don't need this helper
1168+ # at all.
1169+ # goal-state existed in 2.4.0, but did not handle removals
1170+ # correctly until 2.4.1.
1171+ raise NotImplementedError("is_doomed")
1172+ if unit is None:
1173+ unit = local_unit()
1174+ gs = goal_state()
1175+ units = gs.get('units', {})
1176+ if unit not in units:
1177+ return True
1178+ # I don't think 'dead' units ever show up in the goal-state, but
1179+ # check anyway in addition to 'dying'.
1180+ return units[unit]['status'] in ('dying', 'dead')
1181+
1182+
1183+def env_proxy_settings(selected_settings=None):
1184+ """Get proxy settings from process environment variables.
1185+
1186+ Get charm proxy settings from environment variables that correspond to
1187+ juju-http-proxy, juju-https-proxy juju-no-proxy (available as of 2.4.2, see
1188+ lp:1782236) and juju-ftp-proxy in a format suitable for passing to an
1189+ application that reacts to proxy settings passed as environment variables.
1190+ Some applications support lowercase or uppercase notation (e.g. curl), some
1191+ support only lowercase (e.g. wget), there are also subjectively rare cases
1192+ of only uppercase notation support. no_proxy CIDR and wildcard support also
1193+ varies between runtimes and applications as there is no enforced standard.
1194+
1195+ Some applications may connect to multiple destinations and expose config
1196+ options that would affect only proxy settings for a specific destination
1197+ these should be handled in charms in an application-specific manner.
1198+
1199+ :param selected_settings: format only a subset of possible settings
1200+ :type selected_settings: list
1201+ :rtype: Option(None, dict[str, str])
1202+ """
1203+ SUPPORTED_SETTINGS = {
1204+ 'http': 'HTTP_PROXY',
1205+ 'https': 'HTTPS_PROXY',
1206+ 'no_proxy': 'NO_PROXY',
1207+ 'ftp': 'FTP_PROXY'
1208+ }
1209+ if selected_settings is None:
1210+ selected_settings = SUPPORTED_SETTINGS
1211+
1212+ selected_vars = [v for k, v in SUPPORTED_SETTINGS.items()
1213+ if k in selected_settings]
1214+ proxy_settings = {}
1215+ for var in selected_vars:
1216+ var_val = os.getenv(var)
1217+ if var_val:
1218+ proxy_settings[var] = var_val
1219+ proxy_settings[var.lower()] = var_val
1220+ # Now handle juju-prefixed environment variables. The legacy vs new
1221+ # environment variable usage is mutually exclusive
1222+ charm_var_val = os.getenv('JUJU_CHARM_{}'.format(var))
1223+ if charm_var_val:
1224+ proxy_settings[var] = charm_var_val
1225+ proxy_settings[var.lower()] = charm_var_val
1226+ if 'no_proxy' in proxy_settings:
1227+ if _contains_range(proxy_settings['no_proxy']):
1228+ log(RANGE_WARNING, level=WARNING)
1229+ return proxy_settings if proxy_settings else None
1230+
1231+
1232+def _contains_range(addresses):
1233+ """Check for cidr or wildcard domain in a string.
1234+
1235+ Given a string comprising a comma seperated list of ip addresses
1236+ and domain names, determine whether the string contains IP ranges
1237+ or wildcard domains.
1238+
1239+ :param addresses: comma seperated list of domains and ip addresses.
1240+ :type addresses: str
1241+ """
1242+ return (
1243+ # Test for cidr (e.g. 10.20.20.0/24)
1244+ "/" in addresses or
1245+ # Test for wildcard domains (*.foo.com or .foo.com)
1246+ "*" in addresses or
1247+ addresses.startswith(".") or
1248+ ",." in addresses or
1249+ " ." in addresses)
1250diff --git a/charmhelpers/core/host.py b/charmhelpers/core/host.py
1251index b0043cb..b33ac90 100644
1252--- a/charmhelpers/core/host.py
1253+++ b/charmhelpers/core/host.py
1254@@ -34,21 +34,23 @@ import six
1255
1256 from contextlib import contextmanager
1257 from collections import OrderedDict
1258-from .hookenv import log
1259+from .hookenv import log, INFO, DEBUG, local_unit, charm_name
1260 from .fstab import Fstab
1261 from charmhelpers.osplatform import get_platform
1262
1263 __platform__ = get_platform()
1264 if __platform__ == "ubuntu":
1265- from charmhelpers.core.host_factory.ubuntu import (
1266+ from charmhelpers.core.host_factory.ubuntu import ( # NOQA:F401
1267 service_available,
1268 add_new_group,
1269 lsb_release,
1270 cmp_pkgrevno,
1271 CompareHostReleases,
1272+ get_distrib_codename,
1273+ arch
1274 ) # flake8: noqa -- ignore F401 for this import
1275 elif __platform__ == "centos":
1276- from charmhelpers.core.host_factory.centos import (
1277+ from charmhelpers.core.host_factory.centos import ( # NOQA:F401
1278 service_available,
1279 add_new_group,
1280 lsb_release,
1281@@ -58,6 +60,7 @@ elif __platform__ == "centos":
1282
1283 UPDATEDB_PATH = '/etc/updatedb.conf'
1284
1285+
1286 def service_start(service_name, **kwargs):
1287 """Start a system service.
1288
1289@@ -287,8 +290,8 @@ def service_running(service_name, **kwargs):
1290 for key, value in six.iteritems(kwargs):
1291 parameter = '%s=%s' % (key, value)
1292 cmd.append(parameter)
1293- output = subprocess.check_output(cmd,
1294- stderr=subprocess.STDOUT).decode('UTF-8')
1295+ output = subprocess.check_output(
1296+ cmd, stderr=subprocess.STDOUT).decode('UTF-8')
1297 except subprocess.CalledProcessError:
1298 return False
1299 else:
1300@@ -441,6 +444,51 @@ def add_user_to_group(username, group):
1301 subprocess.check_call(cmd)
1302
1303
1304+def chage(username, lastday=None, expiredate=None, inactive=None,
1305+ mindays=None, maxdays=None, root=None, warndays=None):
1306+ """Change user password expiry information
1307+
1308+ :param str username: User to update
1309+ :param str lastday: Set when password was changed in YYYY-MM-DD format
1310+ :param str expiredate: Set when user's account will no longer be
1311+ accessible in YYYY-MM-DD format.
1312+ -1 will remove an account expiration date.
1313+ :param str inactive: Set the number of days of inactivity after a password
1314+ has expired before the account is locked.
1315+ -1 will remove an account's inactivity.
1316+ :param str mindays: Set the minimum number of days between password
1317+ changes to MIN_DAYS.
1318+ 0 indicates the password can be changed anytime.
1319+ :param str maxdays: Set the maximum number of days during which a
1320+ password is valid.
1321+ -1 as MAX_DAYS will remove checking maxdays
1322+ :param str root: Apply changes in the CHROOT_DIR directory
1323+ :param str warndays: Set the number of days of warning before a password
1324+ change is required
1325+ :raises subprocess.CalledProcessError: if call to chage fails
1326+ """
1327+ cmd = ['chage']
1328+ if root:
1329+ cmd.extend(['--root', root])
1330+ if lastday:
1331+ cmd.extend(['--lastday', lastday])
1332+ if expiredate:
1333+ cmd.extend(['--expiredate', expiredate])
1334+ if inactive:
1335+ cmd.extend(['--inactive', inactive])
1336+ if mindays:
1337+ cmd.extend(['--mindays', mindays])
1338+ if maxdays:
1339+ cmd.extend(['--maxdays', maxdays])
1340+ if warndays:
1341+ cmd.extend(['--warndays', warndays])
1342+ cmd.append(username)
1343+ subprocess.check_call(cmd)
1344+
1345+
1346+remove_password_expiry = functools.partial(chage, expiredate='-1', inactive='-1', mindays='0', maxdays='-1')
1347+
1348+
1349 def rsync(from_path, to_path, flags='-r', options=None, timeout=None):
1350 """Replicate the contents of a path"""
1351 options = options or ['--delete', '--executability']
1352@@ -487,13 +535,45 @@ def mkdir(path, owner='root', group='root', perms=0o555, force=False):
1353
1354 def write_file(path, content, owner='root', group='root', perms=0o444):
1355 """Create or overwrite a file with the contents of a byte string."""
1356- log("Writing file {} {}:{} {:o}".format(path, owner, group, perms))
1357 uid = pwd.getpwnam(owner).pw_uid
1358 gid = grp.getgrnam(group).gr_gid
1359- with open(path, 'wb') as target:
1360- os.fchown(target.fileno(), uid, gid)
1361- os.fchmod(target.fileno(), perms)
1362- target.write(content)
1363+ # lets see if we can grab the file and compare the context, to avoid doing
1364+ # a write.
1365+ existing_content = None
1366+ existing_uid, existing_gid, existing_perms = None, None, None
1367+ try:
1368+ with open(path, 'rb') as target:
1369+ existing_content = target.read()
1370+ stat = os.stat(path)
1371+ existing_uid, existing_gid, existing_perms = (
1372+ stat.st_uid, stat.st_gid, stat.st_mode
1373+ )
1374+ except Exception:
1375+ pass
1376+ if content != existing_content:
1377+ log("Writing file {} {}:{} {:o}".format(path, owner, group, perms),
1378+ level=DEBUG)
1379+ with open(path, 'wb') as target:
1380+ os.fchown(target.fileno(), uid, gid)
1381+ os.fchmod(target.fileno(), perms)
1382+ if six.PY3 and isinstance(content, six.string_types):
1383+ content = content.encode('UTF-8')
1384+ target.write(content)
1385+ return
1386+ # the contents were the same, but we might still need to change the
1387+ # ownership or permissions.
1388+ if existing_uid != uid:
1389+ log("Changing uid on already existing content: {} -> {}"
1390+ .format(existing_uid, uid), level=DEBUG)
1391+ os.chown(path, uid, -1)
1392+ if existing_gid != gid:
1393+ log("Changing gid on already existing content: {} -> {}"
1394+ .format(existing_gid, gid), level=DEBUG)
1395+ os.chown(path, -1, gid)
1396+ if existing_perms != perms:
1397+ log("Changing permissions on existing content: {} -> {}"
1398+ .format(existing_perms, perms), level=DEBUG)
1399+ os.chmod(path, perms)
1400
1401
1402 def fstab_remove(mp):
1403@@ -758,7 +838,7 @@ def list_nics(nic_type=None):
1404 ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n')
1405 ip_output = (line.strip() for line in ip_output if line)
1406
1407- key = re.compile('^[0-9]+:\s+(.+):')
1408+ key = re.compile(r'^[0-9]+:\s+(.+):')
1409 for line in ip_output:
1410 matched = re.search(key, line)
1411 if matched:
1412@@ -903,6 +983,20 @@ def is_container():
1413
1414
1415 def add_to_updatedb_prunepath(path, updatedb_path=UPDATEDB_PATH):
1416+ """Adds the specified path to the mlocate's udpatedb.conf PRUNEPATH list.
1417+
1418+ This method has no effect if the path specified by updatedb_path does not
1419+ exist or is not a file.
1420+
1421+ @param path: string the path to add to the updatedb.conf PRUNEPATHS value
1422+ @param updatedb_path: the path the updatedb.conf file
1423+ """
1424+ if not os.path.exists(updatedb_path) or os.path.isdir(updatedb_path):
1425+ # If the updatedb.conf file doesn't exist then don't attempt to update
1426+ # the file as the package providing mlocate may not be installed on
1427+ # the local system
1428+ return
1429+
1430 with open(updatedb_path, 'r+') as f_id:
1431 updatedb_text = f_id.read()
1432 output = updatedb(updatedb_text, path)
1433@@ -922,3 +1016,89 @@ def updatedb(updatedb_text, new_path):
1434 lines[i] = 'PRUNEPATHS="{}"'.format(' '.join(paths))
1435 output = "\n".join(lines)
1436 return output
1437+
1438+
1439+def modulo_distribution(modulo=3, wait=30, non_zero_wait=False):
1440+ """ Modulo distribution
1441+
1442+ This helper uses the unit number, a modulo value and a constant wait time
1443+ to produce a calculated wait time distribution. This is useful in large
1444+ scale deployments to distribute load during an expensive operation such as
1445+ service restarts.
1446+
1447+ If you have 1000 nodes that need to restart 100 at a time 1 minute at a
1448+ time:
1449+
1450+ time.wait(modulo_distribution(modulo=100, wait=60))
1451+ restart()
1452+
1453+ If you need restarts to happen serially set modulo to the exact number of
1454+ nodes and set a high constant wait time:
1455+
1456+ time.wait(modulo_distribution(modulo=10, wait=120))
1457+ restart()
1458+
1459+ @param modulo: int The modulo number creates the group distribution
1460+ @param wait: int The constant time wait value
1461+ @param non_zero_wait: boolean Override unit % modulo == 0,
1462+ return modulo * wait. Used to avoid collisions with
1463+ leader nodes which are often given priority.
1464+ @return: int Calculated time to wait for unit operation
1465+ """
1466+ unit_number = int(local_unit().split('/')[1])
1467+ calculated_wait_time = (unit_number % modulo) * wait
1468+ if non_zero_wait and calculated_wait_time == 0:
1469+ return modulo * wait
1470+ else:
1471+ return calculated_wait_time
1472+
1473+
1474+def install_ca_cert(ca_cert, name=None):
1475+ """
1476+ Install the given cert as a trusted CA.
1477+
1478+ The ``name`` is the stem of the filename where the cert is written, and if
1479+ not provided, it will default to ``juju-{charm_name}``.
1480+
1481+ If the cert is empty or None, or is unchanged, nothing is done.
1482+ """
1483+ if not ca_cert:
1484+ return
1485+ if not isinstance(ca_cert, bytes):
1486+ ca_cert = ca_cert.encode('utf8')
1487+ if not name:
1488+ name = 'juju-{}'.format(charm_name())
1489+ cert_file = '/usr/local/share/ca-certificates/{}.crt'.format(name)
1490+ new_hash = hashlib.md5(ca_cert).hexdigest()
1491+ if file_hash(cert_file) == new_hash:
1492+ return
1493+ log("Installing new CA cert at: {}".format(cert_file), level=INFO)
1494+ write_file(cert_file, ca_cert)
1495+ subprocess.check_call(['update-ca-certificates', '--fresh'])
1496+
1497+
1498+def get_system_env(key, default=None):
1499+ """Get data from system environment as represented in ``/etc/environment``.
1500+
1501+ :param key: Key to look up
1502+ :type key: str
1503+ :param default: Value to return if key is not found
1504+ :type default: any
1505+ :returns: Value for key if found or contents of default parameter
1506+ :rtype: any
1507+ :raises: subprocess.CalledProcessError
1508+ """
1509+ env_file = '/etc/environment'
1510+ # use the shell and env(1) to parse the global environments file. This is
1511+ # done to get the correct result even if the user has shell variable
1512+ # substitutions or other shell logic in that file.
1513+ output = subprocess.check_output(
1514+ ['env', '-i', '/bin/bash', '-c',
1515+ 'set -a && source {} && env'.format(env_file)],
1516+ universal_newlines=True)
1517+ for k, v in (line.split('=', 1)
1518+ for line in output.splitlines() if '=' in line):
1519+ if k == key:
1520+ return v
1521+ else:
1522+ return default
1523diff --git a/charmhelpers/core/host_factory/ubuntu.py b/charmhelpers/core/host_factory/ubuntu.py
1524index d8dc378..3edc068 100644
1525--- a/charmhelpers/core/host_factory/ubuntu.py
1526+++ b/charmhelpers/core/host_factory/ubuntu.py
1527@@ -1,5 +1,6 @@
1528 import subprocess
1529
1530+from charmhelpers.core.hookenv import cached
1531 from charmhelpers.core.strutils import BasicStringComparator
1532
1533
1534@@ -20,6 +21,11 @@ UBUNTU_RELEASES = (
1535 'yakkety',
1536 'zesty',
1537 'artful',
1538+ 'bionic',
1539+ 'cosmic',
1540+ 'disco',
1541+ 'eoan',
1542+ 'focal'
1543 )
1544
1545
1546@@ -70,6 +76,14 @@ def lsb_release():
1547 return d
1548
1549
1550+def get_distrib_codename():
1551+ """Return the codename of the distribution
1552+ :returns: The codename
1553+ :rtype: str
1554+ """
1555+ return lsb_release()['DISTRIB_CODENAME'].lower()
1556+
1557+
1558 def cmp_pkgrevno(package, revno, pkgcache=None):
1559 """Compare supplied revno with the revno of the installed package.
1560
1561@@ -81,9 +95,22 @@ def cmp_pkgrevno(package, revno, pkgcache=None):
1562 the pkgcache argument is None. Be sure to add charmhelpers.fetch if
1563 you call this function, or pass an apt_pkg.Cache() instance.
1564 """
1565- import apt_pkg
1566+ from charmhelpers.fetch import apt_pkg
1567 if not pkgcache:
1568 from charmhelpers.fetch import apt_cache
1569 pkgcache = apt_cache()
1570 pkg = pkgcache[package]
1571 return apt_pkg.version_compare(pkg.current_ver.ver_str, revno)
1572+
1573+
1574+@cached
1575+def arch():
1576+ """Return the package architecture as a string.
1577+
1578+ :returns: the architecture
1579+ :rtype: str
1580+ :raises: subprocess.CalledProcessError if dpkg command fails
1581+ """
1582+ return subprocess.check_output(
1583+ ['dpkg', '--print-architecture']
1584+ ).rstrip().decode('UTF-8')
1585diff --git a/charmhelpers/core/kernel.py b/charmhelpers/core/kernel.py
1586index 2d40452..e01f4f8 100644
1587--- a/charmhelpers/core/kernel.py
1588+++ b/charmhelpers/core/kernel.py
1589@@ -26,12 +26,12 @@ from charmhelpers.core.hookenv import (
1590
1591 __platform__ = get_platform()
1592 if __platform__ == "ubuntu":
1593- from charmhelpers.core.kernel_factory.ubuntu import (
1594+ from charmhelpers.core.kernel_factory.ubuntu import ( # NOQA:F401
1595 persistent_modprobe,
1596 update_initramfs,
1597 ) # flake8: noqa -- ignore F401 for this import
1598 elif __platform__ == "centos":
1599- from charmhelpers.core.kernel_factory.centos import (
1600+ from charmhelpers.core.kernel_factory.centos import ( # NOQA:F401
1601 persistent_modprobe,
1602 update_initramfs,
1603 ) # flake8: noqa -- ignore F401 for this import
1604diff --git a/charmhelpers/core/services/base.py b/charmhelpers/core/services/base.py
1605index ca9dc99..179ad4f 100644
1606--- a/charmhelpers/core/services/base.py
1607+++ b/charmhelpers/core/services/base.py
1608@@ -307,23 +307,34 @@ class PortManagerCallback(ManagerCallback):
1609 """
1610 def __call__(self, manager, service_name, event_name):
1611 service = manager.get_service(service_name)
1612- new_ports = service.get('ports', [])
1613+ # turn this generator into a list,
1614+ # as we'll be going over it multiple times
1615+ new_ports = list(service.get('ports', []))
1616 port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name))
1617 if os.path.exists(port_file):
1618 with open(port_file) as fp:
1619 old_ports = fp.read().split(',')
1620 for old_port in old_ports:
1621- if bool(old_port):
1622- old_port = int(old_port)
1623- if old_port not in new_ports:
1624- hookenv.close_port(old_port)
1625+ if bool(old_port) and not self.ports_contains(old_port, new_ports):
1626+ hookenv.close_port(old_port)
1627 with open(port_file, 'w') as fp:
1628 fp.write(','.join(str(port) for port in new_ports))
1629 for port in new_ports:
1630+ # A port is either a number or 'ICMP'
1631+ protocol = 'TCP'
1632+ if str(port).upper() == 'ICMP':
1633+ protocol = 'ICMP'
1634 if event_name == 'start':
1635- hookenv.open_port(port)
1636+ hookenv.open_port(port, protocol)
1637 elif event_name == 'stop':
1638- hookenv.close_port(port)
1639+ hookenv.close_port(port, protocol)
1640+
1641+ def ports_contains(self, port, ports):
1642+ if not bool(port):
1643+ return False
1644+ if str(port).upper() != 'ICMP':
1645+ port = int(port)
1646+ return port in ports
1647
1648
1649 def service_stop(service_name):
1650diff --git a/charmhelpers/core/strutils.py b/charmhelpers/core/strutils.py
1651index 685dabd..e8df045 100644
1652--- a/charmhelpers/core/strutils.py
1653+++ b/charmhelpers/core/strutils.py
1654@@ -61,13 +61,19 @@ def bytes_from_string(value):
1655 if isinstance(value, six.string_types):
1656 value = six.text_type(value)
1657 else:
1658- msg = "Unable to interpret non-string value '%s' as boolean" % (value)
1659+ msg = "Unable to interpret non-string value '%s' as bytes" % (value)
1660 raise ValueError(msg)
1661 matches = re.match("([0-9]+)([a-zA-Z]+)", value)
1662- if not matches:
1663- msg = "Unable to interpret string value '%s' as bytes" % (value)
1664- raise ValueError(msg)
1665- return int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)])
1666+ if matches:
1667+ size = int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)])
1668+ else:
1669+ # Assume that value passed in is bytes
1670+ try:
1671+ size = int(value)
1672+ except ValueError:
1673+ msg = "Unable to interpret string value '%s' as bytes" % (value)
1674+ raise ValueError(msg)
1675+ return size
1676
1677
1678 class BasicStringComparator(object):
1679diff --git a/charmhelpers/core/sysctl.py b/charmhelpers/core/sysctl.py
1680index 6e413e3..386428d 100644
1681--- a/charmhelpers/core/sysctl.py
1682+++ b/charmhelpers/core/sysctl.py
1683@@ -17,38 +17,59 @@
1684
1685 import yaml
1686
1687-from subprocess import check_call
1688+from subprocess import check_call, CalledProcessError
1689
1690 from charmhelpers.core.hookenv import (
1691 log,
1692 DEBUG,
1693 ERROR,
1694+ WARNING,
1695 )
1696
1697+from charmhelpers.core.host import is_container
1698+
1699 __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>'
1700
1701
1702-def create(sysctl_dict, sysctl_file):
1703+def create(sysctl_dict, sysctl_file, ignore=False):
1704 """Creates a sysctl.conf file from a YAML associative array
1705
1706- :param sysctl_dict: a YAML-formatted string of sysctl options eg "{ 'kernel.max_pid': 1337 }"
1707+ :param sysctl_dict: a dict or YAML-formatted string of sysctl
1708+ options eg "{ 'kernel.max_pid': 1337 }"
1709 :type sysctl_dict: str
1710 :param sysctl_file: path to the sysctl file to be saved
1711 :type sysctl_file: str or unicode
1712+ :param ignore: If True, ignore "unknown variable" errors.
1713+ :type ignore: bool
1714 :returns: None
1715 """
1716- try:
1717- sysctl_dict_parsed = yaml.safe_load(sysctl_dict)
1718- except yaml.YAMLError:
1719- log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict),
1720- level=ERROR)
1721- return
1722+ if type(sysctl_dict) is not dict:
1723+ try:
1724+ sysctl_dict_parsed = yaml.safe_load(sysctl_dict)
1725+ except yaml.YAMLError:
1726+ log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict),
1727+ level=ERROR)
1728+ return
1729+ else:
1730+ sysctl_dict_parsed = sysctl_dict
1731
1732 with open(sysctl_file, "w") as fd:
1733 for key, value in sysctl_dict_parsed.items():
1734 fd.write("{}={}\n".format(key, value))
1735
1736- log("Updating sysctl_file: %s values: %s" % (sysctl_file, sysctl_dict_parsed),
1737+ log("Updating sysctl_file: {} values: {}".format(sysctl_file,
1738+ sysctl_dict_parsed),
1739 level=DEBUG)
1740
1741- check_call(["sysctl", "-p", sysctl_file])
1742+ call = ["sysctl", "-p", sysctl_file]
1743+ if ignore:
1744+ call.append("-e")
1745+
1746+ try:
1747+ check_call(call)
1748+ except CalledProcessError as e:
1749+ if is_container():
1750+ log("Error setting some sysctl keys in this container: {}".format(e.output),
1751+ level=WARNING)
1752+ else:
1753+ raise e
1754diff --git a/charmhelpers/core/templating.py b/charmhelpers/core/templating.py
1755index 7b801a3..9014015 100644
1756--- a/charmhelpers/core/templating.py
1757+++ b/charmhelpers/core/templating.py
1758@@ -20,7 +20,8 @@ from charmhelpers.core import hookenv
1759
1760
1761 def render(source, target, context, owner='root', group='root',
1762- perms=0o444, templates_dir=None, encoding='UTF-8', template_loader=None):
1763+ perms=0o444, templates_dir=None, encoding='UTF-8',
1764+ template_loader=None, config_template=None):
1765 """
1766 Render a template.
1767
1768@@ -32,6 +33,9 @@ def render(source, target, context, owner='root', group='root',
1769 The context should be a dict containing the values to be replaced in the
1770 template.
1771
1772+ config_template may be provided to render from a provided template instead
1773+ of loading from a file.
1774+
1775 The `owner`, `group`, and `perms` options will be passed to `write_file`.
1776
1777 If omitted, `templates_dir` defaults to the `templates` folder in the charm.
1778@@ -65,14 +69,19 @@ def render(source, target, context, owner='root', group='root',
1779 if templates_dir is None:
1780 templates_dir = os.path.join(hookenv.charm_dir(), 'templates')
1781 template_env = Environment(loader=FileSystemLoader(templates_dir))
1782- try:
1783- source = source
1784- template = template_env.get_template(source)
1785- except exceptions.TemplateNotFound as e:
1786- hookenv.log('Could not load template %s from %s.' %
1787- (source, templates_dir),
1788- level=hookenv.ERROR)
1789- raise e
1790+
1791+ # load from a string if provided explicitly
1792+ if config_template is not None:
1793+ template = template_env.from_string(config_template)
1794+ else:
1795+ try:
1796+ source = source
1797+ template = template_env.get_template(source)
1798+ except exceptions.TemplateNotFound as e:
1799+ hookenv.log('Could not load template %s from %s.' %
1800+ (source, templates_dir),
1801+ level=hookenv.ERROR)
1802+ raise e
1803 content = template.render(context)
1804 if target is not None:
1805 target_dir = os.path.dirname(target)
1806diff --git a/charmhelpers/core/unitdata.py b/charmhelpers/core/unitdata.py
1807index 54ec969..ab55432 100644
1808--- a/charmhelpers/core/unitdata.py
1809+++ b/charmhelpers/core/unitdata.py
1810@@ -166,6 +166,10 @@ class Storage(object):
1811
1812 To support dicts, lists, integer, floats, and booleans values
1813 are automatically json encoded/decoded.
1814+
1815+ Note: to facilitate unit testing, ':memory:' can be passed as the
1816+ path parameter which causes sqlite3 to only build the db in memory.
1817+ This should only be used for testing purposes.
1818 """
1819 def __init__(self, path=None):
1820 self.db_path = path
1821@@ -175,6 +179,9 @@ class Storage(object):
1822 else:
1823 self.db_path = os.path.join(
1824 os.environ.get('CHARM_DIR', ''), '.unit-state.db')
1825+ if self.db_path != ':memory:':
1826+ with open(self.db_path, 'a') as f:
1827+ os.fchmod(f.fileno(), 0o600)
1828 self.conn = sqlite3.connect('%s' % self.db_path)
1829 self.cursor = self.conn.cursor()
1830 self.revision = None
1831@@ -358,7 +365,7 @@ class Storage(object):
1832 try:
1833 yield self.revision
1834 self.revision = None
1835- except:
1836+ except Exception:
1837 self.flush(False)
1838 self.revision = None
1839 raise
1840diff --git a/charmhelpers/fetch/__init__.py b/charmhelpers/fetch/__init__.py
1841index ec5e0fe..0cc7fc8 100644
1842--- a/charmhelpers/fetch/__init__.py
1843+++ b/charmhelpers/fetch/__init__.py
1844@@ -48,6 +48,13 @@ class AptLockError(Exception):
1845 pass
1846
1847
1848+class GPGKeyError(Exception):
1849+ """Exception occurs when a GPG key cannot be fetched or used. The message
1850+ indicates what the problem is.
1851+ """
1852+ pass
1853+
1854+
1855 class BaseFetchHandler(object):
1856
1857 """Base class for FetchHandler implementations in fetch plugins"""
1858@@ -77,22 +84,27 @@ module = "charmhelpers.fetch.%s" % __platform__
1859 fetch = importlib.import_module(module)
1860
1861 filter_installed_packages = fetch.filter_installed_packages
1862-install = fetch.install
1863-upgrade = fetch.upgrade
1864-update = fetch.update
1865-purge = fetch.purge
1866+filter_missing_packages = fetch.filter_missing_packages
1867+install = fetch.apt_install
1868+upgrade = fetch.apt_upgrade
1869+update = _fetch_update = fetch.apt_update
1870+purge = fetch.apt_purge
1871 add_source = fetch.add_source
1872
1873 if __platform__ == "ubuntu":
1874 apt_cache = fetch.apt_cache
1875- apt_install = fetch.install
1876- apt_update = fetch.update
1877- apt_upgrade = fetch.upgrade
1878- apt_purge = fetch.purge
1879+ apt_install = fetch.apt_install
1880+ apt_update = fetch.apt_update
1881+ apt_upgrade = fetch.apt_upgrade
1882+ apt_purge = fetch.apt_purge
1883+ apt_autoremove = fetch.apt_autoremove
1884 apt_mark = fetch.apt_mark
1885 apt_hold = fetch.apt_hold
1886 apt_unhold = fetch.apt_unhold
1887+ import_key = fetch.import_key
1888 get_upstream_version = fetch.get_upstream_version
1889+ apt_pkg = fetch.ubuntu_apt_pkg
1890+ get_apt_dpkg_env = fetch.get_apt_dpkg_env
1891 elif __platform__ == "centos":
1892 yum_search = fetch.yum_search
1893
1894@@ -135,7 +147,7 @@ def configure_sources(update=False,
1895 for source, key in zip(sources, keys):
1896 add_source(source, key)
1897 if update:
1898- fetch.update(fatal=True)
1899+ _fetch_update(fatal=True)
1900
1901
1902 def install_remote(source, *args, **kwargs):
1903diff --git a/charmhelpers/fetch/archiveurl.py b/charmhelpers/fetch/archiveurl.py
1904index dd24f9e..d25587a 100644
1905--- a/charmhelpers/fetch/archiveurl.py
1906+++ b/charmhelpers/fetch/archiveurl.py
1907@@ -89,7 +89,7 @@ class ArchiveUrlFetchHandler(BaseFetchHandler):
1908 :param str source: URL pointing to an archive file.
1909 :param str dest: Local path location to download archive file to.
1910 """
1911- # propogate all exceptions
1912+ # propagate all exceptions
1913 # URLError, OSError, etc
1914 proto, netloc, path, params, query, fragment = urlparse(source)
1915 if proto in ('http', 'https'):
1916diff --git a/charmhelpers/fetch/bzrurl.py b/charmhelpers/fetch/bzrurl.py
1917index 07cd029..c4ab3ff 100644
1918--- a/charmhelpers/fetch/bzrurl.py
1919+++ b/charmhelpers/fetch/bzrurl.py
1920@@ -13,7 +13,7 @@
1921 # limitations under the License.
1922
1923 import os
1924-from subprocess import check_call
1925+from subprocess import STDOUT, check_output
1926 from charmhelpers.fetch import (
1927 BaseFetchHandler,
1928 UnhandledSource,
1929@@ -55,7 +55,7 @@ class BzrUrlFetchHandler(BaseFetchHandler):
1930 cmd = ['bzr', 'branch']
1931 cmd += cmd_opts
1932 cmd += [source, dest]
1933- check_call(cmd)
1934+ check_output(cmd, stderr=STDOUT)
1935
1936 def install(self, source, dest=None, revno=None):
1937 url_parts = self.parse_url(source)
1938diff --git a/charmhelpers/fetch/centos.py b/charmhelpers/fetch/centos.py
1939index 604bbfb..a91dcff 100644
1940--- a/charmhelpers/fetch/centos.py
1941+++ b/charmhelpers/fetch/centos.py
1942@@ -132,7 +132,7 @@ def add_source(source, key=None):
1943 key_file.write(key)
1944 key_file.flush()
1945 key_file.seek(0)
1946- subprocess.check_call(['rpm', '--import', key_file])
1947+ subprocess.check_call(['rpm', '--import', key_file.name])
1948 else:
1949 subprocess.check_call(['rpm', '--import', key])
1950
1951diff --git a/charmhelpers/fetch/giturl.py b/charmhelpers/fetch/giturl.py
1952index 4cf21bc..070ca9b 100644
1953--- a/charmhelpers/fetch/giturl.py
1954+++ b/charmhelpers/fetch/giturl.py
1955@@ -13,7 +13,7 @@
1956 # limitations under the License.
1957
1958 import os
1959-from subprocess import check_call, CalledProcessError
1960+from subprocess import check_output, CalledProcessError, STDOUT
1961 from charmhelpers.fetch import (
1962 BaseFetchHandler,
1963 UnhandledSource,
1964@@ -50,7 +50,7 @@ class GitUrlFetchHandler(BaseFetchHandler):
1965 cmd = ['git', 'clone', source, dest, '--branch', branch]
1966 if depth:
1967 cmd.extend(['--depth', depth])
1968- check_call(cmd)
1969+ check_output(cmd, stderr=STDOUT)
1970
1971 def install(self, source, branch="master", dest=None, depth=None):
1972 url_parts = self.parse_url(source)
1973diff --git a/charmhelpers/contrib/python/__init__.py b/charmhelpers/fetch/python/__init__.py
1974similarity index 92%
1975rename from charmhelpers/contrib/python/__init__.py
1976rename to charmhelpers/fetch/python/__init__.py
1977index d7567b8..bff99dc 100644
1978--- a/charmhelpers/contrib/python/__init__.py
1979+++ b/charmhelpers/fetch/python/__init__.py
1980@@ -1,4 +1,4 @@
1981-# Copyright 2014-2015 Canonical Limited.
1982+# Copyright 2014-2019 Canonical Limited.
1983 #
1984 # Licensed under the Apache License, Version 2.0 (the "License");
1985 # you may not use this file except in compliance with the License.
1986diff --git a/charmhelpers/fetch/python/debug.py b/charmhelpers/fetch/python/debug.py
1987new file mode 100644
1988index 0000000..757135e
1989--- /dev/null
1990+++ b/charmhelpers/fetch/python/debug.py
1991@@ -0,0 +1,54 @@
1992+#!/usr/bin/env python
1993+# coding: utf-8
1994+
1995+# Copyright 2014-2015 Canonical Limited.
1996+#
1997+# Licensed under the Apache License, Version 2.0 (the "License");
1998+# you may not use this file except in compliance with the License.
1999+# You may obtain a copy of the License at
2000+#
2001+# http://www.apache.org/licenses/LICENSE-2.0
2002+#
2003+# Unless required by applicable law or agreed to in writing, software
2004+# distributed under the License is distributed on an "AS IS" BASIS,
2005+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
2006+# See the License for the specific language governing permissions and
2007+# limitations under the License.
2008+
2009+from __future__ import print_function
2010+
2011+import atexit
2012+import sys
2013+
2014+from charmhelpers.fetch.python.rpdb import Rpdb
2015+from charmhelpers.core.hookenv import (
2016+ open_port,
2017+ close_port,
2018+ ERROR,
2019+ log
2020+)
2021+
2022+__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>"
2023+
2024+DEFAULT_ADDR = "0.0.0.0"
2025+DEFAULT_PORT = 4444
2026+
2027+
2028+def _error(message):
2029+ log(message, level=ERROR)
2030+
2031+
2032+def set_trace(addr=DEFAULT_ADDR, port=DEFAULT_PORT):
2033+ """
2034+ Set a trace point using the remote debugger
2035+ """
2036+ atexit.register(close_port, port)
2037+ try:
2038+ log("Starting a remote python debugger session on %s:%s" % (addr,
2039+ port))
2040+ open_port(port)
2041+ debugger = Rpdb(addr=addr, port=port)
2042+ debugger.set_trace(sys._getframe().f_back)
2043+ except Exception:
2044+ _error("Cannot start a remote debug session on %s:%s" % (addr,
2045+ port))
2046diff --git a/charmhelpers/contrib/python/packages.py b/charmhelpers/fetch/python/packages.py
2047similarity index 100%
2048rename from charmhelpers/contrib/python/packages.py
2049rename to charmhelpers/fetch/python/packages.py
2050diff --git a/charmhelpers/fetch/python/rpdb.py b/charmhelpers/fetch/python/rpdb.py
2051new file mode 100644
2052index 0000000..9b31610
2053--- /dev/null
2054+++ b/charmhelpers/fetch/python/rpdb.py
2055@@ -0,0 +1,56 @@
2056+# Copyright 2014-2015 Canonical Limited.
2057+#
2058+# Licensed under the Apache License, Version 2.0 (the "License");
2059+# you may not use this file except in compliance with the License.
2060+# You may obtain a copy of the License at
2061+#
2062+# http://www.apache.org/licenses/LICENSE-2.0
2063+#
2064+# Unless required by applicable law or agreed to in writing, software
2065+# distributed under the License is distributed on an "AS IS" BASIS,
2066+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
2067+# See the License for the specific language governing permissions and
2068+# limitations under the License.
2069+
2070+"""Remote Python Debugger (pdb wrapper)."""
2071+
2072+import pdb
2073+import socket
2074+import sys
2075+
2076+__author__ = "Bertrand Janin <b@janin.com>"
2077+__version__ = "0.1.3"
2078+
2079+
2080+class Rpdb(pdb.Pdb):
2081+
2082+ def __init__(self, addr="127.0.0.1", port=4444):
2083+ """Initialize the socket and initialize pdb."""
2084+
2085+ # Backup stdin and stdout before replacing them by the socket handle
2086+ self.old_stdout = sys.stdout
2087+ self.old_stdin = sys.stdin
2088+
2089+ # Open a 'reusable' socket to let the webapp reload on the same port
2090+ self.skt = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
2091+ self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True)
2092+ self.skt.bind((addr, port))
2093+ self.skt.listen(1)
2094+ (clientsocket, address) = self.skt.accept()
2095+ handle = clientsocket.makefile('rw')
2096+ pdb.Pdb.__init__(self, completekey='tab', stdin=handle, stdout=handle)
2097+ sys.stdout = sys.stdin = handle
2098+
2099+ def shutdown(self):
2100+ """Revert stdin and stdout, close the socket."""
2101+ sys.stdout = self.old_stdout
2102+ sys.stdin = self.old_stdin
2103+ self.skt.close()
2104+ self.set_continue()
2105+
2106+ def do_continue(self, arg):
2107+ """Stop all operation on ``continue``."""
2108+ self.shutdown()
2109+ return 1
2110+
2111+ do_EOF = do_quit = do_exit = do_c = do_cont = do_continue
2112diff --git a/charmhelpers/fetch/python/version.py b/charmhelpers/fetch/python/version.py
2113new file mode 100644
2114index 0000000..3eb4210
2115--- /dev/null
2116+++ b/charmhelpers/fetch/python/version.py
2117@@ -0,0 +1,32 @@
2118+#!/usr/bin/env python
2119+# coding: utf-8
2120+
2121+# Copyright 2014-2015 Canonical Limited.
2122+#
2123+# Licensed under the Apache License, Version 2.0 (the "License");
2124+# you may not use this file except in compliance with the License.
2125+# You may obtain a copy of the License at
2126+#
2127+# http://www.apache.org/licenses/LICENSE-2.0
2128+#
2129+# Unless required by applicable law or agreed to in writing, software
2130+# distributed under the License is distributed on an "AS IS" BASIS,
2131+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
2132+# See the License for the specific language governing permissions and
2133+# limitations under the License.
2134+
2135+import sys
2136+
2137+__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>"
2138+
2139+
2140+def current_version():
2141+ """Current system python version"""
2142+ return sys.version_info
2143+
2144+
2145+def current_version_string():
2146+ """Current system python version as string major.minor.micro"""
2147+ return "{0}.{1}.{2}".format(sys.version_info.major,
2148+ sys.version_info.minor,
2149+ sys.version_info.micro)
2150diff --git a/charmhelpers/fetch/snap.py b/charmhelpers/fetch/snap.py
2151index 23c707b..fc70aa9 100644
2152--- a/charmhelpers/fetch/snap.py
2153+++ b/charmhelpers/fetch/snap.py
2154@@ -18,21 +18,33 @@ If writing reactive charms, use the snap layer:
2155 https://lists.ubuntu.com/archives/snapcraft/2016-September/001114.html
2156 """
2157 import subprocess
2158-from os import environ
2159+import os
2160 from time import sleep
2161 from charmhelpers.core.hookenv import log
2162
2163 __author__ = 'Joseph Borg <joseph.borg@canonical.com>'
2164
2165-SNAP_NO_LOCK = 1 # The return code for "couldn't acquire lock" in Snap (hopefully this will be improved).
2166+# The return code for "couldn't acquire lock" in Snap
2167+# (hopefully this will be improved).
2168+SNAP_NO_LOCK = 1
2169 SNAP_NO_LOCK_RETRY_DELAY = 10 # Wait X seconds between Snap lock checks.
2170 SNAP_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times.
2171+SNAP_CHANNELS = [
2172+ 'edge',
2173+ 'beta',
2174+ 'candidate',
2175+ 'stable',
2176+]
2177
2178
2179 class CouldNotAcquireLockException(Exception):
2180 pass
2181
2182
2183+class InvalidSnapChannel(Exception):
2184+ pass
2185+
2186+
2187 def _snap_exec(commands):
2188 """
2189 Execute snap commands.
2190@@ -47,13 +59,17 @@ def _snap_exec(commands):
2191
2192 while return_code is None or return_code == SNAP_NO_LOCK:
2193 try:
2194- return_code = subprocess.check_call(['snap'] + commands, env=environ)
2195+ return_code = subprocess.check_call(['snap'] + commands,
2196+ env=os.environ)
2197 except subprocess.CalledProcessError as e:
2198 retry_count += + 1
2199 if retry_count > SNAP_NO_LOCK_RETRY_COUNT:
2200- raise CouldNotAcquireLockException('Could not aquire lock after %s attempts' % SNAP_NO_LOCK_RETRY_COUNT)
2201+ raise CouldNotAcquireLockException(
2202+ 'Could not aquire lock after {} attempts'
2203+ .format(SNAP_NO_LOCK_RETRY_COUNT))
2204 return_code = e.returncode
2205- log('Snap failed to acquire lock, trying again in %s seconds.' % SNAP_NO_LOCK_RETRY_DELAY, level='WARN')
2206+ log('Snap failed to acquire lock, trying again in {} seconds.'
2207+ .format(SNAP_NO_LOCK_RETRY_DELAY), level='WARN')
2208 sleep(SNAP_NO_LOCK_RETRY_DELAY)
2209
2210 return return_code
2211@@ -120,3 +136,15 @@ def snap_refresh(packages, *flags):
2212
2213 log(message, level='INFO')
2214 return _snap_exec(['refresh'] + flags + packages)
2215+
2216+
2217+def valid_snap_channel(channel):
2218+ """ Validate snap channel exists
2219+
2220+ :raises InvalidSnapChannel: When channel does not exist
2221+ :return: Boolean
2222+ """
2223+ if channel.lower() in SNAP_CHANNELS:
2224+ return True
2225+ else:
2226+ raise InvalidSnapChannel("Invalid Snap Channel: {}".format(channel))
2227diff --git a/charmhelpers/fetch/ubuntu.py b/charmhelpers/fetch/ubuntu.py
2228index 7bc6cc7..3ddaf0d 100644
2229--- a/charmhelpers/fetch/ubuntu.py
2230+++ b/charmhelpers/fetch/ubuntu.py
2231@@ -12,29 +12,49 @@
2232 # See the License for the specific language governing permissions and
2233 # limitations under the License.
2234
2235-import os
2236+from collections import OrderedDict
2237+import platform
2238+import re
2239 import six
2240-import time
2241 import subprocess
2242+import sys
2243+import time
2244
2245-from tempfile import NamedTemporaryFile
2246-from charmhelpers.core.host import (
2247- lsb_release
2248-)
2249-from charmhelpers.core.hookenv import log
2250-from charmhelpers.fetch import SourceConfigError
2251+from charmhelpers.core.host import get_distrib_codename, get_system_env
2252
2253+from charmhelpers.core.hookenv import (
2254+ log,
2255+ DEBUG,
2256+ WARNING,
2257+ env_proxy_settings,
2258+)
2259+from charmhelpers.fetch import SourceConfigError, GPGKeyError
2260+from charmhelpers.fetch import ubuntu_apt_pkg
2261+
2262+PROPOSED_POCKET = (
2263+ "# Proposed\n"
2264+ "deb http://archive.ubuntu.com/ubuntu {}-proposed main universe "
2265+ "multiverse restricted\n")
2266+PROPOSED_PORTS_POCKET = (
2267+ "# Proposed\n"
2268+ "deb http://ports.ubuntu.com/ubuntu-ports {}-proposed main universe "
2269+ "multiverse restricted\n")
2270+# Only supports 64bit and ppc64 at the moment.
2271+ARCH_TO_PROPOSED_POCKET = {
2272+ 'x86_64': PROPOSED_POCKET,
2273+ 'ppc64le': PROPOSED_PORTS_POCKET,
2274+ 'aarch64': PROPOSED_PORTS_POCKET,
2275+ 's390x': PROPOSED_PORTS_POCKET,
2276+}
2277+CLOUD_ARCHIVE_URL = "http://ubuntu-cloud.archive.canonical.com/ubuntu"
2278+CLOUD_ARCHIVE_KEY_ID = '5EDB1B62EC4926EA'
2279 CLOUD_ARCHIVE = """# Ubuntu Cloud Archive
2280 deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main
2281 """
2282-
2283-PROPOSED_POCKET = """# Proposed
2284-deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted
2285-"""
2286-
2287 CLOUD_ARCHIVE_POCKETS = {
2288 # Folsom
2289 'folsom': 'precise-updates/folsom',
2290+ 'folsom/updates': 'precise-updates/folsom',
2291 'precise-folsom': 'precise-updates/folsom',
2292 'precise-folsom/updates': 'precise-updates/folsom',
2293 'precise-updates/folsom': 'precise-updates/folsom',
2294@@ -43,6 +63,7 @@ CLOUD_ARCHIVE_POCKETS = {
2295 'precise-proposed/folsom': 'precise-proposed/folsom',
2296 # Grizzly
2297 'grizzly': 'precise-updates/grizzly',
2298+ 'grizzly/updates': 'precise-updates/grizzly',
2299 'precise-grizzly': 'precise-updates/grizzly',
2300 'precise-grizzly/updates': 'precise-updates/grizzly',
2301 'precise-updates/grizzly': 'precise-updates/grizzly',
2302@@ -51,6 +72,7 @@ CLOUD_ARCHIVE_POCKETS = {
2303 'precise-proposed/grizzly': 'precise-proposed/grizzly',
2304 # Havana
2305 'havana': 'precise-updates/havana',
2306+ 'havana/updates': 'precise-updates/havana',
2307 'precise-havana': 'precise-updates/havana',
2308 'precise-havana/updates': 'precise-updates/havana',
2309 'precise-updates/havana': 'precise-updates/havana',
2310@@ -59,6 +81,7 @@ CLOUD_ARCHIVE_POCKETS = {
2311 'precise-proposed/havana': 'precise-proposed/havana',
2312 # Icehouse
2313 'icehouse': 'precise-updates/icehouse',
2314+ 'icehouse/updates': 'precise-updates/icehouse',
2315 'precise-icehouse': 'precise-updates/icehouse',
2316 'precise-icehouse/updates': 'precise-updates/icehouse',
2317 'precise-updates/icehouse': 'precise-updates/icehouse',
2318@@ -67,6 +90,7 @@ CLOUD_ARCHIVE_POCKETS = {
2319 'precise-proposed/icehouse': 'precise-proposed/icehouse',
2320 # Juno
2321 'juno': 'trusty-updates/juno',
2322+ 'juno/updates': 'trusty-updates/juno',
2323 'trusty-juno': 'trusty-updates/juno',
2324 'trusty-juno/updates': 'trusty-updates/juno',
2325 'trusty-updates/juno': 'trusty-updates/juno',
2326@@ -75,6 +99,7 @@ CLOUD_ARCHIVE_POCKETS = {
2327 'trusty-proposed/juno': 'trusty-proposed/juno',
2328 # Kilo
2329 'kilo': 'trusty-updates/kilo',
2330+ 'kilo/updates': 'trusty-updates/kilo',
2331 'trusty-kilo': 'trusty-updates/kilo',
2332 'trusty-kilo/updates': 'trusty-updates/kilo',
2333 'trusty-updates/kilo': 'trusty-updates/kilo',
2334@@ -83,6 +108,7 @@ CLOUD_ARCHIVE_POCKETS = {
2335 'trusty-proposed/kilo': 'trusty-proposed/kilo',
2336 # Liberty
2337 'liberty': 'trusty-updates/liberty',
2338+ 'liberty/updates': 'trusty-updates/liberty',
2339 'trusty-liberty': 'trusty-updates/liberty',
2340 'trusty-liberty/updates': 'trusty-updates/liberty',
2341 'trusty-updates/liberty': 'trusty-updates/liberty',
2342@@ -91,6 +117,7 @@ CLOUD_ARCHIVE_POCKETS = {
2343 'trusty-proposed/liberty': 'trusty-proposed/liberty',
2344 # Mitaka
2345 'mitaka': 'trusty-updates/mitaka',
2346+ 'mitaka/updates': 'trusty-updates/mitaka',
2347 'trusty-mitaka': 'trusty-updates/mitaka',
2348 'trusty-mitaka/updates': 'trusty-updates/mitaka',
2349 'trusty-updates/mitaka': 'trusty-updates/mitaka',
2350@@ -99,6 +126,7 @@ CLOUD_ARCHIVE_POCKETS = {
2351 'trusty-proposed/mitaka': 'trusty-proposed/mitaka',
2352 # Newton
2353 'newton': 'xenial-updates/newton',
2354+ 'newton/updates': 'xenial-updates/newton',
2355 'xenial-newton': 'xenial-updates/newton',
2356 'xenial-newton/updates': 'xenial-updates/newton',
2357 'xenial-updates/newton': 'xenial-updates/newton',
2358@@ -107,12 +135,13 @@ CLOUD_ARCHIVE_POCKETS = {
2359 'xenial-proposed/newton': 'xenial-proposed/newton',
2360 # Ocata
2361 'ocata': 'xenial-updates/ocata',
2362+ 'ocata/updates': 'xenial-updates/ocata',
2363 'xenial-ocata': 'xenial-updates/ocata',
2364 'xenial-ocata/updates': 'xenial-updates/ocata',
2365 'xenial-updates/ocata': 'xenial-updates/ocata',
2366 'ocata/proposed': 'xenial-proposed/ocata',
2367 'xenial-ocata/proposed': 'xenial-proposed/ocata',
2368- 'xenial-ocata/newton': 'xenial-proposed/ocata',
2369+ 'xenial-proposed/ocata': 'xenial-proposed/ocata',
2370 # Pike
2371 'pike': 'xenial-updates/pike',
2372 'xenial-pike': 'xenial-updates/pike',
2373@@ -120,7 +149,7 @@ CLOUD_ARCHIVE_POCKETS = {
2374 'xenial-updates/pike': 'xenial-updates/pike',
2375 'pike/proposed': 'xenial-proposed/pike',
2376 'xenial-pike/proposed': 'xenial-proposed/pike',
2377- 'xenial-pike/newton': 'xenial-proposed/pike',
2378+ 'xenial-proposed/pike': 'xenial-proposed/pike',
2379 # Queens
2380 'queens': 'xenial-updates/queens',
2381 'xenial-queens': 'xenial-updates/queens',
2382@@ -128,12 +157,45 @@ CLOUD_ARCHIVE_POCKETS = {
2383 'xenial-updates/queens': 'xenial-updates/queens',
2384 'queens/proposed': 'xenial-proposed/queens',
2385 'xenial-queens/proposed': 'xenial-proposed/queens',
2386- 'xenial-queens/newton': 'xenial-proposed/queens',
2387+ 'xenial-proposed/queens': 'xenial-proposed/queens',
2388+ # Rocky
2389+ 'rocky': 'bionic-updates/rocky',
2390+ 'bionic-rocky': 'bionic-updates/rocky',
2391+ 'bionic-rocky/updates': 'bionic-updates/rocky',
2392+ 'bionic-updates/rocky': 'bionic-updates/rocky',
2393+ 'rocky/proposed': 'bionic-proposed/rocky',
2394+ 'bionic-rocky/proposed': 'bionic-proposed/rocky',
2395+ 'bionic-proposed/rocky': 'bionic-proposed/rocky',
2396+ # Stein
2397+ 'stein': 'bionic-updates/stein',
2398+ 'bionic-stein': 'bionic-updates/stein',
2399+ 'bionic-stein/updates': 'bionic-updates/stein',
2400+ 'bionic-updates/stein': 'bionic-updates/stein',
2401+ 'stein/proposed': 'bionic-proposed/stein',
2402+ 'bionic-stein/proposed': 'bionic-proposed/stein',
2403+ 'bionic-proposed/stein': 'bionic-proposed/stein',
2404+ # Train
2405+ 'train': 'bionic-updates/train',
2406+ 'bionic-train': 'bionic-updates/train',
2407+ 'bionic-train/updates': 'bionic-updates/train',
2408+ 'bionic-updates/train': 'bionic-updates/train',
2409+ 'train/proposed': 'bionic-proposed/train',
2410+ 'bionic-train/proposed': 'bionic-proposed/train',
2411+ 'bionic-proposed/train': 'bionic-proposed/train',
2412+ # Ussuri
2413+ 'ussuri': 'bionic-updates/ussuri',
2414+ 'bionic-ussuri': 'bionic-updates/ussuri',
2415+ 'bionic-ussuri/updates': 'bionic-updates/ussuri',
2416+ 'bionic-updates/ussuri': 'bionic-updates/ussuri',
2417+ 'ussuri/proposed': 'bionic-proposed/ussuri',
2418+ 'bionic-ussuri/proposed': 'bionic-proposed/ussuri',
2419+ 'bionic-proposed/ussuri': 'bionic-proposed/ussuri',
2420 }
2421
2422+
2423 APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT.
2424 CMD_RETRY_DELAY = 10 # Wait 10 seconds between command retries.
2425-CMD_RETRY_COUNT = 30 # Retry a failing fatal command X times.
2426+CMD_RETRY_COUNT = 3 # Retry a failing fatal command X times.
2427
2428
2429 def filter_installed_packages(packages):
2430@@ -151,18 +213,54 @@ def filter_installed_packages(packages):
2431 return _pkgs
2432
2433
2434-def apt_cache(in_memory=True, progress=None):
2435- """Build and return an apt cache."""
2436- from apt import apt_pkg
2437- apt_pkg.init()
2438- if in_memory:
2439- apt_pkg.config.set("Dir::Cache::pkgcache", "")
2440- apt_pkg.config.set("Dir::Cache::srcpkgcache", "")
2441- return apt_pkg.Cache(progress)
2442+def filter_missing_packages(packages):
2443+ """Return a list of packages that are installed.
2444
2445+ :param packages: list of packages to evaluate.
2446+ :returns list: Packages that are installed.
2447+ """
2448+ return list(
2449+ set(packages) -
2450+ set(filter_installed_packages(packages))
2451+ )
2452
2453-def install(packages, options=None, fatal=False):
2454- """Install one or more packages."""
2455+
2456+def apt_cache(*_, **__):
2457+ """Shim returning an object simulating the apt_pkg Cache.
2458+
2459+ :param _: Accept arguments for compability, not used.
2460+ :type _: any
2461+ :param __: Accept keyword arguments for compability, not used.
2462+ :type __: any
2463+ :returns:Object used to interrogate the system apt and dpkg databases.
2464+ :rtype:ubuntu_apt_pkg.Cache
2465+ """
2466+ if 'apt_pkg' in sys.modules:
2467+ # NOTE(fnordahl): When our consumer use the upstream ``apt_pkg`` module
2468+ # in conjunction with the apt_cache helper function, they may expect us
2469+ # to call ``apt_pkg.init()`` for them.
2470+ #
2471+ # Detect this situation, log a warning and make the call to
2472+ # ``apt_pkg.init()`` to avoid the consumer Python interpreter from
2473+ # crashing with a segmentation fault.
2474+ log('Support for use of upstream ``apt_pkg`` module in conjunction'
2475+ 'with charm-helpers is deprecated since 2019-06-25', level=WARNING)
2476+ sys.modules['apt_pkg'].init()
2477+ return ubuntu_apt_pkg.Cache()
2478+
2479+
2480+def apt_install(packages, options=None, fatal=False):
2481+ """Install one or more packages.
2482+
2483+ :param packages: Package(s) to install
2484+ :type packages: Option[str, List[str]]
2485+ :param options: Options to pass on to apt-get
2486+ :type options: Option[None, List[str]]
2487+ :param fatal: Whether the command's output should be checked and
2488+ retried.
2489+ :type fatal: bool
2490+ :raises: subprocess.CalledProcessError
2491+ """
2492 if options is None:
2493 options = ['--option=Dpkg::Options::=--force-confold']
2494
2495@@ -178,8 +276,18 @@ def install(packages, options=None, fatal=False):
2496 _run_apt_command(cmd, fatal)
2497
2498
2499-def upgrade(options=None, fatal=False, dist=False):
2500- """Upgrade all packages."""
2501+def apt_upgrade(options=None, fatal=False, dist=False):
2502+ """Upgrade all packages.
2503+
2504+ :param options: Options to pass on to apt-get
2505+ :type options: Option[None, List[str]]
2506+ :param fatal: Whether the command's output should be checked and
2507+ retried.
2508+ :type fatal: bool
2509+ :param dist: Whether ``dist-upgrade`` should be used over ``upgrade``
2510+ :type dist: bool
2511+ :raises: subprocess.CalledProcessError
2512+ """
2513 if options is None:
2514 options = ['--option=Dpkg::Options::=--force-confold']
2515
2516@@ -193,14 +301,22 @@ def upgrade(options=None, fatal=False, dist=False):
2517 _run_apt_command(cmd, fatal)
2518
2519
2520-def update(fatal=False):
2521+def apt_update(fatal=False):
2522 """Update local apt cache."""
2523 cmd = ['apt-get', 'update']
2524 _run_apt_command(cmd, fatal)
2525
2526
2527-def purge(packages, fatal=False):
2528- """Purge one or more packages."""
2529+def apt_purge(packages, fatal=False):
2530+ """Purge one or more packages.
2531+
2532+ :param packages: Package(s) to install
2533+ :type packages: Option[str, List[str]]
2534+ :param fatal: Whether the command's output should be checked and
2535+ retried.
2536+ :type fatal: bool
2537+ :raises: subprocess.CalledProcessError
2538+ """
2539 cmd = ['apt-get', '--assume-yes', 'purge']
2540 if isinstance(packages, six.string_types):
2541 cmd.append(packages)
2542@@ -210,6 +326,21 @@ def purge(packages, fatal=False):
2543 _run_apt_command(cmd, fatal)
2544
2545
2546+def apt_autoremove(purge=True, fatal=False):
2547+ """Purge one or more packages.
2548+ :param purge: Whether the ``--purge`` option should be passed on or not.
2549+ :type purge: bool
2550+ :param fatal: Whether the command's output should be checked and
2551+ retried.
2552+ :type fatal: bool
2553+ :raises: subprocess.CalledProcessError
2554+ """
2555+ cmd = ['apt-get', '--assume-yes', 'autoremove']
2556+ if purge:
2557+ cmd.append('--purge')
2558+ _run_apt_command(cmd, fatal)
2559+
2560+
2561 def apt_mark(packages, mark, fatal=False):
2562 """Flag one or more packages using apt-mark."""
2563 log("Marking {} as {}".format(packages, mark))
2564@@ -233,7 +364,159 @@ def apt_unhold(packages, fatal=False):
2565 return apt_mark(packages, 'unhold', fatal=fatal)
2566
2567
2568-def add_source(source, key=None):
2569+def import_key(key):
2570+ """Import an ASCII Armor key.
2571+
2572+ A Radix64 format keyid is also supported for backwards
2573+ compatibility. In this case Ubuntu keyserver will be
2574+ queried for a key via HTTPS by its keyid. This method
2575+ is less preferrable because https proxy servers may
2576+ require traffic decryption which is equivalent to a
2577+ man-in-the-middle attack (a proxy server impersonates
2578+ keyserver TLS certificates and has to be explicitly
2579+ trusted by the system).
2580+
2581+ :param key: A GPG key in ASCII armor format,
2582+ including BEGIN and END markers or a keyid.
2583+ :type key: (bytes, str)
2584+ :raises: GPGKeyError if the key could not be imported
2585+ """
2586+ key = key.strip()
2587+ if '-' in key or '\n' in key:
2588+ # Send everything not obviously a keyid to GPG to import, as
2589+ # we trust its validation better than our own. eg. handling
2590+ # comments before the key.
2591+ log("PGP key found (looks like ASCII Armor format)", level=DEBUG)
2592+ if ('-----BEGIN PGP PUBLIC KEY BLOCK-----' in key and
2593+ '-----END PGP PUBLIC KEY BLOCK-----' in key):
2594+ log("Writing provided PGP key in the binary format", level=DEBUG)
2595+ if six.PY3:
2596+ key_bytes = key.encode('utf-8')
2597+ else:
2598+ key_bytes = key
2599+ key_name = _get_keyid_by_gpg_key(key_bytes)
2600+ key_gpg = _dearmor_gpg_key(key_bytes)
2601+ _write_apt_gpg_keyfile(key_name=key_name, key_material=key_gpg)
2602+ else:
2603+ raise GPGKeyError("ASCII armor markers missing from GPG key")
2604+ else:
2605+ log("PGP key found (looks like Radix64 format)", level=WARNING)
2606+ log("SECURELY importing PGP key from keyserver; "
2607+ "full key not provided.", level=WARNING)
2608+ # as of bionic add-apt-repository uses curl with an HTTPS keyserver URL
2609+ # to retrieve GPG keys. `apt-key adv` command is deprecated as is
2610+ # apt-key in general as noted in its manpage. See lp:1433761 for more
2611+ # history. Instead, /etc/apt/trusted.gpg.d is used directly to drop
2612+ # gpg
2613+ key_asc = _get_key_by_keyid(key)
2614+ # write the key in GPG format so that apt-key list shows it
2615+ key_gpg = _dearmor_gpg_key(key_asc)
2616+ _write_apt_gpg_keyfile(key_name=key, key_material=key_gpg)
2617+
2618+
2619+def _get_keyid_by_gpg_key(key_material):
2620+ """Get a GPG key fingerprint by GPG key material.
2621+ Gets a GPG key fingerprint (40-digit, 160-bit) by the ASCII armor-encoded
2622+ or binary GPG key material. Can be used, for example, to generate file
2623+ names for keys passed via charm options.
2624+
2625+ :param key_material: ASCII armor-encoded or binary GPG key material
2626+ :type key_material: bytes
2627+ :raises: GPGKeyError if invalid key material has been provided
2628+ :returns: A GPG key fingerprint
2629+ :rtype: str
2630+ """
2631+ # Use the same gpg command for both Xenial and Bionic
2632+ cmd = 'gpg --with-colons --with-fingerprint'
2633+ ps = subprocess.Popen(cmd.split(),
2634+ stdout=subprocess.PIPE,
2635+ stderr=subprocess.PIPE,
2636+ stdin=subprocess.PIPE)
2637+ out, err = ps.communicate(input=key_material)
2638+ if six.PY3:
2639+ out = out.decode('utf-8')
2640+ err = err.decode('utf-8')
2641+ if 'gpg: no valid OpenPGP data found.' in err:
2642+ raise GPGKeyError('Invalid GPG key material provided')
2643+ # from gnupg2 docs: fpr :: Fingerprint (fingerprint is in field 10)
2644+ return re.search(r"^fpr:{9}([0-9A-F]{40}):$", out, re.MULTILINE).group(1)
2645+
2646+
2647+def _get_key_by_keyid(keyid):
2648+ """Get a key via HTTPS from the Ubuntu keyserver.
2649+ Different key ID formats are supported by SKS keyservers (the longer ones
2650+ are more secure, see "dead beef attack" and https://evil32.com/). Since
2651+ HTTPS is used, if SSLBump-like HTTPS proxies are in place, they will
2652+ impersonate keyserver.ubuntu.com and generate a certificate with
2653+ keyserver.ubuntu.com in the CN field or in SubjAltName fields of a
2654+ certificate. If such proxy behavior is expected it is necessary to add the
2655+ CA certificate chain containing the intermediate CA of the SSLBump proxy to
2656+ every machine that this code runs on via ca-certs cloud-init directive (via
2657+ cloudinit-userdata model-config) or via other means (such as through a
2658+ custom charm option). Also note that DNS resolution for the hostname in a
2659+ URL is done at a proxy server - not at the client side.
2660+
2661+ 8-digit (32 bit) key ID
2662+ https://keyserver.ubuntu.com/pks/lookup?search=0x4652B4E6
2663+ 16-digit (64 bit) key ID
2664+ https://keyserver.ubuntu.com/pks/lookup?search=0x6E85A86E4652B4E6
2665+ 40-digit key ID:
2666+ https://keyserver.ubuntu.com/pks/lookup?search=0x35F77D63B5CEC106C577ED856E85A86E4652B4E6
2667+
2668+ :param keyid: An 8, 16 or 40 hex digit keyid to find a key for
2669+ :type keyid: (bytes, str)
2670+ :returns: A key material for the specified GPG key id
2671+ :rtype: (str, bytes)
2672+ :raises: subprocess.CalledProcessError
2673+ """
2674+ # options=mr - machine-readable output (disables html wrappers)
2675+ keyserver_url = ('https://keyserver.ubuntu.com'
2676+ '/pks/lookup?op=get&options=mr&exact=on&search=0x{}')
2677+ curl_cmd = ['curl', keyserver_url.format(keyid)]
2678+ # use proxy server settings in order to retrieve the key
2679+ return subprocess.check_output(curl_cmd,
2680+ env=env_proxy_settings(['https']))
2681+
2682+
2683+def _dearmor_gpg_key(key_asc):
2684+ """Converts a GPG key in the ASCII armor format to the binary format.
2685+
2686+ :param key_asc: A GPG key in ASCII armor format.
2687+ :type key_asc: (str, bytes)
2688+ :returns: A GPG key in binary format
2689+ :rtype: (str, bytes)
2690+ :raises: GPGKeyError
2691+ """
2692+ ps = subprocess.Popen(['gpg', '--dearmor'],
2693+ stdout=subprocess.PIPE,
2694+ stderr=subprocess.PIPE,
2695+ stdin=subprocess.PIPE)
2696+ out, err = ps.communicate(input=key_asc)
2697+ # no need to decode output as it is binary (invalid utf-8), only error
2698+ if six.PY3:
2699+ err = err.decode('utf-8')
2700+ if 'gpg: no valid OpenPGP data found.' in err:
2701+ raise GPGKeyError('Invalid GPG key material. Check your network setup'
2702+ ' (MTU, routing, DNS) and/or proxy server settings'
2703+ ' as well as destination keyserver status.')
2704+ else:
2705+ return out
2706+
2707+
2708+def _write_apt_gpg_keyfile(key_name, key_material):
2709+ """Writes GPG key material into a file at a provided path.
2710+
2711+ :param key_name: A key name to use for a key file (could be a fingerprint)
2712+ :type key_name: str
2713+ :param key_material: A GPG key material (binary)
2714+ :type key_material: (str, bytes)
2715+ """
2716+ with open('/etc/apt/trusted.gpg.d/{}.gpg'.format(key_name),
2717+ 'wb') as keyf:
2718+ keyf.write(key_material)
2719+
2720+
2721+def add_source(source, key=None, fail_invalid=False):
2722 """Add a package source to this system.
2723
2724 @param source: a URL or sources.list entry, as supported by
2725@@ -249,6 +532,33 @@ def add_source(source, key=None):
2726 such as 'cloud:icehouse'
2727 'distro' may be used as a noop
2728
2729+ Full list of source specifications supported by the function are:
2730+
2731+ 'distro': A NOP; i.e. it has no effect.
2732+ 'proposed': the proposed deb spec [2] is wrtten to
2733+ /etc/apt/sources.list/proposed
2734+ 'distro-proposed': adds <version>-proposed to the debs [2]
2735+ 'ppa:<ppa-name>': add-apt-repository --yes <ppa_name>
2736+ 'deb <deb-spec>': add-apt-repository --yes deb <deb-spec>
2737+ 'http://....': add-apt-repository --yes http://...
2738+ 'cloud-archive:<spec>': add-apt-repository -yes cloud-archive:<spec>
2739+ 'cloud:<release>[-staging]': specify a Cloud Archive pocket <release> with
2740+ optional staging version. If staging is used then the staging PPA [2]
2741+ with be used. If staging is NOT used then the cloud archive [3] will be
2742+ added, and the 'ubuntu-cloud-keyring' package will be added for the
2743+ current distro.
2744+
2745+ Otherwise the source is not recognised and this is logged to the juju log.
2746+ However, no error is raised, unless sys_error_on_exit is True.
2747+
2748+ [1] deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main
2749+ where {} is replaced with the derived pocket name.
2750+ [2] deb http://archive.ubuntu.com/ubuntu {}-proposed \
2751+ main universe multiverse restricted
2752+ where {} is replaced with the lsb_release codename (e.g. xenial)
2753+ [3] deb http://ubuntu-cloud.archive.canonical.com/ubuntu <pocket>
2754+ to /etc/apt/sources.list.d/cloud-archive-list
2755+
2756 @param key: A key to be added to the system's APT keyring and used
2757 to verify the signatures on packages. Ideally, this should be an
2758 ASCII format GPG public key including the block headers. A GPG key
2759@@ -256,67 +566,172 @@ def add_source(source, key=None):
2760 available to retrieve the actual public key from a public keyserver
2761 placing your Juju environment at risk. ppa and cloud archive keys
2762 are securely added automtically, so sould not be provided.
2763+
2764+ @param fail_invalid: (boolean) if True, then the function raises a
2765+ SourceConfigError is there is no matching installation source.
2766+
2767+ @raises SourceConfigError() if for cloud:<pocket>, the <pocket> is not a
2768+ valid pocket in CLOUD_ARCHIVE_POCKETS
2769 """
2770+ _mapping = OrderedDict([
2771+ (r"^distro$", lambda: None), # This is a NOP
2772+ (r"^(?:proposed|distro-proposed)$", _add_proposed),
2773+ (r"^cloud-archive:(.*)$", _add_apt_repository),
2774+ (r"^((?:deb |http:|https:|ppa:).*)$", _add_apt_repository),
2775+ (r"^cloud:(.*)-(.*)\/staging$", _add_cloud_staging),
2776+ (r"^cloud:(.*)-(.*)$", _add_cloud_distro_check),
2777+ (r"^cloud:(.*)$", _add_cloud_pocket),
2778+ (r"^snap:.*-(.*)-(.*)$", _add_cloud_distro_check),
2779+ ])
2780 if source is None:
2781- log('Source is not present. Skipping')
2782- return
2783-
2784- if (source.startswith('ppa:') or
2785- source.startswith('http') or
2786- source.startswith('deb ') or
2787- source.startswith('cloud-archive:')):
2788- cmd = ['add-apt-repository', '--yes', source]
2789- _run_with_retries(cmd)
2790- elif source.startswith('cloud:'):
2791- install(filter_installed_packages(['ubuntu-cloud-keyring']),
2792- fatal=True)
2793- pocket = source.split(':')[-1]
2794- if pocket not in CLOUD_ARCHIVE_POCKETS:
2795- raise SourceConfigError(
2796- 'Unsupported cloud: source option %s' %
2797- pocket)
2798- actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket]
2799- with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt:
2800- apt.write(CLOUD_ARCHIVE.format(actual_pocket))
2801- elif source == 'proposed':
2802- release = lsb_release()['DISTRIB_CODENAME']
2803- with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt:
2804- apt.write(PROPOSED_POCKET.format(release))
2805- elif source == 'distro':
2806- pass
2807+ source = ''
2808+ for r, fn in six.iteritems(_mapping):
2809+ m = re.match(r, source)
2810+ if m:
2811+ if key:
2812+ # Import key before adding the source which depends on it,
2813+ # as refreshing packages could fail otherwise.
2814+ try:
2815+ import_key(key)
2816+ except GPGKeyError as e:
2817+ raise SourceConfigError(str(e))
2818+ # call the associated function with the captured groups
2819+ # raises SourceConfigError on error.
2820+ fn(*m.groups())
2821+ break
2822 else:
2823- log("Unknown source: {!r}".format(source))
2824-
2825- if key:
2826- if '-----BEGIN PGP PUBLIC KEY BLOCK-----' in key:
2827- with NamedTemporaryFile('w+') as key_file:
2828- key_file.write(key)
2829- key_file.flush()
2830- key_file.seek(0)
2831- subprocess.check_call(['apt-key', 'add', '-'], stdin=key_file)
2832- else:
2833- # Note that hkp: is in no way a secure protocol. Using a
2834- # GPG key id is pointless from a security POV unless you
2835- # absolutely trust your network and DNS.
2836- subprocess.check_call(['apt-key', 'adv', '--keyserver',
2837- 'hkp://keyserver.ubuntu.com:80', '--recv',
2838- key])
2839+ # nothing matched. log an error and maybe sys.exit
2840+ err = "Unknown source: {!r}".format(source)
2841+ log(err)
2842+ if fail_invalid:
2843+ raise SourceConfigError(err)
2844+
2845+
2846+def _add_proposed():
2847+ """Add the PROPOSED_POCKET as /etc/apt/source.list.d/proposed.list
2848+
2849+ Uses get_distrib_codename to determine the correct stanza for
2850+ the deb line.
2851+
2852+ For intel architecutres PROPOSED_POCKET is used for the release, but for
2853+ other architectures PROPOSED_PORTS_POCKET is used for the release.
2854+ """
2855+ release = get_distrib_codename()
2856+ arch = platform.machine()
2857+ if arch not in six.iterkeys(ARCH_TO_PROPOSED_POCKET):
2858+ raise SourceConfigError("Arch {} not supported for (distro-)proposed"
2859+ .format(arch))
2860+ with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt:
2861+ apt.write(ARCH_TO_PROPOSED_POCKET[arch].format(release))
2862+
2863+
2864+def _add_apt_repository(spec):
2865+ """Add the spec using add_apt_repository
2866+
2867+ :param spec: the parameter to pass to add_apt_repository
2868+ :type spec: str
2869+ """
2870+ if '{series}' in spec:
2871+ series = get_distrib_codename()
2872+ spec = spec.replace('{series}', series)
2873+ # software-properties package for bionic properly reacts to proxy settings
2874+ # passed as environment variables (See lp:1433761). This is not the case
2875+ # LTS and non-LTS releases below bionic.
2876+ _run_with_retries(['add-apt-repository', '--yes', spec],
2877+ cmd_env=env_proxy_settings(['https']))
2878+
2879+
2880+def _add_cloud_pocket(pocket):
2881+ """Add a cloud pocket as /etc/apt/sources.d/cloud-archive.list
2882+
2883+ Note that this overwrites the existing file if there is one.
2884+
2885+ This function also converts the simple pocket in to the actual pocket using
2886+ the CLOUD_ARCHIVE_POCKETS mapping.
2887+
2888+ :param pocket: string representing the pocket to add a deb spec for.
2889+ :raises: SourceConfigError if the cloud pocket doesn't exist or the
2890+ requested release doesn't match the current distro version.
2891+ """
2892+ apt_install(filter_installed_packages(['ubuntu-cloud-keyring']),
2893+ fatal=True)
2894+ if pocket not in CLOUD_ARCHIVE_POCKETS:
2895+ raise SourceConfigError(
2896+ 'Unsupported cloud: source option %s' %
2897+ pocket)
2898+ actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket]
2899+ with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt:
2900+ apt.write(CLOUD_ARCHIVE.format(actual_pocket))
2901+
2902+
2903+def _add_cloud_staging(cloud_archive_release, openstack_release):
2904+ """Add the cloud staging repository which is in
2905+ ppa:ubuntu-cloud-archive/<openstack_release>-staging
2906+
2907+ This function checks that the cloud_archive_release matches the current
2908+ codename for the distro that charm is being installed on.
2909+
2910+ :param cloud_archive_release: string, codename for the release.
2911+ :param openstack_release: String, codename for the openstack release.
2912+ :raises: SourceConfigError if the cloud_archive_release doesn't match the
2913+ current version of the os.
2914+ """
2915+ _verify_is_ubuntu_rel(cloud_archive_release, openstack_release)
2916+ ppa = 'ppa:ubuntu-cloud-archive/{}-staging'.format(openstack_release)
2917+ cmd = 'add-apt-repository -y {}'.format(ppa)
2918+ _run_with_retries(cmd.split(' '))
2919+
2920+
2921+def _add_cloud_distro_check(cloud_archive_release, openstack_release):
2922+ """Add the cloud pocket, but also check the cloud_archive_release against
2923+ the current distro, and use the openstack_release as the full lookup.
2924+
2925+ This just calls _add_cloud_pocket() with the openstack_release as pocket
2926+ to get the correct cloud-archive.list for dpkg to work with.
2927+
2928+ :param cloud_archive_release:String, codename for the distro release.
2929+ :param openstack_release: String, spec for the release to look up in the
2930+ CLOUD_ARCHIVE_POCKETS
2931+ :raises: SourceConfigError if this is the wrong distro, or the pocket spec
2932+ doesn't exist.
2933+ """
2934+ _verify_is_ubuntu_rel(cloud_archive_release, openstack_release)
2935+ _add_cloud_pocket("{}-{}".format(cloud_archive_release, openstack_release))
2936+
2937+
2938+def _verify_is_ubuntu_rel(release, os_release):
2939+ """Verify that the release is in the same as the current ubuntu release.
2940+
2941+ :param release: String, lowercase for the release.
2942+ :param os_release: String, the os_release being asked for
2943+ :raises: SourceConfigError if the release is not the same as the ubuntu
2944+ release.
2945+ """
2946+ ubuntu_rel = get_distrib_codename()
2947+ if release != ubuntu_rel:
2948+ raise SourceConfigError(
2949+ 'Invalid Cloud Archive release specified: {}-{} on this Ubuntu'
2950+ 'version ({})'.format(release, os_release, ubuntu_rel))
2951
2952
2953 def _run_with_retries(cmd, max_retries=CMD_RETRY_COUNT, retry_exitcodes=(1,),
2954 retry_message="", cmd_env=None):
2955 """Run a command and retry until success or max_retries is reached.
2956
2957- :param: cmd: str: The apt command to run.
2958- :param: max_retries: int: The number of retries to attempt on a fatal
2959- command. Defaults to CMD_RETRY_COUNT.
2960- :param: retry_exitcodes: tuple: Optional additional exit codes to retry.
2961- Defaults to retry on exit code 1.
2962- :param: retry_message: str: Optional log prefix emitted during retries.
2963- :param: cmd_env: dict: Environment variables to add to the command run.
2964+ :param cmd: The apt command to run.
2965+ :type cmd: str
2966+ :param max_retries: The number of retries to attempt on a fatal
2967+ command. Defaults to CMD_RETRY_COUNT.
2968+ :type max_retries: int
2969+ :param retry_exitcodes: Optional additional exit codes to retry.
2970+ Defaults to retry on exit code 1.
2971+ :type retry_exitcodes: tuple
2972+ :param retry_message: Optional log prefix emitted during retries.
2973+ :type retry_message: str
2974+ :param: cmd_env: Environment variables to add to the command run.
2975+ :type cmd_env: Option[None, Dict[str, str]]
2976 """
2977-
2978- env = os.environ.copy()
2979+ env = get_apt_dpkg_env()
2980 if cmd_env:
2981 env.update(cmd_env)
2982
2983@@ -343,21 +758,18 @@ def _run_with_retries(cmd, max_retries=CMD_RETRY_COUNT, retry_exitcodes=(1,),
2984 def _run_apt_command(cmd, fatal=False):
2985 """Run an apt command with optional retries.
2986
2987- :param: fatal: bool: Whether the command's output should be checked and
2988- retried.
2989+ :param cmd: The apt command to run.
2990+ :type cmd: str
2991+ :param fatal: Whether the command's output should be checked and
2992+ retried.
2993+ :type fatal: bool
2994 """
2995- # Provide DEBIAN_FRONTEND=noninteractive if not present in the environment.
2996- cmd_env = {
2997- 'DEBIAN_FRONTEND': os.environ.get('DEBIAN_FRONTEND', 'noninteractive')}
2998-
2999 if fatal:
3000 _run_with_retries(
3001- cmd, cmd_env=cmd_env, retry_exitcodes=(1, APT_NO_LOCK,),
3002+ cmd, retry_exitcodes=(1, APT_NO_LOCK,),
3003 retry_message="Couldn't acquire DPKG lock")
3004 else:
3005- env = os.environ.copy()
3006- env.update(cmd_env)
3007- subprocess.call(cmd, env=env)
3008+ subprocess.call(cmd, env=get_apt_dpkg_env())
3009
3010
3011 def get_upstream_version(package):
3012@@ -365,11 +777,10 @@ def get_upstream_version(package):
3013
3014 @returns None (if not installed) or the upstream version
3015 """
3016- import apt_pkg
3017 cache = apt_cache()
3018 try:
3019 pkg = cache[package]
3020- except:
3021+ except Exception:
3022 # the package is unknown to the current apt cache.
3023 return None
3024
3025@@ -377,4 +788,18 @@ def get_upstream_version(package):
3026 # package is known, but no version is currently installed.
3027 return None
3028
3029- return apt_pkg.upstream_version(pkg.current_ver.ver_str)
3030+ return ubuntu_apt_pkg.upstream_version(pkg.current_ver.ver_str)
3031+
3032+
3033+def get_apt_dpkg_env():
3034+ """Get environment suitable for execution of APT and DPKG tools.
3035+
3036+ We keep this in a helper function instead of in a global constant to
3037+ avoid execution on import of the library.
3038+ :returns: Environment suitable for execution of APT and DPKG tools.
3039+ :rtype: Dict[str, str]
3040+ """
3041+ # The fallback is used in the event of ``/etc/environment`` not containing
3042+ # avalid PATH variable.
3043+ return {'DEBIAN_FRONTEND': 'noninteractive',
3044+ 'PATH': get_system_env('PATH', '/usr/sbin:/usr/bin:/sbin:/bin')}
3045diff --git a/charmhelpers/fetch/ubuntu_apt_pkg.py b/charmhelpers/fetch/ubuntu_apt_pkg.py
3046new file mode 100644
3047index 0000000..929a75d
3048--- /dev/null
3049+++ b/charmhelpers/fetch/ubuntu_apt_pkg.py
3050@@ -0,0 +1,267 @@
3051+# Copyright 2019 Canonical Ltd
3052+#
3053+# Licensed under the Apache License, Version 2.0 (the "License");
3054+# you may not use this file except in compliance with the License.
3055+# You may obtain a copy of the License at
3056+#
3057+# http://www.apache.org/licenses/LICENSE-2.0
3058+#
3059+# Unless required by applicable law or agreed to in writing, software
3060+# distributed under the License is distributed on an "AS IS" BASIS,
3061+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
3062+# See the License for the specific language governing permissions and
3063+# limitations under the License.
3064+
3065+"""Provide a subset of the ``python-apt`` module API.
3066+
3067+Data collection is done through subprocess calls to ``apt-cache`` and
3068+``dpkg-query`` commands.
3069+
3070+The main purpose for this module is to avoid dependency on the
3071+``python-apt`` python module.
3072+
3073+The indicated python module is a wrapper around the ``apt`` C++ library
3074+which is tightly connected to the version of the distribution it was
3075+shipped on. It is not developed in a backward/forward compatible manner.
3076+
3077+This in turn makes it incredibly hard to distribute as a wheel for a piece
3078+of python software that supports a span of distro releases [0][1].
3079+
3080+Upstream feedback like [2] does not give confidence in this ever changing,
3081+so with this we get rid of the dependency.
3082+
3083+0: https://github.com/juju-solutions/layer-basic/pull/135
3084+1: https://bugs.launchpad.net/charm-octavia/+bug/1824112
3085+2: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=845330#10
3086+"""
3087+
3088+import locale
3089+import os
3090+import subprocess
3091+import sys
3092+
3093+
3094+class _container(dict):
3095+ """Simple container for attributes."""
3096+ __getattr__ = dict.__getitem__
3097+ __setattr__ = dict.__setitem__
3098+
3099+
3100+class Package(_container):
3101+ """Simple container for package attributes."""
3102+
3103+
3104+class Version(_container):
3105+ """Simple container for version attributes."""
3106+
3107+
3108+class Cache(object):
3109+ """Simulation of ``apt_pkg`` Cache object."""
3110+ def __init__(self, progress=None):
3111+ pass
3112+
3113+ def __contains__(self, package):
3114+ try:
3115+ pkg = self.__getitem__(package)
3116+ return pkg is not None
3117+ except KeyError:
3118+ return False
3119+
3120+ def __getitem__(self, package):
3121+ """Get information about a package from apt and dpkg databases.
3122+
3123+ :param package: Name of package
3124+ :type package: str
3125+ :returns: Package object
3126+ :rtype: object
3127+ :raises: KeyError, subprocess.CalledProcessError
3128+ """
3129+ apt_result = self._apt_cache_show([package])[package]
3130+ apt_result['name'] = apt_result.pop('package')
3131+ pkg = Package(apt_result)
3132+ dpkg_result = self._dpkg_list([package]).get(package, {})
3133+ current_ver = None
3134+ installed_version = dpkg_result.get('version')
3135+ if installed_version:
3136+ current_ver = Version({'ver_str': installed_version})
3137+ pkg.current_ver = current_ver
3138+ pkg.architecture = dpkg_result.get('architecture')
3139+ return pkg
3140+
3141+ def _dpkg_list(self, packages):
3142+ """Get data from system dpkg database for package.
3143+
3144+ :param packages: Packages to get data from
3145+ :type packages: List[str]
3146+ :returns: Structured data about installed packages, keys like
3147+ ``dpkg-query --list``
3148+ :rtype: dict
3149+ :raises: subprocess.CalledProcessError
3150+ """
3151+ pkgs = {}
3152+ cmd = ['dpkg-query', '--list']
3153+ cmd.extend(packages)
3154+ if locale.getlocale() == (None, None):
3155+ # subprocess calls out to locale.getpreferredencoding(False) to
3156+ # determine encoding. Workaround for Trusty where the
3157+ # environment appears to not be set up correctly.
3158+ locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
3159+ try:
3160+ output = subprocess.check_output(cmd,
3161+ stderr=subprocess.STDOUT,
3162+ universal_newlines=True)
3163+ except subprocess.CalledProcessError as cp:
3164+ # ``dpkg-query`` may return error and at the same time have
3165+ # produced useful output, for example when asked for multiple
3166+ # packages where some are not installed
3167+ if cp.returncode != 1:
3168+ raise
3169+ output = cp.output
3170+ headings = []
3171+ for line in output.splitlines():
3172+ if line.startswith('||/'):
3173+ headings = line.split()
3174+ headings.pop(0)
3175+ continue
3176+ elif (line.startswith('|') or line.startswith('+') or
3177+ line.startswith('dpkg-query:')):
3178+ continue
3179+ else:
3180+ data = line.split(None, 4)
3181+ status = data.pop(0)
3182+ if status != 'ii':
3183+ continue
3184+ pkg = {}
3185+ pkg.update({k.lower(): v for k, v in zip(headings, data)})
3186+ if 'name' in pkg:
3187+ pkgs.update({pkg['name']: pkg})
3188+ return pkgs
3189+
3190+ def _apt_cache_show(self, packages):
3191+ """Get data from system apt cache for package.
3192+
3193+ :param packages: Packages to get data from
3194+ :type packages: List[str]
3195+ :returns: Structured data about package, keys like
3196+ ``apt-cache show``
3197+ :rtype: dict
3198+ :raises: subprocess.CalledProcessError
3199+ """
3200+ pkgs = {}
3201+ cmd = ['apt-cache', 'show', '--no-all-versions']
3202+ cmd.extend(packages)
3203+ if locale.getlocale() == (None, None):
3204+ # subprocess calls out to locale.getpreferredencoding(False) to
3205+ # determine encoding. Workaround for Trusty where the
3206+ # environment appears to not be set up correctly.
3207+ locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
3208+ try:
3209+ output = subprocess.check_output(cmd,
3210+ stderr=subprocess.STDOUT,
3211+ universal_newlines=True)
3212+ previous = None
3213+ pkg = {}
3214+ for line in output.splitlines():
3215+ if not line:
3216+ if 'package' in pkg:
3217+ pkgs.update({pkg['package']: pkg})
3218+ pkg = {}
3219+ continue
3220+ if line.startswith(' '):
3221+ if previous and previous in pkg:
3222+ pkg[previous] += os.linesep + line.lstrip()
3223+ continue
3224+ if ':' in line:
3225+ kv = line.split(':', 1)
3226+ key = kv[0].lower()
3227+ if key == 'n':
3228+ continue
3229+ previous = key
3230+ pkg.update({key: kv[1].lstrip()})
3231+ except subprocess.CalledProcessError as cp:
3232+ # ``apt-cache`` returns 100 if none of the packages asked for
3233+ # exist in the apt cache.
3234+ if cp.returncode != 100:
3235+ raise
3236+ return pkgs
3237+
3238+
3239+class Config(_container):
3240+ def __init__(self):
3241+ super(Config, self).__init__(self._populate())
3242+
3243+ def _populate(self):
3244+ cfgs = {}
3245+ cmd = ['apt-config', 'dump']
3246+ output = subprocess.check_output(cmd,
3247+ stderr=subprocess.STDOUT,
3248+ universal_newlines=True)
3249+ for line in output.splitlines():
3250+ if not line.startswith("CommandLine"):
3251+ k, v = line.split(" ", 1)
3252+ cfgs[k] = v.strip(";").strip("\"")
3253+
3254+ return cfgs
3255+
3256+
3257+# Backwards compatibility with old apt_pkg module
3258+sys.modules[__name__].config = Config()
3259+
3260+
3261+def init():
3262+ """Compability shim that does nothing."""
3263+ pass
3264+
3265+
3266+def upstream_version(version):
3267+ """Extracts upstream version from a version string.
3268+
3269+ Upstream reference: https://salsa.debian.org/apt-team/apt/blob/master/
3270+ apt-pkg/deb/debversion.cc#L259
3271+
3272+ :param version: Version string
3273+ :type version: str
3274+ :returns: Upstream version
3275+ :rtype: str
3276+ """
3277+ if version:
3278+ version = version.split(':')[-1]
3279+ version = version.split('-')[0]
3280+ return version
3281+
3282+
3283+def version_compare(a, b):
3284+ """Compare the given versions.
3285+
3286+ Call out to ``dpkg`` to make sure the code doing the comparison is
3287+ compatible with what the ``apt`` library would do. Mimic the return
3288+ values.
3289+
3290+ Upstream reference:
3291+ https://apt-team.pages.debian.net/python-apt/library/apt_pkg.html
3292+ ?highlight=version_compare#apt_pkg.version_compare
3293+
3294+ :param a: version string
3295+ :type a: str
3296+ :param b: version string
3297+ :type b: str
3298+ :returns: >0 if ``a`` is greater than ``b``, 0 if a equals b,
3299+ <0 if ``a`` is smaller than ``b``
3300+ :rtype: int
3301+ :raises: subprocess.CalledProcessError, RuntimeError
3302+ """
3303+ for op in ('gt', 1), ('eq', 0), ('lt', -1):
3304+ try:
3305+ subprocess.check_call(['dpkg', '--compare-versions',
3306+ a, op[0], b],
3307+ stderr=subprocess.STDOUT,
3308+ universal_newlines=True)
3309+ return op[1]
3310+ except subprocess.CalledProcessError as cp:
3311+ if cp.returncode == 1:
3312+ continue
3313+ raise
3314+ else:
3315+ raise RuntimeError('Unable to compare "{}" and "{}", according to '
3316+ 'our logic they are neither greater, equal nor '
3317+ 'less than each other.')
3318diff --git a/charmhelpers/osplatform.py b/charmhelpers/osplatform.py
3319index d9a4d5c..78c81af 100644
3320--- a/charmhelpers/osplatform.py
3321+++ b/charmhelpers/osplatform.py
3322@@ -1,4 +1,5 @@
3323 import platform
3324+import os
3325
3326
3327 def get_platform():
3328@@ -9,9 +10,13 @@ def get_platform():
3329 This string is used to decide which platform module should be imported.
3330 """
3331 # linux_distribution is deprecated and will be removed in Python 3.7
3332- # Warings *not* disabled, as we certainly need to fix this.
3333- tuple_platform = platform.linux_distribution()
3334- current_platform = tuple_platform[0]
3335+ # Warnings *not* disabled, as we certainly need to fix this.
3336+ if hasattr(platform, 'linux_distribution'):
3337+ tuple_platform = platform.linux_distribution()
3338+ current_platform = tuple_platform[0]
3339+ else:
3340+ current_platform = _get_platform_from_fs()
3341+
3342 if "Ubuntu" in current_platform:
3343 return "ubuntu"
3344 elif "CentOS" in current_platform:
3345@@ -20,6 +25,22 @@ def get_platform():
3346 # Stock Python does not detect Ubuntu and instead returns debian.
3347 # Or at least it does in some build environments like Travis CI
3348 return "ubuntu"
3349+ elif "elementary" in current_platform:
3350+ # ElementaryOS fails to run tests locally without this.
3351+ return "ubuntu"
3352 else:
3353 raise RuntimeError("This module is not supported on {}."
3354 .format(current_platform))
3355+
3356+
3357+def _get_platform_from_fs():
3358+ """Get Platform from /etc/os-release."""
3359+ with open(os.path.join(os.sep, 'etc', 'os-release')) as fin:
3360+ content = dict(
3361+ line.split('=', 1)
3362+ for line in fin.read().splitlines()
3363+ if '=' in line
3364+ )
3365+ for k, v in content.items():
3366+ content[k] = v.strip('"')
3367+ return content["NAME"]

Subscribers

People subscribed via source and target branches

to all changes: