Merge ~xavpaice/charm-thruk-agent:update_charmhelpers into ~nagios-charmers/charm-thruk-agent:master

Proposed by Xav Paice
Status: Merged
Approved by: Wouter van Bommel
Approved revision: 66c19905c8c565dfb495a14e83a809681496ac46
Merged at revision: 3a9512547f97be9b44dacbc79f31818b441be4ec
Proposed branch: ~xavpaice/charm-thruk-agent:update_charmhelpers
Merge into: ~nagios-charmers/charm-thruk-agent:master
Diff against target: 2206 lines (+1317/-155)
23 files modified
bin/charm_helpers_sync.py (+30/-22)
hooks/actions.py (+2/-1)
hooks/charmhelpers/__init__.py (+4/-4)
hooks/charmhelpers/core/hookenv.py (+449/-27)
hooks/charmhelpers/core/host.py (+164/-11)
hooks/charmhelpers/core/host_factory/ubuntu.py (+25/-0)
hooks/charmhelpers/core/kernel.py (+2/-2)
hooks/charmhelpers/core/services/base.py (+18/-7)
hooks/charmhelpers/core/strutils.py (+11/-5)
hooks/charmhelpers/core/sysctl.py (+21/-10)
hooks/charmhelpers/core/templating.py (+18/-9)
hooks/charmhelpers/core/unitdata.py (+8/-1)
hooks/charmhelpers/fetch/__init__.py (+2/-0)
hooks/charmhelpers/fetch/archiveurl.py (+1/-1)
hooks/charmhelpers/fetch/bzrurl.py (+2/-2)
hooks/charmhelpers/fetch/giturl.py (+2/-2)
hooks/charmhelpers/fetch/python/__init__.py (+13/-0)
hooks/charmhelpers/fetch/python/debug.py (+54/-0)
hooks/charmhelpers/fetch/python/packages.py (+154/-0)
hooks/charmhelpers/fetch/python/rpdb.py (+56/-0)
hooks/charmhelpers/fetch/python/version.py (+32/-0)
hooks/charmhelpers/fetch/snap.py (+33/-5)
hooks/charmhelpers/fetch/ubuntu.py (+216/-46)
Reviewer Review Type Date Requested Status
Wouter van Bommel (community) Approve
Canonical IS Reviewers Pending
Review via email: mp+368892@code.launchpad.net

Commit message

Update to allow the use of the newer juju proxy settings for apt installations.

To post a comment you must log in.
Revision history for this message
🤖 Canonical IS Merge Bot (canonical-is-mergebot) wrote :

This merge proposal is being monitored by mergebot. Change the status to Approved to merge.

Revision history for this message
Wouter van Bommel (woutervb) wrote :

Upgrade of charmhelpers

review: Approve
Revision history for this message
🤖 Canonical IS Merge Bot (canonical-is-mergebot) wrote :

Change successfully merged at revision 3a9512547f97be9b44dacbc79f31818b441be4ec

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
diff --git a/bin/charm_helpers_sync.py b/bin/charm_helpers_sync.py
index f67fdb9..7c0c194 100644
--- a/bin/charm_helpers_sync.py
+++ b/bin/charm_helpers_sync.py
@@ -2,19 +2,17 @@
22
3# Copyright 2014-2015 Canonical Limited.3# Copyright 2014-2015 Canonical Limited.
4#4#
5# This file is part of charm-helpers.5# Licensed under the Apache License, Version 2.0 (the "License");
6# you may not use this file except in compliance with the License.
7# You may obtain a copy of the License at
6#8#
7# charm-helpers is free software: you can redistribute it and/or modify9# http://www.apache.org/licenses/LICENSE-2.0
8# it under the terms of the GNU Lesser General Public License version 3 as
9# published by the Free Software Foundation.
10#10#
11# charm-helpers is distributed in the hope that it will be useful,11# Unless required by applicable law or agreed to in writing, software
12# but WITHOUT ANY WARRANTY; without even the implied warranty of12# distributed under the License is distributed on an "AS IS" BASIS,
13# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# GNU Lesser General Public License for more details.14# See the License for the specific language governing permissions and
15#15# limitations under the License.
16# You should have received a copy of the GNU Lesser General Public License
17# along with charm-helpers. If not, see <http://www.gnu.org/licenses/>.
1816
19# Authors:17# Authors:
20# Adam Gandelman <adamg@ubuntu.com>18# Adam Gandelman <adamg@ubuntu.com>
@@ -31,7 +29,7 @@ from fnmatch import fnmatch
3129
32import six30import six
3331
34CHARM_HELPERS_BRANCH = 'lp:charm-helpers'32CHARM_HELPERS_REPO = 'https://github.com/juju/charm-helpers'
3533
3634
37def parse_config(conf_file):35def parse_config(conf_file):
@@ -41,10 +39,16 @@ def parse_config(conf_file):
41 return yaml.load(open(conf_file).read())39 return yaml.load(open(conf_file).read())
4240
4341
44def clone_helpers(work_dir, branch):42def clone_helpers(work_dir, repo):
45 dest = os.path.join(work_dir, 'charm-helpers')43 dest = os.path.join(work_dir, 'charm-helpers')
46 logging.info('Checking out %s to %s.' % (branch, dest))44 logging.info('Cloning out %s to %s.' % (repo, dest))
47 cmd = ['bzr', 'checkout', '--lightweight', branch, dest]45 branch = None
46 if '@' in repo:
47 repo, branch = repo.split('@', 1)
48 cmd = ['git', 'clone', '--depth=1']
49 if branch is not None:
50 cmd += ['--branch', branch]
51 cmd += [repo, dest]
48 subprocess.check_call(cmd)52 subprocess.check_call(cmd)
49 return dest53 return dest
5054
@@ -176,6 +180,9 @@ def extract_options(inc, global_options=None):
176180
177181
178def sync_helpers(include, src, dest, options=None):182def sync_helpers(include, src, dest, options=None):
183 if os.path.exists(dest):
184 logging.debug('Removing existing directory: %s' % dest)
185 shutil.rmtree(dest)
179 if not os.path.isdir(dest):186 if not os.path.isdir(dest):
180 os.makedirs(dest)187 os.makedirs(dest)
181188
@@ -193,14 +200,15 @@ def sync_helpers(include, src, dest, options=None):
193 inc, opts = extract_options(m, global_options)200 inc, opts = extract_options(m, global_options)
194 sync(src, dest, '%s.%s' % (k, inc), opts)201 sync(src, dest, '%s.%s' % (k, inc), opts)
195202
203
196if __name__ == '__main__':204if __name__ == '__main__':
197 parser = optparse.OptionParser()205 parser = optparse.OptionParser()
198 parser.add_option('-c', '--config', action='store', dest='config',206 parser.add_option('-c', '--config', action='store', dest='config',
199 default=None, help='helper config file')207 default=None, help='helper config file')
200 parser.add_option('-D', '--debug', action='store_true', dest='debug',208 parser.add_option('-D', '--debug', action='store_true', dest='debug',
201 default=False, help='debug')209 default=False, help='debug')
202 parser.add_option('-b', '--branch', action='store', dest='branch',210 parser.add_option('-r', '--repository', action='store', dest='repo',
203 help='charm-helpers bzr branch (overrides config)')211 help='charm-helpers git repository (overrides config)')
204 parser.add_option('-d', '--destination', action='store', dest='dest_dir',212 parser.add_option('-d', '--destination', action='store', dest='dest_dir',
205 help='sync destination dir (overrides config)')213 help='sync destination dir (overrides config)')
206 (opts, args) = parser.parse_args()214 (opts, args) = parser.parse_args()
@@ -219,10 +227,10 @@ if __name__ == '__main__':
219 else:227 else:
220 config = {}228 config = {}
221229
222 if 'branch' not in config:230 if 'repo' not in config:
223 config['branch'] = CHARM_HELPERS_BRANCH231 config['repo'] = CHARM_HELPERS_REPO
224 if opts.branch:232 if opts.repo:
225 config['branch'] = opts.branch233 config['repo'] = opts.repo
226 if opts.dest_dir:234 if opts.dest_dir:
227 config['destination'] = opts.dest_dir235 config['destination'] = opts.dest_dir
228236
@@ -242,7 +250,7 @@ if __name__ == '__main__':
242 sync_options = config['options']250 sync_options = config['options']
243 tmpd = tempfile.mkdtemp()251 tmpd = tempfile.mkdtemp()
244 try:252 try:
245 checkout = clone_helpers(tmpd, config['branch'])253 checkout = clone_helpers(tmpd, config['repo'])
246 sync_helpers(config['include'], checkout, config['destination'],254 sync_helpers(config['include'], checkout, config['destination'],
247 options=sync_options)255 options=sync_options)
248 except Exception as e:256 except Exception as e:
diff --git a/hooks/actions.py b/hooks/actions.py
index a72e9a6..1593b51 100644
--- a/hooks/actions.py
+++ b/hooks/actions.py
@@ -93,7 +93,8 @@ def update_ppa(service_name):
93 prev_source = config.previous('source')93 prev_source = config.previous('source')
94 if prev_source is not None and prev_source != new_source:94 if prev_source is not None and prev_source != new_source:
95 subprocess.check_call(['add-apt-repository',95 subprocess.check_call(['add-apt-repository',
96 '--yes', '--remove', prev_source])96 '--yes', '--remove', prev_source],
97 env=hookenv.env_proxy_settings(['https']))
97 add_source(config.get('source'), config.get('key', None))98 add_source(config.get('source'), config.get('key', None))
98 apt_update(fatal=True)99 apt_update(fatal=True)
99 package_list = ["thruk", "pwgen", "apache2-utils"]100 package_list = ["thruk", "pwgen", "apache2-utils"]
diff --git a/hooks/charmhelpers/__init__.py b/hooks/charmhelpers/__init__.py
index e7aa471..61ef907 100644
--- a/hooks/charmhelpers/__init__.py
+++ b/hooks/charmhelpers/__init__.py
@@ -23,22 +23,22 @@ import subprocess
23import sys23import sys
2424
25try:25try:
26 import six # flake8: noqa26 import six # NOQA:F401
27except ImportError:27except ImportError:
28 if sys.version_info.major == 2:28 if sys.version_info.major == 2:
29 subprocess.check_call(['apt-get', 'install', '-y', 'python-six'])29 subprocess.check_call(['apt-get', 'install', '-y', 'python-six'])
30 else:30 else:
31 subprocess.check_call(['apt-get', 'install', '-y', 'python3-six'])31 subprocess.check_call(['apt-get', 'install', '-y', 'python3-six'])
32 import six # flake8: noqa32 import six # NOQA:F401
3333
34try:34try:
35 import yaml # flake8: noqa35 import yaml # NOQA:F401
36except ImportError:36except ImportError:
37 if sys.version_info.major == 2:37 if sys.version_info.major == 2:
38 subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml'])38 subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml'])
39 else:39 else:
40 subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml'])40 subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml'])
41 import yaml # flake8: noqa41 import yaml # NOQA:F401
4242
4343
44# Holds a list of mapping of mangled function names that have been deprecated44# Holds a list of mapping of mangled function names that have been deprecated
diff --git a/hooks/charmhelpers/core/hookenv.py b/hooks/charmhelpers/core/hookenv.py
index e44e22b..4744eb4 100644
--- a/hooks/charmhelpers/core/hookenv.py
+++ b/hooks/charmhelpers/core/hookenv.py
@@ -22,10 +22,12 @@ from __future__ import print_function
22import copy22import copy
23from distutils.version import LooseVersion23from distutils.version import LooseVersion
24from functools import wraps24from functools import wraps
25from collections import namedtuple
25import glob26import glob
26import os27import os
27import json28import json
28import yaml29import yaml
30import re
29import subprocess31import subprocess
30import sys32import sys
31import errno33import errno
@@ -38,12 +40,20 @@ if not six.PY3:
38else:40else:
39 from collections import UserDict41 from collections import UserDict
4042
43
41CRITICAL = "CRITICAL"44CRITICAL = "CRITICAL"
42ERROR = "ERROR"45ERROR = "ERROR"
43WARNING = "WARNING"46WARNING = "WARNING"
44INFO = "INFO"47INFO = "INFO"
45DEBUG = "DEBUG"48DEBUG = "DEBUG"
49TRACE = "TRACE"
46MARKER = object()50MARKER = object()
51SH_MAX_ARG = 131071
52
53
54RANGE_WARNING = ('Passing NO_PROXY string that includes a cidr. '
55 'This may not be compatible with software you are '
56 'running in your shell.')
4757
48cache = {}58cache = {}
4959
@@ -64,7 +74,7 @@ def cached(func):
64 @wraps(func)74 @wraps(func)
65 def wrapper(*args, **kwargs):75 def wrapper(*args, **kwargs):
66 global cache76 global cache
67 key = str((func, args, kwargs))77 key = json.dumps((func, args, kwargs), sort_keys=True, default=str)
68 try:78 try:
69 return cache[key]79 return cache[key]
70 except KeyError:80 except KeyError:
@@ -94,7 +104,7 @@ def log(message, level=None):
94 command += ['-l', level]104 command += ['-l', level]
95 if not isinstance(message, six.string_types):105 if not isinstance(message, six.string_types):
96 message = repr(message)106 message = repr(message)
97 command += [message]107 command += [message[:SH_MAX_ARG]]
98 # Missing juju-log should not cause failures in unit tests108 # Missing juju-log should not cause failures in unit tests
99 # Send log output to stderr109 # Send log output to stderr
100 try:110 try:
@@ -197,11 +207,58 @@ def remote_unit():
197 return os.environ.get('JUJU_REMOTE_UNIT', None)207 return os.environ.get('JUJU_REMOTE_UNIT', None)
198208
199209
200def service_name():210def application_name():
201 """The name service group this unit belongs to"""211 """
212 The name of the deployed application this unit belongs to.
213 """
202 return local_unit().split('/')[0]214 return local_unit().split('/')[0]
203215
204216
217def service_name():
218 """
219 .. deprecated:: 0.19.1
220 Alias for :func:`application_name`.
221 """
222 return application_name()
223
224
225def model_name():
226 """
227 Name of the model that this unit is deployed in.
228 """
229 return os.environ['JUJU_MODEL_NAME']
230
231
232def model_uuid():
233 """
234 UUID of the model that this unit is deployed in.
235 """
236 return os.environ['JUJU_MODEL_UUID']
237
238
239def principal_unit():
240 """Returns the principal unit of this unit, otherwise None"""
241 # Juju 2.2 and above provides JUJU_PRINCIPAL_UNIT
242 principal_unit = os.environ.get('JUJU_PRINCIPAL_UNIT', None)
243 # If it's empty, then this unit is the principal
244 if principal_unit == '':
245 return os.environ['JUJU_UNIT_NAME']
246 elif principal_unit is not None:
247 return principal_unit
248 # For Juju 2.1 and below, let's try work out the principle unit by
249 # the various charms' metadata.yaml.
250 for reltype in relation_types():
251 for rid in relation_ids(reltype):
252 for unit in related_units(rid):
253 md = _metadata_unit(unit)
254 if not md:
255 continue
256 subordinate = md.pop('subordinate', None)
257 if not subordinate:
258 return unit
259 return None
260
261
205@cached262@cached
206def remote_service_name(relid=None):263def remote_service_name(relid=None):
207 """The remote service name for a given relation-id (or the current relation)"""264 """The remote service name for a given relation-id (or the current relation)"""
@@ -263,7 +320,7 @@ class Config(dict):
263 self.implicit_save = True320 self.implicit_save = True
264 self._prev_dict = None321 self._prev_dict = None
265 self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME)322 self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME)
266 if os.path.exists(self.path):323 if os.path.exists(self.path) and os.stat(self.path).st_size:
267 self.load_previous()324 self.load_previous()
268 atexit(self._implicit_save)325 atexit(self._implicit_save)
269326
@@ -283,7 +340,11 @@ class Config(dict):
283 """340 """
284 self.path = path or self.path341 self.path = path or self.path
285 with open(self.path) as f:342 with open(self.path) as f:
286 self._prev_dict = json.load(f)343 try:
344 self._prev_dict = json.load(f)
345 except ValueError as e:
346 log('Unable to parse previous config data - {}'.format(str(e)),
347 level=ERROR)
287 for k, v in copy.deepcopy(self._prev_dict).items():348 for k, v in copy.deepcopy(self._prev_dict).items():
288 if k not in self:349 if k not in self:
289 self[k] = v350 self[k] = v
@@ -319,6 +380,7 @@ class Config(dict):
319380
320 """381 """
321 with open(self.path, 'w') as f:382 with open(self.path, 'w') as f:
383 os.fchmod(f.fileno(), 0o600)
322 json.dump(self, f)384 json.dump(self, f)
323385
324 def _implicit_save(self):386 def _implicit_save(self):
@@ -326,22 +388,40 @@ class Config(dict):
326 self.save()388 self.save()
327389
328390
329@cached391_cache_config = None
392
393
330def config(scope=None):394def config(scope=None):
331 """Juju charm configuration"""395 """
332 config_cmd_line = ['config-get']396 Get the juju charm configuration (scope==None) or individual key,
333 if scope is not None:397 (scope=str). The returned value is a Python data structure loaded as
334 config_cmd_line.append(scope)398 JSON from the Juju config command.
335 else:399
336 config_cmd_line.append('--all')400 :param scope: If set, return the value for the specified key.
337 config_cmd_line.append('--format=json')401 :type scope: Optional[str]
402 :returns: Either the whole config as a Config, or a key from it.
403 :rtype: Any
404 """
405 global _cache_config
406 config_cmd_line = ['config-get', '--all', '--format=json']
338 try:407 try:
339 config_data = json.loads(408 # JSON Decode Exception for Python3.5+
340 subprocess.check_output(config_cmd_line).decode('UTF-8'))409 exc_json = json.decoder.JSONDecodeError
410 except AttributeError:
411 # JSON Decode Exception for Python2.7 through Python3.4
412 exc_json = ValueError
413 try:
414 if _cache_config is None:
415 config_data = json.loads(
416 subprocess.check_output(config_cmd_line).decode('UTF-8'))
417 _cache_config = Config(config_data)
341 if scope is not None:418 if scope is not None:
342 return config_data419 return _cache_config.get(scope)
343 return Config(config_data)420 return _cache_config
344 except ValueError:421 except (exc_json, UnicodeDecodeError) as e:
422 log('Unable to parse output from config-get: config_cmd_line="{}" '
423 'message="{}"'
424 .format(config_cmd_line, str(e)), level=ERROR)
345 return None425 return None
346426
347427
@@ -435,6 +515,67 @@ def related_units(relid=None):
435 subprocess.check_output(units_cmd_line).decode('UTF-8')) or []515 subprocess.check_output(units_cmd_line).decode('UTF-8')) or []
436516
437517
518def expected_peer_units():
519 """Get a generator for units we expect to join peer relation based on
520 goal-state.
521
522 The local unit is excluded from the result to make it easy to gauge
523 completion of all peers joining the relation with existing hook tools.
524
525 Example usage:
526 log('peer {} of {} joined peer relation'
527 .format(len(related_units()),
528 len(list(expected_peer_units()))))
529
530 This function will raise NotImplementedError if used with juju versions
531 without goal-state support.
532
533 :returns: iterator
534 :rtype: types.GeneratorType
535 :raises: NotImplementedError
536 """
537 if not has_juju_version("2.4.0"):
538 # goal-state first appeared in 2.4.0.
539 raise NotImplementedError("goal-state")
540 _goal_state = goal_state()
541 return (key for key in _goal_state['units']
542 if '/' in key and key != local_unit())
543
544
545def expected_related_units(reltype=None):
546 """Get a generator for units we expect to join relation based on
547 goal-state.
548
549 Note that you can not use this function for the peer relation, take a look
550 at expected_peer_units() for that.
551
552 This function will raise KeyError if you request information for a
553 relation type for which juju goal-state does not have information. It will
554 raise NotImplementedError if used with juju versions without goal-state
555 support.
556
557 Example usage:
558 log('participant {} of {} joined relation {}'
559 .format(len(related_units()),
560 len(list(expected_related_units())),
561 relation_type()))
562
563 :param reltype: Relation type to list data for, default is to list data for
564 the realtion type we are currently executing a hook for.
565 :type reltype: str
566 :returns: iterator
567 :rtype: types.GeneratorType
568 :raises: KeyError, NotImplementedError
569 """
570 if not has_juju_version("2.4.4"):
571 # goal-state existed in 2.4.0, but did not list individual units to
572 # join a relation in 2.4.1 through 2.4.3. (LP: #1794739)
573 raise NotImplementedError("goal-state relation unit count")
574 reltype = reltype or relation_type()
575 _goal_state = goal_state()
576 return (key for key in _goal_state['relations'][reltype] if '/' in key)
577
578
438@cached579@cached
439def relation_for_unit(unit=None, rid=None):580def relation_for_unit(unit=None, rid=None):
440 """Get the json represenation of a unit's relation"""581 """Get the json represenation of a unit's relation"""
@@ -478,6 +619,24 @@ def metadata():
478 return yaml.safe_load(md)619 return yaml.safe_load(md)
479620
480621
622def _metadata_unit(unit):
623 """Given the name of a unit (e.g. apache2/0), get the unit charm's
624 metadata.yaml. Very similar to metadata() but allows us to inspect
625 other units. Unit needs to be co-located, such as a subordinate or
626 principal/primary.
627
628 :returns: metadata.yaml as a python object.
629
630 """
631 basedir = os.sep.join(charm_dir().split(os.sep)[:-2])
632 unitdir = 'unit-{}'.format(unit.replace(os.sep, '-'))
633 joineddir = os.path.join(basedir, unitdir, 'charm', 'metadata.yaml')
634 if not os.path.exists(joineddir):
635 return None
636 with open(joineddir) as md:
637 return yaml.safe_load(md)
638
639
481@cached640@cached
482def relation_types():641def relation_types():
483 """Get a list of relation types supported by this charm"""642 """Get a list of relation types supported by this charm"""
@@ -602,18 +761,31 @@ def is_relation_made(relation, keys='private-address'):
602 return False761 return False
603762
604763
764def _port_op(op_name, port, protocol="TCP"):
765 """Open or close a service network port"""
766 _args = [op_name]
767 icmp = protocol.upper() == "ICMP"
768 if icmp:
769 _args.append(protocol)
770 else:
771 _args.append('{}/{}'.format(port, protocol))
772 try:
773 subprocess.check_call(_args)
774 except subprocess.CalledProcessError:
775 # Older Juju pre 2.3 doesn't support ICMP
776 # so treat it as a no-op if it fails.
777 if not icmp:
778 raise
779
780
605def open_port(port, protocol="TCP"):781def open_port(port, protocol="TCP"):
606 """Open a service network port"""782 """Open a service network port"""
607 _args = ['open-port']783 _port_op('open-port', port, protocol)
608 _args.append('{}/{}'.format(port, protocol))
609 subprocess.check_call(_args)
610784
611785
612def close_port(port, protocol="TCP"):786def close_port(port, protocol="TCP"):
613 """Close a service network port"""787 """Close a service network port"""
614 _args = ['close-port']788 _port_op('close-port', port, protocol)
615 _args.append('{}/{}'.format(port, protocol))
616 subprocess.check_call(_args)
617789
618790
619def open_ports(start, end, protocol="TCP"):791def open_ports(start, end, protocol="TCP"):
@@ -630,6 +802,17 @@ def close_ports(start, end, protocol="TCP"):
630 subprocess.check_call(_args)802 subprocess.check_call(_args)
631803
632804
805def opened_ports():
806 """Get the opened ports
807
808 *Note that this will only show ports opened in a previous hook*
809
810 :returns: Opened ports as a list of strings: ``['8080/tcp', '8081-8083/tcp']``
811 """
812 _args = ['opened-ports', '--format=json']
813 return json.loads(subprocess.check_output(_args).decode('UTF-8'))
814
815
633@cached816@cached
634def unit_get(attribute):817def unit_get(attribute):
635 """Get the unit ID for the remote unit"""818 """Get the unit ID for the remote unit"""
@@ -751,8 +934,15 @@ class Hooks(object):
751 return wrapper934 return wrapper
752935
753936
937class NoNetworkBinding(Exception):
938 pass
939
940
754def charm_dir():941def charm_dir():
755 """Return the root directory of the current charm"""942 """Return the root directory of the current charm"""
943 d = os.environ.get('JUJU_CHARM_DIR')
944 if d is not None:
945 return d
756 return os.environ.get('CHARM_DIR')946 return os.environ.get('CHARM_DIR')
757947
758948
@@ -874,6 +1064,14 @@ def application_version_set(version):
8741064
8751065
876@translate_exc(from_exc=OSError, to_exc=NotImplementedError)1066@translate_exc(from_exc=OSError, to_exc=NotImplementedError)
1067@cached
1068def goal_state():
1069 """Juju goal state values"""
1070 cmd = ['goal-state', '--format=json']
1071 return json.loads(subprocess.check_output(cmd).decode('UTF-8'))
1072
1073
1074@translate_exc(from_exc=OSError, to_exc=NotImplementedError)
877def is_leader():1075def is_leader():
878 """Does the current unit hold the juju leadership1076 """Does the current unit hold the juju leadership
8791077
@@ -967,7 +1165,6 @@ def juju_version():
967 universal_newlines=True).strip()1165 universal_newlines=True).strip()
9681166
9691167
970@cached
971def has_juju_version(minimum_version):1168def has_juju_version(minimum_version):
972 """Return True if the Juju version is at least the provided version"""1169 """Return True if the Juju version is at least the provided version"""
973 return LooseVersion(juju_version()) >= LooseVersion(minimum_version)1170 return LooseVersion(juju_version()) >= LooseVersion(minimum_version)
@@ -1027,6 +1224,8 @@ def _run_atexit():
1027@translate_exc(from_exc=OSError, to_exc=NotImplementedError)1224@translate_exc(from_exc=OSError, to_exc=NotImplementedError)
1028def network_get_primary_address(binding):1225def network_get_primary_address(binding):
1029 '''1226 '''
1227 Deprecated since Juju 2.3; use network_get()
1228
1030 Retrieve the primary network address for a named binding1229 Retrieve the primary network address for a named binding
10311230
1032 :param binding: string. The name of a relation of extra-binding1231 :param binding: string. The name of a relation of extra-binding
@@ -1034,7 +1233,41 @@ def network_get_primary_address(binding):
1034 :raise: NotImplementedError if run on Juju < 2.01233 :raise: NotImplementedError if run on Juju < 2.0
1035 '''1234 '''
1036 cmd = ['network-get', '--primary-address', binding]1235 cmd = ['network-get', '--primary-address', binding]
1037 return subprocess.check_output(cmd).decode('UTF-8').strip()1236 try:
1237 response = subprocess.check_output(
1238 cmd,
1239 stderr=subprocess.STDOUT).decode('UTF-8').strip()
1240 except CalledProcessError as e:
1241 if 'no network config found for binding' in e.output.decode('UTF-8'):
1242 raise NoNetworkBinding("No network binding for {}"
1243 .format(binding))
1244 else:
1245 raise
1246 return response
1247
1248
1249def network_get(endpoint, relation_id=None):
1250 """
1251 Retrieve the network details for a relation endpoint
1252
1253 :param endpoint: string. The name of a relation endpoint
1254 :param relation_id: int. The ID of the relation for the current context.
1255 :return: dict. The loaded YAML output of the network-get query.
1256 :raise: NotImplementedError if request not supported by the Juju version.
1257 """
1258 if not has_juju_version('2.2'):
1259 raise NotImplementedError(juju_version()) # earlier versions require --primary-address
1260 if relation_id and not has_juju_version('2.3'):
1261 raise NotImplementedError # 2.3 added the -r option
1262
1263 cmd = ['network-get', endpoint, '--format', 'yaml']
1264 if relation_id:
1265 cmd.append('-r')
1266 cmd.append(relation_id)
1267 response = subprocess.check_output(
1268 cmd,
1269 stderr=subprocess.STDOUT).decode('UTF-8').strip()
1270 return yaml.safe_load(response)
10381271
10391272
1040def add_metric(*args, **kwargs):1273def add_metric(*args, **kwargs):
@@ -1066,3 +1299,192 @@ def meter_info():
1066 """Get the meter status information, if running in the meter-status-changed1299 """Get the meter status information, if running in the meter-status-changed
1067 hook."""1300 hook."""
1068 return os.environ.get('JUJU_METER_INFO')1301 return os.environ.get('JUJU_METER_INFO')
1302
1303
1304def iter_units_for_relation_name(relation_name):
1305 """Iterate through all units in a relation
1306
1307 Generator that iterates through all the units in a relation and yields
1308 a named tuple with rid and unit field names.
1309
1310 Usage:
1311 data = [(u.rid, u.unit)
1312 for u in iter_units_for_relation_name(relation_name)]
1313
1314 :param relation_name: string relation name
1315 :yield: Named Tuple with rid and unit field names
1316 """
1317 RelatedUnit = namedtuple('RelatedUnit', 'rid, unit')
1318 for rid in relation_ids(relation_name):
1319 for unit in related_units(rid):
1320 yield RelatedUnit(rid, unit)
1321
1322
1323def ingress_address(rid=None, unit=None):
1324 """
1325 Retrieve the ingress-address from a relation when available.
1326 Otherwise, return the private-address.
1327
1328 When used on the consuming side of the relation (unit is a remote
1329 unit), the ingress-address is the IP address that this unit needs
1330 to use to reach the provided service on the remote unit.
1331
1332 When used on the providing side of the relation (unit == local_unit()),
1333 the ingress-address is the IP address that is advertised to remote
1334 units on this relation. Remote units need to use this address to
1335 reach the local provided service on this unit.
1336
1337 Note that charms may document some other method to use in
1338 preference to the ingress_address(), such as an address provided
1339 on a different relation attribute or a service discovery mechanism.
1340 This allows charms to redirect inbound connections to their peers
1341 or different applications such as load balancers.
1342
1343 Usage:
1344 addresses = [ingress_address(rid=u.rid, unit=u.unit)
1345 for u in iter_units_for_relation_name(relation_name)]
1346
1347 :param rid: string relation id
1348 :param unit: string unit name
1349 :side effect: calls relation_get
1350 :return: string IP address
1351 """
1352 settings = relation_get(rid=rid, unit=unit)
1353 return (settings.get('ingress-address') or
1354 settings.get('private-address'))
1355
1356
1357def egress_subnets(rid=None, unit=None):
1358 """
1359 Retrieve the egress-subnets from a relation.
1360
1361 This function is to be used on the providing side of the
1362 relation, and provides the ranges of addresses that client
1363 connections may come from. The result is uninteresting on
1364 the consuming side of a relation (unit == local_unit()).
1365
1366 Returns a stable list of subnets in CIDR format.
1367 eg. ['192.168.1.0/24', '2001::F00F/128']
1368
1369 If egress-subnets is not available, falls back to using the published
1370 ingress-address, or finally private-address.
1371
1372 :param rid: string relation id
1373 :param unit: string unit name
1374 :side effect: calls relation_get
1375 :return: list of subnets in CIDR format. eg. ['192.168.1.0/24', '2001::F00F/128']
1376 """
1377 def _to_range(addr):
1378 if re.search(r'^(?:\d{1,3}\.){3}\d{1,3}$', addr) is not None:
1379 addr += '/32'
1380 elif ':' in addr and '/' not in addr: # IPv6
1381 addr += '/128'
1382 return addr
1383
1384 settings = relation_get(rid=rid, unit=unit)
1385 if 'egress-subnets' in settings:
1386 return [n.strip() for n in settings['egress-subnets'].split(',') if n.strip()]
1387 if 'ingress-address' in settings:
1388 return [_to_range(settings['ingress-address'])]
1389 if 'private-address' in settings:
1390 return [_to_range(settings['private-address'])]
1391 return [] # Should never happen
1392
1393
1394def unit_doomed(unit=None):
1395 """Determines if the unit is being removed from the model
1396
1397 Requires Juju 2.4.1.
1398
1399 :param unit: string unit name, defaults to local_unit
1400 :side effect: calls goal_state
1401 :side effect: calls local_unit
1402 :side effect: calls has_juju_version
1403 :return: True if the unit is being removed, already gone, or never existed
1404 """
1405 if not has_juju_version("2.4.1"):
1406 # We cannot risk blindly returning False for 'we don't know',
1407 # because that could cause data loss; if call sites don't
1408 # need an accurate answer, they likely don't need this helper
1409 # at all.
1410 # goal-state existed in 2.4.0, but did not handle removals
1411 # correctly until 2.4.1.
1412 raise NotImplementedError("is_doomed")
1413 if unit is None:
1414 unit = local_unit()
1415 gs = goal_state()
1416 units = gs.get('units', {})
1417 if unit not in units:
1418 return True
1419 # I don't think 'dead' units ever show up in the goal-state, but
1420 # check anyway in addition to 'dying'.
1421 return units[unit]['status'] in ('dying', 'dead')
1422
1423
1424def env_proxy_settings(selected_settings=None):
1425 """Get proxy settings from process environment variables.
1426
1427 Get charm proxy settings from environment variables that correspond to
1428 juju-http-proxy, juju-https-proxy and juju-no-proxy (available as of 2.4.2,
1429 see lp:1782236) in a format suitable for passing to an application that
1430 reacts to proxy settings passed as environment variables. Some applications
1431 support lowercase or uppercase notation (e.g. curl), some support only
1432 lowercase (e.g. wget), there are also subjectively rare cases of only
1433 uppercase notation support. no_proxy CIDR and wildcard support also varies
1434 between runtimes and applications as there is no enforced standard.
1435
1436 Some applications may connect to multiple destinations and expose config
1437 options that would affect only proxy settings for a specific destination
1438 these should be handled in charms in an application-specific manner.
1439
1440 :param selected_settings: format only a subset of possible settings
1441 :type selected_settings: list
1442 :rtype: Option(None, dict[str, str])
1443 """
1444 SUPPORTED_SETTINGS = {
1445 'http': 'HTTP_PROXY',
1446 'https': 'HTTPS_PROXY',
1447 'no_proxy': 'NO_PROXY',
1448 'ftp': 'FTP_PROXY'
1449 }
1450 if selected_settings is None:
1451 selected_settings = SUPPORTED_SETTINGS
1452
1453 selected_vars = [v for k, v in SUPPORTED_SETTINGS.items()
1454 if k in selected_settings]
1455 proxy_settings = {}
1456 for var in selected_vars:
1457 var_val = os.getenv(var)
1458 if var_val:
1459 proxy_settings[var] = var_val
1460 proxy_settings[var.lower()] = var_val
1461 # Now handle juju-prefixed environment variables. The legacy vs new
1462 # environment variable usage is mutually exclusive
1463 charm_var_val = os.getenv('JUJU_CHARM_{}'.format(var))
1464 if charm_var_val:
1465 proxy_settings[var] = charm_var_val
1466 proxy_settings[var.lower()] = charm_var_val
1467 if 'no_proxy' in proxy_settings:
1468 if _contains_range(proxy_settings['no_proxy']):
1469 log(RANGE_WARNING, level=WARNING)
1470 return proxy_settings if proxy_settings else None
1471
1472
1473def _contains_range(addresses):
1474 """Check for cidr or wildcard domain in a string.
1475
1476 Given a string comprising a comma seperated list of ip addresses
1477 and domain names, determine whether the string contains IP ranges
1478 or wildcard domains.
1479
1480 :param addresses: comma seperated list of domains and ip addresses.
1481 :type addresses: str
1482 """
1483 return (
1484 # Test for cidr (e.g. 10.20.20.0/24)
1485 "/" in addresses or
1486 # Test for wildcard domains (*.foo.com or .foo.com)
1487 "*" in addresses or
1488 addresses.startswith(".") or
1489 ",." in addresses or
1490 " ." in addresses)
diff --git a/hooks/charmhelpers/core/host.py b/hooks/charmhelpers/core/host.py
index b0043cb..32754ff 100644
--- a/hooks/charmhelpers/core/host.py
+++ b/hooks/charmhelpers/core/host.py
@@ -34,21 +34,23 @@ import six
3434
35from contextlib import contextmanager35from contextlib import contextmanager
36from collections import OrderedDict36from collections import OrderedDict
37from .hookenv import log37from .hookenv import log, INFO, DEBUG, local_unit, charm_name
38from .fstab import Fstab38from .fstab import Fstab
39from charmhelpers.osplatform import get_platform39from charmhelpers.osplatform import get_platform
4040
41__platform__ = get_platform()41__platform__ = get_platform()
42if __platform__ == "ubuntu":42if __platform__ == "ubuntu":
43 from charmhelpers.core.host_factory.ubuntu import (43 from charmhelpers.core.host_factory.ubuntu import ( # NOQA:F401
44 service_available,44 service_available,
45 add_new_group,45 add_new_group,
46 lsb_release,46 lsb_release,
47 cmp_pkgrevno,47 cmp_pkgrevno,
48 CompareHostReleases,48 CompareHostReleases,
49 get_distrib_codename,
50 arch
49 ) # flake8: noqa -- ignore F401 for this import51 ) # flake8: noqa -- ignore F401 for this import
50elif __platform__ == "centos":52elif __platform__ == "centos":
51 from charmhelpers.core.host_factory.centos import (53 from charmhelpers.core.host_factory.centos import ( # NOQA:F401
52 service_available,54 service_available,
53 add_new_group,55 add_new_group,
54 lsb_release,56 lsb_release,
@@ -58,6 +60,7 @@ elif __platform__ == "centos":
5860
59UPDATEDB_PATH = '/etc/updatedb.conf'61UPDATEDB_PATH = '/etc/updatedb.conf'
6062
63
61def service_start(service_name, **kwargs):64def service_start(service_name, **kwargs):
62 """Start a system service.65 """Start a system service.
6366
@@ -287,8 +290,8 @@ def service_running(service_name, **kwargs):
287 for key, value in six.iteritems(kwargs):290 for key, value in six.iteritems(kwargs):
288 parameter = '%s=%s' % (key, value)291 parameter = '%s=%s' % (key, value)
289 cmd.append(parameter)292 cmd.append(parameter)
290 output = subprocess.check_output(cmd,293 output = subprocess.check_output(
291 stderr=subprocess.STDOUT).decode('UTF-8')294 cmd, stderr=subprocess.STDOUT).decode('UTF-8')
292 except subprocess.CalledProcessError:295 except subprocess.CalledProcessError:
293 return False296 return False
294 else:297 else:
@@ -441,6 +444,51 @@ def add_user_to_group(username, group):
441 subprocess.check_call(cmd)444 subprocess.check_call(cmd)
442445
443446
447def chage(username, lastday=None, expiredate=None, inactive=None,
448 mindays=None, maxdays=None, root=None, warndays=None):
449 """Change user password expiry information
450
451 :param str username: User to update
452 :param str lastday: Set when password was changed in YYYY-MM-DD format
453 :param str expiredate: Set when user's account will no longer be
454 accessible in YYYY-MM-DD format.
455 -1 will remove an account expiration date.
456 :param str inactive: Set the number of days of inactivity after a password
457 has expired before the account is locked.
458 -1 will remove an account's inactivity.
459 :param str mindays: Set the minimum number of days between password
460 changes to MIN_DAYS.
461 0 indicates the password can be changed anytime.
462 :param str maxdays: Set the maximum number of days during which a
463 password is valid.
464 -1 as MAX_DAYS will remove checking maxdays
465 :param str root: Apply changes in the CHROOT_DIR directory
466 :param str warndays: Set the number of days of warning before a password
467 change is required
468 :raises subprocess.CalledProcessError: if call to chage fails
469 """
470 cmd = ['chage']
471 if root:
472 cmd.extend(['--root', root])
473 if lastday:
474 cmd.extend(['--lastday', lastday])
475 if expiredate:
476 cmd.extend(['--expiredate', expiredate])
477 if inactive:
478 cmd.extend(['--inactive', inactive])
479 if mindays:
480 cmd.extend(['--mindays', mindays])
481 if maxdays:
482 cmd.extend(['--maxdays', maxdays])
483 if warndays:
484 cmd.extend(['--warndays', warndays])
485 cmd.append(username)
486 subprocess.check_call(cmd)
487
488
489remove_password_expiry = functools.partial(chage, expiredate='-1', inactive='-1', mindays='0', maxdays='-1')
490
491
444def rsync(from_path, to_path, flags='-r', options=None, timeout=None):492def rsync(from_path, to_path, flags='-r', options=None, timeout=None):
445 """Replicate the contents of a path"""493 """Replicate the contents of a path"""
446 options = options or ['--delete', '--executability']494 options = options or ['--delete', '--executability']
@@ -487,13 +535,45 @@ def mkdir(path, owner='root', group='root', perms=0o555, force=False):
487535
488def write_file(path, content, owner='root', group='root', perms=0o444):536def write_file(path, content, owner='root', group='root', perms=0o444):
489 """Create or overwrite a file with the contents of a byte string."""537 """Create or overwrite a file with the contents of a byte string."""
490 log("Writing file {} {}:{} {:o}".format(path, owner, group, perms))
491 uid = pwd.getpwnam(owner).pw_uid538 uid = pwd.getpwnam(owner).pw_uid
492 gid = grp.getgrnam(group).gr_gid539 gid = grp.getgrnam(group).gr_gid
493 with open(path, 'wb') as target:540 # lets see if we can grab the file and compare the context, to avoid doing
494 os.fchown(target.fileno(), uid, gid)541 # a write.
495 os.fchmod(target.fileno(), perms)542 existing_content = None
496 target.write(content)543 existing_uid, existing_gid, existing_perms = None, None, None
544 try:
545 with open(path, 'rb') as target:
546 existing_content = target.read()
547 stat = os.stat(path)
548 existing_uid, existing_gid, existing_perms = (
549 stat.st_uid, stat.st_gid, stat.st_mode
550 )
551 except Exception:
552 pass
553 if content != existing_content:
554 log("Writing file {} {}:{} {:o}".format(path, owner, group, perms),
555 level=DEBUG)
556 with open(path, 'wb') as target:
557 os.fchown(target.fileno(), uid, gid)
558 os.fchmod(target.fileno(), perms)
559 if six.PY3 and isinstance(content, six.string_types):
560 content = content.encode('UTF-8')
561 target.write(content)
562 return
563 # the contents were the same, but we might still need to change the
564 # ownership or permissions.
565 if existing_uid != uid:
566 log("Changing uid on already existing content: {} -> {}"
567 .format(existing_uid, uid), level=DEBUG)
568 os.chown(path, uid, -1)
569 if existing_gid != gid:
570 log("Changing gid on already existing content: {} -> {}"
571 .format(existing_gid, gid), level=DEBUG)
572 os.chown(path, -1, gid)
573 if existing_perms != perms:
574 log("Changing permissions on existing content: {} -> {}"
575 .format(existing_perms, perms), level=DEBUG)
576 os.chmod(path, perms)
497577
498578
499def fstab_remove(mp):579def fstab_remove(mp):
@@ -758,7 +838,7 @@ def list_nics(nic_type=None):
758 ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n')838 ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n')
759 ip_output = (line.strip() for line in ip_output if line)839 ip_output = (line.strip() for line in ip_output if line)
760840
761 key = re.compile('^[0-9]+:\s+(.+):')841 key = re.compile(r'^[0-9]+:\s+(.+):')
762 for line in ip_output:842 for line in ip_output:
763 matched = re.search(key, line)843 matched = re.search(key, line)
764 if matched:844 if matched:
@@ -903,6 +983,20 @@ def is_container():
903983
904984
905def add_to_updatedb_prunepath(path, updatedb_path=UPDATEDB_PATH):985def add_to_updatedb_prunepath(path, updatedb_path=UPDATEDB_PATH):
986 """Adds the specified path to the mlocate's udpatedb.conf PRUNEPATH list.
987
988 This method has no effect if the path specified by updatedb_path does not
989 exist or is not a file.
990
991 @param path: string the path to add to the updatedb.conf PRUNEPATHS value
992 @param updatedb_path: the path the updatedb.conf file
993 """
994 if not os.path.exists(updatedb_path) or os.path.isdir(updatedb_path):
995 # If the updatedb.conf file doesn't exist then don't attempt to update
996 # the file as the package providing mlocate may not be installed on
997 # the local system
998 return
999
906 with open(updatedb_path, 'r+') as f_id:1000 with open(updatedb_path, 'r+') as f_id:
907 updatedb_text = f_id.read()1001 updatedb_text = f_id.read()
908 output = updatedb(updatedb_text, path)1002 output = updatedb(updatedb_text, path)
@@ -922,3 +1016,62 @@ def updatedb(updatedb_text, new_path):
922 lines[i] = 'PRUNEPATHS="{}"'.format(' '.join(paths))1016 lines[i] = 'PRUNEPATHS="{}"'.format(' '.join(paths))
923 output = "\n".join(lines)1017 output = "\n".join(lines)
924 return output1018 return output
1019
1020
1021def modulo_distribution(modulo=3, wait=30, non_zero_wait=False):
1022 """ Modulo distribution
1023
1024 This helper uses the unit number, a modulo value and a constant wait time
1025 to produce a calculated wait time distribution. This is useful in large
1026 scale deployments to distribute load during an expensive operation such as
1027 service restarts.
1028
1029 If you have 1000 nodes that need to restart 100 at a time 1 minute at a
1030 time:
1031
1032 time.wait(modulo_distribution(modulo=100, wait=60))
1033 restart()
1034
1035 If you need restarts to happen serially set modulo to the exact number of
1036 nodes and set a high constant wait time:
1037
1038 time.wait(modulo_distribution(modulo=10, wait=120))
1039 restart()
1040
1041 @param modulo: int The modulo number creates the group distribution
1042 @param wait: int The constant time wait value
1043 @param non_zero_wait: boolean Override unit % modulo == 0,
1044 return modulo * wait. Used to avoid collisions with
1045 leader nodes which are often given priority.
1046 @return: int Calculated time to wait for unit operation
1047 """
1048 unit_number = int(local_unit().split('/')[1])
1049 calculated_wait_time = (unit_number % modulo) * wait
1050 if non_zero_wait and calculated_wait_time == 0:
1051 return modulo * wait
1052 else:
1053 return calculated_wait_time
1054
1055
1056def install_ca_cert(ca_cert, name=None):
1057 """
1058 Install the given cert as a trusted CA.
1059
1060 The ``name`` is the stem of the filename where the cert is written, and if
1061 not provided, it will default to ``juju-{charm_name}``.
1062
1063 If the cert is empty or None, or is unchanged, nothing is done.
1064 """
1065 if not ca_cert:
1066 return
1067 if not isinstance(ca_cert, bytes):
1068 ca_cert = ca_cert.encode('utf8')
1069 if not name:
1070 name = 'juju-{}'.format(charm_name())
1071 cert_file = '/usr/local/share/ca-certificates/{}.crt'.format(name)
1072 new_hash = hashlib.md5(ca_cert).hexdigest()
1073 if file_hash(cert_file) == new_hash:
1074 return
1075 log("Installing new CA cert at: {}".format(cert_file), level=INFO)
1076 write_file(cert_file, ca_cert)
1077 subprocess.check_call(['update-ca-certificates', '--fresh'])
diff --git a/hooks/charmhelpers/core/host_factory/ubuntu.py b/hooks/charmhelpers/core/host_factory/ubuntu.py
index d8dc378..0ee2b66 100644
--- a/hooks/charmhelpers/core/host_factory/ubuntu.py
+++ b/hooks/charmhelpers/core/host_factory/ubuntu.py
@@ -1,5 +1,6 @@
1import subprocess1import subprocess
22
3from charmhelpers.core.hookenv import cached
3from charmhelpers.core.strutils import BasicStringComparator4from charmhelpers.core.strutils import BasicStringComparator
45
56
@@ -20,6 +21,9 @@ UBUNTU_RELEASES = (
20 'yakkety',21 'yakkety',
21 'zesty',22 'zesty',
22 'artful',23 'artful',
24 'bionic',
25 'cosmic',
26 'disco',
23)27)
2428
2529
@@ -70,6 +74,14 @@ def lsb_release():
70 return d74 return d
7175
7276
77def get_distrib_codename():
78 """Return the codename of the distribution
79 :returns: The codename
80 :rtype: str
81 """
82 return lsb_release()['DISTRIB_CODENAME'].lower()
83
84
73def cmp_pkgrevno(package, revno, pkgcache=None):85def cmp_pkgrevno(package, revno, pkgcache=None):
74 """Compare supplied revno with the revno of the installed package.86 """Compare supplied revno with the revno of the installed package.
7587
@@ -87,3 +99,16 @@ def cmp_pkgrevno(package, revno, pkgcache=None):
87 pkgcache = apt_cache()99 pkgcache = apt_cache()
88 pkg = pkgcache[package]100 pkg = pkgcache[package]
89 return apt_pkg.version_compare(pkg.current_ver.ver_str, revno)101 return apt_pkg.version_compare(pkg.current_ver.ver_str, revno)
102
103
104@cached
105def arch():
106 """Return the package architecture as a string.
107
108 :returns: the architecture
109 :rtype: str
110 :raises: subprocess.CalledProcessError if dpkg command fails
111 """
112 return subprocess.check_output(
113 ['dpkg', '--print-architecture']
114 ).rstrip().decode('UTF-8')
diff --git a/hooks/charmhelpers/core/kernel.py b/hooks/charmhelpers/core/kernel.py
index 2d40452..e01f4f8 100644
--- a/hooks/charmhelpers/core/kernel.py
+++ b/hooks/charmhelpers/core/kernel.py
@@ -26,12 +26,12 @@ from charmhelpers.core.hookenv import (
2626
27__platform__ = get_platform()27__platform__ = get_platform()
28if __platform__ == "ubuntu":28if __platform__ == "ubuntu":
29 from charmhelpers.core.kernel_factory.ubuntu import (29 from charmhelpers.core.kernel_factory.ubuntu import ( # NOQA:F401
30 persistent_modprobe,30 persistent_modprobe,
31 update_initramfs,31 update_initramfs,
32 ) # flake8: noqa -- ignore F401 for this import32 ) # flake8: noqa -- ignore F401 for this import
33elif __platform__ == "centos":33elif __platform__ == "centos":
34 from charmhelpers.core.kernel_factory.centos import (34 from charmhelpers.core.kernel_factory.centos import ( # NOQA:F401
35 persistent_modprobe,35 persistent_modprobe,
36 update_initramfs,36 update_initramfs,
37 ) # flake8: noqa -- ignore F401 for this import37 ) # flake8: noqa -- ignore F401 for this import
diff --git a/hooks/charmhelpers/core/services/base.py b/hooks/charmhelpers/core/services/base.py
index ca9dc99..179ad4f 100644
--- a/hooks/charmhelpers/core/services/base.py
+++ b/hooks/charmhelpers/core/services/base.py
@@ -307,23 +307,34 @@ class PortManagerCallback(ManagerCallback):
307 """307 """
308 def __call__(self, manager, service_name, event_name):308 def __call__(self, manager, service_name, event_name):
309 service = manager.get_service(service_name)309 service = manager.get_service(service_name)
310 new_ports = service.get('ports', [])310 # turn this generator into a list,
311 # as we'll be going over it multiple times
312 new_ports = list(service.get('ports', []))
311 port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name))313 port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name))
312 if os.path.exists(port_file):314 if os.path.exists(port_file):
313 with open(port_file) as fp:315 with open(port_file) as fp:
314 old_ports = fp.read().split(',')316 old_ports = fp.read().split(',')
315 for old_port in old_ports:317 for old_port in old_ports:
316 if bool(old_port):318 if bool(old_port) and not self.ports_contains(old_port, new_ports):
317 old_port = int(old_port)319 hookenv.close_port(old_port)
318 if old_port not in new_ports:
319 hookenv.close_port(old_port)
320 with open(port_file, 'w') as fp:320 with open(port_file, 'w') as fp:
321 fp.write(','.join(str(port) for port in new_ports))321 fp.write(','.join(str(port) for port in new_ports))
322 for port in new_ports:322 for port in new_ports:
323 # A port is either a number or 'ICMP'
324 protocol = 'TCP'
325 if str(port).upper() == 'ICMP':
326 protocol = 'ICMP'
323 if event_name == 'start':327 if event_name == 'start':
324 hookenv.open_port(port)328 hookenv.open_port(port, protocol)
325 elif event_name == 'stop':329 elif event_name == 'stop':
326 hookenv.close_port(port)330 hookenv.close_port(port, protocol)
331
332 def ports_contains(self, port, ports):
333 if not bool(port):
334 return False
335 if str(port).upper() != 'ICMP':
336 port = int(port)
337 return port in ports
327338
328339
329def service_stop(service_name):340def service_stop(service_name):
diff --git a/hooks/charmhelpers/core/strutils.py b/hooks/charmhelpers/core/strutils.py
index 685dabd..e8df045 100644
--- a/hooks/charmhelpers/core/strutils.py
+++ b/hooks/charmhelpers/core/strutils.py
@@ -61,13 +61,19 @@ def bytes_from_string(value):
61 if isinstance(value, six.string_types):61 if isinstance(value, six.string_types):
62 value = six.text_type(value)62 value = six.text_type(value)
63 else:63 else:
64 msg = "Unable to interpret non-string value '%s' as boolean" % (value)64 msg = "Unable to interpret non-string value '%s' as bytes" % (value)
65 raise ValueError(msg)65 raise ValueError(msg)
66 matches = re.match("([0-9]+)([a-zA-Z]+)", value)66 matches = re.match("([0-9]+)([a-zA-Z]+)", value)
67 if not matches:67 if matches:
68 msg = "Unable to interpret string value '%s' as bytes" % (value)68 size = int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)])
69 raise ValueError(msg)69 else:
70 return int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)])70 # Assume that value passed in is bytes
71 try:
72 size = int(value)
73 except ValueError:
74 msg = "Unable to interpret string value '%s' as bytes" % (value)
75 raise ValueError(msg)
76 return size
7177
7278
73class BasicStringComparator(object):79class BasicStringComparator(object):
diff --git a/hooks/charmhelpers/core/sysctl.py b/hooks/charmhelpers/core/sysctl.py
index 6e413e3..f1f4a28 100644
--- a/hooks/charmhelpers/core/sysctl.py
+++ b/hooks/charmhelpers/core/sysctl.py
@@ -28,27 +28,38 @@ from charmhelpers.core.hookenv import (
28__author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>'28__author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>'
2929
3030
31def create(sysctl_dict, sysctl_file):31def create(sysctl_dict, sysctl_file, ignore=False):
32 """Creates a sysctl.conf file from a YAML associative array32 """Creates a sysctl.conf file from a YAML associative array
3333
34 :param sysctl_dict: a YAML-formatted string of sysctl options eg "{ 'kernel.max_pid': 1337 }"34 :param sysctl_dict: a dict or YAML-formatted string of sysctl
35 options eg "{ 'kernel.max_pid': 1337 }"
35 :type sysctl_dict: str36 :type sysctl_dict: str
36 :param sysctl_file: path to the sysctl file to be saved37 :param sysctl_file: path to the sysctl file to be saved
37 :type sysctl_file: str or unicode38 :type sysctl_file: str or unicode
39 :param ignore: If True, ignore "unknown variable" errors.
40 :type ignore: bool
38 :returns: None41 :returns: None
39 """42 """
40 try:43 if type(sysctl_dict) is not dict:
41 sysctl_dict_parsed = yaml.safe_load(sysctl_dict)44 try:
42 except yaml.YAMLError:45 sysctl_dict_parsed = yaml.safe_load(sysctl_dict)
43 log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict),46 except yaml.YAMLError:
44 level=ERROR)47 log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict),
45 return48 level=ERROR)
49 return
50 else:
51 sysctl_dict_parsed = sysctl_dict
4652
47 with open(sysctl_file, "w") as fd:53 with open(sysctl_file, "w") as fd:
48 for key, value in sysctl_dict_parsed.items():54 for key, value in sysctl_dict_parsed.items():
49 fd.write("{}={}\n".format(key, value))55 fd.write("{}={}\n".format(key, value))
5056
51 log("Updating sysctl_file: %s values: %s" % (sysctl_file, sysctl_dict_parsed),57 log("Updating sysctl_file: {} values: {}".format(sysctl_file,
58 sysctl_dict_parsed),
52 level=DEBUG)59 level=DEBUG)
5360
54 check_call(["sysctl", "-p", sysctl_file])61 call = ["sysctl", "-p", sysctl_file]
62 if ignore:
63 call.append("-e")
64
65 check_call(call)
diff --git a/hooks/charmhelpers/core/templating.py b/hooks/charmhelpers/core/templating.py
index 7b801a3..9014015 100644
--- a/hooks/charmhelpers/core/templating.py
+++ b/hooks/charmhelpers/core/templating.py
@@ -20,7 +20,8 @@ from charmhelpers.core import hookenv
2020
2121
22def render(source, target, context, owner='root', group='root',22def render(source, target, context, owner='root', group='root',
23 perms=0o444, templates_dir=None, encoding='UTF-8', template_loader=None):23 perms=0o444, templates_dir=None, encoding='UTF-8',
24 template_loader=None, config_template=None):
24 """25 """
25 Render a template.26 Render a template.
2627
@@ -32,6 +33,9 @@ def render(source, target, context, owner='root', group='root',
32 The context should be a dict containing the values to be replaced in the33 The context should be a dict containing the values to be replaced in the
33 template.34 template.
3435
36 config_template may be provided to render from a provided template instead
37 of loading from a file.
38
35 The `owner`, `group`, and `perms` options will be passed to `write_file`.39 The `owner`, `group`, and `perms` options will be passed to `write_file`.
3640
37 If omitted, `templates_dir` defaults to the `templates` folder in the charm.41 If omitted, `templates_dir` defaults to the `templates` folder in the charm.
@@ -65,14 +69,19 @@ def render(source, target, context, owner='root', group='root',
65 if templates_dir is None:69 if templates_dir is None:
66 templates_dir = os.path.join(hookenv.charm_dir(), 'templates')70 templates_dir = os.path.join(hookenv.charm_dir(), 'templates')
67 template_env = Environment(loader=FileSystemLoader(templates_dir))71 template_env = Environment(loader=FileSystemLoader(templates_dir))
68 try:72
69 source = source73 # load from a string if provided explicitly
70 template = template_env.get_template(source)74 if config_template is not None:
71 except exceptions.TemplateNotFound as e:75 template = template_env.from_string(config_template)
72 hookenv.log('Could not load template %s from %s.' %76 else:
73 (source, templates_dir),77 try:
74 level=hookenv.ERROR)78 source = source
75 raise e79 template = template_env.get_template(source)
80 except exceptions.TemplateNotFound as e:
81 hookenv.log('Could not load template %s from %s.' %
82 (source, templates_dir),
83 level=hookenv.ERROR)
84 raise e
76 content = template.render(context)85 content = template.render(context)
77 if target is not None:86 if target is not None:
78 target_dir = os.path.dirname(target)87 target_dir = os.path.dirname(target)
diff --git a/hooks/charmhelpers/core/unitdata.py b/hooks/charmhelpers/core/unitdata.py
index 54ec969..ab55432 100644
--- a/hooks/charmhelpers/core/unitdata.py
+++ b/hooks/charmhelpers/core/unitdata.py
@@ -166,6 +166,10 @@ class Storage(object):
166166
167 To support dicts, lists, integer, floats, and booleans values167 To support dicts, lists, integer, floats, and booleans values
168 are automatically json encoded/decoded.168 are automatically json encoded/decoded.
169
170 Note: to facilitate unit testing, ':memory:' can be passed as the
171 path parameter which causes sqlite3 to only build the db in memory.
172 This should only be used for testing purposes.
169 """173 """
170 def __init__(self, path=None):174 def __init__(self, path=None):
171 self.db_path = path175 self.db_path = path
@@ -175,6 +179,9 @@ class Storage(object):
175 else:179 else:
176 self.db_path = os.path.join(180 self.db_path = os.path.join(
177 os.environ.get('CHARM_DIR', ''), '.unit-state.db')181 os.environ.get('CHARM_DIR', ''), '.unit-state.db')
182 if self.db_path != ':memory:':
183 with open(self.db_path, 'a') as f:
184 os.fchmod(f.fileno(), 0o600)
178 self.conn = sqlite3.connect('%s' % self.db_path)185 self.conn = sqlite3.connect('%s' % self.db_path)
179 self.cursor = self.conn.cursor()186 self.cursor = self.conn.cursor()
180 self.revision = None187 self.revision = None
@@ -358,7 +365,7 @@ class Storage(object):
358 try:365 try:
359 yield self.revision366 yield self.revision
360 self.revision = None367 self.revision = None
361 except:368 except Exception:
362 self.flush(False)369 self.flush(False)
363 self.revision = None370 self.revision = None
364 raise371 raise
diff --git a/hooks/charmhelpers/fetch/__init__.py b/hooks/charmhelpers/fetch/__init__.py
index 480a627..8572d34 100644
--- a/hooks/charmhelpers/fetch/__init__.py
+++ b/hooks/charmhelpers/fetch/__init__.py
@@ -84,6 +84,7 @@ module = "charmhelpers.fetch.%s" % __platform__
84fetch = importlib.import_module(module)84fetch = importlib.import_module(module)
8585
86filter_installed_packages = fetch.filter_installed_packages86filter_installed_packages = fetch.filter_installed_packages
87filter_missing_packages = fetch.filter_missing_packages
87install = fetch.apt_install88install = fetch.apt_install
88upgrade = fetch.apt_upgrade89upgrade = fetch.apt_upgrade
89update = _fetch_update = fetch.apt_update90update = _fetch_update = fetch.apt_update
@@ -96,6 +97,7 @@ if __platform__ == "ubuntu":
96 apt_update = fetch.apt_update97 apt_update = fetch.apt_update
97 apt_upgrade = fetch.apt_upgrade98 apt_upgrade = fetch.apt_upgrade
98 apt_purge = fetch.apt_purge99 apt_purge = fetch.apt_purge
100 apt_autoremove = fetch.apt_autoremove
99 apt_mark = fetch.apt_mark101 apt_mark = fetch.apt_mark
100 apt_hold = fetch.apt_hold102 apt_hold = fetch.apt_hold
101 apt_unhold = fetch.apt_unhold103 apt_unhold = fetch.apt_unhold
diff --git a/hooks/charmhelpers/fetch/archiveurl.py b/hooks/charmhelpers/fetch/archiveurl.py
index dd24f9e..d25587a 100644
--- a/hooks/charmhelpers/fetch/archiveurl.py
+++ b/hooks/charmhelpers/fetch/archiveurl.py
@@ -89,7 +89,7 @@ class ArchiveUrlFetchHandler(BaseFetchHandler):
89 :param str source: URL pointing to an archive file.89 :param str source: URL pointing to an archive file.
90 :param str dest: Local path location to download archive file to.90 :param str dest: Local path location to download archive file to.
91 """91 """
92 # propogate all exceptions92 # propagate all exceptions
93 # URLError, OSError, etc93 # URLError, OSError, etc
94 proto, netloc, path, params, query, fragment = urlparse(source)94 proto, netloc, path, params, query, fragment = urlparse(source)
95 if proto in ('http', 'https'):95 if proto in ('http', 'https'):
diff --git a/hooks/charmhelpers/fetch/bzrurl.py b/hooks/charmhelpers/fetch/bzrurl.py
index 07cd029..c4ab3ff 100644
--- a/hooks/charmhelpers/fetch/bzrurl.py
+++ b/hooks/charmhelpers/fetch/bzrurl.py
@@ -13,7 +13,7 @@
13# limitations under the License.13# limitations under the License.
1414
15import os15import os
16from subprocess import check_call16from subprocess import STDOUT, check_output
17from charmhelpers.fetch import (17from charmhelpers.fetch import (
18 BaseFetchHandler,18 BaseFetchHandler,
19 UnhandledSource,19 UnhandledSource,
@@ -55,7 +55,7 @@ class BzrUrlFetchHandler(BaseFetchHandler):
55 cmd = ['bzr', 'branch']55 cmd = ['bzr', 'branch']
56 cmd += cmd_opts56 cmd += cmd_opts
57 cmd += [source, dest]57 cmd += [source, dest]
58 check_call(cmd)58 check_output(cmd, stderr=STDOUT)
5959
60 def install(self, source, dest=None, revno=None):60 def install(self, source, dest=None, revno=None):
61 url_parts = self.parse_url(source)61 url_parts = self.parse_url(source)
diff --git a/hooks/charmhelpers/fetch/giturl.py b/hooks/charmhelpers/fetch/giturl.py
index 4cf21bc..070ca9b 100644
--- a/hooks/charmhelpers/fetch/giturl.py
+++ b/hooks/charmhelpers/fetch/giturl.py
@@ -13,7 +13,7 @@
13# limitations under the License.13# limitations under the License.
1414
15import os15import os
16from subprocess import check_call, CalledProcessError16from subprocess import check_output, CalledProcessError, STDOUT
17from charmhelpers.fetch import (17from charmhelpers.fetch import (
18 BaseFetchHandler,18 BaseFetchHandler,
19 UnhandledSource,19 UnhandledSource,
@@ -50,7 +50,7 @@ class GitUrlFetchHandler(BaseFetchHandler):
50 cmd = ['git', 'clone', source, dest, '--branch', branch]50 cmd = ['git', 'clone', source, dest, '--branch', branch]
51 if depth:51 if depth:
52 cmd.extend(['--depth', depth])52 cmd.extend(['--depth', depth])
53 check_call(cmd)53 check_output(cmd, stderr=STDOUT)
5454
55 def install(self, source, branch="master", dest=None, depth=None):55 def install(self, source, branch="master", dest=None, depth=None):
56 url_parts = self.parse_url(source)56 url_parts = self.parse_url(source)
diff --git a/hooks/charmhelpers/fetch/python/__init__.py b/hooks/charmhelpers/fetch/python/__init__.py
57new file mode 10064457new file mode 100644
index 0000000..bff99dc
--- /dev/null
+++ b/hooks/charmhelpers/fetch/python/__init__.py
@@ -0,0 +1,13 @@
1# Copyright 2014-2019 Canonical Limited.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
diff --git a/hooks/charmhelpers/fetch/python/debug.py b/hooks/charmhelpers/fetch/python/debug.py
0new file mode 10064414new file mode 100644
index 0000000..757135e
--- /dev/null
+++ b/hooks/charmhelpers/fetch/python/debug.py
@@ -0,0 +1,54 @@
1#!/usr/bin/env python
2# coding: utf-8
3
4# Copyright 2014-2015 Canonical Limited.
5#
6# Licensed under the Apache License, Version 2.0 (the "License");
7# you may not use this file except in compliance with the License.
8# You may obtain a copy of the License at
9#
10# http://www.apache.org/licenses/LICENSE-2.0
11#
12# Unless required by applicable law or agreed to in writing, software
13# distributed under the License is distributed on an "AS IS" BASIS,
14# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15# See the License for the specific language governing permissions and
16# limitations under the License.
17
18from __future__ import print_function
19
20import atexit
21import sys
22
23from charmhelpers.fetch.python.rpdb import Rpdb
24from charmhelpers.core.hookenv import (
25 open_port,
26 close_port,
27 ERROR,
28 log
29)
30
31__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>"
32
33DEFAULT_ADDR = "0.0.0.0"
34DEFAULT_PORT = 4444
35
36
37def _error(message):
38 log(message, level=ERROR)
39
40
41def set_trace(addr=DEFAULT_ADDR, port=DEFAULT_PORT):
42 """
43 Set a trace point using the remote debugger
44 """
45 atexit.register(close_port, port)
46 try:
47 log("Starting a remote python debugger session on %s:%s" % (addr,
48 port))
49 open_port(port)
50 debugger = Rpdb(addr=addr, port=port)
51 debugger.set_trace(sys._getframe().f_back)
52 except Exception:
53 _error("Cannot start a remote debug session on %s:%s" % (addr,
54 port))
diff --git a/hooks/charmhelpers/fetch/python/packages.py b/hooks/charmhelpers/fetch/python/packages.py
0new file mode 10064455new file mode 100644
index 0000000..6e95028
--- /dev/null
+++ b/hooks/charmhelpers/fetch/python/packages.py
@@ -0,0 +1,154 @@
1#!/usr/bin/env python
2# coding: utf-8
3
4# Copyright 2014-2015 Canonical Limited.
5#
6# Licensed under the Apache License, Version 2.0 (the "License");
7# you may not use this file except in compliance with the License.
8# You may obtain a copy of the License at
9#
10# http://www.apache.org/licenses/LICENSE-2.0
11#
12# Unless required by applicable law or agreed to in writing, software
13# distributed under the License is distributed on an "AS IS" BASIS,
14# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15# See the License for the specific language governing permissions and
16# limitations under the License.
17
18import os
19import six
20import subprocess
21import sys
22
23from charmhelpers.fetch import apt_install, apt_update
24from charmhelpers.core.hookenv import charm_dir, log
25
26__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>"
27
28
29def pip_execute(*args, **kwargs):
30 """Overriden pip_execute() to stop sys.path being changed.
31
32 The act of importing main from the pip module seems to cause add wheels
33 from the /usr/share/python-wheels which are installed by various tools.
34 This function ensures that sys.path remains the same after the call is
35 executed.
36 """
37 try:
38 _path = sys.path
39 try:
40 from pip import main as _pip_execute
41 except ImportError:
42 apt_update()
43 if six.PY2:
44 apt_install('python-pip')
45 else:
46 apt_install('python3-pip')
47 from pip import main as _pip_execute
48 _pip_execute(*args, **kwargs)
49 finally:
50 sys.path = _path
51
52
53def parse_options(given, available):
54 """Given a set of options, check if available"""
55 for key, value in sorted(given.items()):
56 if not value:
57 continue
58 if key in available:
59 yield "--{0}={1}".format(key, value)
60
61
62def pip_install_requirements(requirements, constraints=None, **options):
63 """Install a requirements file.
64
65 :param constraints: Path to pip constraints file.
66 http://pip.readthedocs.org/en/stable/user_guide/#constraints-files
67 """
68 command = ["install"]
69
70 available_options = ('proxy', 'src', 'log', )
71 for option in parse_options(options, available_options):
72 command.append(option)
73
74 command.append("-r {0}".format(requirements))
75 if constraints:
76 command.append("-c {0}".format(constraints))
77 log("Installing from file: {} with constraints {} "
78 "and options: {}".format(requirements, constraints, command))
79 else:
80 log("Installing from file: {} with options: {}".format(requirements,
81 command))
82 pip_execute(command)
83
84
85def pip_install(package, fatal=False, upgrade=False, venv=None,
86 constraints=None, **options):
87 """Install a python package"""
88 if venv:
89 venv_python = os.path.join(venv, 'bin/pip')
90 command = [venv_python, "install"]
91 else:
92 command = ["install"]
93
94 available_options = ('proxy', 'src', 'log', 'index-url', )
95 for option in parse_options(options, available_options):
96 command.append(option)
97
98 if upgrade:
99 command.append('--upgrade')
100
101 if constraints:
102 command.extend(['-c', constraints])
103
104 if isinstance(package, list):
105 command.extend(package)
106 else:
107 command.append(package)
108
109 log("Installing {} package with options: {}".format(package,
110 command))
111 if venv:
112 subprocess.check_call(command)
113 else:
114 pip_execute(command)
115
116
117def pip_uninstall(package, **options):
118 """Uninstall a python package"""
119 command = ["uninstall", "-q", "-y"]
120
121 available_options = ('proxy', 'log', )
122 for option in parse_options(options, available_options):
123 command.append(option)
124
125 if isinstance(package, list):
126 command.extend(package)
127 else:
128 command.append(package)
129
130 log("Uninstalling {} package with options: {}".format(package,
131 command))
132 pip_execute(command)
133
134
135def pip_list():
136 """Returns the list of current python installed packages
137 """
138 return pip_execute(["list"])
139
140
141def pip_create_virtualenv(path=None):
142 """Create an isolated Python environment."""
143 if six.PY2:
144 apt_install('python-virtualenv')
145 else:
146 apt_install('python3-virtualenv')
147
148 if path:
149 venv_path = path
150 else:
151 venv_path = os.path.join(charm_dir(), 'venv')
152
153 if not os.path.exists(venv_path):
154 subprocess.check_call(['virtualenv', venv_path])
diff --git a/hooks/charmhelpers/fetch/python/rpdb.py b/hooks/charmhelpers/fetch/python/rpdb.py
0new file mode 100644155new file mode 100644
index 0000000..9b31610
--- /dev/null
+++ b/hooks/charmhelpers/fetch/python/rpdb.py
@@ -0,0 +1,56 @@
1# Copyright 2014-2015 Canonical Limited.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15"""Remote Python Debugger (pdb wrapper)."""
16
17import pdb
18import socket
19import sys
20
21__author__ = "Bertrand Janin <b@janin.com>"
22__version__ = "0.1.3"
23
24
25class Rpdb(pdb.Pdb):
26
27 def __init__(self, addr="127.0.0.1", port=4444):
28 """Initialize the socket and initialize pdb."""
29
30 # Backup stdin and stdout before replacing them by the socket handle
31 self.old_stdout = sys.stdout
32 self.old_stdin = sys.stdin
33
34 # Open a 'reusable' socket to let the webapp reload on the same port
35 self.skt = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
36 self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True)
37 self.skt.bind((addr, port))
38 self.skt.listen(1)
39 (clientsocket, address) = self.skt.accept()
40 handle = clientsocket.makefile('rw')
41 pdb.Pdb.__init__(self, completekey='tab', stdin=handle, stdout=handle)
42 sys.stdout = sys.stdin = handle
43
44 def shutdown(self):
45 """Revert stdin and stdout, close the socket."""
46 sys.stdout = self.old_stdout
47 sys.stdin = self.old_stdin
48 self.skt.close()
49 self.set_continue()
50
51 def do_continue(self, arg):
52 """Stop all operation on ``continue``."""
53 self.shutdown()
54 return 1
55
56 do_EOF = do_quit = do_exit = do_c = do_cont = do_continue
diff --git a/hooks/charmhelpers/fetch/python/version.py b/hooks/charmhelpers/fetch/python/version.py
0new file mode 10064457new file mode 100644
index 0000000..3eb4210
--- /dev/null
+++ b/hooks/charmhelpers/fetch/python/version.py
@@ -0,0 +1,32 @@
1#!/usr/bin/env python
2# coding: utf-8
3
4# Copyright 2014-2015 Canonical Limited.
5#
6# Licensed under the Apache License, Version 2.0 (the "License");
7# you may not use this file except in compliance with the License.
8# You may obtain a copy of the License at
9#
10# http://www.apache.org/licenses/LICENSE-2.0
11#
12# Unless required by applicable law or agreed to in writing, software
13# distributed under the License is distributed on an "AS IS" BASIS,
14# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15# See the License for the specific language governing permissions and
16# limitations under the License.
17
18import sys
19
20__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>"
21
22
23def current_version():
24 """Current system python version"""
25 return sys.version_info
26
27
28def current_version_string():
29 """Current system python version as string major.minor.micro"""
30 return "{0}.{1}.{2}".format(sys.version_info.major,
31 sys.version_info.minor,
32 sys.version_info.micro)
diff --git a/hooks/charmhelpers/fetch/snap.py b/hooks/charmhelpers/fetch/snap.py
index 23c707b..395836c 100644
--- a/hooks/charmhelpers/fetch/snap.py
+++ b/hooks/charmhelpers/fetch/snap.py
@@ -18,21 +18,33 @@ If writing reactive charms, use the snap layer:
18https://lists.ubuntu.com/archives/snapcraft/2016-September/001114.html18https://lists.ubuntu.com/archives/snapcraft/2016-September/001114.html
19"""19"""
20import subprocess20import subprocess
21from os import environ21import os
22from time import sleep22from time import sleep
23from charmhelpers.core.hookenv import log23from charmhelpers.core.hookenv import log
2424
25__author__ = 'Joseph Borg <joseph.borg@canonical.com>'25__author__ = 'Joseph Borg <joseph.borg@canonical.com>'
2626
27SNAP_NO_LOCK = 1 # The return code for "couldn't acquire lock" in Snap (hopefully this will be improved).27# The return code for "couldn't acquire lock" in Snap
28# (hopefully this will be improved).
29SNAP_NO_LOCK = 1
28SNAP_NO_LOCK_RETRY_DELAY = 10 # Wait X seconds between Snap lock checks.30SNAP_NO_LOCK_RETRY_DELAY = 10 # Wait X seconds between Snap lock checks.
29SNAP_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times.31SNAP_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times.
32SNAP_CHANNELS = [
33 'edge',
34 'beta',
35 'candidate',
36 'stable',
37]
3038
3139
32class CouldNotAcquireLockException(Exception):40class CouldNotAcquireLockException(Exception):
33 pass41 pass
3442
3543
44class InvalidSnapChannel(Exception):
45 pass
46
47
36def _snap_exec(commands):48def _snap_exec(commands):
37 """49 """
38 Execute snap commands.50 Execute snap commands.
@@ -47,13 +59,17 @@ def _snap_exec(commands):
4759
48 while return_code is None or return_code == SNAP_NO_LOCK:60 while return_code is None or return_code == SNAP_NO_LOCK:
49 try:61 try:
50 return_code = subprocess.check_call(['snap'] + commands, env=environ)62 return_code = subprocess.check_call(['snap'] + commands,
63 env=os.environ)
51 except subprocess.CalledProcessError as e:64 except subprocess.CalledProcessError as e:
52 retry_count += + 165 retry_count += + 1
53 if retry_count > SNAP_NO_LOCK_RETRY_COUNT:66 if retry_count > SNAP_NO_LOCK_RETRY_COUNT:
54 raise CouldNotAcquireLockException('Could not aquire lock after %s attempts' % SNAP_NO_LOCK_RETRY_COUNT)67 raise CouldNotAcquireLockException(
68 'Could not aquire lock after {} attempts'
69 .format(SNAP_NO_LOCK_RETRY_COUNT))
55 return_code = e.returncode70 return_code = e.returncode
56 log('Snap failed to acquire lock, trying again in %s seconds.' % SNAP_NO_LOCK_RETRY_DELAY, level='WARN')71 log('Snap failed to acquire lock, trying again in {} seconds.'
72 .format(SNAP_NO_LOCK_RETRY_DELAY, level='WARN'))
57 sleep(SNAP_NO_LOCK_RETRY_DELAY)73 sleep(SNAP_NO_LOCK_RETRY_DELAY)
5874
59 return return_code75 return return_code
@@ -120,3 +136,15 @@ def snap_refresh(packages, *flags):
120136
121 log(message, level='INFO')137 log(message, level='INFO')
122 return _snap_exec(['refresh'] + flags + packages)138 return _snap_exec(['refresh'] + flags + packages)
139
140
141def valid_snap_channel(channel):
142 """ Validate snap channel exists
143
144 :raises InvalidSnapChannel: When channel does not exist
145 :return: Boolean
146 """
147 if channel.lower() in SNAP_CHANNELS:
148 return True
149 else:
150 raise InvalidSnapChannel("Invalid Snap Channel: {}".format(channel))
diff --git a/hooks/charmhelpers/fetch/ubuntu.py b/hooks/charmhelpers/fetch/ubuntu.py
index 57b5fb6..24c76e3 100644
--- a/hooks/charmhelpers/fetch/ubuntu.py
+++ b/hooks/charmhelpers/fetch/ubuntu.py
@@ -19,14 +19,14 @@ import re
19import six19import six
20import time20import time
21import subprocess21import subprocess
22from tempfile import NamedTemporaryFile
2322
24from charmhelpers.core.host import (23from charmhelpers.core.host import get_distrib_codename
25 lsb_release24
26)
27from charmhelpers.core.hookenv import (25from charmhelpers.core.hookenv import (
28 log,26 log,
29 DEBUG,27 DEBUG,
28 WARNING,
29 env_proxy_settings,
30)30)
31from charmhelpers.fetch import SourceConfigError, GPGKeyError31from charmhelpers.fetch import SourceConfigError, GPGKeyError
3232
@@ -43,6 +43,7 @@ ARCH_TO_PROPOSED_POCKET = {
43 'x86_64': PROPOSED_POCKET,43 'x86_64': PROPOSED_POCKET,
44 'ppc64le': PROPOSED_PORTS_POCKET,44 'ppc64le': PROPOSED_PORTS_POCKET,
45 'aarch64': PROPOSED_PORTS_POCKET,45 'aarch64': PROPOSED_PORTS_POCKET,
46 's390x': PROPOSED_PORTS_POCKET,
46}47}
47CLOUD_ARCHIVE_URL = "http://ubuntu-cloud.archive.canonical.com/ubuntu"48CLOUD_ARCHIVE_URL = "http://ubuntu-cloud.archive.canonical.com/ubuntu"
48CLOUD_ARCHIVE_KEY_ID = '5EDB1B62EC4926EA'49CLOUD_ARCHIVE_KEY_ID = '5EDB1B62EC4926EA'
@@ -139,7 +140,7 @@ CLOUD_ARCHIVE_POCKETS = {
139 'xenial-updates/ocata': 'xenial-updates/ocata',140 'xenial-updates/ocata': 'xenial-updates/ocata',
140 'ocata/proposed': 'xenial-proposed/ocata',141 'ocata/proposed': 'xenial-proposed/ocata',
141 'xenial-ocata/proposed': 'xenial-proposed/ocata',142 'xenial-ocata/proposed': 'xenial-proposed/ocata',
142 'xenial-ocata/newton': 'xenial-proposed/ocata',143 'xenial-proposed/ocata': 'xenial-proposed/ocata',
143 # Pike144 # Pike
144 'pike': 'xenial-updates/pike',145 'pike': 'xenial-updates/pike',
145 'xenial-pike': 'xenial-updates/pike',146 'xenial-pike': 'xenial-updates/pike',
@@ -147,7 +148,7 @@ CLOUD_ARCHIVE_POCKETS = {
147 'xenial-updates/pike': 'xenial-updates/pike',148 'xenial-updates/pike': 'xenial-updates/pike',
148 'pike/proposed': 'xenial-proposed/pike',149 'pike/proposed': 'xenial-proposed/pike',
149 'xenial-pike/proposed': 'xenial-proposed/pike',150 'xenial-pike/proposed': 'xenial-proposed/pike',
150 'xenial-pike/newton': 'xenial-proposed/pike',151 'xenial-proposed/pike': 'xenial-proposed/pike',
151 # Queens152 # Queens
152 'queens': 'xenial-updates/queens',153 'queens': 'xenial-updates/queens',
153 'xenial-queens': 'xenial-updates/queens',154 'xenial-queens': 'xenial-updates/queens',
@@ -155,13 +156,37 @@ CLOUD_ARCHIVE_POCKETS = {
155 'xenial-updates/queens': 'xenial-updates/queens',156 'xenial-updates/queens': 'xenial-updates/queens',
156 'queens/proposed': 'xenial-proposed/queens',157 'queens/proposed': 'xenial-proposed/queens',
157 'xenial-queens/proposed': 'xenial-proposed/queens',158 'xenial-queens/proposed': 'xenial-proposed/queens',
158 'xenial-queens/newton': 'xenial-proposed/queens',159 'xenial-proposed/queens': 'xenial-proposed/queens',
160 # Rocky
161 'rocky': 'bionic-updates/rocky',
162 'bionic-rocky': 'bionic-updates/rocky',
163 'bionic-rocky/updates': 'bionic-updates/rocky',
164 'bionic-updates/rocky': 'bionic-updates/rocky',
165 'rocky/proposed': 'bionic-proposed/rocky',
166 'bionic-rocky/proposed': 'bionic-proposed/rocky',
167 'bionic-proposed/rocky': 'bionic-proposed/rocky',
168 # Stein
169 'stein': 'bionic-updates/stein',
170 'bionic-stein': 'bionic-updates/stein',
171 'bionic-stein/updates': 'bionic-updates/stein',
172 'bionic-updates/stein': 'bionic-updates/stein',
173 'stein/proposed': 'bionic-proposed/stein',
174 'bionic-stein/proposed': 'bionic-proposed/stein',
175 'bionic-proposed/stein': 'bionic-proposed/stein',
176 # Train
177 'train': 'bionic-updates/train',
178 'bionic-train': 'bionic-updates/train',
179 'bionic-train/updates': 'bionic-updates/train',
180 'bionic-updates/train': 'bionic-updates/train',
181 'train/proposed': 'bionic-proposed/train',
182 'bionic-train/proposed': 'bionic-proposed/train',
183 'bionic-proposed/train': 'bionic-proposed/train',
159}184}
160185
161186
162APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT.187APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT.
163CMD_RETRY_DELAY = 10 # Wait 10 seconds between command retries.188CMD_RETRY_DELAY = 10 # Wait 10 seconds between command retries.
164CMD_RETRY_COUNT = 30 # Retry a failing fatal command X times.189CMD_RETRY_COUNT = 3 # Retry a failing fatal command X times.
165190
166191
167def filter_installed_packages(packages):192def filter_installed_packages(packages):
@@ -179,6 +204,18 @@ def filter_installed_packages(packages):
179 return _pkgs204 return _pkgs
180205
181206
207def filter_missing_packages(packages):
208 """Return a list of packages that are installed.
209
210 :param packages: list of packages to evaluate.
211 :returns list: Packages that are installed.
212 """
213 return list(
214 set(packages) -
215 set(filter_installed_packages(packages))
216 )
217
218
182def apt_cache(in_memory=True, progress=None):219def apt_cache(in_memory=True, progress=None):
183 """Build and return an apt cache."""220 """Build and return an apt cache."""
184 from apt import apt_pkg221 from apt import apt_pkg
@@ -238,6 +275,14 @@ def apt_purge(packages, fatal=False):
238 _run_apt_command(cmd, fatal)275 _run_apt_command(cmd, fatal)
239276
240277
278def apt_autoremove(purge=True, fatal=False):
279 """Purge one or more packages."""
280 cmd = ['apt-get', '--assume-yes', 'autoremove']
281 if purge:
282 cmd.append('--purge')
283 _run_apt_command(cmd, fatal)
284
285
241def apt_mark(packages, mark, fatal=False):286def apt_mark(packages, mark, fatal=False):
242 """Flag one or more packages using apt-mark."""287 """Flag one or more packages using apt-mark."""
243 log("Marking {} as {}".format(packages, mark))288 log("Marking {} as {}".format(packages, mark))
@@ -261,42 +306,156 @@ def apt_unhold(packages, fatal=False):
261 return apt_mark(packages, 'unhold', fatal=fatal)306 return apt_mark(packages, 'unhold', fatal=fatal)
262307
263308
264def import_key(keyid):309def import_key(key):
265 """Import a key in either ASCII Armor or Radix64 format.310 """Import an ASCII Armor key.
266311
267 `keyid` is either the keyid to fetch from a PGP server, or312 A Radix64 format keyid is also supported for backwards
268 the key in ASCII armor foramt.313 compatibility. In this case Ubuntu keyserver will be
314 queried for a key via HTTPS by its keyid. This method
315 is less preferrable because https proxy servers may
316 require traffic decryption which is equivalent to a
317 man-in-the-middle attack (a proxy server impersonates
318 keyserver TLS certificates and has to be explicitly
319 trusted by the system).
269320
270 :param keyid: String of key (or key id).321 :param key: A GPG key in ASCII armor format,
322 including BEGIN and END markers or a keyid.
323 :type key: (bytes, str)
271 :raises: GPGKeyError if the key could not be imported324 :raises: GPGKeyError if the key could not be imported
272 """325 """
273 key = keyid.strip()326 key = key.strip()
274 if (key.startswith('-----BEGIN PGP PUBLIC KEY BLOCK-----') and327 if '-' in key or '\n' in key:
275 key.endswith('-----END PGP PUBLIC KEY BLOCK-----')):328 # Send everything not obviously a keyid to GPG to import, as
329 # we trust its validation better than our own. eg. handling
330 # comments before the key.
276 log("PGP key found (looks like ASCII Armor format)", level=DEBUG)331 log("PGP key found (looks like ASCII Armor format)", level=DEBUG)
277 log("Importing ASCII Armor PGP key", level=DEBUG)332 if ('-----BEGIN PGP PUBLIC KEY BLOCK-----' in key and
278 with NamedTemporaryFile() as keyfile:333 '-----END PGP PUBLIC KEY BLOCK-----' in key):
279 with open(keyfile.name, 'w') as fd:334 log("Writing provided PGP key in the binary format", level=DEBUG)
280 fd.write(key)335 if six.PY3:
281 fd.write("\n")336 key_bytes = key.encode('utf-8')
282 cmd = ['apt-key', 'add', keyfile.name]337 else:
283 try:338 key_bytes = key
284 subprocess.check_call(cmd)339 key_name = _get_keyid_by_gpg_key(key_bytes)
285 except subprocess.CalledProcessError:340 key_gpg = _dearmor_gpg_key(key_bytes)
286 error = "Error importing PGP key '{}'".format(key)341 _write_apt_gpg_keyfile(key_name=key_name, key_material=key_gpg)
287 log(error)342 else:
288 raise GPGKeyError(error)343 raise GPGKeyError("ASCII armor markers missing from GPG key")
289 else:344 else:
290 log("PGP key found (looks like Radix64 format)", level=DEBUG)345 log("PGP key found (looks like Radix64 format)", level=WARNING)
291 log("Importing PGP key from keyserver", level=DEBUG)346 log("SECURELY importing PGP key from keyserver; "
292 cmd = ['apt-key', 'adv', '--keyserver',347 "full key not provided.", level=WARNING)
293 'hkp://keyserver.ubuntu.com:80', '--recv-keys', key]348 # as of bionic add-apt-repository uses curl with an HTTPS keyserver URL
294 try:349 # to retrieve GPG keys. `apt-key adv` command is deprecated as is
295 subprocess.check_call(cmd)350 # apt-key in general as noted in its manpage. See lp:1433761 for more
296 except subprocess.CalledProcessError:351 # history. Instead, /etc/apt/trusted.gpg.d is used directly to drop
297 error = "Error importing PGP key '{}'".format(key)352 # gpg
298 log(error)353 key_asc = _get_key_by_keyid(key)
299 raise GPGKeyError(error)354 # write the key in GPG format so that apt-key list shows it
355 key_gpg = _dearmor_gpg_key(key_asc)
356 _write_apt_gpg_keyfile(key_name=key, key_material=key_gpg)
357
358
359def _get_keyid_by_gpg_key(key_material):
360 """Get a GPG key fingerprint by GPG key material.
361 Gets a GPG key fingerprint (40-digit, 160-bit) by the ASCII armor-encoded
362 or binary GPG key material. Can be used, for example, to generate file
363 names for keys passed via charm options.
364
365 :param key_material: ASCII armor-encoded or binary GPG key material
366 :type key_material: bytes
367 :raises: GPGKeyError if invalid key material has been provided
368 :returns: A GPG key fingerprint
369 :rtype: str
370 """
371 # Use the same gpg command for both Xenial and Bionic
372 cmd = 'gpg --with-colons --with-fingerprint'
373 ps = subprocess.Popen(cmd.split(),
374 stdout=subprocess.PIPE,
375 stderr=subprocess.PIPE,
376 stdin=subprocess.PIPE)
377 out, err = ps.communicate(input=key_material)
378 if six.PY3:
379 out = out.decode('utf-8')
380 err = err.decode('utf-8')
381 if 'gpg: no valid OpenPGP data found.' in err:
382 raise GPGKeyError('Invalid GPG key material provided')
383 # from gnupg2 docs: fpr :: Fingerprint (fingerprint is in field 10)
384 return re.search(r"^fpr:{9}([0-9A-F]{40}):$", out, re.MULTILINE).group(1)
385
386
387def _get_key_by_keyid(keyid):
388 """Get a key via HTTPS from the Ubuntu keyserver.
389 Different key ID formats are supported by SKS keyservers (the longer ones
390 are more secure, see "dead beef attack" and https://evil32.com/). Since
391 HTTPS is used, if SSLBump-like HTTPS proxies are in place, they will
392 impersonate keyserver.ubuntu.com and generate a certificate with
393 keyserver.ubuntu.com in the CN field or in SubjAltName fields of a
394 certificate. If such proxy behavior is expected it is necessary to add the
395 CA certificate chain containing the intermediate CA of the SSLBump proxy to
396 every machine that this code runs on via ca-certs cloud-init directive (via
397 cloudinit-userdata model-config) or via other means (such as through a
398 custom charm option). Also note that DNS resolution for the hostname in a
399 URL is done at a proxy server - not at the client side.
400
401 8-digit (32 bit) key ID
402 https://keyserver.ubuntu.com/pks/lookup?search=0x4652B4E6
403 16-digit (64 bit) key ID
404 https://keyserver.ubuntu.com/pks/lookup?search=0x6E85A86E4652B4E6
405 40-digit key ID:
406 https://keyserver.ubuntu.com/pks/lookup?search=0x35F77D63B5CEC106C577ED856E85A86E4652B4E6
407
408 :param keyid: An 8, 16 or 40 hex digit keyid to find a key for
409 :type keyid: (bytes, str)
410 :returns: A key material for the specified GPG key id
411 :rtype: (str, bytes)
412 :raises: subprocess.CalledProcessError
413 """
414 # options=mr - machine-readable output (disables html wrappers)
415 keyserver_url = ('https://keyserver.ubuntu.com'
416 '/pks/lookup?op=get&options=mr&exact=on&search=0x{}')
417 curl_cmd = ['curl', keyserver_url.format(keyid)]
418 # use proxy server settings in order to retrieve the key
419 return subprocess.check_output(curl_cmd,
420 env=env_proxy_settings(['https']))
421
422
423def _dearmor_gpg_key(key_asc):
424 """Converts a GPG key in the ASCII armor format to the binary format.
425
426 :param key_asc: A GPG key in ASCII armor format.
427 :type key_asc: (str, bytes)
428 :returns: A GPG key in binary format
429 :rtype: (str, bytes)
430 :raises: GPGKeyError
431 """
432 ps = subprocess.Popen(['gpg', '--dearmor'],
433 stdout=subprocess.PIPE,
434 stderr=subprocess.PIPE,
435 stdin=subprocess.PIPE)
436 out, err = ps.communicate(input=key_asc)
437 # no need to decode output as it is binary (invalid utf-8), only error
438 if six.PY3:
439 err = err.decode('utf-8')
440 if 'gpg: no valid OpenPGP data found.' in err:
441 raise GPGKeyError('Invalid GPG key material. Check your network setup'
442 ' (MTU, routing, DNS) and/or proxy server settings'
443 ' as well as destination keyserver status.')
444 else:
445 return out
446
447
448def _write_apt_gpg_keyfile(key_name, key_material):
449 """Writes GPG key material into a file at a provided path.
450
451 :param key_name: A key name to use for a key file (could be a fingerprint)
452 :type key_name: str
453 :param key_material: A GPG key material (binary)
454 :type key_material: (str, bytes)
455 """
456 with open('/etc/apt/trusted.gpg.d/{}.gpg'.format(key_name),
457 'wb') as keyf:
458 keyf.write(key_material)
300459
301460
302def add_source(source, key=None, fail_invalid=False):461def add_source(source, key=None, fail_invalid=False):
@@ -364,20 +523,23 @@ def add_source(source, key=None, fail_invalid=False):
364 (r"^cloud:(.*)-(.*)\/staging$", _add_cloud_staging),523 (r"^cloud:(.*)-(.*)\/staging$", _add_cloud_staging),
365 (r"^cloud:(.*)-(.*)$", _add_cloud_distro_check),524 (r"^cloud:(.*)-(.*)$", _add_cloud_distro_check),
366 (r"^cloud:(.*)$", _add_cloud_pocket),525 (r"^cloud:(.*)$", _add_cloud_pocket),
526 (r"^snap:.*-(.*)-(.*)$", _add_cloud_distro_check),
367 ])527 ])
368 if source is None:528 if source is None:
369 source = ''529 source = ''
370 for r, fn in six.iteritems(_mapping):530 for r, fn in six.iteritems(_mapping):
371 m = re.match(r, source)531 m = re.match(r, source)
372 if m:532 if m:
373 # call the assoicated function with the captured groups
374 # raises SourceConfigError on error.
375 fn(*m.groups())
376 if key:533 if key:
534 # Import key before adding the source which depends on it,
535 # as refreshing packages could fail otherwise.
377 try:536 try:
378 import_key(key)537 import_key(key)
379 except GPGKeyError as e:538 except GPGKeyError as e:
380 raise SourceConfigError(str(e))539 raise SourceConfigError(str(e))
540 # call the associated function with the captured groups
541 # raises SourceConfigError on error.
542 fn(*m.groups())
381 break543 break
382 else:544 else:
383 # nothing matched. log an error and maybe sys.exit545 # nothing matched. log an error and maybe sys.exit
@@ -390,13 +552,13 @@ def add_source(source, key=None, fail_invalid=False):
390def _add_proposed():552def _add_proposed():
391 """Add the PROPOSED_POCKET as /etc/apt/source.list.d/proposed.list553 """Add the PROPOSED_POCKET as /etc/apt/source.list.d/proposed.list
392554
393 Uses lsb_release()['DISTRIB_CODENAME'] to determine the correct staza for555 Uses get_distrib_codename to determine the correct stanza for
394 the deb line.556 the deb line.
395557
396 For intel architecutres PROPOSED_POCKET is used for the release, but for558 For intel architecutres PROPOSED_POCKET is used for the release, but for
397 other architectures PROPOSED_PORTS_POCKET is used for the release.559 other architectures PROPOSED_PORTS_POCKET is used for the release.
398 """560 """
399 release = lsb_release()['DISTRIB_CODENAME']561 release = get_distrib_codename()
400 arch = platform.machine()562 arch = platform.machine()
401 if arch not in six.iterkeys(ARCH_TO_PROPOSED_POCKET):563 if arch not in six.iterkeys(ARCH_TO_PROPOSED_POCKET):
402 raise SourceConfigError("Arch {} not supported for (distro-)proposed"564 raise SourceConfigError("Arch {} not supported for (distro-)proposed"
@@ -409,8 +571,16 @@ def _add_apt_repository(spec):
409 """Add the spec using add_apt_repository571 """Add the spec using add_apt_repository
410572
411 :param spec: the parameter to pass to add_apt_repository573 :param spec: the parameter to pass to add_apt_repository
574 :type spec: str
412 """575 """
413 _run_with_retries(['add-apt-repository', '--yes', spec])576 if '{series}' in spec:
577 series = get_distrib_codename()
578 spec = spec.replace('{series}', series)
579 # software-properties package for bionic properly reacts to proxy settings
580 # passed as environment variables (See lp:1433761). This is not the case
581 # LTS and non-LTS releases below bionic.
582 _run_with_retries(['add-apt-repository', '--yes', spec],
583 cmd_env=env_proxy_settings(['https']))
414584
415585
416def _add_cloud_pocket(pocket):586def _add_cloud_pocket(pocket):
@@ -479,7 +649,7 @@ def _verify_is_ubuntu_rel(release, os_release):
479 :raises: SourceConfigError if the release is not the same as the ubuntu649 :raises: SourceConfigError if the release is not the same as the ubuntu
480 release.650 release.
481 """651 """
482 ubuntu_rel = lsb_release()['DISTRIB_CODENAME']652 ubuntu_rel = get_distrib_codename()
483 if release != ubuntu_rel:653 if release != ubuntu_rel:
484 raise SourceConfigError(654 raise SourceConfigError(
485 'Invalid Cloud Archive release specified: {}-{} on this Ubuntu'655 'Invalid Cloud Archive release specified: {}-{} on this Ubuntu'
@@ -557,7 +727,7 @@ def get_upstream_version(package):
557 cache = apt_cache()727 cache = apt_cache()
558 try:728 try:
559 pkg = cache[package]729 pkg = cache[package]
560 except:730 except Exception:
561 # the package is unknown to the current apt cache.731 # the package is unknown to the current apt cache.
562 return None732 return None
563733

Subscribers

People subscribed via source and target branches