Merge lp:~verterok/charms/xenial/conn-check/focal into lp:~ubuntuone-hackers/charms/xenial/conn-check/focal

Proposed by Guillermo Gonzalez
Status: Merged
Approved by: Guillermo Gonzalez
Approved revision: 62
Merged at revision: 58
Proposed branch: lp:~verterok/charms/xenial/conn-check/focal
Merge into: lp:~ubuntuone-hackers/charms/xenial/conn-check/focal
Diff against target: 5838 lines (+3744/-563)
33 files modified
hooks/charmhelpers/__init__.py (+67/-19)
hooks/charmhelpers/contrib/ansible/__init__.py (+153/-89)
hooks/charmhelpers/contrib/charmsupport/nrpe.py (+187/-31)
hooks/charmhelpers/contrib/templating/contexts.py (+7/-7)
hooks/charmhelpers/core/decorators.py (+38/-0)
hooks/charmhelpers/core/hookenv.py (+658/-59)
hooks/charmhelpers/core/host.py (+655/-102)
hooks/charmhelpers/core/host_factory/centos.py (+16/-0)
hooks/charmhelpers/core/host_factory/ubuntu.py (+73/-5)
hooks/charmhelpers/core/kernel.py (+2/-2)
hooks/charmhelpers/core/services/base.py (+22/-10)
hooks/charmhelpers/core/services/helpers.py (+2/-2)
hooks/charmhelpers/core/strutils.py (+75/-14)
hooks/charmhelpers/core/sysctl.py (+32/-11)
hooks/charmhelpers/core/templating.py (+21/-17)
hooks/charmhelpers/core/unitdata.py (+17/-9)
hooks/charmhelpers/fetch/__init__.py (+29/-18)
hooks/charmhelpers/fetch/archiveurl.py (+35/-27)
hooks/charmhelpers/fetch/bzrurl.py (+2/-2)
hooks/charmhelpers/fetch/centos.py (+4/-5)
hooks/charmhelpers/fetch/giturl.py (+2/-2)
hooks/charmhelpers/fetch/python/__init__.py (+13/-0)
hooks/charmhelpers/fetch/python/debug.py (+52/-0)
hooks/charmhelpers/fetch/python/packages.py (+148/-0)
hooks/charmhelpers/fetch/python/rpdb.py (+56/-0)
hooks/charmhelpers/fetch/python/version.py (+32/-0)
hooks/charmhelpers/fetch/snap.py (+150/-0)
hooks/charmhelpers/fetch/ubuntu.py (+822/-125)
hooks/charmhelpers/fetch/ubuntu_apt_pkg.py (+327/-0)
hooks/charmhelpers/osplatform.py (+32/-2)
hooks/hooks.py (+1/-1)
playbook.yaml (+13/-3)
roles/nrpe-external-master/tasks/main.yaml (+1/-1)
To merge this branch: bzr merge lp:~verterok/charms/xenial/conn-check/focal
Reviewer Review Type Date Requested Status
John Paraskevopoulos Approve
Review via email: mp+445757@code.launchpad.net

Commit message

update charm to work in focal

Description of the change

Most of the changes are from the automated pull of a newer charmhelpers with python3.8 support. Please check the individual commits for easier reviewing

To post a comment you must log in.
Revision history for this message
John Paraskevopoulos (quantifics) wrote :

lgtm thanks!

review: Approve
62. By Guillermo Gonzalez

use xenial for all distribution release comparisons

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'hooks/charmhelpers/__init__.py'
--- hooks/charmhelpers/__init__.py 2016-12-20 14:35:00 +0000
+++ hooks/charmhelpers/__init__.py 2023-06-30 13:58:42 +0000
@@ -14,23 +14,71 @@
1414
15# Bootstrap charm-helpers, installing its dependencies if necessary using15# Bootstrap charm-helpers, installing its dependencies if necessary using
16# only standard libraries.16# only standard libraries.
17import functools
18import inspect
17import subprocess19import subprocess
18import sys20
1921
20try:22try:
21 import six # flake8: noqa23 import yaml # NOQA:F401
22except ImportError:24except ImportError:
23 if sys.version_info.major == 2:25 subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml'])
24 subprocess.check_call(['apt-get', 'install', '-y', 'python-six'])26 import yaml # NOQA:F401
25 else:27
26 subprocess.check_call(['apt-get', 'install', '-y', 'python3-six'])28
27 import six # flake8: noqa29# Holds a list of mapping of mangled function names that have been deprecated
2830# using the @deprecate decorator below. This is so that the warning is only
29try:31# printed once for each usage of the function.
30 import yaml # flake8: noqa32__deprecated_functions = {}
31except ImportError:33
32 if sys.version_info.major == 2:34
33 subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml'])35def deprecate(warning, date=None, log=None):
34 else:36 """Add a deprecation warning the first time the function is used.
35 subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml'])37
36 import yaml # flake8: noqa38 The date which is a string in semi-ISO8660 format indicates the year-month
39 that the function is officially going to be removed.
40
41 usage:
42
43 @deprecate('use core/fetch/add_source() instead', '2017-04')
44 def contributed_add_source_thing(...):
45 ...
46
47 And it then prints to the log ONCE that the function is deprecated.
48 The reason for passing the logging function (log) is so that hookenv.log
49 can be used for a charm if needed.
50
51 :param warning: String to indicate what is to be used instead.
52 :param date: Optional string in YYYY-MM format to indicate when the
53 function will definitely (probably) be removed.
54 :param log: The log function to call in order to log. If None, logs to
55 stdout
56 """
57 def wrap(f):
58
59 @functools.wraps(f)
60 def wrapped_f(*args, **kwargs):
61 try:
62 module = inspect.getmodule(f)
63 file = inspect.getsourcefile(f)
64 lines = inspect.getsourcelines(f)
65 f_name = "{}-{}-{}..{}-{}".format(
66 module.__name__, file, lines[0], lines[-1], f.__name__)
67 except (IOError, TypeError):
68 # assume it was local, so just use the name of the function
69 f_name = f.__name__
70 if f_name not in __deprecated_functions:
71 __deprecated_functions[f_name] = True
72 s = "DEPRECATION WARNING: Function {} is being removed".format(
73 f.__name__)
74 if date:
75 s = "{} on/around {}".format(s, date)
76 if warning:
77 s = "{} : {}".format(s, warning)
78 if log:
79 log(s)
80 else:
81 print(s)
82 return f(*args, **kwargs)
83 return wrapped_f
84 return wrap
3785
=== modified file 'hooks/charmhelpers/contrib/ansible/__init__.py'
--- hooks/charmhelpers/contrib/ansible/__init__.py 2016-12-20 14:35:00 +0000
+++ hooks/charmhelpers/contrib/ansible/__init__.py 2023-06-30 13:58:42 +0000
@@ -16,90 +16,107 @@
16#16#
17# Authors:17# Authors:
18# Charm Helpers Developers <juju@lists.ubuntu.com>18# Charm Helpers Developers <juju@lists.ubuntu.com>
19"""Charm Helpers ansible - declare the state of your machines.19"""
2020The ansible package enables you to easily use the configuration management
21This helper enables you to declare your machine state, rather than21tool `Ansible`_ to setup and configure your charm. All of your charm
22program it procedurally (and have to test each change to your procedures).22configuration options and relation-data are available as regular Ansible
23Your install hook can be as simple as::23variables which can be used in your playbooks and templates.
2424
25 {{{25.. _Ansible: https://www.ansible.com/
26 import charmhelpers.contrib.ansible26
2727Usage
2828=====
29
30Here is an example directory structure for a charm to get you started::
31
32 charm-ansible-example/
33 |-- ansible
34 | |-- playbook.yaml
35 | `-- templates
36 | `-- example.j2
37 |-- config.yaml
38 |-- copyright
39 |-- icon.svg
40 |-- layer.yaml
41 |-- metadata.yaml
42 |-- reactive
43 | `-- example.py
44 |-- README.md
45
46Running a playbook called ``playbook.yaml`` when the ``install`` hook is run
47can be as simple as::
48
49 from charmhelpers.contrib import ansible
50 from charms.reactive import hook
51
52 @hook('install')
29 def install():53 def install():
30 charmhelpers.contrib.ansible.install_ansible_support()54 ansible.install_ansible_support()
31 charmhelpers.contrib.ansible.apply_playbook('playbooks/install.yaml')55 ansible.apply_playbook('ansible/playbook.yaml')
32 }}}56
3357Here is an example playbook that uses the ``template`` module to template the
34and won't need to change (nor will its tests) when you change the machine58file ``example.j2`` to the charm host and then uses the ``debug`` module to
35state.59print out all the host and Juju variables that you can use in your playbooks.
3660Note that you must target ``localhost`` as the playbook is run locally on the
37All of your juju config and relation-data are available as template61charm host::
38variables within your playbooks and templates. An install playbook looks62
39something like::
40
41 {{{
42 ---63 ---
43 - hosts: localhost64 - hosts: localhost
44 user: root
45
46 tasks:65 tasks:
47 - name: Add private repositories.66 - name: Template a file
48 template:67 template:
49 src: ../templates/private-repositories.list.jinja268 src: templates/example.j2
50 dest: /etc/apt/sources.list.d/private.list69 dest: /tmp/example.j2
5170
52 - name: Update the cache.71 - name: Print all variables available to Ansible
53 apt: update_cache=yes72 debug:
5473 var: vars
55 - name: Install dependencies.74
56 apt: pkg={{ item }}75Read more online about `playbooks`_ and standard Ansible `modules`_.
57 with_items:76
58 - python-mimeparse77.. _playbooks: https://docs.ansible.com/ansible/latest/user_guide/playbooks.html
59 - python-webob78.. _modules: https://docs.ansible.com/ansible/latest/user_guide/modules.html
60 - sunburnt79
6180A further feature of the Ansible hooks is to provide a light weight "action"
62 - name: Setup groups.
63 group: name={{ item.name }} gid={{ item.gid }}
64 with_items:
65 - { name: 'deploy_user', gid: 1800 }
66 - { name: 'service_user', gid: 1500 }
67
68 ...
69 }}}
70
71Read more online about `playbooks`_ and standard ansible `modules`_.
72
73.. _playbooks: http://www.ansibleworks.com/docs/playbooks.html
74.. _modules: http://www.ansibleworks.com/docs/modules.html
75
76A further feature os the ansible hooks is to provide a light weight "action"
77scripting tool. This is a decorator that you apply to a function, and that81scripting tool. This is a decorator that you apply to a function, and that
78function can now receive cli args, and can pass extra args to the playbook.82function can now receive cli args, and can pass extra args to the playbook::
7983
80e.g.84 @hooks.action()
8185 def some_action(amount, force="False"):
8286 "Usage: some-action AMOUNT [force=True]" # <-- shown on error
83@hooks.action()87 # process the arguments
84def some_action(amount, force="False"):88 # do some calls
85 "Usage: some-action AMOUNT [force=True]" # <-- shown on error89 # return extra-vars to be passed to ansible-playbook
86 # process the arguments90 return {
87 # do some calls91 'amount': int(amount),
88 # return extra-vars to be passed to ansible-playbook92 'type': force,
89 return {93 }
90 'amount': int(amount),
91 'type': force,
92 }
9394
94You can now create a symlink to hooks.py that can be invoked like a hook, but95You can now create a symlink to hooks.py that can be invoked like a hook, but
95with cli params:96with cli params::
9697
97# link actions/some-action to hooks/hooks.py98 # link actions/some-action to hooks/hooks.py
9899
99actions/some-action amount=10 force=true100 actions/some-action amount=10 force=true
101
102Install Ansible via pip
103=======================
104
105If you want to install a specific version of Ansible via pip instead of
106``install_ansible_support`` which uses APT, consider using the layer options
107of `layer-basic`_ to install Ansible in a virtualenv::
108
109 options:
110 basic:
111 python_packages: ['ansible==2.9.0']
112 include_system_packages: true
113 use_venv: true
114
115.. _layer-basic: https://charmsreactive.readthedocs.io/en/latest/layer-basic.html#layer-configuration
100116
101"""117"""
102import os118import os
119import json
103import stat120import stat
104import subprocess121import subprocess
105import functools122import functools
@@ -117,27 +134,63 @@
117ansible_vars_path = '/etc/ansible/host_vars/localhost'134ansible_vars_path = '/etc/ansible/host_vars/localhost'
118135
119136
120def install_ansible_support(from_ppa=True, ppa_location='ppa:rquillo/ansible'):137def install_ansible_support(from_ppa=True, ppa_location='ppa:ansible/ansible'):
121 """Installs the ansible package.138 """Installs Ansible via APT.
122139
123 By default it is installed from the `PPA`_ linked from140 By default this installs Ansible from the `PPA`_ linked from
124 the ansible `website`_ or from a ppa specified by a charm config..141 the Ansible `website`_ or from a PPA set in ``ppa_location``.
125142
126 .. _PPA: https://launchpad.net/~rquillo/+archive/ansible143 .. _PPA: https://launchpad.net/~ansible/+archive/ubuntu/ansible
127 .. _website: http://docs.ansible.com/intro_installation.html#latest-releases-via-apt-ubuntu144 .. _website: http://docs.ansible.com/intro_installation.html#latest-releases-via-apt-ubuntu
128145
129 If from_ppa is empty, you must ensure that the package is available146 If ``from_ppa`` is ``False``, then Ansible will be installed from
130 from a configured repository.147 Ubuntu's Universe repositories.
131 """148 """
132 if from_ppa:149 if from_ppa:
133 charmhelpers.fetch.add_source(ppa_location)150 charmhelpers.fetch.add_source(ppa_location)
134 charmhelpers.fetch.apt_update(fatal=True)151 charmhelpers.fetch.apt_update(fatal=True)
135 charmhelpers.fetch.apt_install('ansible')152 charmhelpers.fetch.apt_install('ansible')
136 with open(ansible_hosts_path, 'w+') as hosts_file:153 with open(ansible_hosts_path, 'w+') as hosts_file:
137 hosts_file.write('localhost ansible_connection=local')154 hosts_file.write('localhost ansible_connection=local ansible_remote_tmp=/root/.ansible/tmp')
138155
139156
140def apply_playbook(playbook, tags=None, extra_vars=None):157def apply_playbook(playbook, tags=None, extra_vars=None):
158 """Run a playbook.
159
160 This helper runs a playbook with juju state variables as context,
161 therefore variables set in application config can be used directly.
162 List of tags (--tags) and dictionary with extra_vars (--extra-vars)
163 can be passed as additional parameters.
164
165 Read more about playbook `_variables`_ online.
166
167 .. _variables: https://docs.ansible.com/ansible/latest/user_guide/playbooks_variables.html
168
169 Example::
170
171 # Run ansible/playbook.yaml with tag install and pass extra
172 # variables var_a and var_b
173 apply_playbook(
174 playbook='ansible/playbook.yaml',
175 tags=['install'],
176 extra_vars={'var_a': 'val_a', 'var_b': 'val_b'}
177 )
178
179 # Run ansible/playbook.yaml with tag config and extra variable nested,
180 # which is passed as json and can be used as dictionary in playbook
181 apply_playbook(
182 playbook='ansible/playbook.yaml',
183 tags=['config'],
184 extra_vars={'nested': {'a': 'value1', 'b': 'value2'}}
185 )
186
187 # Custom config file can be passed within extra_vars
188 apply_playbook(
189 playbook='ansible/playbook.yaml',
190 extra_vars="@some_file.json"
191 )
192
193 """
141 tags = tags or []194 tags = tags or []
142 tags = ",".join(tags)195 tags = ",".join(tags)
143 charmhelpers.contrib.templating.contexts.juju_state_to_yaml(196 charmhelpers.contrib.templating.contexts.juju_state_to_yaml(
@@ -146,9 +199,13 @@
146199
147 # we want ansible's log output to be unbuffered200 # we want ansible's log output to be unbuffered
148 env = os.environ.copy()201 env = os.environ.copy()
202 proxy_settings = charmhelpers.core.hookenv.env_proxy_settings()
203 if proxy_settings:
204 env.update(proxy_settings)
149 env['PYTHONUNBUFFERED'] = "1"205 env['PYTHONUNBUFFERED'] = "1"
150 call = [206 call = [
151 'ansible-playbook',207 'ansible-playbook',
208 '-vvv',
152 '-c',209 '-c',
153 'local',210 'local',
154 playbook,211 playbook,
@@ -156,9 +213,17 @@
156 if tags:213 if tags:
157 call.extend(['--tags', '{}'.format(tags)])214 call.extend(['--tags', '{}'.format(tags)])
158 if extra_vars:215 if extra_vars:
159 extra = ["%s=%s" % (k, v) for k, v in extra_vars.items()]216 call.extend(['--extra-vars', json.dumps(extra_vars)])
160 call.extend(['--extra-vars', " ".join(extra)])217 try:
161 subprocess.check_call(call, env=env)218 subprocess.check_output(call, env=env)
219 except subprocess.CalledProcessError as e:
220 err_msg = e.output.decode().strip()
221 charmhelpers.core.hookenv.log("Ansible playbook failed with "
222 "{}".format(e),
223 level="ERROR")
224 charmhelpers.core.hookenv.log("Stdout: {}".format(err_msg),
225 level="ERROR")
226 raise e
162227
163228
164class AnsibleHooks(charmhelpers.core.hookenv.Hooks):229class AnsibleHooks(charmhelpers.core.hookenv.Hooks):
@@ -170,7 +235,7 @@
170235
171 Example::236 Example::
172237
173 hooks = AnsibleHooks(playbook_path='playbooks/my_machine_state.yaml')238 hooks = AnsibleHooks(playbook_path='ansible/my_machine_state.yaml')
174239
175 # All the tasks within my_machine_state.yaml tagged with 'install'240 # All the tasks within my_machine_state.yaml tagged with 'install'
176 # will be run automatically after do_custom_work()241 # will be run automatically after do_custom_work()
@@ -188,13 +253,12 @@
188 # the hooks which are handled by ansible-only and they'll be registered253 # the hooks which are handled by ansible-only and they'll be registered
189 # for you:254 # for you:
190 # hooks = AnsibleHooks(255 # hooks = AnsibleHooks(
191 # 'playbooks/my_machine_state.yaml',256 # 'ansible/my_machine_state.yaml',
192 # default_hooks=['config-changed', 'start', 'stop'])257 # default_hooks=['config-changed', 'start', 'stop'])
193258
194 if __name__ == "__main__":259 if __name__ == "__main__":
195 # execute a hook based on the name the program is called by260 # execute a hook based on the name the program is called by
196 hooks.execute(sys.argv)261 hooks.execute(sys.argv)
197
198 """262 """
199263
200 def __init__(self, playbook_path, default_hooks=None):264 def __init__(self, playbook_path, default_hooks=None):
201265
=== modified file 'hooks/charmhelpers/contrib/charmsupport/nrpe.py'
--- hooks/charmhelpers/contrib/charmsupport/nrpe.py 2016-12-20 14:35:00 +0000
+++ hooks/charmhelpers/contrib/charmsupport/nrpe.py 2023-06-30 13:58:42 +0000
@@ -1,4 +1,4 @@
1# Copyright 2014-2015 Canonical Limited.1# Copyright 2012-2021 Canonical Limited.
2#2#
3# Licensed under the Apache License, Version 2.0 (the "License");3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.4# you may not use this file except in compliance with the License.
@@ -13,25 +13,29 @@
13# limitations under the License.13# limitations under the License.
1414
15"""Compatibility with the nrpe-external-master charm"""15"""Compatibility with the nrpe-external-master charm"""
16# Copyright 2012 Canonical Ltd.
17#16#
18# Authors:17# Authors:
19# Matthew Wedgwood <matthew.wedgwood@canonical.com>18# Matthew Wedgwood <matthew.wedgwood@canonical.com>
2019
21import subprocess20import glob
22import pwd
23import grp21import grp
22import json
24import os23import os
25import glob24import pwd
26import shutil
27import re25import re
28import shlex26import shlex
27import shutil
28import subprocess
29import yaml29import yaml
3030
31from charmhelpers.core.hookenv import (31from charmhelpers.core.hookenv import (
32 application_name,
32 config,33 config,
34 ERROR,
35 hook_name,
33 local_unit,36 local_unit,
34 log,37 log,
38 relation_get,
35 relation_ids,39 relation_ids,
36 relation_set,40 relation_set,
37 relations_of_type,41 relations_of_type,
@@ -125,7 +129,7 @@
125129
126130
127class Check(object):131class Check(object):
128 shortname_re = '[A-Za-z0-9-_]+$'132 shortname_re = '[A-Za-z0-9-_.@]+$'
129 service_template = ("""133 service_template = ("""
130#---------------------------------------------------134#---------------------------------------------------
131# This file is Juju managed135# This file is Juju managed
@@ -137,10 +141,11 @@
137 """{description}141 """{description}
138 check_command check_nrpe!{command}142 check_command check_nrpe!{command}
139 servicegroups {nagios_servicegroup}143 servicegroups {nagios_servicegroup}
144{service_config_overrides}
140}}145}}
141""")146""")
142147
143 def __init__(self, shortname, description, check_cmd):148 def __init__(self, shortname, description, check_cmd, max_check_attempts=None):
144 super(Check, self).__init__()149 super(Check, self).__init__()
145 # XXX: could be better to calculate this from the service name150 # XXX: could be better to calculate this from the service name
146 if not re.match(self.shortname_re, shortname):151 if not re.match(self.shortname_re, shortname):
@@ -153,6 +158,7 @@
153 # The default is: illegal_object_name_chars=`~!$%^&*"|'<>?,()=158 # The default is: illegal_object_name_chars=`~!$%^&*"|'<>?,()=
154 self.description = description159 self.description = description
155 self.check_cmd = self._locate_cmd(check_cmd)160 self.check_cmd = self._locate_cmd(check_cmd)
161 self.max_check_attempts = max_check_attempts
156162
157 def _get_check_filename(self):163 def _get_check_filename(self):
158 return os.path.join(NRPE.nrpe_confdir, '{}.cfg'.format(self.command))164 return os.path.join(NRPE.nrpe_confdir, '{}.cfg'.format(self.command))
@@ -171,7 +177,8 @@
171 if os.path.exists(os.path.join(path, parts[0])):177 if os.path.exists(os.path.join(path, parts[0])):
172 command = os.path.join(path, parts[0])178 command = os.path.join(path, parts[0])
173 if len(parts) > 1:179 if len(parts) > 1:
174 command += " " + " ".join(parts[1:])180 safe_args = [shlex.quote(arg) for arg in parts[1:]]
181 command += " " + " ".join(safe_args)
175 return command182 return command
176 log('Check command not found: {}'.format(parts[0]))183 log('Check command not found: {}'.format(parts[0]))
177 return ''184 return ''
@@ -193,6 +200,13 @@
193 nrpe_check_file = self._get_check_filename()200 nrpe_check_file = self._get_check_filename()
194 with open(nrpe_check_file, 'w') as nrpe_check_config:201 with open(nrpe_check_file, 'w') as nrpe_check_config:
195 nrpe_check_config.write("# check {}\n".format(self.shortname))202 nrpe_check_config.write("# check {}\n".format(self.shortname))
203 if nagios_servicegroups:
204 nrpe_check_config.write(
205 "# The following header was added automatically by juju\n")
206 nrpe_check_config.write(
207 "# Modifying it will affect nagios monitoring and alerting\n")
208 nrpe_check_config.write(
209 "# servicegroups: {}\n".format(nagios_servicegroups))
196 nrpe_check_config.write("command[{}]={}\n".format(210 nrpe_check_config.write("command[{}]={}\n".format(
197 self.command, self.check_cmd))211 self.command, self.check_cmd))
198212
@@ -207,12 +221,19 @@
207 nagios_servicegroups):221 nagios_servicegroups):
208 self._remove_service_files()222 self._remove_service_files()
209223
224 if self.max_check_attempts:
225 service_config_overrides = ' max_check_attempts {}'.format(
226 self.max_check_attempts
227 ) # Note indentation is here rather than in the template to avoid trailing spaces
228 else:
229 service_config_overrides = '' # empty string to avoid printing 'None'
210 templ_vars = {230 templ_vars = {
211 'nagios_hostname': hostname,231 'nagios_hostname': hostname,
212 'nagios_servicegroup': nagios_servicegroups,232 'nagios_servicegroup': nagios_servicegroups,
213 'description': self.description,233 'description': self.description,
214 'shortname': self.shortname,234 'shortname': self.shortname,
215 'command': self.command,235 'command': self.command,
236 'service_config_overrides': service_config_overrides,
216 }237 }
217 nrpe_service_text = Check.service_template.format(**templ_vars)238 nrpe_service_text = Check.service_template.format(**templ_vars)
218 nrpe_service_file = self._get_service_filename(hostname)239 nrpe_service_file = self._get_service_filename(hostname)
@@ -227,6 +248,7 @@
227 nagios_logdir = '/var/log/nagios'248 nagios_logdir = '/var/log/nagios'
228 nagios_exportdir = '/var/lib/nagios/export'249 nagios_exportdir = '/var/lib/nagios/export'
229 nrpe_confdir = '/etc/nagios/nrpe.d'250 nrpe_confdir = '/etc/nagios/nrpe.d'
251 homedir = '/var/lib/nagios' # home dir provided by nagios-nrpe-server
230252
231 def __init__(self, hostname=None, primary=True):253 def __init__(self, hostname=None, primary=True):
232 super(NRPE, self).__init__()254 super(NRPE, self).__init__()
@@ -251,11 +273,28 @@
251 relation = relation_ids('nrpe-external-master')273 relation = relation_ids('nrpe-external-master')
252 if relation:274 if relation:
253 log("Setting charm primary status {}".format(primary))275 log("Setting charm primary status {}".format(primary))
254 for rid in relation_ids('nrpe-external-master'):276 for rid in relation:
255 relation_set(relation_id=rid, relation_settings={'primary': self.primary})277 relation_set(relation_id=rid, relation_settings={'primary': self.primary})
278 self.remove_check_queue = set()
279
280 @classmethod
281 def does_nrpe_conf_dir_exist(cls):
282 """Return True if th nrpe_confdif directory exists."""
283 return os.path.isdir(cls.nrpe_confdir)
256284
257 def add_check(self, *args, **kwargs):285 def add_check(self, *args, **kwargs):
286 shortname = None
287 if kwargs.get('shortname') is None:
288 if len(args) > 0:
289 shortname = args[0]
290 else:
291 shortname = kwargs['shortname']
292
258 self.checks.append(Check(*args, **kwargs))293 self.checks.append(Check(*args, **kwargs))
294 try:
295 self.remove_check_queue.remove(shortname)
296 except KeyError:
297 pass
259298
260 def remove_check(self, *args, **kwargs):299 def remove_check(self, *args, **kwargs):
261 if kwargs.get('shortname') is None:300 if kwargs.get('shortname') is None:
@@ -272,12 +311,13 @@
272311
273 check = Check(*args, **kwargs)312 check = Check(*args, **kwargs)
274 check.remove(self.hostname)313 check.remove(self.hostname)
314 self.remove_check_queue.add(kwargs['shortname'])
275315
276 def write(self):316 def write(self):
277 try:317 try:
278 nagios_uid = pwd.getpwnam('nagios').pw_uid318 nagios_uid = pwd.getpwnam('nagios').pw_uid
279 nagios_gid = grp.getgrnam('nagios').gr_gid319 nagios_gid = grp.getgrnam('nagios').gr_gid
280 except:320 except Exception:
281 log("Nagios user not set up, nrpe checks not updated")321 log("Nagios user not set up, nrpe checks not updated")
282 return322 return
283323
@@ -287,19 +327,50 @@
287327
288 nrpe_monitors = {}328 nrpe_monitors = {}
289 monitors = {"monitors": {"remote": {"nrpe": nrpe_monitors}}}329 monitors = {"monitors": {"remote": {"nrpe": nrpe_monitors}}}
330
331 # check that the charm can write to the conf dir. If not, then nagios
332 # probably isn't installed, and we can defer.
333 if not self.does_nrpe_conf_dir_exist():
334 return
335
290 for nrpecheck in self.checks:336 for nrpecheck in self.checks:
291 nrpecheck.write(self.nagios_context, self.hostname,337 nrpecheck.write(self.nagios_context, self.hostname,
292 self.nagios_servicegroups)338 self.nagios_servicegroups)
293 nrpe_monitors[nrpecheck.shortname] = {339 nrpe_monitors[nrpecheck.shortname] = {
294 "command": nrpecheck.command,340 "command": nrpecheck.command,
295 }341 }
342 # If we were passed max_check_attempts, add that to the relation data
343 if nrpecheck.max_check_attempts is not None:
344 nrpe_monitors[nrpecheck.shortname]['max_check_attempts'] = nrpecheck.max_check_attempts
296345
297 service('restart', 'nagios-nrpe-server')346 # update-status hooks are configured to firing every 5 minutes by
347 # default. When nagios-nrpe-server is restarted, the nagios server
348 # reports checks failing causing unnecessary alerts. Let's not restart
349 # on update-status hooks.
350 if not hook_name() == 'update-status':
351 service('restart', 'nagios-nrpe-server')
298352
299 monitor_ids = relation_ids("local-monitors") + \353 monitor_ids = relation_ids("local-monitors") + \
300 relation_ids("nrpe-external-master")354 relation_ids("nrpe-external-master")
301 for rid in monitor_ids:355 for rid in monitor_ids:
302 relation_set(relation_id=rid, monitors=yaml.dump(monitors))356 reldata = relation_get(unit=local_unit(), rid=rid)
357 if 'monitors' in reldata:
358 # update the existing set of monitors with the new data
359 old_monitors = yaml.safe_load(reldata['monitors'])
360 old_nrpe_monitors = old_monitors['monitors']['remote']['nrpe']
361 # remove keys that are in the remove_check_queue
362 old_nrpe_monitors = {k: v for k, v in old_nrpe_monitors.items()
363 if k not in self.remove_check_queue}
364 # update/add nrpe_monitors
365 old_nrpe_monitors.update(nrpe_monitors)
366 old_monitors['monitors']['remote']['nrpe'] = old_nrpe_monitors
367 # write back to the relation
368 relation_set(relation_id=rid, monitors=yaml.dump(old_monitors))
369 else:
370 # write a brand new set of monitors, as no existing ones.
371 relation_set(relation_id=rid, monitors=yaml.dump(monitors))
372
373 self.remove_check_queue.clear()
303374
304375
305def get_nagios_hostcontext(relation_name='nrpe-external-master'):376def get_nagios_hostcontext(relation_name='nrpe-external-master'):
@@ -338,14 +409,29 @@
338 return unit409 return unit
339410
340411
341def add_init_service_checks(nrpe, services, unit_name):412def add_init_service_checks(nrpe, services, unit_name, immediate_check=True):
342 """413 """
343 Add checks for each service in list414 Add checks for each service in list
344415
345 :param NRPE nrpe: NRPE object to add check to416 :param NRPE nrpe: NRPE object to add check to
346 :param list services: List of services to check417 :param list services: List of services to check
347 :param str unit_name: Unit name to use in check description418 :param str unit_name: Unit name to use in check description
419 :param bool immediate_check: For sysv init, run the service check immediately
348 """420 """
421 # check_haproxy is redundant in the presence of check_crm. See LP Bug#1880601 for details.
422 # just remove check_haproxy if haproxy is added as a lsb resource in hacluster.
423 for rid in relation_ids("ha"):
424 ha_resources = relation_get("json_resources", rid=rid, unit=local_unit())
425 if ha_resources:
426 try:
427 ha_resources_parsed = json.loads(ha_resources)
428 except ValueError as e:
429 log('Could not parse JSON from ha resources. {}'.format(e), level=ERROR)
430 raise
431 if "lsb:haproxy" in ha_resources_parsed.values():
432 if "haproxy" in services:
433 log("removed check_haproxy. This service will be monitored by check_crm")
434 services.remove("haproxy")
349 for svc in services:435 for svc in services:
350 # Don't add a check for these services from neutron-gateway436 # Don't add a check for these services from neutron-gateway
351 if svc in ['ext-port', 'os-charm-phy-nic-mtu']:437 if svc in ['ext-port', 'os-charm-phy-nic-mtu']:
@@ -354,7 +440,7 @@
354 upstart_init = '/etc/init/%s.conf' % svc440 upstart_init = '/etc/init/%s.conf' % svc
355 sysv_init = '/etc/init.d/%s' % svc441 sysv_init = '/etc/init.d/%s' % svc
356442
357 if host.init_is_systemd():443 if host.init_is_systemd(service_name=svc):
358 nrpe.add_check(444 nrpe.add_check(
359 shortname=svc,445 shortname=svc,
360 description='process check {%s}' % unit_name,446 description='process check {%s}' % unit_name,
@@ -368,33 +454,53 @@
368 )454 )
369 elif os.path.exists(sysv_init):455 elif os.path.exists(sysv_init):
370 cronpath = '/etc/cron.d/nagios-service-check-%s' % svc456 cronpath = '/etc/cron.d/nagios-service-check-%s' % svc
371 cron_file = ('*/5 * * * * root '457 checkpath = '%s/service-check-%s.txt' % (nrpe.homedir, svc)
372 '/usr/local/lib/nagios/plugins/check_exit_status.pl '458 croncmd = (
373 '-s /etc/init.d/%s status > '459 '/usr/local/lib/nagios/plugins/check_exit_status.pl '
374 '/var/lib/nagios/service-check-%s.txt\n' % (svc,460 '-e -s /etc/init.d/%s status' % svc
375 svc)461 )
376 )462 cron_file = '*/5 * * * * root %s > %s\n' % (croncmd, checkpath)
377 f = open(cronpath, 'w')463 f = open(cronpath, 'w')
378 f.write(cron_file)464 f.write(cron_file)
379 f.close()465 f.close()
380 nrpe.add_check(466 nrpe.add_check(
381 shortname=svc,467 shortname=svc,
382 description='process check {%s}' % unit_name,468 description='service check {%s}' % unit_name,
383 check_cmd='check_status_file.py -f '469 check_cmd='check_status_file.py -f %s' % checkpath,
384 '/var/lib/nagios/service-check-%s.txt' % svc,
385 )470 )
386471 # if /var/lib/nagios doesn't exist open(checkpath, 'w') will fail
387472 # (LP: #1670223).
388def copy_nrpe_checks():473 if immediate_check and os.path.isdir(nrpe.homedir):
474 f = open(checkpath, 'w')
475 subprocess.call(
476 croncmd.split(),
477 stdout=f,
478 stderr=subprocess.STDOUT
479 )
480 f.close()
481 os.chmod(checkpath, 0o644)
482
483
484def copy_nrpe_checks(nrpe_files_dir=None):
389 """485 """
390 Copy the nrpe checks into place486 Copy the nrpe checks into place
391487
392 """488 """
393 NAGIOS_PLUGINS = '/usr/local/lib/nagios/plugins'489 NAGIOS_PLUGINS = '/usr/local/lib/nagios/plugins'
394 nrpe_files_dir = os.path.join(os.getenv('CHARM_DIR'), 'hooks',490 if nrpe_files_dir is None:
395 'charmhelpers', 'contrib', 'openstack',491 # determine if "charmhelpers" is in CHARMDIR or CHARMDIR/hooks
396 'files')492 for segment in ['.', 'hooks']:
397493 nrpe_files_dir = os.path.abspath(os.path.join(
494 os.getenv('CHARM_DIR'),
495 segment,
496 'charmhelpers',
497 'contrib',
498 'openstack',
499 'files'))
500 if os.path.isdir(nrpe_files_dir):
501 break
502 else:
503 raise RuntimeError("Couldn't find charmhelpers directory")
398 if not os.path.exists(NAGIOS_PLUGINS):504 if not os.path.exists(NAGIOS_PLUGINS):
399 os.makedirs(NAGIOS_PLUGINS)505 os.makedirs(NAGIOS_PLUGINS)
400 for fname in glob.glob(os.path.join(nrpe_files_dir, "check_*")):506 for fname in glob.glob(os.path.join(nrpe_files_dir, "check_*")):
@@ -418,3 +524,53 @@
418 shortname='haproxy_queue',524 shortname='haproxy_queue',
419 description='Check HAProxy queue depth {%s}' % unit_name,525 description='Check HAProxy queue depth {%s}' % unit_name,
420 check_cmd='check_haproxy_queue_depth.sh')526 check_cmd='check_haproxy_queue_depth.sh')
527
528
529def remove_deprecated_check(nrpe, deprecated_services):
530 """
531 Remove checks for deprecated services in list
532
533 :param nrpe: NRPE object to remove check from
534 :type nrpe: NRPE
535 :param deprecated_services: List of deprecated services that are removed
536 :type deprecated_services: list
537 """
538 for dep_svc in deprecated_services:
539 log('Deprecated service: {}'.format(dep_svc))
540 nrpe.remove_check(shortname=dep_svc)
541
542
543def add_deferred_restarts_check(nrpe):
544 """
545 Add NRPE check for services with deferred restarts.
546
547 :param NRPE nrpe: NRPE object to add check to
548 """
549 unit_name = local_unit().replace('/', '-')
550 shortname = unit_name + '_deferred_restarts'
551 check_cmd = 'check_deferred_restarts.py --application {}'.format(
552 application_name())
553
554 log('Adding deferred restarts nrpe check: {}'.format(shortname))
555 nrpe.add_check(
556 shortname=shortname,
557 description='Check deferred service restarts {}'.format(unit_name),
558 check_cmd=check_cmd)
559
560
561def remove_deferred_restarts_check(nrpe):
562 """
563 Remove NRPE check for services with deferred service restarts.
564
565 :param NRPE nrpe: NRPE object to remove check from
566 """
567 unit_name = local_unit().replace('/', '-')
568 shortname = unit_name + '_deferred_restarts'
569 check_cmd = 'check_deferred_restarts.py --application {}'.format(
570 application_name())
571
572 log('Removing deferred restarts nrpe check: {}'.format(shortname))
573 nrpe.remove_check(
574 shortname=shortname,
575 description='Check deferred service restarts {}'.format(unit_name),
576 check_cmd=check_cmd)
421577
=== modified file 'hooks/charmhelpers/contrib/templating/contexts.py'
--- hooks/charmhelpers/contrib/templating/contexts.py 2016-12-20 14:35:00 +0000
+++ hooks/charmhelpers/contrib/templating/contexts.py 2023-06-30 13:58:42 +0000
@@ -20,8 +20,6 @@
20import os20import os
21import yaml21import yaml
2222
23import six
24
25import charmhelpers.core.hookenv23import charmhelpers.core.hookenv
2624
2725
@@ -93,7 +91,8 @@
93 By default, hyphens are allowed in keys as this is supported91 By default, hyphens are allowed in keys as this is supported
94 by yaml, but for tools like ansible, hyphens are not valid [1].92 by yaml, but for tools like ansible, hyphens are not valid [1].
9593
96 [1] http://www.ansibleworks.com/docs/playbooks_variables.html#what-makes-a-valid-variable-name94 [1] http://www.ansibleworks.com/docs/playbooks_variables.html
95 #what-makes-a-valid-variable-name
97 """96 """
98 config = charmhelpers.core.hookenv.config()97 config = charmhelpers.core.hookenv.config()
9998
@@ -101,16 +100,17 @@
101 # file resources etc.100 # file resources etc.
102 config['charm_dir'] = charm_dir101 config['charm_dir'] = charm_dir
103 config['local_unit'] = charmhelpers.core.hookenv.local_unit()102 config['local_unit'] = charmhelpers.core.hookenv.local_unit()
104 config['unit_private_address'] = charmhelpers.core.hookenv.unit_private_ip()103 config['unit_private_address'] = (
104 charmhelpers.core.hookenv.unit_private_ip())
105 config['unit_public_address'] = charmhelpers.core.hookenv.unit_get(105 config['unit_public_address'] = charmhelpers.core.hookenv.unit_get(
106 'public-address'106 'public-address'
107 )107 )
108108
109 # Don't use non-standard tags for unicode which will not109 # Don't use non-standard tags for unicode which will not
110 # work when salt uses yaml.load_safe.110 # work when salt uses yaml.load_safe.
111 yaml.add_representer(six.text_type,111 yaml.add_representer(str,
112 lambda dumper, value: dumper.represent_scalar(112 lambda dumper, value: dumper.represent_scalar(
113 six.u('tag:yaml.org,2002:str'), value))113 'tag:yaml.org,2002:str', value))
114114
115 yaml_dir = os.path.dirname(yaml_path)115 yaml_dir = os.path.dirname(yaml_path)
116 if not os.path.exists(yaml_dir):116 if not os.path.exists(yaml_dir):
@@ -118,7 +118,7 @@
118118
119 if os.path.exists(yaml_path):119 if os.path.exists(yaml_path):
120 with open(yaml_path, "r") as existing_vars_file:120 with open(yaml_path, "r") as existing_vars_file:
121 existing_vars = yaml.load(existing_vars_file.read())121 existing_vars = yaml.safe_load(existing_vars_file.read())
122 else:122 else:
123 with open(yaml_path, "w+"):123 with open(yaml_path, "w+"):
124 pass124 pass
125125
=== modified file 'hooks/charmhelpers/core/decorators.py'
--- hooks/charmhelpers/core/decorators.py 2016-12-20 14:35:00 +0000
+++ hooks/charmhelpers/core/decorators.py 2023-06-30 13:58:42 +0000
@@ -53,3 +53,41 @@
53 return _retry_on_exception_inner_253 return _retry_on_exception_inner_2
5454
55 return _retry_on_exception_inner_155 return _retry_on_exception_inner_1
56
57
58def retry_on_predicate(num_retries, predicate_fun, base_delay=0):
59 """Retry based on return value
60
61 The return value of the decorated function is passed to the given predicate_fun. If the
62 result of the predicate is False, retry the decorated function up to num_retries times
63
64 An exponential backoff up to base_delay^num_retries seconds can be introduced by setting
65 base_delay to a nonzero value. The default is to run with a zero (i.e. no) delay
66
67 :param num_retries: Max. number of retries to perform
68 :type num_retries: int
69 :param predicate_fun: Predicate function to determine if a retry is necessary
70 :type predicate_fun: callable
71 :param base_delay: Starting value in seconds for exponential delay, defaults to 0 (no delay)
72 :type base_delay: float
73 """
74 def _retry_on_pred_inner_1(f):
75 def _retry_on_pred_inner_2(*args, **kwargs):
76 retries = num_retries
77 multiplier = 1
78 delay = base_delay
79 while True:
80 result = f(*args, **kwargs)
81 if predicate_fun(result) or retries <= 0:
82 return result
83 delay *= multiplier
84 multiplier += 1
85 log("Result {}, retrying '{}' {} more times (delay={})".format(
86 result, f.__name__, retries, delay), level=INFO)
87 retries -= 1
88 if delay:
89 time.sleep(delay)
90
91 return _retry_on_pred_inner_2
92
93 return _retry_on_pred_inner_1
5694
=== modified file 'hooks/charmhelpers/core/hookenv.py'
--- hooks/charmhelpers/core/hookenv.py 2016-12-20 14:35:00 +0000
+++ hooks/charmhelpers/core/hookenv.py 2023-06-30 13:58:42 +0000
@@ -1,4 +1,4 @@
1# Copyright 2014-2015 Canonical Limited.1# Copyright 2013-2021 Canonical Limited.
2#2#
3# Licensed under the Apache License, Version 2.0 (the "License");3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.4# you may not use this file except in compliance with the License.
@@ -13,37 +13,50 @@
13# limitations under the License.13# limitations under the License.
1414
15"Interactions with the Juju environment"15"Interactions with the Juju environment"
16# Copyright 2013 Canonical Ltd.
17#16#
18# Authors:17# Authors:
19# Charm Helpers Developers <juju@lists.ubuntu.com>18# Charm Helpers Developers <juju@lists.ubuntu.com>
2019
21from __future__ import print_function
22import copy20import copy
23from distutils.version import LooseVersion21from distutils.version import LooseVersion
22from enum import Enum
24from functools import wraps23from functools import wraps
24from collections import namedtuple, UserDict
25import glob25import glob
26import os26import os
27import json27import json
28import yaml28import yaml
29import re
29import subprocess30import subprocess
30import sys31import sys
31import errno32import errno
32import tempfile33import tempfile
33from subprocess import CalledProcessError34from subprocess import CalledProcessError
3435
35import six36from charmhelpers import deprecate
36if not six.PY3:37
37 from UserDict import UserDict
38else:
39 from collections import UserDict
4038
41CRITICAL = "CRITICAL"39CRITICAL = "CRITICAL"
42ERROR = "ERROR"40ERROR = "ERROR"
43WARNING = "WARNING"41WARNING = "WARNING"
44INFO = "INFO"42INFO = "INFO"
45DEBUG = "DEBUG"43DEBUG = "DEBUG"
44TRACE = "TRACE"
46MARKER = object()45MARKER = object()
46SH_MAX_ARG = 131071
47
48
49RANGE_WARNING = ('Passing NO_PROXY string that includes a cidr. '
50 'This may not be compatible with software you are '
51 'running in your shell.')
52
53
54class WORKLOAD_STATES(Enum):
55 ACTIVE = 'active'
56 BLOCKED = 'blocked'
57 MAINTENANCE = 'maintenance'
58 WAITING = 'waiting'
59
4760
48cache = {}61cache = {}
4962
@@ -64,7 +77,7 @@
64 @wraps(func)77 @wraps(func)
65 def wrapper(*args, **kwargs):78 def wrapper(*args, **kwargs):
66 global cache79 global cache
67 key = str((func, args, kwargs))80 key = json.dumps((func, args, kwargs), sort_keys=True, default=str)
68 try:81 try:
69 return cache[key]82 return cache[key]
70 except KeyError:83 except KeyError:
@@ -92,9 +105,9 @@
92 command = ['juju-log']105 command = ['juju-log']
93 if level:106 if level:
94 command += ['-l', level]107 command += ['-l', level]
95 if not isinstance(message, six.string_types):108 if not isinstance(message, str):
96 message = repr(message)109 message = repr(message)
97 command += [message]110 command += [message[:SH_MAX_ARG]]
98 # Missing juju-log should not cause failures in unit tests111 # Missing juju-log should not cause failures in unit tests
99 # Send log output to stderr112 # Send log output to stderr
100 try:113 try:
@@ -109,6 +122,24 @@
109 raise122 raise
110123
111124
125def function_log(message):
126 """Write a function progress message"""
127 command = ['function-log']
128 if not isinstance(message, str):
129 message = repr(message)
130 command += [message[:SH_MAX_ARG]]
131 # Missing function-log should not cause failures in unit tests
132 # Send function_log output to stderr
133 try:
134 subprocess.call(command)
135 except OSError as e:
136 if e.errno == errno.ENOENT:
137 message = "function-log: {}".format(message)
138 print(message, file=sys.stderr)
139 else:
140 raise
141
142
112class Serializable(UserDict):143class Serializable(UserDict):
113 """Wrapper, an object that can be serialized to yaml or json"""144 """Wrapper, an object that can be serialized to yaml or json"""
114145
@@ -187,6 +218,17 @@
187 raise ValueError('Must specify neither or both of relation_name and service_or_unit')218 raise ValueError('Must specify neither or both of relation_name and service_or_unit')
188219
189220
221def departing_unit():
222 """The departing unit for the current relation hook.
223
224 Available since juju 2.8.
225
226 :returns: the departing unit, or None if the information isn't available.
227 :rtype: Optional[str]
228 """
229 return os.environ.get('JUJU_DEPARTING_UNIT', None)
230
231
190def local_unit():232def local_unit():
191 """Local unit ID"""233 """Local unit ID"""
192 return os.environ['JUJU_UNIT_NAME']234 return os.environ['JUJU_UNIT_NAME']
@@ -197,9 +239,56 @@
197 return os.environ.get('JUJU_REMOTE_UNIT', None)239 return os.environ.get('JUJU_REMOTE_UNIT', None)
198240
199241
242def application_name():
243 """
244 The name of the deployed application this unit belongs to.
245 """
246 return local_unit().split('/')[0]
247
248
200def service_name():249def service_name():
201 """The name service group this unit belongs to"""250 """
202 return local_unit().split('/')[0]251 .. deprecated:: 0.19.1
252 Alias for :func:`application_name`.
253 """
254 return application_name()
255
256
257def model_name():
258 """
259 Name of the model that this unit is deployed in.
260 """
261 return os.environ['JUJU_MODEL_NAME']
262
263
264def model_uuid():
265 """
266 UUID of the model that this unit is deployed in.
267 """
268 return os.environ['JUJU_MODEL_UUID']
269
270
271def principal_unit():
272 """Returns the principal unit of this unit, otherwise None"""
273 # Juju 2.2 and above provides JUJU_PRINCIPAL_UNIT
274 principal_unit = os.environ.get('JUJU_PRINCIPAL_UNIT', None)
275 # If it's empty, then this unit is the principal
276 if principal_unit == '':
277 return os.environ['JUJU_UNIT_NAME']
278 elif principal_unit is not None:
279 return principal_unit
280 # For Juju 2.1 and below, let's try work out the principle unit by
281 # the various charms' metadata.yaml.
282 for reltype in relation_types():
283 for rid in relation_ids(reltype):
284 for unit in related_units(rid):
285 md = _metadata_unit(unit)
286 if not md:
287 continue
288 subordinate = md.pop('subordinate', None)
289 if not subordinate:
290 return unit
291 return None
203292
204293
205@cached294@cached
@@ -263,7 +352,7 @@
263 self.implicit_save = True352 self.implicit_save = True
264 self._prev_dict = None353 self._prev_dict = None
265 self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME)354 self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME)
266 if os.path.exists(self.path):355 if os.path.exists(self.path) and os.stat(self.path).st_size:
267 self.load_previous()356 self.load_previous()
268 atexit(self._implicit_save)357 atexit(self._implicit_save)
269358
@@ -283,7 +372,13 @@
283 """372 """
284 self.path = path or self.path373 self.path = path or self.path
285 with open(self.path) as f:374 with open(self.path) as f:
286 self._prev_dict = json.load(f)375 try:
376 self._prev_dict = json.load(f)
377 except ValueError as e:
378 log('Found but was unable to parse previous config data, '
379 'ignoring which will report all values as changed - {}'
380 .format(str(e)), level=ERROR)
381 return
287 for k, v in copy.deepcopy(self._prev_dict).items():382 for k, v in copy.deepcopy(self._prev_dict).items():
288 if k not in self:383 if k not in self:
289 self[k] = v384 self[k] = v
@@ -319,6 +414,7 @@
319414
320 """415 """
321 with open(self.path, 'w') as f:416 with open(self.path, 'w') as f:
417 os.fchmod(f.fileno(), 0o600)
322 json.dump(self, f)418 json.dump(self, f)
323419
324 def _implicit_save(self):420 def _implicit_save(self):
@@ -326,35 +422,52 @@
326 self.save()422 self.save()
327423
328424
329@cached425_cache_config = None
426
427
330def config(scope=None):428def config(scope=None):
331 """Juju charm configuration"""429 """
332 config_cmd_line = ['config-get']430 Get the juju charm configuration (scope==None) or individual key,
333 if scope is not None:431 (scope=str). The returned value is a Python data structure loaded as
334 config_cmd_line.append(scope)432 JSON from the Juju config command.
335 else:433
336 config_cmd_line.append('--all')434 :param scope: If set, return the value for the specified key.
337 config_cmd_line.append('--format=json')435 :type scope: Optional[str]
436 :returns: Either the whole config as a Config, or a key from it.
437 :rtype: Any
438 """
439 global _cache_config
440 config_cmd_line = ['config-get', '--all', '--format=json']
338 try:441 try:
339 config_data = json.loads(442 if _cache_config is None:
340 subprocess.check_output(config_cmd_line).decode('UTF-8'))443 config_data = json.loads(
444 subprocess.check_output(config_cmd_line).decode('UTF-8'))
445 _cache_config = Config(config_data)
341 if scope is not None:446 if scope is not None:
342 return config_data447 return _cache_config.get(scope)
343 return Config(config_data)448 return _cache_config
344 except ValueError:449 except (json.decoder.JSONDecodeError, UnicodeDecodeError) as e:
450 log('Unable to parse output from config-get: config_cmd_line="{}" '
451 'message="{}"'
452 .format(config_cmd_line, str(e)), level=ERROR)
345 return None453 return None
346454
347455
348@cached456@cached
349def relation_get(attribute=None, unit=None, rid=None):457def relation_get(attribute=None, unit=None, rid=None, app=None):
350 """Get relation information"""458 """Get relation information"""
351 _args = ['relation-get', '--format=json']459 _args = ['relation-get', '--format=json']
460 if app is not None:
461 if unit is not None:
462 raise ValueError("Cannot use both 'unit' and 'app'")
463 _args.append('--app')
352 if rid:464 if rid:
353 _args.append('-r')465 _args.append('-r')
354 _args.append(rid)466 _args.append(rid)
355 _args.append(attribute or '-')467 _args.append(attribute or '-')
356 if unit:468 # unit or application name
357 _args.append(unit)469 if unit or app:
470 _args.append(unit or app)
358 try:471 try:
359 return json.loads(subprocess.check_output(_args).decode('UTF-8'))472 return json.loads(subprocess.check_output(_args).decode('UTF-8'))
360 except ValueError:473 except ValueError:
@@ -365,12 +478,28 @@
365 raise478 raise
366479
367480
368def relation_set(relation_id=None, relation_settings=None, **kwargs):481@cached
482def _relation_set_accepts_file():
483 """Return True if the juju relation-set command accepts a file.
484
485 Cache the result as it won't change during the execution of a hook, and
486 thus we can make relation_set() more efficient by only checking for the
487 first relation_set() call.
488
489 :returns: True if relation_set accepts a file.
490 :rtype: bool
491 :raises: subprocess.CalledProcessError if the check fails.
492 """
493 return "--file" in subprocess.check_output(
494 ["relation-set", "--help"], universal_newlines=True)
495
496
497def relation_set(relation_id=None, relation_settings=None, app=False, **kwargs):
369 """Set relation information for the current unit"""498 """Set relation information for the current unit"""
370 relation_settings = relation_settings if relation_settings else {}499 relation_settings = relation_settings if relation_settings else {}
371 relation_cmd_line = ['relation-set']500 relation_cmd_line = ['relation-set']
372 accepts_file = "--file" in subprocess.check_output(501 if app:
373 relation_cmd_line + ["--help"], universal_newlines=True)502 relation_cmd_line.append('--app')
374 if relation_id is not None:503 if relation_id is not None:
375 relation_cmd_line.extend(('-r', relation_id))504 relation_cmd_line.extend(('-r', relation_id))
376 settings = relation_settings.copy()505 settings = relation_settings.copy()
@@ -380,7 +509,7 @@
380 # sites pass in things like dicts or numbers.509 # sites pass in things like dicts or numbers.
381 if value is not None:510 if value is not None:
382 settings[key] = "{}".format(value)511 settings[key] = "{}".format(value)
383 if accepts_file:512 if _relation_set_accepts_file():
384 # --file was introduced in Juju 1.23.2. Use it by default if513 # --file was introduced in Juju 1.23.2. Use it by default if
385 # available, since otherwise we'll break if the relation data is514 # available, since otherwise we'll break if the relation data is
386 # too big. Ideally we should tell relation-set to read the data from515 # too big. Ideally we should tell relation-set to read the data from
@@ -435,9 +564,70 @@
435 subprocess.check_output(units_cmd_line).decode('UTF-8')) or []564 subprocess.check_output(units_cmd_line).decode('UTF-8')) or []
436565
437566
567def expected_peer_units():
568 """Get a generator for units we expect to join peer relation based on
569 goal-state.
570
571 The local unit is excluded from the result to make it easy to gauge
572 completion of all peers joining the relation with existing hook tools.
573
574 Example usage:
575 log('peer {} of {} joined peer relation'
576 .format(len(related_units()),
577 len(list(expected_peer_units()))))
578
579 This function will raise NotImplementedError if used with juju versions
580 without goal-state support.
581
582 :returns: iterator
583 :rtype: types.GeneratorType
584 :raises: NotImplementedError
585 """
586 if not has_juju_version("2.4.0"):
587 # goal-state first appeared in 2.4.0.
588 raise NotImplementedError("goal-state")
589 _goal_state = goal_state()
590 return (key for key in _goal_state['units']
591 if '/' in key and key != local_unit())
592
593
594def expected_related_units(reltype=None):
595 """Get a generator for units we expect to join relation based on
596 goal-state.
597
598 Note that you can not use this function for the peer relation, take a look
599 at expected_peer_units() for that.
600
601 This function will raise KeyError if you request information for a
602 relation type for which juju goal-state does not have information. It will
603 raise NotImplementedError if used with juju versions without goal-state
604 support.
605
606 Example usage:
607 log('participant {} of {} joined relation {}'
608 .format(len(related_units()),
609 len(list(expected_related_units())),
610 relation_type()))
611
612 :param reltype: Relation type to list data for, default is to list data for
613 the relation type we are currently executing a hook for.
614 :type reltype: str
615 :returns: iterator
616 :rtype: types.GeneratorType
617 :raises: KeyError, NotImplementedError
618 """
619 if not has_juju_version("2.4.4"):
620 # goal-state existed in 2.4.0, but did not list individual units to
621 # join a relation in 2.4.1 through 2.4.3. (LP: #1794739)
622 raise NotImplementedError("goal-state relation unit count")
623 reltype = reltype or relation_type()
624 _goal_state = goal_state()
625 return (key for key in _goal_state['relations'][reltype] if '/' in key)
626
627
438@cached628@cached
439def relation_for_unit(unit=None, rid=None):629def relation_for_unit(unit=None, rid=None):
440 """Get the json represenation of a unit's relation"""630 """Get the json representation of a unit's relation"""
441 unit = unit or remote_unit()631 unit = unit or remote_unit()
442 relation = relation_get(unit=unit, rid=rid)632 relation = relation_get(unit=unit, rid=rid)
443 for key in relation:633 for key in relation:
@@ -478,6 +668,24 @@
478 return yaml.safe_load(md)668 return yaml.safe_load(md)
479669
480670
671def _metadata_unit(unit):
672 """Given the name of a unit (e.g. apache2/0), get the unit charm's
673 metadata.yaml. Very similar to metadata() but allows us to inspect
674 other units. Unit needs to be co-located, such as a subordinate or
675 principal/primary.
676
677 :returns: metadata.yaml as a python object.
678
679 """
680 basedir = os.sep.join(charm_dir().split(os.sep)[:-2])
681 unitdir = 'unit-{}'.format(unit.replace(os.sep, '-'))
682 joineddir = os.path.join(basedir, unitdir, 'charm', 'metadata.yaml')
683 if not os.path.exists(joineddir):
684 return None
685 with open(joineddir) as md:
686 return yaml.safe_load(md)
687
688
481@cached689@cached
482def relation_types():690def relation_types():
483 """Get a list of relation types supported by this charm"""691 """Get a list of relation types supported by this charm"""
@@ -602,18 +810,31 @@
602 return False810 return False
603811
604812
813def _port_op(op_name, port, protocol="TCP"):
814 """Open or close a service network port"""
815 _args = [op_name]
816 icmp = protocol.upper() == "ICMP"
817 if icmp:
818 _args.append(protocol)
819 else:
820 _args.append('{}/{}'.format(port, protocol))
821 try:
822 subprocess.check_call(_args)
823 except subprocess.CalledProcessError:
824 # Older Juju pre 2.3 doesn't support ICMP
825 # so treat it as a no-op if it fails.
826 if not icmp:
827 raise
828
829
605def open_port(port, protocol="TCP"):830def open_port(port, protocol="TCP"):
606 """Open a service network port"""831 """Open a service network port"""
607 _args = ['open-port']832 _port_op('open-port', port, protocol)
608 _args.append('{}/{}'.format(port, protocol))
609 subprocess.check_call(_args)
610833
611834
612def close_port(port, protocol="TCP"):835def close_port(port, protocol="TCP"):
613 """Close a service network port"""836 """Close a service network port"""
614 _args = ['close-port']837 _port_op('close-port', port, protocol)
615 _args.append('{}/{}'.format(port, protocol))
616 subprocess.check_call(_args)
617838
618839
619def open_ports(start, end, protocol="TCP"):840def open_ports(start, end, protocol="TCP"):
@@ -630,6 +851,17 @@
630 subprocess.check_call(_args)851 subprocess.check_call(_args)
631852
632853
854def opened_ports():
855 """Get the opened ports
856
857 *Note that this will only show ports opened in a previous hook*
858
859 :returns: Opened ports as a list of strings: ``['8080/tcp', '8081-8083/tcp']``
860 """
861 _args = ['opened-ports', '--format=json']
862 return json.loads(subprocess.check_output(_args).decode('UTF-8'))
863
864
633@cached865@cached
634def unit_get(attribute):866def unit_get(attribute):
635 """Get the unit ID for the remote unit"""867 """Get the unit ID for the remote unit"""
@@ -751,14 +983,29 @@
751 return wrapper983 return wrapper
752984
753985
986class NoNetworkBinding(Exception):
987 pass
988
989
754def charm_dir():990def charm_dir():
755 """Return the root directory of the current charm"""991 """Return the root directory of the current charm"""
992 d = os.environ.get('JUJU_CHARM_DIR')
993 if d is not None:
994 return d
756 return os.environ.get('CHARM_DIR')995 return os.environ.get('CHARM_DIR')
757996
758997
998def cmd_exists(cmd):
999 """Return True if the specified cmd exists in the path"""
1000 return any(
1001 os.access(os.path.join(path, cmd), os.X_OK)
1002 for path in os.environ["PATH"].split(os.pathsep)
1003 )
1004
1005
759@cached1006@cached
760def action_get(key=None):1007def action_get(key=None):
761 """Gets the value of an action parameter, or all key/value param pairs"""1008 """Gets the value of an action parameter, or all key/value param pairs."""
762 cmd = ['action-get']1009 cmd = ['action-get']
763 if key is not None:1010 if key is not None:
764 cmd.append(key)1011 cmd.append(key)
@@ -767,52 +1014,132 @@
767 return action_data1014 return action_data
7681015
7691016
1017@cached
1018@deprecate("moved to action_get()", log=log)
1019def function_get(key=None):
1020 """
1021 .. deprecated::
1022 Gets the value of an action parameter, or all key/value param pairs.
1023 """
1024 cmd = ['function-get']
1025 # Fallback for older charms.
1026 if not cmd_exists('function-get'):
1027 cmd = ['action-get']
1028
1029 if key is not None:
1030 cmd.append(key)
1031 cmd.append('--format=json')
1032 function_data = json.loads(subprocess.check_output(cmd).decode('UTF-8'))
1033 return function_data
1034
1035
770def action_set(values):1036def action_set(values):
771 """Sets the values to be returned after the action finishes"""1037 """Sets the values to be returned after the action finishes."""
772 cmd = ['action-set']1038 cmd = ['action-set']
773 for k, v in list(values.items()):1039 for k, v in list(values.items()):
774 cmd.append('{}={}'.format(k, v))1040 cmd.append('{}={}'.format(k, v))
775 subprocess.check_call(cmd)1041 subprocess.check_call(cmd)
7761042
7771043
1044@deprecate("moved to action_set()", log=log)
1045def function_set(values):
1046 """
1047 .. deprecated::
1048 Sets the values to be returned after the function finishes.
1049 """
1050 cmd = ['function-set']
1051 # Fallback for older charms.
1052 if not cmd_exists('function-get'):
1053 cmd = ['action-set']
1054
1055 for k, v in list(values.items()):
1056 cmd.append('{}={}'.format(k, v))
1057 subprocess.check_call(cmd)
1058
1059
778def action_fail(message):1060def action_fail(message):
779 """Sets the action status to failed and sets the error message.1061 """
1062 Sets the action status to failed and sets the error message.
7801063
781 The results set by action_set are preserved."""1064 The results set by action_set are preserved.
1065 """
782 subprocess.check_call(['action-fail', message])1066 subprocess.check_call(['action-fail', message])
7831067
7841068
1069@deprecate("moved to action_fail()", log=log)
1070def function_fail(message):
1071 """
1072 .. deprecated::
1073 Sets the function status to failed and sets the error message.
1074
1075 The results set by function_set are preserved.
1076 """
1077 cmd = ['function-fail']
1078 # Fallback for older charms.
1079 if not cmd_exists('function-fail'):
1080 cmd = ['action-fail']
1081 cmd.append(message)
1082
1083 subprocess.check_call(cmd)
1084
1085
785def action_name():1086def action_name():
786 """Get the name of the currently executing action."""1087 """Get the name of the currently executing action."""
787 return os.environ.get('JUJU_ACTION_NAME')1088 return os.environ.get('JUJU_ACTION_NAME')
7881089
7891090
1091def function_name():
1092 """Get the name of the currently executing function."""
1093 return os.environ.get('JUJU_FUNCTION_NAME') or action_name()
1094
1095
790def action_uuid():1096def action_uuid():
791 """Get the UUID of the currently executing action."""1097 """Get the UUID of the currently executing action."""
792 return os.environ.get('JUJU_ACTION_UUID')1098 return os.environ.get('JUJU_ACTION_UUID')
7931099
7941100
1101def function_id():
1102 """Get the ID of the currently executing function."""
1103 return os.environ.get('JUJU_FUNCTION_ID') or action_uuid()
1104
1105
795def action_tag():1106def action_tag():
796 """Get the tag for the currently executing action."""1107 """Get the tag for the currently executing action."""
797 return os.environ.get('JUJU_ACTION_TAG')1108 return os.environ.get('JUJU_ACTION_TAG')
7981109
7991110
800def status_set(workload_state, message):1111def function_tag():
1112 """Get the tag for the currently executing function."""
1113 return os.environ.get('JUJU_FUNCTION_TAG') or action_tag()
1114
1115
1116def status_set(workload_state, message, application=False):
801 """Set the workload state with a message1117 """Set the workload state with a message
8021118
803 Use status-set to set the workload state with a message which is visible1119 Use status-set to set the workload state with a message which is visible
804 to the user via juju status. If the status-set command is not found then1120 to the user via juju status. If the status-set command is not found then
805 assume this is juju < 1.23 and juju-log the message unstead.1121 assume this is juju < 1.23 and juju-log the message instead.
8061122
807 workload_state -- valid juju workload state.1123 workload_state -- valid juju workload state. str or WORKLOAD_STATES
808 message -- status update message1124 message -- status update message
1125 application -- Whether this is an application state set
809 """1126 """
810 valid_states = ['maintenance', 'blocked', 'waiting', 'active']1127 bad_state_msg = '{!r} is not a valid workload state'
811 if workload_state not in valid_states:1128
812 raise ValueError(1129 if isinstance(workload_state, str):
813 '{!r} is not a valid workload state'.format(workload_state)1130 try:
814 )1131 # Convert string to enum.
815 cmd = ['status-set', workload_state, message]1132 workload_state = WORKLOAD_STATES[workload_state.upper()]
1133 except KeyError:
1134 raise ValueError(bad_state_msg.format(workload_state))
1135
1136 if workload_state not in WORKLOAD_STATES:
1137 raise ValueError(bad_state_msg.format(workload_state))
1138
1139 cmd = ['status-set']
1140 if application:
1141 cmd.append('--application')
1142 cmd.extend([workload_state.value, message])
816 try:1143 try:
817 ret = subprocess.call(cmd)1144 ret = subprocess.call(cmd)
818 if ret == 0:1145 if ret == 0:
@@ -820,7 +1147,7 @@
820 except OSError as e:1147 except OSError as e:
821 if e.errno != errno.ENOENT:1148 if e.errno != errno.ENOENT:
822 raise1149 raise
823 log_message = 'status-set failed: {} {}'.format(workload_state,1150 log_message = 'status-set failed: {} {}'.format(workload_state.value,
824 message)1151 message)
825 log(log_message, level='INFO')1152 log(log_message, level='INFO')
8261153
@@ -874,6 +1201,14 @@
8741201
8751202
876@translate_exc(from_exc=OSError, to_exc=NotImplementedError)1203@translate_exc(from_exc=OSError, to_exc=NotImplementedError)
1204@cached
1205def goal_state():
1206 """Juju goal state values"""
1207 cmd = ['goal-state', '--format=json']
1208 return json.loads(subprocess.check_output(cmd).decode('UTF-8'))
1209
1210
1211@translate_exc(from_exc=OSError, to_exc=NotImplementedError)
877def is_leader():1212def is_leader():
878 """Does the current unit hold the juju leadership1213 """Does the current unit hold the juju leadership
8791214
@@ -967,7 +1302,6 @@
967 universal_newlines=True).strip()1302 universal_newlines=True).strip()
9681303
9691304
970@cached
971def has_juju_version(minimum_version):1305def has_juju_version(minimum_version):
972 """Return True if the Juju version is at least the provided version"""1306 """Return True if the Juju version is at least the provided version"""
973 return LooseVersion(juju_version()) >= LooseVersion(minimum_version)1307 return LooseVersion(juju_version()) >= LooseVersion(minimum_version)
@@ -1027,6 +1361,8 @@
1027@translate_exc(from_exc=OSError, to_exc=NotImplementedError)1361@translate_exc(from_exc=OSError, to_exc=NotImplementedError)
1028def network_get_primary_address(binding):1362def network_get_primary_address(binding):
1029 '''1363 '''
1364 Deprecated since Juju 2.3; use network_get()
1365
1030 Retrieve the primary network address for a named binding1366 Retrieve the primary network address for a named binding
10311367
1032 :param binding: string. The name of a relation of extra-binding1368 :param binding: string. The name of a relation of extra-binding
@@ -1034,4 +1370,267 @@
1034 :raise: NotImplementedError if run on Juju < 2.01370 :raise: NotImplementedError if run on Juju < 2.0
1035 '''1371 '''
1036 cmd = ['network-get', '--primary-address', binding]1372 cmd = ['network-get', '--primary-address', binding]
1037 return subprocess.check_output(cmd).decode('UTF-8').strip()1373 try:
1374 response = subprocess.check_output(
1375 cmd,
1376 stderr=subprocess.STDOUT).decode('UTF-8').strip()
1377 except CalledProcessError as e:
1378 if 'no network config found for binding' in e.output.decode('UTF-8'):
1379 raise NoNetworkBinding("No network binding for {}"
1380 .format(binding))
1381 else:
1382 raise
1383 return response
1384
1385
1386def network_get(endpoint, relation_id=None):
1387 """
1388 Retrieve the network details for a relation endpoint
1389
1390 :param endpoint: string. The name of a relation endpoint
1391 :param relation_id: int. The ID of the relation for the current context.
1392 :return: dict. The loaded YAML output of the network-get query.
1393 :raise: NotImplementedError if request not supported by the Juju version.
1394 """
1395 if not has_juju_version('2.2'):
1396 raise NotImplementedError(juju_version()) # earlier versions require --primary-address
1397 if relation_id and not has_juju_version('2.3'):
1398 raise NotImplementedError # 2.3 added the -r option
1399
1400 cmd = ['network-get', endpoint, '--format', 'yaml']
1401 if relation_id:
1402 cmd.append('-r')
1403 cmd.append(relation_id)
1404 response = subprocess.check_output(
1405 cmd,
1406 stderr=subprocess.STDOUT).decode('UTF-8').strip()
1407 return yaml.safe_load(response)
1408
1409
1410def add_metric(*args, **kwargs):
1411 """Add metric values. Values may be expressed with keyword arguments. For
1412 metric names containing dashes, these may be expressed as one or more
1413 'key=value' positional arguments. May only be called from the collect-metrics
1414 hook."""
1415 _args = ['add-metric']
1416 _kvpairs = []
1417 _kvpairs.extend(args)
1418 _kvpairs.extend(['{}={}'.format(k, v) for k, v in kwargs.items()])
1419 _args.extend(sorted(_kvpairs))
1420 try:
1421 subprocess.check_call(_args)
1422 return
1423 except EnvironmentError as e:
1424 if e.errno != errno.ENOENT:
1425 raise
1426 log_message = 'add-metric failed: {}'.format(' '.join(_kvpairs))
1427 log(log_message, level='INFO')
1428
1429
1430def meter_status():
1431 """Get the meter status, if running in the meter-status-changed hook."""
1432 return os.environ.get('JUJU_METER_STATUS')
1433
1434
1435def meter_info():
1436 """Get the meter status information, if running in the meter-status-changed
1437 hook."""
1438 return os.environ.get('JUJU_METER_INFO')
1439
1440
1441def iter_units_for_relation_name(relation_name):
1442 """Iterate through all units in a relation
1443
1444 Generator that iterates through all the units in a relation and yields
1445 a named tuple with rid and unit field names.
1446
1447 Usage:
1448 data = [(u.rid, u.unit)
1449 for u in iter_units_for_relation_name(relation_name)]
1450
1451 :param relation_name: string relation name
1452 :yield: Named Tuple with rid and unit field names
1453 """
1454 RelatedUnit = namedtuple('RelatedUnit', 'rid, unit')
1455 for rid in relation_ids(relation_name):
1456 for unit in related_units(rid):
1457 yield RelatedUnit(rid, unit)
1458
1459
1460def ingress_address(rid=None, unit=None):
1461 """
1462 Retrieve the ingress-address from a relation when available.
1463 Otherwise, return the private-address.
1464
1465 When used on the consuming side of the relation (unit is a remote
1466 unit), the ingress-address is the IP address that this unit needs
1467 to use to reach the provided service on the remote unit.
1468
1469 When used on the providing side of the relation (unit == local_unit()),
1470 the ingress-address is the IP address that is advertised to remote
1471 units on this relation. Remote units need to use this address to
1472 reach the local provided service on this unit.
1473
1474 Note that charms may document some other method to use in
1475 preference to the ingress_address(), such as an address provided
1476 on a different relation attribute or a service discovery mechanism.
1477 This allows charms to redirect inbound connections to their peers
1478 or different applications such as load balancers.
1479
1480 Usage:
1481 addresses = [ingress_address(rid=u.rid, unit=u.unit)
1482 for u in iter_units_for_relation_name(relation_name)]
1483
1484 :param rid: string relation id
1485 :param unit: string unit name
1486 :side effect: calls relation_get
1487 :return: string IP address
1488 """
1489 settings = relation_get(rid=rid, unit=unit)
1490 return (settings.get('ingress-address') or
1491 settings.get('private-address'))
1492
1493
1494def egress_subnets(rid=None, unit=None):
1495 """
1496 Retrieve the egress-subnets from a relation.
1497
1498 This function is to be used on the providing side of the
1499 relation, and provides the ranges of addresses that client
1500 connections may come from. The result is uninteresting on
1501 the consuming side of a relation (unit == local_unit()).
1502
1503 Returns a stable list of subnets in CIDR format.
1504 eg. ['192.168.1.0/24', '2001::F00F/128']
1505
1506 If egress-subnets is not available, falls back to using the published
1507 ingress-address, or finally private-address.
1508
1509 :param rid: string relation id
1510 :param unit: string unit name
1511 :side effect: calls relation_get
1512 :return: list of subnets in CIDR format. eg. ['192.168.1.0/24', '2001::F00F/128']
1513 """
1514 def _to_range(addr):
1515 if re.search(r'^(?:\d{1,3}\.){3}\d{1,3}$', addr) is not None:
1516 addr += '/32'
1517 elif ':' in addr and '/' not in addr: # IPv6
1518 addr += '/128'
1519 return addr
1520
1521 settings = relation_get(rid=rid, unit=unit)
1522 if 'egress-subnets' in settings:
1523 return [n.strip() for n in settings['egress-subnets'].split(',') if n.strip()]
1524 if 'ingress-address' in settings:
1525 return [_to_range(settings['ingress-address'])]
1526 if 'private-address' in settings:
1527 return [_to_range(settings['private-address'])]
1528 return [] # Should never happen
1529
1530
1531def unit_doomed(unit=None):
1532 """Determines if the unit is being removed from the model
1533
1534 Requires Juju 2.4.1.
1535
1536 :param unit: string unit name, defaults to local_unit
1537 :side effect: calls goal_state
1538 :side effect: calls local_unit
1539 :side effect: calls has_juju_version
1540 :return: True if the unit is being removed, already gone, or never existed
1541 """
1542 if not has_juju_version("2.4.1"):
1543 # We cannot risk blindly returning False for 'we don't know',
1544 # because that could cause data loss; if call sites don't
1545 # need an accurate answer, they likely don't need this helper
1546 # at all.
1547 # goal-state existed in 2.4.0, but did not handle removals
1548 # correctly until 2.4.1.
1549 raise NotImplementedError("is_doomed")
1550 if unit is None:
1551 unit = local_unit()
1552 gs = goal_state()
1553 units = gs.get('units', {})
1554 if unit not in units:
1555 return True
1556 # I don't think 'dead' units ever show up in the goal-state, but
1557 # check anyway in addition to 'dying'.
1558 return units[unit]['status'] in ('dying', 'dead')
1559
1560
1561def env_proxy_settings(selected_settings=None):
1562 """Get proxy settings from process environment variables.
1563
1564 Get charm proxy settings from environment variables that correspond to
1565 juju-http-proxy, juju-https-proxy juju-no-proxy (available as of 2.4.2, see
1566 lp:1782236) and juju-ftp-proxy in a format suitable for passing to an
1567 application that reacts to proxy settings passed as environment variables.
1568 Some applications support lowercase or uppercase notation (e.g. curl), some
1569 support only lowercase (e.g. wget), there are also subjectively rare cases
1570 of only uppercase notation support. no_proxy CIDR and wildcard support also
1571 varies between runtimes and applications as there is no enforced standard.
1572
1573 Some applications may connect to multiple destinations and expose config
1574 options that would affect only proxy settings for a specific destination
1575 these should be handled in charms in an application-specific manner.
1576
1577 :param selected_settings: format only a subset of possible settings
1578 :type selected_settings: list
1579 :rtype: Option(None, dict[str, str])
1580 """
1581 SUPPORTED_SETTINGS = {
1582 'http': 'HTTP_PROXY',
1583 'https': 'HTTPS_PROXY',
1584 'no_proxy': 'NO_PROXY',
1585 'ftp': 'FTP_PROXY'
1586 }
1587 if selected_settings is None:
1588 selected_settings = SUPPORTED_SETTINGS
1589
1590 selected_vars = [v for k, v in SUPPORTED_SETTINGS.items()
1591 if k in selected_settings]
1592 proxy_settings = {}
1593 for var in selected_vars:
1594 var_val = os.getenv(var)
1595 if var_val:
1596 proxy_settings[var] = var_val
1597 proxy_settings[var.lower()] = var_val
1598 # Now handle juju-prefixed environment variables. The legacy vs new
1599 # environment variable usage is mutually exclusive
1600 charm_var_val = os.getenv('JUJU_CHARM_{}'.format(var))
1601 if charm_var_val:
1602 proxy_settings[var] = charm_var_val
1603 proxy_settings[var.lower()] = charm_var_val
1604 if 'no_proxy' in proxy_settings:
1605 if _contains_range(proxy_settings['no_proxy']):
1606 log(RANGE_WARNING, level=WARNING)
1607 return proxy_settings if proxy_settings else None
1608
1609
1610def _contains_range(addresses):
1611 """Check for cidr or wildcard domain in a string.
1612
1613 Given a string comprising a comma separated list of ip addresses
1614 and domain names, determine whether the string contains IP ranges
1615 or wildcard domains.
1616
1617 :param addresses: comma separated list of domains and ip addresses.
1618 :type addresses: str
1619 """
1620 return (
1621 # Test for cidr (e.g. 10.20.20.0/24)
1622 "/" in addresses or
1623 # Test for wildcard domains (*.foo.com or .foo.com)
1624 "*" in addresses or
1625 addresses.startswith(".") or
1626 ",." in addresses or
1627 " ." in addresses)
1628
1629
1630def is_subordinate():
1631 """Check whether charm is subordinate in unit metadata.
1632
1633 :returns: True if unit is subordniate, False otherwise.
1634 :rtype: bool
1635 """
1636 return metadata().get('subordinate') is True
10381637
=== modified file 'hooks/charmhelpers/core/host.py'
--- hooks/charmhelpers/core/host.py 2017-01-16 16:28:40 +0000
+++ hooks/charmhelpers/core/host.py 2023-06-30 13:58:42 +0000
@@ -1,4 +1,4 @@
1# Copyright 2014-2015 Canonical Limited.1# Copyright 2014-2021 Canonical Limited.
2#2#
3# Licensed under the Apache License, Version 2.0 (the "License");3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.4# you may not use this file except in compliance with the License.
@@ -19,6 +19,7 @@
19# Nick Moffitt <nick.moffitt@canonical.com>19# Nick Moffitt <nick.moffitt@canonical.com>
20# Matthew Wedgwood <matthew.wedgwood@canonical.com>20# Matthew Wedgwood <matthew.wedgwood@canonical.com>
2121
22import errno
22import os23import os
23import re24import re
24import pwd25import pwd
@@ -30,66 +31,199 @@
30import hashlib31import hashlib
31import functools32import functools
32import itertools33import itertools
33import six
3434
35from contextlib import contextmanager35from contextlib import contextmanager
36from collections import OrderedDict36from collections import OrderedDict, defaultdict
37from .hookenv import log37from .hookenv import log, INFO, DEBUG, local_unit, charm_name
38from .fstab import Fstab38from .fstab import Fstab
39from charmhelpers.osplatform import get_platform39from charmhelpers.osplatform import get_platform
4040
41__platform__ = get_platform()41__platform__ = get_platform()
42if __platform__ == "ubuntu":42if __platform__ == "ubuntu":
43 from charmhelpers.core.host_factory.ubuntu import (43 from charmhelpers.core.host_factory.ubuntu import ( # NOQA:F401
44 service_available,44 service_available,
45 add_new_group,45 add_new_group,
46 lsb_release,46 lsb_release,
47 cmp_pkgrevno,47 cmp_pkgrevno,
48 CompareHostReleases,
49 get_distrib_codename,
50 arch
48 ) # flake8: noqa -- ignore F401 for this import51 ) # flake8: noqa -- ignore F401 for this import
49elif __platform__ == "centos":52elif __platform__ == "centos":
50 from charmhelpers.core.host_factory.centos import (53 from charmhelpers.core.host_factory.centos import ( # NOQA:F401
51 service_available,54 service_available,
52 add_new_group,55 add_new_group,
53 lsb_release,56 lsb_release,
54 cmp_pkgrevno,57 cmp_pkgrevno,
58 CompareHostReleases,
55 ) # flake8: noqa -- ignore F401 for this import59 ) # flake8: noqa -- ignore F401 for this import
5660
5761UPDATEDB_PATH = '/etc/updatedb.conf'
58def service_start(service_name):62CA_CERT_DIR = '/usr/local/share/ca-certificates'
59 """Start a system service"""63
60 return service('start', service_name)64
6165def service_start(service_name, **kwargs):
6266 """Start a system service.
63def service_stop(service_name):67
64 """Stop a system service"""68 The specified service name is managed via the system level init system.
65 return service('stop', service_name)69 Some init systems (e.g. upstart) require that additional arguments be
6670 provided in order to directly control service instances whereas other init
6771 systems allow for addressing instances of a service directly by name (e.g.
68def service_restart(service_name):72 systemd).
69 """Restart a system service"""73
74 The kwargs allow for the additional parameters to be passed to underlying
75 init systems for those systems which require/allow for them. For example,
76 the ceph-osd upstart script requires the id parameter to be passed along
77 in order to identify which running daemon should be reloaded. The follow-
78 ing example stops the ceph-osd service for instance id=4:
79
80 service_stop('ceph-osd', id=4)
81
82 :param service_name: the name of the service to stop
83 :param **kwargs: additional parameters to pass to the init system when
84 managing services. These will be passed as key=value
85 parameters to the init system's commandline. kwargs
86 are ignored for systemd enabled systems.
87 """
88 return service('start', service_name, **kwargs)
89
90
91def service_stop(service_name, **kwargs):
92 """Stop a system service.
93
94 The specified service name is managed via the system level init system.
95 Some init systems (e.g. upstart) require that additional arguments be
96 provided in order to directly control service instances whereas other init
97 systems allow for addressing instances of a service directly by name (e.g.
98 systemd).
99
100 The kwargs allow for the additional parameters to be passed to underlying
101 init systems for those systems which require/allow for them. For example,
102 the ceph-osd upstart script requires the id parameter to be passed along
103 in order to identify which running daemon should be reloaded. The follow-
104 ing example stops the ceph-osd service for instance id=4:
105
106 service_stop('ceph-osd', id=4)
107
108 :param service_name: the name of the service to stop
109 :param **kwargs: additional parameters to pass to the init system when
110 managing services. These will be passed as key=value
111 parameters to the init system's commandline. kwargs
112 are ignored for systemd enabled systems.
113 """
114 return service('stop', service_name, **kwargs)
115
116
117def service_enable(service_name, **kwargs):
118 """Enable a system service.
119
120 The specified service name is managed via the system level init system.
121 Some init systems (e.g. upstart) require that additional arguments be
122 provided in order to directly control service instances whereas other init
123 systems allow for addressing instances of a service directly by name (e.g.
124 systemd).
125
126 The kwargs allow for the additional parameters to be passed to underlying
127 init systems for those systems which require/allow for them. For example,
128 the ceph-osd upstart script requires the id parameter to be passed along
129 in order to identify which running daemon should be restarted. The follow-
130 ing example restarts the ceph-osd service for instance id=4:
131
132 service_enable('ceph-osd', id=4)
133
134 :param service_name: the name of the service to enable
135 :param **kwargs: additional parameters to pass to the init system when
136 managing services. These will be passed as key=value
137 parameters to the init system's commandline. kwargs
138 are ignored for init systems not allowing additional
139 parameters via the commandline (systemd).
140 """
141 return service('enable', service_name, **kwargs)
142
143
144def service_restart(service_name, **kwargs):
145 """Restart a system service.
146
147 The specified service name is managed via the system level init system.
148 Some init systems (e.g. upstart) require that additional arguments be
149 provided in order to directly control service instances whereas other init
150 systems allow for addressing instances of a service directly by name (e.g.
151 systemd).
152
153 The kwargs allow for the additional parameters to be passed to underlying
154 init systems for those systems which require/allow for them. For example,
155 the ceph-osd upstart script requires the id parameter to be passed along
156 in order to identify which running daemon should be restarted. The follow-
157 ing example restarts the ceph-osd service for instance id=4:
158
159 service_restart('ceph-osd', id=4)
160
161 :param service_name: the name of the service to restart
162 :param **kwargs: additional parameters to pass to the init system when
163 managing services. These will be passed as key=value
164 parameters to the init system's commandline. kwargs
165 are ignored for init systems not allowing additional
166 parameters via the commandline (systemd).
167 """
70 return service('restart', service_name)168 return service('restart', service_name)
71169
72170
73def service_reload(service_name, restart_on_failure=False):171def service_reload(service_name, restart_on_failure=False, **kwargs):
74 """Reload a system service, optionally falling back to restart if172 """Reload a system service, optionally falling back to restart if
75 reload fails"""173 reload fails.
76 service_result = service('reload', service_name)174
175 The specified service name is managed via the system level init system.
176 Some init systems (e.g. upstart) require that additional arguments be
177 provided in order to directly control service instances whereas other init
178 systems allow for addressing instances of a service directly by name (e.g.
179 systemd).
180
181 The kwargs allow for the additional parameters to be passed to underlying
182 init systems for those systems which require/allow for them. For example,
183 the ceph-osd upstart script requires the id parameter to be passed along
184 in order to identify which running daemon should be reloaded. The follow-
185 ing example restarts the ceph-osd service for instance id=4:
186
187 service_reload('ceph-osd', id=4)
188
189 :param service_name: the name of the service to reload
190 :param restart_on_failure: boolean indicating whether to fallback to a
191 restart if the reload fails.
192 :param **kwargs: additional parameters to pass to the init system when
193 managing services. These will be passed as key=value
194 parameters to the init system's commandline. kwargs
195 are ignored for init systems not allowing additional
196 parameters via the commandline (systemd).
197 """
198 service_result = service('reload', service_name, **kwargs)
77 if not service_result and restart_on_failure:199 if not service_result and restart_on_failure:
78 service_result = service('restart', service_name)200 service_result = service('restart', service_name, **kwargs)
79 return service_result201 return service_result
80202
81203
82def service_pause(service_name, init_dir="/etc/init", initd_dir="/etc/init.d"):204def service_pause(service_name, init_dir="/etc/init", initd_dir="/etc/init.d",
205 **kwargs):
83 """Pause a system service.206 """Pause a system service.
84207
85 Stop it, and prevent it from starting again at boot."""208 Stop it, and prevent it from starting again at boot.
209
210 :param service_name: the name of the service to pause
211 :param init_dir: path to the upstart init directory
212 :param initd_dir: path to the sysv init directory
213 :param **kwargs: additional parameters to pass to the init system when
214 managing services. These will be passed as key=value
215 parameters to the init system's commandline. kwargs
216 are ignored for init systems which do not support
217 key=value arguments via the commandline.
218 """
86 stopped = True219 stopped = True
87 if service_running(service_name):220 if service_running(service_name, **kwargs):
88 stopped = service_stop(service_name)221 stopped = service_stop(service_name, **kwargs)
89 upstart_file = os.path.join(init_dir, "{}.conf".format(service_name))222 upstart_file = os.path.join(init_dir, "{}.conf".format(service_name))
90 sysv_file = os.path.join(initd_dir, service_name)223 sysv_file = os.path.join(initd_dir, service_name)
91 if init_is_systemd():224 if init_is_systemd(service_name=service_name):
92 service('disable', service_name)225 service('disable', service_name)
226 service('mask', service_name)
93 elif os.path.exists(upstart_file):227 elif os.path.exists(upstart_file):
94 override_path = os.path.join(228 override_path = os.path.join(
95 init_dir, '{}.override'.format(service_name))229 init_dir, '{}.override'.format(service_name))
@@ -106,13 +240,23 @@
106240
107241
108def service_resume(service_name, init_dir="/etc/init",242def service_resume(service_name, init_dir="/etc/init",
109 initd_dir="/etc/init.d"):243 initd_dir="/etc/init.d", **kwargs):
110 """Resume a system service.244 """Resume a system service.
111245
112 Reenable starting again at boot. Start the service"""246 Re-enable starting again at boot. Start the service.
247
248 :param service_name: the name of the service to resume
249 :param init_dir: the path to the init dir
250 :param initd dir: the path to the initd dir
251 :param **kwargs: additional parameters to pass to the init system when
252 managing services. These will be passed as key=value
253 parameters to the init system's commandline. kwargs
254 are ignored for systemd enabled systems.
255 """
113 upstart_file = os.path.join(init_dir, "{}.conf".format(service_name))256 upstart_file = os.path.join(init_dir, "{}.conf".format(service_name))
114 sysv_file = os.path.join(initd_dir, service_name)257 sysv_file = os.path.join(initd_dir, service_name)
115 if init_is_systemd():258 if init_is_systemd(service_name=service_name):
259 service('unmask', service_name)
116 service('enable', service_name)260 service('enable', service_name)
117 elif os.path.exists(upstart_file):261 elif os.path.exists(upstart_file):
118 override_path = os.path.join(262 override_path = os.path.join(
@@ -126,19 +270,30 @@
126 "Unable to detect {0} as SystemD, Upstart {1} or"270 "Unable to detect {0} as SystemD, Upstart {1} or"
127 " SysV {2}".format(271 " SysV {2}".format(
128 service_name, upstart_file, sysv_file))272 service_name, upstart_file, sysv_file))
273 started = service_running(service_name, **kwargs)
129274
130 started = service_running(service_name)
131 if not started:275 if not started:
132 started = service_start(service_name)276 started = service_start(service_name, **kwargs)
133 return started277 return started
134278
135279
136def service(action, service_name):280def service(action, service_name=None, **kwargs):
137 """Control a system service"""281 """Control a system service.
138 if init_is_systemd():282
139 cmd = ['systemctl', action, service_name]283 :param action: the action to take on the service
284 :param service_name: the name of the service to perform th action on
285 :param **kwargs: additional params to be passed to the service command in
286 the form of key=value.
287 """
288 if init_is_systemd(service_name=service_name):
289 cmd = ['systemctl', action]
290 if service_name is not None:
291 cmd.append(service_name)
140 else:292 else:
141 cmd = ['service', service_name, action]293 cmd = ['service', service_name, action]
294 for key, value in kwargs.items():
295 parameter = '%s=%s' % (key, value)
296 cmd.append(parameter)
142 return subprocess.call(cmd) == 0297 return subprocess.call(cmd) == 0
143298
144299
@@ -146,16 +301,27 @@
146_INIT_D_CONF = "/etc/init.d/{}"301_INIT_D_CONF = "/etc/init.d/{}"
147302
148303
149def service_running(service_name):304def service_running(service_name, **kwargs):
150 """Determine whether a system service is running"""305 """Determine whether a system service is running.
151 if init_is_systemd():306
307 :param service_name: the name of the service
308 :param **kwargs: additional args to pass to the service command. This is
309 used to pass additional key=value arguments to the
310 service command line for managing specific instance
311 units (e.g. service ceph-osd status id=2). The kwargs
312 are ignored in systemd services.
313 """
314 if init_is_systemd(service_name=service_name):
152 return service('is-active', service_name)315 return service('is-active', service_name)
153 else:316 else:
154 if os.path.exists(_UPSTART_CONF.format(service_name)):317 if os.path.exists(_UPSTART_CONF.format(service_name)):
155 try:318 try:
319 cmd = ['status', service_name]
320 for key, value in kwargs.items():
321 parameter = '%s=%s' % (key, value)
322 cmd.append(parameter)
156 output = subprocess.check_output(323 output = subprocess.check_output(
157 ['status', service_name],324 cmd, stderr=subprocess.STDOUT).decode('UTF-8')
158 stderr=subprocess.STDOUT).decode('UTF-8')
159 except subprocess.CalledProcessError:325 except subprocess.CalledProcessError:
160 return False326 return False
161 else:327 else:
@@ -175,8 +341,16 @@
175SYSTEMD_SYSTEM = '/run/systemd/system'341SYSTEMD_SYSTEM = '/run/systemd/system'
176342
177343
178def init_is_systemd():344def init_is_systemd(service_name=None):
179 """Return True if the host system uses systemd, False otherwise."""345 """
346 Returns whether the host uses systemd for the specified service.
347
348 @param Optional[str] service_name: specific name of service
349 """
350 if str(service_name).startswith("snap."):
351 return True
352 if lsb_release()['DISTRIB_CODENAME'] == 'trusty':
353 return False
180 return os.path.isdir(SYSTEMD_SYSTEM)354 return os.path.isdir(SYSTEMD_SYSTEM)
181355
182356
@@ -306,6 +480,51 @@
306 subprocess.check_call(cmd)480 subprocess.check_call(cmd)
307481
308482
483def chage(username, lastday=None, expiredate=None, inactive=None,
484 mindays=None, maxdays=None, root=None, warndays=None):
485 """Change user password expiry information
486
487 :param str username: User to update
488 :param str lastday: Set when password was changed in YYYY-MM-DD format
489 :param str expiredate: Set when user's account will no longer be
490 accessible in YYYY-MM-DD format.
491 -1 will remove an account expiration date.
492 :param str inactive: Set the number of days of inactivity after a password
493 has expired before the account is locked.
494 -1 will remove an account's inactivity.
495 :param str mindays: Set the minimum number of days between password
496 changes to MIN_DAYS.
497 0 indicates the password can be changed anytime.
498 :param str maxdays: Set the maximum number of days during which a
499 password is valid.
500 -1 as MAX_DAYS will remove checking maxdays
501 :param str root: Apply changes in the CHROOT_DIR directory
502 :param str warndays: Set the number of days of warning before a password
503 change is required
504 :raises subprocess.CalledProcessError: if call to chage fails
505 """
506 cmd = ['chage']
507 if root:
508 cmd.extend(['--root', root])
509 if lastday:
510 cmd.extend(['--lastday', lastday])
511 if expiredate:
512 cmd.extend(['--expiredate', expiredate])
513 if inactive:
514 cmd.extend(['--inactive', inactive])
515 if mindays:
516 cmd.extend(['--mindays', mindays])
517 if maxdays:
518 cmd.extend(['--maxdays', maxdays])
519 if warndays:
520 cmd.extend(['--warndays', warndays])
521 cmd.append(username)
522 subprocess.check_call(cmd)
523
524
525remove_password_expiry = functools.partial(chage, expiredate='-1', inactive='-1', mindays='0', maxdays='-1')
526
527
309def rsync(from_path, to_path, flags='-r', options=None, timeout=None):528def rsync(from_path, to_path, flags='-r', options=None, timeout=None):
310 """Replicate the contents of a path"""529 """Replicate the contents of a path"""
311 options = options or ['--delete', '--executability']530 options = options or ['--delete', '--executability']
@@ -352,13 +571,45 @@
352571
353def write_file(path, content, owner='root', group='root', perms=0o444):572def write_file(path, content, owner='root', group='root', perms=0o444):
354 """Create or overwrite a file with the contents of a byte string."""573 """Create or overwrite a file with the contents of a byte string."""
355 log("Writing file {} {}:{} {:o}".format(path, owner, group, perms))
356 uid = pwd.getpwnam(owner).pw_uid574 uid = pwd.getpwnam(owner).pw_uid
357 gid = grp.getgrnam(group).gr_gid575 gid = grp.getgrnam(group).gr_gid
358 with open(path, 'wb') as target:576 # lets see if we can grab the file and compare the context, to avoid doing
359 os.fchown(target.fileno(), uid, gid)577 # a write.
360 os.fchmod(target.fileno(), perms)578 existing_content = None
361 target.write(content)579 existing_uid, existing_gid, existing_perms = None, None, None
580 try:
581 with open(path, 'rb') as target:
582 existing_content = target.read()
583 stat = os.stat(path)
584 existing_uid, existing_gid, existing_perms = (
585 stat.st_uid, stat.st_gid, stat.st_mode
586 )
587 except Exception:
588 pass
589 if content != existing_content:
590 log("Writing file {} {}:{} {:o}".format(path, owner, group, perms),
591 level=DEBUG)
592 with open(path, 'wb') as target:
593 os.fchown(target.fileno(), uid, gid)
594 os.fchmod(target.fileno(), perms)
595 if isinstance(content, str):
596 content = content.encode('UTF-8')
597 target.write(content)
598 return
599 # the contents were the same, but we might still need to change the
600 # ownership or permissions.
601 if existing_uid != uid:
602 log("Changing uid on already existing content: {} -> {}"
603 .format(existing_uid, uid), level=DEBUG)
604 os.chown(path, uid, -1)
605 if existing_gid != gid:
606 log("Changing gid on already existing content: {} -> {}"
607 .format(existing_gid, gid), level=DEBUG)
608 os.chown(path, -1, gid)
609 if existing_perms != perms:
610 log("Changing permissions on existing content: {} -> {}"
611 .format(existing_perms, perms), level=DEBUG)
612 os.chmod(path, perms)
362613
363614
364def fstab_remove(mp):615def fstab_remove(mp):
@@ -456,7 +707,7 @@
456707
457 :param str checksum: Value of the checksum used to validate the file.708 :param str checksum: Value of the checksum used to validate the file.
458 :param str hash_type: Hash algorithm used to generate `checksum`.709 :param str hash_type: Hash algorithm used to generate `checksum`.
459 Can be any hash alrgorithm supported by :mod:`hashlib`,710 Can be any hash algorithm supported by :mod:`hashlib`,
460 such as md5, sha1, sha256, sha512, etc.711 such as md5, sha1, sha256, sha512, etc.
461 :raises ChecksumError: If the file fails the checksum712 :raises ChecksumError: If the file fails the checksum
462713
@@ -471,78 +722,227 @@
471 pass722 pass
472723
473724
474def restart_on_change(restart_map, stopstart=False, restart_functions=None):725class restart_on_change(object):
475 """Restart services based on configuration files changing726 """Decorator and context manager to handle restarts.
476727
477 This function is used a decorator, for example::728 Usage:
478729
479 @restart_on_change({730 @restart_on_change(restart_map, ...)
480 '/etc/ceph/ceph.conf': [ 'cinder-api', 'cinder-volume' ]731 def function_that_might_trigger_a_restart(...)
481 '/etc/apache/sites-enabled/*': [ 'apache2' ]732 ...
482 })733
483 def config_changed():734 Or:
484 pass # your code here735
485736 with restart_on_change(restart_map, ...):
486 In this example, the cinder-api and cinder-volume services737 do_stuff_that_might_trigger_a_restart()
487 would be restarted if /etc/ceph/ceph.conf is changed by the738 ...
488 ceph_client_changed function. The apache2 service would be
489 restarted if any file matching the pattern got changed, created
490 or removed. Standard wildcards are supported, see documentation
491 for the 'glob' module for more information.
492
493 @param restart_map: {path_file_name: [service_name, ...]
494 @param stopstart: DEFAULT false; whether to stop, start OR restart
495 @param restart_functions: nonstandard functions to use to restart services
496 {svc: func, ...}
497 @returns result from decorated function
498 """739 """
499 def wrap(f):740
741 def __init__(self, restart_map, stopstart=False, restart_functions=None,
742 can_restart_now_f=None, post_svc_restart_f=None,
743 pre_restarts_wait_f=None):
744 """
745 :param restart_map: {file: [service, ...]}
746 :type restart_map: Dict[str, List[str,]]
747 :param stopstart: whether to stop, start or restart a service
748 :type stopstart: booleean
749 :param restart_functions: nonstandard functions to use to restart
750 services {svc: func, ...}
751 :type restart_functions: Dict[str, Callable[[str], None]]
752 :param can_restart_now_f: A function used to check if the restart is
753 permitted.
754 :type can_restart_now_f: Callable[[str, List[str]], boolean]
755 :param post_svc_restart_f: A function run after a service has
756 restarted.
757 :type post_svc_restart_f: Callable[[str], None]
758 :param pre_restarts_wait_f: A function called before any restarts.
759 :type pre_restarts_wait_f: Callable[None, None]
760 """
761 self.restart_map = restart_map
762 self.stopstart = stopstart
763 self.restart_functions = restart_functions
764 self.can_restart_now_f = can_restart_now_f
765 self.post_svc_restart_f = post_svc_restart_f
766 self.pre_restarts_wait_f = pre_restarts_wait_f
767
768 def __call__(self, f):
769 """Work like a decorator.
770
771 Returns a wrapped function that performs the restart if triggered.
772
773 :param f: The function that is being wrapped.
774 :type f: Callable[[Any], Any]
775 :returns: the wrapped function
776 :rtype: Callable[[Any], Any]
777 """
500 @functools.wraps(f)778 @functools.wraps(f)
501 def wrapped_f(*args, **kwargs):779 def wrapped_f(*args, **kwargs):
502 return restart_on_change_helper(780 return restart_on_change_helper(
503 (lambda: f(*args, **kwargs)), restart_map, stopstart,781 (lambda: f(*args, **kwargs)),
504 restart_functions)782 self.restart_map,
783 stopstart=self.stopstart,
784 restart_functions=self.restart_functions,
785 can_restart_now_f=self.can_restart_now_f,
786 post_svc_restart_f=self.post_svc_restart_f,
787 pre_restarts_wait_f=self.pre_restarts_wait_f)
505 return wrapped_f788 return wrapped_f
506 return wrap789
790 def __enter__(self):
791 """Enter the runtime context related to this object. """
792 self.checksums = _pre_restart_on_change_helper(self.restart_map)
793
794 def __exit__(self, exc_type, exc_val, exc_tb):
795 """Exit the runtime context related to this object.
796
797 The parameters describe the exception that caused the context to be
798 exited. If the context was exited without an exception, all three
799 arguments will be None.
800 """
801 if exc_type is None:
802 _post_restart_on_change_helper(
803 self.checksums,
804 self.restart_map,
805 stopstart=self.stopstart,
806 restart_functions=self.restart_functions,
807 can_restart_now_f=self.can_restart_now_f,
808 post_svc_restart_f=self.post_svc_restart_f,
809 pre_restarts_wait_f=self.pre_restarts_wait_f)
810 # All is good, so return False; any exceptions will propagate.
811 return False
507812
508813
509def restart_on_change_helper(lambda_f, restart_map, stopstart=False,814def restart_on_change_helper(lambda_f, restart_map, stopstart=False,
510 restart_functions=None):815 restart_functions=None,
816 can_restart_now_f=None,
817 post_svc_restart_f=None,
818 pre_restarts_wait_f=None):
511 """Helper function to perform the restart_on_change function.819 """Helper function to perform the restart_on_change function.
512820
513 This is provided for decorators to restart services if files described821 This is provided for decorators to restart services if files described
514 in the restart_map have changed after an invocation of lambda_f().822 in the restart_map have changed after an invocation of lambda_f().
515823
516 @param lambda_f: function to call.824 This functions allows for a number of helper functions to be passed.
517 @param restart_map: {file: [service, ...]}825
518 @param stopstart: whether to stop, start or restart a service826 `restart_functions` is a map with a service as the key and the
519 @param restart_functions: nonstandard functions to use to restart services827 corresponding value being the function to call to restart the service. For
520 {svc: func, ...}828 example if `restart_functions={'some-service': my_restart_func}` then
521 @returns result of lambda_f()829 `my_restart_func` should a function which takes one argument which is the
830 service name to be retstarted.
831
832 `can_restart_now_f` is a function which checks that a restart is permitted.
833 It should return a bool which indicates if a restart is allowed and should
834 take a service name (str) and a list of changed files (List[str]) as
835 arguments.
836
837 `post_svc_restart_f` is a function which runs after a service has been
838 restarted. It takes the service name that was restarted as an argument.
839
840 `pre_restarts_wait_f` is a function which is called before any restarts
841 occur. The use case for this is an application which wants to try and
842 stagger restarts between units.
843
844 :param lambda_f: function to call.
845 :type lambda_f: Callable[[], ANY]
846 :param restart_map: {file: [service, ...]}
847 :type restart_map: Dict[str, List[str,]]
848 :param stopstart: whether to stop, start or restart a service
849 :type stopstart: booleean
850 :param restart_functions: nonstandard functions to use to restart services
851 {svc: func, ...}
852 :type restart_functions: Dict[str, Callable[[str], None]]
853 :param can_restart_now_f: A function used to check if the restart is
854 permitted.
855 :type can_restart_now_f: Callable[[str, List[str]], boolean]
856 :param post_svc_restart_f: A function run after a service has
857 restarted.
858 :type post_svc_restart_f: Callable[[str], None]
859 :param pre_restarts_wait_f: A function called before any restarts.
860 :type pre_restarts_wait_f: Callable[None, None]
861 :returns: result of lambda_f()
862 :rtype: ANY
863 """
864 checksums = _pre_restart_on_change_helper(restart_map)
865 r = lambda_f()
866 _post_restart_on_change_helper(checksums,
867 restart_map,
868 stopstart,
869 restart_functions,
870 can_restart_now_f,
871 post_svc_restart_f,
872 pre_restarts_wait_f)
873 return r
874
875
876def _pre_restart_on_change_helper(restart_map):
877 """Take a snapshot of file hashes.
878
879 :param restart_map: {file: [service, ...]}
880 :type restart_map: Dict[str, List[str,]]
881 :returns: Dictionary of file paths and the files checksum.
882 :rtype: Dict[str, str]
883 """
884 return {path: path_hash(path) for path in restart_map}
885
886
887def _post_restart_on_change_helper(checksums,
888 restart_map,
889 stopstart=False,
890 restart_functions=None,
891 can_restart_now_f=None,
892 post_svc_restart_f=None,
893 pre_restarts_wait_f=None):
894 """Check whether files have changed.
895
896 :param checksums: Dictionary of file paths and the files checksum.
897 :type checksums: Dict[str, str]
898 :param restart_map: {file: [service, ...]}
899 :type restart_map: Dict[str, List[str,]]
900 :param stopstart: whether to stop, start or restart a service
901 :type stopstart: booleean
902 :param restart_functions: nonstandard functions to use to restart services
903 {svc: func, ...}
904 :type restart_functions: Dict[str, Callable[[str], None]]
905 :param can_restart_now_f: A function used to check if the restart is
906 permitted.
907 :type can_restart_now_f: Callable[[str, List[str]], boolean]
908 :param post_svc_restart_f: A function run after a service has
909 restarted.
910 :type post_svc_restart_f: Callable[[str], None]
911 :param pre_restarts_wait_f: A function called before any restarts.
912 :type pre_restarts_wait_f: Callable[None, None]
522 """913 """
523 if restart_functions is None:914 if restart_functions is None:
524 restart_functions = {}915 restart_functions = {}
525 checksums = {path: path_hash(path) for path in restart_map}916 changed_files = defaultdict(list)
526 r = lambda_f()917 restarts = []
527 # create a list of lists of the services to restart918 # create a list of lists of the services to restart
528 restarts = [restart_map[path]919 for path, services in restart_map.items():
529 for path in restart_map920 if path_hash(path) != checksums[path]:
530 if path_hash(path) != checksums[path]]921 restarts.append(services)
922 for svc in services:
923 changed_files[svc].append(path)
531 # create a flat list of ordered services without duplicates from lists924 # create a flat list of ordered services without duplicates from lists
532 services_list = list(OrderedDict.fromkeys(itertools.chain(*restarts)))925 services_list = list(OrderedDict.fromkeys(itertools.chain(*restarts)))
533 if services_list:926 if services_list:
927 if pre_restarts_wait_f:
928 pre_restarts_wait_f()
534 actions = ('stop', 'start') if stopstart else ('restart',)929 actions = ('stop', 'start') if stopstart else ('restart',)
535 for service_name in services_list:930 for service_name in services_list:
931 if can_restart_now_f:
932 if not can_restart_now_f(service_name,
933 changed_files[service_name]):
934 continue
536 if service_name in restart_functions:935 if service_name in restart_functions:
537 restart_functions[service_name](service_name)936 restart_functions[service_name](service_name)
538 else:937 else:
539 for action in actions:938 for action in actions:
540 service(action, service_name)939 service(action, service_name)
541 return r940 if post_svc_restart_f:
941 post_svc_restart_f(service_name)
542942
543943
544def pwgen(length=None):944def pwgen(length=None):
545 """Generate a random pasword."""945 """Generate a random password."""
546 if length is None:946 if length is None:
547 # A random length is ok to use a weak PRNG947 # A random length is ok to use a weak PRNG
548 length = random.choice(range(35, 45))948 length = random.choice(range(35, 45))
@@ -554,7 +954,7 @@
554 random_generator = random.SystemRandom()954 random_generator = random.SystemRandom()
555 random_chars = [955 random_chars = [
556 random_generator.choice(alphanumeric_chars) for _ in range(length)]956 random_generator.choice(alphanumeric_chars) for _ in range(length)]
557 return(''.join(random_chars))957 return ''.join(random_chars)
558958
559959
560def is_phy_iface(interface):960def is_phy_iface(interface):
@@ -595,7 +995,7 @@
595995
596def list_nics(nic_type=None):996def list_nics(nic_type=None):
597 """Return a list of nics of given type(s)"""997 """Return a list of nics of given type(s)"""
598 if isinstance(nic_type, six.string_types):998 if isinstance(nic_type, str):
599 int_types = [nic_type]999 int_types = [nic_type]
600 else:1000 else:
601 int_types = nic_type1001 int_types = nic_type
@@ -604,7 +1004,8 @@
604 if nic_type:1004 if nic_type:
605 for int_type in int_types:1005 for int_type in int_types:
606 cmd = ['ip', 'addr', 'show', 'label', int_type + '*']1006 cmd = ['ip', 'addr', 'show', 'label', int_type + '*']
607 ip_output = subprocess.check_output(cmd).decode('UTF-8')1007 ip_output = subprocess.check_output(
1008 cmd).decode('UTF-8', errors='replace')
608 ip_output = ip_output.split('\n')1009 ip_output = ip_output.split('\n')
609 ip_output = (line for line in ip_output if line)1010 ip_output = (line for line in ip_output if line)
610 for line in ip_output:1011 for line in ip_output:
@@ -620,10 +1021,11 @@
620 interfaces.append(iface)1021 interfaces.append(iface)
621 else:1022 else:
622 cmd = ['ip', 'a']1023 cmd = ['ip', 'a']
623 ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n')1024 ip_output = subprocess.check_output(
1025 cmd).decode('UTF-8', errors='replace').split('\n')
624 ip_output = (line.strip() for line in ip_output if line)1026 ip_output = (line.strip() for line in ip_output if line)
6251027
626 key = re.compile('^[0-9]+:\s+(.+):')1028 key = re.compile(r'^[0-9]+:\s+(.+):')
627 for line in ip_output:1029 for line in ip_output:
628 matched = re.search(key, line)1030 matched = re.search(key, line)
629 if matched:1031 if matched:
@@ -644,7 +1046,8 @@
644def get_nic_mtu(nic):1046def get_nic_mtu(nic):
645 """Return the Maximum Transmission Unit (MTU) for a network interface."""1047 """Return the Maximum Transmission Unit (MTU) for a network interface."""
646 cmd = ['ip', 'addr', 'show', nic]1048 cmd = ['ip', 'addr', 'show', nic]
647 ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n')1049 ip_output = subprocess.check_output(
1050 cmd).decode('UTF-8', errors='replace').split('\n')
648 mtu = ""1051 mtu = ""
649 for line in ip_output:1052 for line in ip_output:
650 words = line.split()1053 words = line.split()
@@ -656,7 +1059,7 @@
656def get_nic_hwaddr(nic):1059def get_nic_hwaddr(nic):
657 """Return the Media Access Control (MAC) for a network interface."""1060 """Return the Media Access Control (MAC) for a network interface."""
658 cmd = ['ip', '-o', '-0', 'addr', 'show', nic]1061 cmd = ['ip', '-o', '-0', 'addr', 'show', nic]
659 ip_output = subprocess.check_output(cmd).decode('UTF-8')1062 ip_output = subprocess.check_output(cmd).decode('UTF-8', errors='replace')
660 hwaddr = ""1063 hwaddr = ""
661 words = ip_output.split()1064 words = ip_output.split()
662 if 'link/ether' in words:1065 if 'link/ether' in words:
@@ -668,7 +1071,7 @@
668def chdir(directory):1071def chdir(directory):
669 """Change the current working directory to a different directory for a code1072 """Change the current working directory to a different directory for a code
670 block and return the previous directory after the block exits. Useful to1073 block and return the previous directory after the block exits. Useful to
671 run commands from a specificed directory.1074 run commands from a specified directory.
6721075
673 :param str directory: The directory path to change to for this context.1076 :param str directory: The directory path to change to for this context.
674 """1077 """
@@ -703,9 +1106,12 @@
703 for root, dirs, files in os.walk(path, followlinks=follow_links):1106 for root, dirs, files in os.walk(path, followlinks=follow_links):
704 for name in dirs + files:1107 for name in dirs + files:
705 full = os.path.join(root, name)1108 full = os.path.join(root, name)
706 broken_symlink = os.path.lexists(full) and not os.path.exists(full)1109 try:
707 if not broken_symlink:
708 chown(full, uid, gid)1110 chown(full, uid, gid)
1111 except (IOError, OSError) as e:
1112 # Intended to ignore "file not found".
1113 if e.errno == errno.ENOENT:
1114 pass
7091115
7101116
711def lchownr(path, owner, group):1117def lchownr(path, owner, group):
@@ -720,6 +1126,20 @@
720 chownr(path, owner, group, follow_links=False)1126 chownr(path, owner, group, follow_links=False)
7211127
7221128
1129def owner(path):
1130 """Returns a tuple containing the username & groupname owning the path.
1131
1132 :param str path: the string path to retrieve the ownership
1133 :return tuple(str, str): A (username, groupname) tuple containing the
1134 name of the user and group owning the path.
1135 :raises OSError: if the specified path does not exist
1136 """
1137 stat = os.stat(path)
1138 username = pwd.getpwuid(stat.st_uid)[0]
1139 groupname = grp.getgrgid(stat.st_gid)[0]
1140 return username, groupname
1141
1142
723def get_total_ram():1143def get_total_ram():
724 """The total amount of system RAM in bytes.1144 """The total amount of system RAM in bytes.
7251145
@@ -751,3 +1171,136 @@
751 else:1171 else:
752 # Detect using upstart container file marker1172 # Detect using upstart container file marker
753 return os.path.exists(UPSTART_CONTAINER_TYPE)1173 return os.path.exists(UPSTART_CONTAINER_TYPE)
1174
1175
1176def add_to_updatedb_prunepath(path, updatedb_path=UPDATEDB_PATH):
1177 """Adds the specified path to the mlocate's udpatedb.conf PRUNEPATH list.
1178
1179 This method has no effect if the path specified by updatedb_path does not
1180 exist or is not a file.
1181
1182 @param path: string the path to add to the updatedb.conf PRUNEPATHS value
1183 @param updatedb_path: the path the updatedb.conf file
1184 """
1185 if not os.path.exists(updatedb_path) or os.path.isdir(updatedb_path):
1186 # If the updatedb.conf file doesn't exist then don't attempt to update
1187 # the file as the package providing mlocate may not be installed on
1188 # the local system
1189 return
1190
1191 with open(updatedb_path, 'r+') as f_id:
1192 updatedb_text = f_id.read()
1193 output = updatedb(updatedb_text, path)
1194 f_id.seek(0)
1195 f_id.write(output)
1196 f_id.truncate()
1197
1198
1199def updatedb(updatedb_text, new_path):
1200 lines = [line for line in updatedb_text.split("\n")]
1201 for i, line in enumerate(lines):
1202 if line.startswith("PRUNEPATHS="):
1203 paths_line = line.split("=")[1].replace('"', '')
1204 paths = paths_line.split(" ")
1205 if new_path not in paths:
1206 paths.append(new_path)
1207 lines[i] = 'PRUNEPATHS="{}"'.format(' '.join(paths))
1208 output = "\n".join(lines)
1209 return output
1210
1211
1212def modulo_distribution(modulo=3, wait=30, non_zero_wait=False):
1213 """ Modulo distribution
1214
1215 This helper uses the unit number, a modulo value and a constant wait time
1216 to produce a calculated wait time distribution. This is useful in large
1217 scale deployments to distribute load during an expensive operation such as
1218 service restarts.
1219
1220 If you have 1000 nodes that need to restart 100 at a time 1 minute at a
1221 time:
1222
1223 time.wait(modulo_distribution(modulo=100, wait=60))
1224 restart()
1225
1226 If you need restarts to happen serially set modulo to the exact number of
1227 nodes and set a high constant wait time:
1228
1229 time.wait(modulo_distribution(modulo=10, wait=120))
1230 restart()
1231
1232 @param modulo: int The modulo number creates the group distribution
1233 @param wait: int The constant time wait value
1234 @param non_zero_wait: boolean Override unit % modulo == 0,
1235 return modulo * wait. Used to avoid collisions with
1236 leader nodes which are often given priority.
1237 @return: int Calculated time to wait for unit operation
1238 """
1239 unit_number = int(local_unit().split('/')[1])
1240 calculated_wait_time = (unit_number % modulo) * wait
1241 if non_zero_wait and calculated_wait_time == 0:
1242 return modulo * wait
1243 else:
1244 return calculated_wait_time
1245
1246
1247def ca_cert_absolute_path(basename_without_extension):
1248 """Returns absolute path to CA certificate.
1249
1250 :param basename_without_extension: Filename without extension
1251 :type basename_without_extension: str
1252 :returns: Absolute full path
1253 :rtype: str
1254 """
1255 return '{}/{}.crt'.format(CA_CERT_DIR, basename_without_extension)
1256
1257
1258def install_ca_cert(ca_cert, name=None):
1259 """
1260 Install the given cert as a trusted CA.
1261
1262 The ``name`` is the stem of the filename where the cert is written, and if
1263 not provided, it will default to ``juju-{charm_name}``.
1264
1265 If the cert is empty or None, or is unchanged, nothing is done.
1266 """
1267 if not ca_cert:
1268 return
1269 if not isinstance(ca_cert, bytes):
1270 ca_cert = ca_cert.encode('utf8')
1271 if not name:
1272 name = 'juju-{}'.format(charm_name())
1273 cert_file = ca_cert_absolute_path(name)
1274 new_hash = hashlib.md5(ca_cert).hexdigest()
1275 if file_hash(cert_file) == new_hash:
1276 return
1277 log("Installing new CA cert at: {}".format(cert_file), level=INFO)
1278 write_file(cert_file, ca_cert)
1279 subprocess.check_call(['update-ca-certificates', '--fresh'])
1280
1281
1282def get_system_env(key, default=None):
1283 """Get data from system environment as represented in ``/etc/environment``.
1284
1285 :param key: Key to look up
1286 :type key: str
1287 :param default: Value to return if key is not found
1288 :type default: any
1289 :returns: Value for key if found or contents of default parameter
1290 :rtype: any
1291 :raises: subprocess.CalledProcessError
1292 """
1293 env_file = '/etc/environment'
1294 # use the shell and env(1) to parse the global environments file. This is
1295 # done to get the correct result even if the user has shell variable
1296 # substitutions or other shell logic in that file.
1297 output = subprocess.check_output(
1298 ['env', '-i', '/bin/bash', '-c',
1299 'set -a && source {} && env'.format(env_file)],
1300 universal_newlines=True)
1301 for k, v in (line.split('=', 1)
1302 for line in output.splitlines() if '=' in line):
1303 if k == key:
1304 return v
1305 else:
1306 return default
7541307
=== modified file 'hooks/charmhelpers/core/host_factory/centos.py'
--- hooks/charmhelpers/core/host_factory/centos.py 2016-12-20 20:15:28 +0000
+++ hooks/charmhelpers/core/host_factory/centos.py 2023-06-30 13:58:42 +0000
@@ -2,6 +2,22 @@
2import yum2import yum
3import os3import os
44
5from charmhelpers.core.strutils import BasicStringComparator
6
7
8class CompareHostReleases(BasicStringComparator):
9 """Provide comparisons of Host releases.
10
11 Use in the form of
12
13 if CompareHostReleases(release) > 'trusty':
14 # do something with mitaka
15 """
16
17 def __init__(self, item):
18 raise NotImplementedError(
19 "CompareHostReleases() is not implemented for CentOS")
20
521
6def service_available(service_name):22def service_available(service_name):
7 # """Determine whether a system service is available."""23 # """Determine whether a system service is available."""
824
=== modified file 'hooks/charmhelpers/core/host_factory/ubuntu.py'
--- hooks/charmhelpers/core/host_factory/ubuntu.py 2016-12-20 20:15:28 +0000
+++ hooks/charmhelpers/core/host_factory/ubuntu.py 2023-06-30 13:58:42 +0000
@@ -1,5 +1,50 @@
1import subprocess1import subprocess
22
3from charmhelpers.core.hookenv import cached
4from charmhelpers.core.strutils import BasicStringComparator
5
6
7UBUNTU_RELEASES = (
8 'lucid',
9 'maverick',
10 'natty',
11 'oneiric',
12 'precise',
13 'quantal',
14 'raring',
15 'saucy',
16 'trusty',
17 'utopic',
18 'vivid',
19 'wily',
20 'xenial',
21 'yakkety',
22 'zesty',
23 'artful',
24 'bionic',
25 'cosmic',
26 'disco',
27 'eoan',
28 'focal',
29 'groovy',
30 'hirsute',
31 'impish',
32 'jammy',
33 'kinetic',
34 'lunar',
35)
36
37
38class CompareHostReleases(BasicStringComparator):
39 """Provide comparisons of Ubuntu releases.
40
41 Use in the form of
42
43 if CompareHostReleases(release) > 'trusty':
44 # do something with mitaka
45 """
46 _list = UBUNTU_RELEASES
47
348
4def service_available(service_name):49def service_available(service_name):
5 """Determine whether a system service is available"""50 """Determine whether a system service is available"""
@@ -37,6 +82,14 @@
37 return d82 return d
3883
3984
85def get_distrib_codename():
86 """Return the codename of the distribution
87 :returns: The codename
88 :rtype: str
89 """
90 return lsb_release()['DISTRIB_CODENAME'].lower()
91
92
40def cmp_pkgrevno(package, revno, pkgcache=None):93def cmp_pkgrevno(package, revno, pkgcache=None):
41 """Compare supplied revno with the revno of the installed package.94 """Compare supplied revno with the revno of the installed package.
4295
@@ -48,9 +101,24 @@
48 the pkgcache argument is None. Be sure to add charmhelpers.fetch if101 the pkgcache argument is None. Be sure to add charmhelpers.fetch if
49 you call this function, or pass an apt_pkg.Cache() instance.102 you call this function, or pass an apt_pkg.Cache() instance.
50 """103 """
51 import apt_pkg104 from charmhelpers.fetch import apt_pkg, get_installed_version
52 if not pkgcache:105 if not pkgcache:
53 from charmhelpers.fetch import apt_cache106 current_ver = get_installed_version(package)
54 pkgcache = apt_cache()107 else:
55 pkg = pkgcache[package]108 pkg = pkgcache[package]
56 return apt_pkg.version_compare(pkg.current_ver.ver_str, revno)109 current_ver = pkg.current_ver
110
111 return apt_pkg.version_compare(current_ver.ver_str, revno)
112
113
114@cached
115def arch():
116 """Return the package architecture as a string.
117
118 :returns: the architecture
119 :rtype: str
120 :raises: subprocess.CalledProcessError if dpkg command fails
121 """
122 return subprocess.check_output(
123 ['dpkg', '--print-architecture']
124 ).rstrip().decode('UTF-8')
57125
=== modified file 'hooks/charmhelpers/core/kernel.py'
--- hooks/charmhelpers/core/kernel.py 2016-12-20 20:15:28 +0000
+++ hooks/charmhelpers/core/kernel.py 2023-06-30 13:58:42 +0000
@@ -26,12 +26,12 @@
2626
27__platform__ = get_platform()27__platform__ = get_platform()
28if __platform__ == "ubuntu":28if __platform__ == "ubuntu":
29 from charmhelpers.core.kernel_factory.ubuntu import (29 from charmhelpers.core.kernel_factory.ubuntu import ( # NOQA:F401
30 persistent_modprobe,30 persistent_modprobe,
31 update_initramfs,31 update_initramfs,
32 ) # flake8: noqa -- ignore F401 for this import32 ) # flake8: noqa -- ignore F401 for this import
33elif __platform__ == "centos":33elif __platform__ == "centos":
34 from charmhelpers.core.kernel_factory.centos import (34 from charmhelpers.core.kernel_factory.centos import ( # NOQA:F401
35 persistent_modprobe,35 persistent_modprobe,
36 update_initramfs,36 update_initramfs,
37 ) # flake8: noqa -- ignore F401 for this import37 ) # flake8: noqa -- ignore F401 for this import
3838
=== modified file 'hooks/charmhelpers/core/services/base.py'
--- hooks/charmhelpers/core/services/base.py 2016-12-20 14:35:00 +0000
+++ hooks/charmhelpers/core/services/base.py 2023-06-30 13:58:42 +0000
@@ -14,8 +14,9 @@
1414
15import os15import os
16import json16import json
17from inspect import getargspec17import inspect
18from collections import Iterable, OrderedDict18from collections import OrderedDict
19from collections.abc import Iterable
1920
20from charmhelpers.core import host21from charmhelpers.core import host
21from charmhelpers.core import hookenv22from charmhelpers.core import hookenv
@@ -169,7 +170,7 @@
169 if not units:170 if not units:
170 continue171 continue
171 remote_service = units[0].split('/')[0]172 remote_service = units[0].split('/')[0]
172 argspec = getargspec(provider.provide_data)173 argspec = inspect.getfullargspec(provider.provide_data)
173 if len(argspec.args) > 1:174 if len(argspec.args) > 1:
174 data = provider.provide_data(remote_service, service_ready)175 data = provider.provide_data(remote_service, service_ready)
175 else:176 else:
@@ -307,23 +308,34 @@
307 """308 """
308 def __call__(self, manager, service_name, event_name):309 def __call__(self, manager, service_name, event_name):
309 service = manager.get_service(service_name)310 service = manager.get_service(service_name)
310 new_ports = service.get('ports', [])311 # turn this generator into a list,
312 # as we'll be going over it multiple times
313 new_ports = list(service.get('ports', []))
311 port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name))314 port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name))
312 if os.path.exists(port_file):315 if os.path.exists(port_file):
313 with open(port_file) as fp:316 with open(port_file) as fp:
314 old_ports = fp.read().split(',')317 old_ports = fp.read().split(',')
315 for old_port in old_ports:318 for old_port in old_ports:
316 if bool(old_port):319 if bool(old_port) and not self.ports_contains(old_port, new_ports):
317 old_port = int(old_port)320 hookenv.close_port(old_port)
318 if old_port not in new_ports:
319 hookenv.close_port(old_port)
320 with open(port_file, 'w') as fp:321 with open(port_file, 'w') as fp:
321 fp.write(','.join(str(port) for port in new_ports))322 fp.write(','.join(str(port) for port in new_ports))
322 for port in new_ports:323 for port in new_ports:
324 # A port is either a number or 'ICMP'
325 protocol = 'TCP'
326 if str(port).upper() == 'ICMP':
327 protocol = 'ICMP'
323 if event_name == 'start':328 if event_name == 'start':
324 hookenv.open_port(port)329 hookenv.open_port(port, protocol)
325 elif event_name == 'stop':330 elif event_name == 'stop':
326 hookenv.close_port(port)331 hookenv.close_port(port, protocol)
332
333 def ports_contains(self, port, ports):
334 if not bool(port):
335 return False
336 if str(port).upper() != 'ICMP':
337 port = int(port)
338 return port in ports
327339
328340
329def service_stop(service_name):341def service_stop(service_name):
330342
=== modified file 'hooks/charmhelpers/core/services/helpers.py'
--- hooks/charmhelpers/core/services/helpers.py 2016-12-20 14:35:00 +0000
+++ hooks/charmhelpers/core/services/helpers.py 2023-06-30 13:58:42 +0000
@@ -179,7 +179,7 @@
179 self.required_options = args179 self.required_options = args
180 self['config'] = hookenv.config()180 self['config'] = hookenv.config()
181 with open(os.path.join(hookenv.charm_dir(), 'config.yaml')) as fp:181 with open(os.path.join(hookenv.charm_dir(), 'config.yaml')) as fp:
182 self.config = yaml.load(fp).get('options', {})182 self.config = yaml.safe_load(fp).get('options', {})
183183
184 def __bool__(self):184 def __bool__(self):
185 for option in self.required_options:185 for option in self.required_options:
@@ -227,7 +227,7 @@
227 if not os.path.isabs(file_name):227 if not os.path.isabs(file_name):
228 file_name = os.path.join(hookenv.charm_dir(), file_name)228 file_name = os.path.join(hookenv.charm_dir(), file_name)
229 with open(file_name, 'r') as file_stream:229 with open(file_name, 'r') as file_stream:
230 data = yaml.load(file_stream)230 data = yaml.safe_load(file_stream)
231 if not data:231 if not data:
232 raise OSError("%s is empty" % file_name)232 raise OSError("%s is empty" % file_name)
233 return data233 return data
234234
=== modified file 'hooks/charmhelpers/core/strutils.py'
--- hooks/charmhelpers/core/strutils.py 2016-12-20 14:35:00 +0000
+++ hooks/charmhelpers/core/strutils.py 2023-06-30 13:58:42 +0000
@@ -15,26 +15,28 @@
15# See the License for the specific language governing permissions and15# See the License for the specific language governing permissions and
16# limitations under the License.16# limitations under the License.
1717
18import six
19import re18import re
2019
2120TRUTHY_STRINGS = {'y', 'yes', 'true', 't', 'on'}
22def bool_from_string(value):21FALSEY_STRINGS = {'n', 'no', 'false', 'f', 'off'}
22
23
24def bool_from_string(value, truthy_strings=TRUTHY_STRINGS, falsey_strings=FALSEY_STRINGS, assume_false=False):
23 """Interpret string value as boolean.25 """Interpret string value as boolean.
2426
25 Returns True if value translates to True otherwise False.27 Returns True if value translates to True otherwise False.
26 """28 """
27 if isinstance(value, six.string_types):29 if isinstance(value, str):
28 value = six.text_type(value)30 value = str(value)
29 else:31 else:
30 msg = "Unable to interpret non-string value '%s' as boolean" % (value)32 msg = "Unable to interpret non-string value '%s' as boolean" % (value)
31 raise ValueError(msg)33 raise ValueError(msg)
3234
33 value = value.strip().lower()35 value = value.strip().lower()
3436
35 if value in ['y', 'yes', 'true', 't', 'on']:37 if value in truthy_strings:
36 return True38 return True
37 elif value in ['n', 'no', 'false', 'f', 'off']:39 elif value in falsey_strings or assume_false:
38 return False40 return False
3941
40 msg = "Unable to interpret string value '%s' as boolean" % (value)42 msg = "Unable to interpret string value '%s' as boolean" % (value)
@@ -58,13 +60,72 @@
58 'P': 5,60 'P': 5,
59 'PB': 5,61 'PB': 5,
60 }62 }
61 if isinstance(value, six.string_types):63 if isinstance(value, str):
62 value = six.text_type(value)64 value = str(value)
63 else:65 else:
64 msg = "Unable to interpret non-string value '%s' as boolean" % (value)66 msg = "Unable to interpret non-string value '%s' as bytes" % (value)
65 raise ValueError(msg)67 raise ValueError(msg)
66 matches = re.match("([0-9]+)([a-zA-Z]+)", value)68 matches = re.match("([0-9]+)([a-zA-Z]+)", value)
67 if not matches:69 if matches:
68 msg = "Unable to interpret string value '%s' as bytes" % (value)70 size = int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)])
69 raise ValueError(msg)71 else:
70 return int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)])72 # Assume that value passed in is bytes
73 try:
74 size = int(value)
75 except ValueError:
76 msg = "Unable to interpret string value '%s' as bytes" % (value)
77 raise ValueError(msg)
78 return size
79
80
81class BasicStringComparator(object):
82 """Provides a class that will compare strings from an iterator type object.
83 Used to provide > and < comparisons on strings that may not necessarily be
84 alphanumerically ordered. e.g. OpenStack or Ubuntu releases AFTER the
85 z-wrap.
86 """
87
88 _list = None
89
90 def __init__(self, item):
91 if self._list is None:
92 raise Exception("Must define the _list in the class definition!")
93 try:
94 self.index = self._list.index(item)
95 except Exception:
96 raise KeyError("Item '{}' is not in list '{}'"
97 .format(item, self._list))
98
99 def __eq__(self, other):
100 assert isinstance(other, str) or isinstance(other, self.__class__)
101 return self.index == self._list.index(other)
102
103 def __ne__(self, other):
104 return not self.__eq__(other)
105
106 def __lt__(self, other):
107 assert isinstance(other, str) or isinstance(other, self.__class__)
108 return self.index < self._list.index(other)
109
110 def __ge__(self, other):
111 return not self.__lt__(other)
112
113 def __gt__(self, other):
114 assert isinstance(other, str) or isinstance(other, self.__class__)
115 return self.index > self._list.index(other)
116
117 def __le__(self, other):
118 return not self.__gt__(other)
119
120 def __str__(self):
121 """Always give back the item at the index so it can be used in
122 comparisons like:
123
124 s_mitaka = CompareOpenStack('mitaka')
125 s_newton = CompareOpenstack('newton')
126
127 assert s_newton > s_mitaka
128
129 @returns: <string>
130 """
131 return self._list[self.index]
71132
=== modified file 'hooks/charmhelpers/core/sysctl.py'
--- hooks/charmhelpers/core/sysctl.py 2016-12-20 14:35:00 +0000
+++ hooks/charmhelpers/core/sysctl.py 2023-06-30 13:58:42 +0000
@@ -17,38 +17,59 @@
1717
18import yaml18import yaml
1919
20from subprocess import check_call20from subprocess import check_call, CalledProcessError
2121
22from charmhelpers.core.hookenv import (22from charmhelpers.core.hookenv import (
23 log,23 log,
24 DEBUG,24 DEBUG,
25 ERROR,25 ERROR,
26 WARNING,
26)27)
2728
29from charmhelpers.core.host import is_container
30
28__author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>'31__author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>'
2932
3033
31def create(sysctl_dict, sysctl_file):34def create(sysctl_dict, sysctl_file, ignore=False):
32 """Creates a sysctl.conf file from a YAML associative array35 """Creates a sysctl.conf file from a YAML associative array
3336
34 :param sysctl_dict: a YAML-formatted string of sysctl options eg "{ 'kernel.max_pid': 1337 }"37 :param sysctl_dict: a dict or YAML-formatted string of sysctl
38 options eg "{ 'kernel.max_pid': 1337 }"
35 :type sysctl_dict: str39 :type sysctl_dict: str
36 :param sysctl_file: path to the sysctl file to be saved40 :param sysctl_file: path to the sysctl file to be saved
37 :type sysctl_file: str or unicode41 :type sysctl_file: str or unicode
42 :param ignore: If True, ignore "unknown variable" errors.
43 :type ignore: bool
38 :returns: None44 :returns: None
39 """45 """
40 try:46 if type(sysctl_dict) is not dict:
41 sysctl_dict_parsed = yaml.safe_load(sysctl_dict)47 try:
42 except yaml.YAMLError:48 sysctl_dict_parsed = yaml.safe_load(sysctl_dict)
43 log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict),49 except yaml.YAMLError:
44 level=ERROR)50 log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict),
45 return51 level=ERROR)
52 return
53 else:
54 sysctl_dict_parsed = sysctl_dict
4655
47 with open(sysctl_file, "w") as fd:56 with open(sysctl_file, "w") as fd:
48 for key, value in sysctl_dict_parsed.items():57 for key, value in sysctl_dict_parsed.items():
49 fd.write("{}={}\n".format(key, value))58 fd.write("{}={}\n".format(key, value))
5059
51 log("Updating sysctl_file: %s values: %s" % (sysctl_file, sysctl_dict_parsed),60 log("Updating sysctl_file: {} values: {}".format(sysctl_file,
61 sysctl_dict_parsed),
52 level=DEBUG)62 level=DEBUG)
5363
54 check_call(["sysctl", "-p", sysctl_file])64 call = ["sysctl", "-p", sysctl_file]
65 if ignore:
66 call.append("-e")
67
68 try:
69 check_call(call)
70 except CalledProcessError as e:
71 if is_container():
72 log("Error setting some sysctl keys in this container: {}".format(e.output),
73 level=WARNING)
74 else:
75 raise e
5576
=== modified file 'hooks/charmhelpers/core/templating.py'
--- hooks/charmhelpers/core/templating.py 2016-12-20 14:35:00 +0000
+++ hooks/charmhelpers/core/templating.py 2023-06-30 13:58:42 +0000
@@ -13,14 +13,14 @@
13# limitations under the License.13# limitations under the License.
1414
15import os15import os
16import sys
1716
18from charmhelpers.core import host17from charmhelpers.core import host
19from charmhelpers.core import hookenv18from charmhelpers.core import hookenv
2019
2120
22def render(source, target, context, owner='root', group='root',21def render(source, target, context, owner='root', group='root',
23 perms=0o444, templates_dir=None, encoding='UTF-8', template_loader=None):22 perms=0o444, templates_dir=None, encoding='UTF-8',
23 template_loader=None, config_template=None):
24 """24 """
25 Render a template.25 Render a template.
2626
@@ -32,6 +32,9 @@
32 The context should be a dict containing the values to be replaced in the32 The context should be a dict containing the values to be replaced in the
33 template.33 template.
3434
35 config_template may be provided to render from a provided template instead
36 of loading from a file.
37
35 The `owner`, `group`, and `perms` options will be passed to `write_file`.38 The `owner`, `group`, and `perms` options will be passed to `write_file`.
3639
37 If omitted, `templates_dir` defaults to the `templates` folder in the charm.40 If omitted, `templates_dir` defaults to the `templates` folder in the charm.
@@ -39,9 +42,8 @@
39 The rendered template will be written to the file as well as being returned42 The rendered template will be written to the file as well as being returned
40 as a string.43 as a string.
4144
42 Note: Using this requires python-jinja2 or python3-jinja2; if it is not45 Note: Using this requires python3-jinja2; if it is not installed, calling
43 installed, calling this will attempt to use charmhelpers.fetch.apt_install46 this will attempt to use charmhelpers.fetch.apt_install to install it.
44 to install it.
45 """47 """
46 try:48 try:
47 from jinja2 import FileSystemLoader, Environment, exceptions49 from jinja2 import FileSystemLoader, Environment, exceptions
@@ -53,10 +55,7 @@
53 'charmhelpers.fetch to install it',55 'charmhelpers.fetch to install it',
54 level=hookenv.ERROR)56 level=hookenv.ERROR)
55 raise57 raise
56 if sys.version_info.major == 2:58 apt_install('python3-jinja2', fatal=True)
57 apt_install('python-jinja2', fatal=True)
58 else:
59 apt_install('python3-jinja2', fatal=True)
60 from jinja2 import FileSystemLoader, Environment, exceptions59 from jinja2 import FileSystemLoader, Environment, exceptions
6160
62 if template_loader:61 if template_loader:
@@ -65,14 +64,19 @@
65 if templates_dir is None:64 if templates_dir is None:
66 templates_dir = os.path.join(hookenv.charm_dir(), 'templates')65 templates_dir = os.path.join(hookenv.charm_dir(), 'templates')
67 template_env = Environment(loader=FileSystemLoader(templates_dir))66 template_env = Environment(loader=FileSystemLoader(templates_dir))
68 try:67
69 source = source68 # load from a string if provided explicitly
70 template = template_env.get_template(source)69 if config_template is not None:
71 except exceptions.TemplateNotFound as e:70 template = template_env.from_string(config_template)
72 hookenv.log('Could not load template %s from %s.' %71 else:
73 (source, templates_dir),72 try:
74 level=hookenv.ERROR)73 source = source
75 raise e74 template = template_env.get_template(source)
75 except exceptions.TemplateNotFound as e:
76 hookenv.log('Could not load template %s from %s.' %
77 (source, templates_dir),
78 level=hookenv.ERROR)
79 raise e
76 content = template.render(context)80 content = template.render(context)
77 if target is not None:81 if target is not None:
78 target_dir = os.path.dirname(target)82 target_dir = os.path.dirname(target)
7983
=== modified file 'hooks/charmhelpers/core/unitdata.py'
--- hooks/charmhelpers/core/unitdata.py 2016-12-20 14:35:00 +0000
+++ hooks/charmhelpers/core/unitdata.py 2023-06-30 13:58:42 +0000
@@ -1,7 +1,7 @@
1#!/usr/bin/env python1#!/usr/bin/env python
2# -*- coding: utf-8 -*-2# -*- coding: utf-8 -*-
3#3#
4# Copyright 2014-2015 Canonical Limited.4# Copyright 2014-2021 Canonical Limited.
5#5#
6# Licensed under the Apache License, Version 2.0 (the "License");6# Licensed under the Apache License, Version 2.0 (the "License");
7# you may not use this file except in compliance with the License.7# you may not use this file except in compliance with the License.
@@ -61,7 +61,7 @@
61 'previous value', prev,61 'previous value', prev,
62 'current value', cur)62 'current value', cur)
6363
64 # Get some unit specific bookeeping64 # Get some unit specific bookkeeping
65 if not db.get('pkg_key'):65 if not db.get('pkg_key'):
66 key = urllib.urlopen('https://example.com/pkg_key').read()66 key = urllib.urlopen('https://example.com/pkg_key').read()
67 db.set('pkg_key', key)67 db.set('pkg_key', key)
@@ -166,15 +166,23 @@
166166
167 To support dicts, lists, integer, floats, and booleans values167 To support dicts, lists, integer, floats, and booleans values
168 are automatically json encoded/decoded.168 are automatically json encoded/decoded.
169
170 Note: to facilitate unit testing, ':memory:' can be passed as the
171 path parameter which causes sqlite3 to only build the db in memory.
172 This should only be used for testing purposes.
169 """173 """
170 def __init__(self, path=None):174 def __init__(self, path=None, keep_revisions=False):
171 self.db_path = path175 self.db_path = path
176 self.keep_revisions = keep_revisions
172 if path is None:177 if path is None:
173 if 'UNIT_STATE_DB' in os.environ:178 if 'UNIT_STATE_DB' in os.environ:
174 self.db_path = os.environ['UNIT_STATE_DB']179 self.db_path = os.environ['UNIT_STATE_DB']
175 else:180 else:
176 self.db_path = os.path.join(181 self.db_path = os.path.join(
177 os.environ.get('CHARM_DIR', ''), '.unit-state.db')182 os.environ.get('CHARM_DIR', ''), '.unit-state.db')
183 if self.db_path != ':memory:':
184 with open(self.db_path, 'a') as f:
185 os.fchmod(f.fileno(), 0o600)
178 self.conn = sqlite3.connect('%s' % self.db_path)186 self.conn = sqlite3.connect('%s' % self.db_path)
179 self.cursor = self.conn.cursor()187 self.cursor = self.conn.cursor()
180 self.revision = None188 self.revision = None
@@ -235,7 +243,7 @@
235 Remove a key from the database entirely.243 Remove a key from the database entirely.
236 """244 """
237 self.cursor.execute('delete from kv where key=?', [key])245 self.cursor.execute('delete from kv where key=?', [key])
238 if self.revision and self.cursor.rowcount:246 if self.keep_revisions and self.revision and self.cursor.rowcount:
239 self.cursor.execute(247 self.cursor.execute(
240 'insert into kv_revisions values (?, ?, ?)',248 'insert into kv_revisions values (?, ?, ?)',
241 [key, self.revision, json.dumps('DELETED')])249 [key, self.revision, json.dumps('DELETED')])
@@ -252,14 +260,14 @@
252 if keys is not None:260 if keys is not None:
253 keys = ['%s%s' % (prefix, key) for key in keys]261 keys = ['%s%s' % (prefix, key) for key in keys]
254 self.cursor.execute('delete from kv where key in (%s)' % ','.join(['?'] * len(keys)), keys)262 self.cursor.execute('delete from kv where key in (%s)' % ','.join(['?'] * len(keys)), keys)
255 if self.revision and self.cursor.rowcount:263 if self.keep_revisions and self.revision and self.cursor.rowcount:
256 self.cursor.execute(264 self.cursor.execute(
257 'insert into kv_revisions values %s' % ','.join(['(?, ?, ?)'] * len(keys)),265 'insert into kv_revisions values %s' % ','.join(['(?, ?, ?)'] * len(keys)),
258 list(itertools.chain.from_iterable((key, self.revision, json.dumps('DELETED')) for key in keys)))266 list(itertools.chain.from_iterable((key, self.revision, json.dumps('DELETED')) for key in keys)))
259 else:267 else:
260 self.cursor.execute('delete from kv where key like ?',268 self.cursor.execute('delete from kv where key like ?',
261 ['%s%%' % prefix])269 ['%s%%' % prefix])
262 if self.revision and self.cursor.rowcount:270 if self.keep_revisions and self.revision and self.cursor.rowcount:
263 self.cursor.execute(271 self.cursor.execute(
264 'insert into kv_revisions values (?, ?, ?)',272 'insert into kv_revisions values (?, ?, ?)',
265 ['%s%%' % prefix, self.revision, json.dumps('DELETED')])273 ['%s%%' % prefix, self.revision, json.dumps('DELETED')])
@@ -292,7 +300,7 @@
292 where key = ?''', [serialized, key])300 where key = ?''', [serialized, key])
293301
294 # Save302 # Save
295 if not self.revision:303 if (not self.keep_revisions) or (not self.revision):
296 return value304 return value
297305
298 self.cursor.execute(306 self.cursor.execute(
@@ -358,7 +366,7 @@
358 try:366 try:
359 yield self.revision367 yield self.revision
360 self.revision = None368 self.revision = None
361 except:369 except Exception:
362 self.flush(False)370 self.flush(False)
363 self.revision = None371 self.revision = None
364 raise372 raise
@@ -442,7 +450,7 @@
442 'previous value', prev,450 'previous value', prev,
443 'current value', cur)451 'current value', cur)
444452
445 # Get some unit specific bookeeping453 # Get some unit specific bookkeeping
446 if not db.get('pkg_key'):454 if not db.get('pkg_key'):
447 key = urllib.urlopen('https://example.com/pkg_key').read()455 key = urllib.urlopen('https://example.com/pkg_key').read()
448 db.set('pkg_key', key)456 db.set('pkg_key', key)
449457
=== modified file 'hooks/charmhelpers/fetch/__init__.py'
--- hooks/charmhelpers/fetch/__init__.py 2016-12-20 14:35:00 +0000
+++ hooks/charmhelpers/fetch/__init__.py 2023-06-30 13:58:42 +0000
@@ -1,4 +1,4 @@
1# Copyright 2014-2015 Canonical Limited.1# Copyright 2014-2021 Canonical Limited.
2#2#
3# Licensed under the Apache License, Version 2.0 (the "License");3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.4# you may not use this file except in compliance with the License.
@@ -20,11 +20,7 @@
20 log,20 log,
21)21)
2222
23import six23from urllib.parse import urlparse, urlunparse
24if six.PY3:
25 from urllib.parse import urlparse, urlunparse
26else:
27 from urlparse import urlparse, urlunparse
2824
2925
30# The order of this list is very important. Handlers should be listed in from26# The order of this list is very important. Handlers should be listed in from
@@ -48,6 +44,13 @@
48 pass44 pass
4945
5046
47class GPGKeyError(Exception):
48 """Exception occurs when a GPG key cannot be fetched or used. The message
49 indicates what the problem is.
50 """
51 pass
52
53
51class BaseFetchHandler(object):54class BaseFetchHandler(object):
5255
53 """Base class for FetchHandler implementations in fetch plugins"""56 """Base class for FetchHandler implementations in fetch plugins"""
@@ -77,22 +80,30 @@
77fetch = importlib.import_module(module)80fetch = importlib.import_module(module)
7881
79filter_installed_packages = fetch.filter_installed_packages82filter_installed_packages = fetch.filter_installed_packages
80install = fetch.install83filter_missing_packages = fetch.filter_missing_packages
81upgrade = fetch.upgrade84install = fetch.apt_install
82update = fetch.update85upgrade = fetch.apt_upgrade
83purge = fetch.purge86update = _fetch_update = fetch.apt_update
87purge = fetch.apt_purge
84add_source = fetch.add_source88add_source = fetch.add_source
8589
86if __platform__ == "ubuntu":90if __platform__ == "ubuntu":
87 apt_cache = fetch.apt_cache91 apt_cache = fetch.apt_cache
88 apt_install = fetch.install92 apt_install = fetch.apt_install
89 apt_update = fetch.update93 apt_update = fetch.apt_update
90 apt_upgrade = fetch.upgrade94 apt_upgrade = fetch.apt_upgrade
91 apt_purge = fetch.purge95 apt_purge = fetch.apt_purge
96 apt_autoremove = fetch.apt_autoremove
92 apt_mark = fetch.apt_mark97 apt_mark = fetch.apt_mark
93 apt_hold = fetch.apt_hold98 apt_hold = fetch.apt_hold
94 apt_unhold = fetch.apt_unhold99 apt_unhold = fetch.apt_unhold
100 import_key = fetch.import_key
95 get_upstream_version = fetch.get_upstream_version101 get_upstream_version = fetch.get_upstream_version
102 apt_pkg = fetch.ubuntu_apt_pkg
103 get_apt_dpkg_env = fetch.get_apt_dpkg_env
104 get_installed_version = fetch.get_installed_version
105 OPENSTACK_RELEASES = fetch.OPENSTACK_RELEASES
106 UBUNTU_OPENSTACK_RELEASE = fetch.UBUNTU_OPENSTACK_RELEASE
96elif __platform__ == "centos":107elif __platform__ == "centos":
97 yum_search = fetch.yum_search108 yum_search = fetch.yum_search
98109
@@ -119,14 +130,14 @@
119 sources = safe_load((config(sources_var) or '').strip()) or []130 sources = safe_load((config(sources_var) or '').strip()) or []
120 keys = safe_load((config(keys_var) or '').strip()) or None131 keys = safe_load((config(keys_var) or '').strip()) or None
121132
122 if isinstance(sources, six.string_types):133 if isinstance(sources, str):
123 sources = [sources]134 sources = [sources]
124135
125 if keys is None:136 if keys is None:
126 for source in sources:137 for source in sources:
127 add_source(source, None)138 add_source(source, None)
128 else:139 else:
129 if isinstance(keys, six.string_types):140 if isinstance(keys, str):
130 keys = [keys]141 keys = [keys]
131142
132 if len(sources) != len(keys):143 if len(sources) != len(keys):
@@ -135,7 +146,7 @@
135 for source, key in zip(sources, keys):146 for source, key in zip(sources, keys):
136 add_source(source, key)147 add_source(source, key)
137 if update:148 if update:
138 fetch.update(fatal=True)149 _fetch_update(fatal=True)
139150
140151
141def install_remote(source, *args, **kwargs):152def install_remote(source, *args, **kwargs):
@@ -190,7 +201,7 @@
190 classname)201 classname)
191 plugin_list.append(handler_class())202 plugin_list.append(handler_class())
192 except NotImplementedError:203 except NotImplementedError:
193 # Skip missing plugins so that they can be ommitted from204 # Skip missing plugins so that they can be omitted from
194 # installation if desired205 # installation if desired
195 log("FetchHandler {} not found, skipping plugin".format(206 log("FetchHandler {} not found, skipping plugin".format(
196 handler_name))207 handler_name))
197208
=== modified file 'hooks/charmhelpers/fetch/archiveurl.py'
--- hooks/charmhelpers/fetch/archiveurl.py 2016-12-20 14:35:00 +0000
+++ hooks/charmhelpers/fetch/archiveurl.py 2023-06-30 13:58:42 +0000
@@ -12,6 +12,7 @@
12# See the License for the specific language governing permissions and12# See the License for the specific language governing permissions and
13# limitations under the License.13# limitations under the License.
1414
15import contextlib
15import os16import os
16import hashlib17import hashlib
17import re18import re
@@ -24,28 +25,21 @@
24 get_archive_handler,25 get_archive_handler,
25 extract,26 extract,
26)27)
28from charmhelpers.core.hookenv import (
29 env_proxy_settings,
30)
27from charmhelpers.core.host import mkdir, check_hash31from charmhelpers.core.host import mkdir, check_hash
2832
29import six33from urllib.request import (
30if six.PY3:34 build_opener, install_opener, urlopen, urlretrieve,
31 from urllib.request import (35 HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler,
32 build_opener, install_opener, urlopen, urlretrieve,36 ProxyHandler
33 HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler,37)
34 )38from urllib.parse import urlparse, urlunparse, parse_qs
35 from urllib.parse import urlparse, urlunparse, parse_qs39from urllib.error import URLError
36 from urllib.error import URLError
37else:
38 from urllib import urlretrieve
39 from urllib2 import (
40 build_opener, install_opener, urlopen,
41 HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler,
42 URLError
43 )
44 from urlparse import urlparse, urlunparse, parse_qs
4540
4641
47def splituser(host):42def splituser(host):
48 '''urllib.splituser(), but six's support of this seems broken'''
49 _userprog = re.compile('^(.*)@(.*)$')43 _userprog = re.compile('^(.*)@(.*)$')
50 match = _userprog.match(host)44 match = _userprog.match(host)
51 if match:45 if match:
@@ -54,7 +48,6 @@
5448
5549
56def splitpasswd(user):50def splitpasswd(user):
57 '''urllib.splitpasswd(), but six's support of this is missing'''
58 _passwdprog = re.compile('^([^:]*):(.*)$', re.S)51 _passwdprog = re.compile('^([^:]*):(.*)$', re.S)
59 match = _passwdprog.match(user)52 match = _passwdprog.match(user)
60 if match:53 if match:
@@ -62,6 +55,20 @@
62 return user, None55 return user, None
6356
6457
58@contextlib.contextmanager
59def proxy_env():
60 """
61 Creates a context which temporarily modifies the proxy settings in os.environ.
62 """
63 restore = {**os.environ} # Copy the current os.environ
64 juju_proxies = env_proxy_settings() or {}
65 os.environ.update(**juju_proxies) # Insert or Update the os.environ
66 yield os.environ
67 for key in juju_proxies:
68 del os.environ[key] # remove any keys which were added or updated
69 os.environ.update(**restore) # restore any original values
70
71
65class ArchiveUrlFetchHandler(BaseFetchHandler):72class ArchiveUrlFetchHandler(BaseFetchHandler):
66 """73 """
67 Handler to download archive files from arbitrary URLs.74 Handler to download archive files from arbitrary URLs.
@@ -89,9 +96,10 @@
89 :param str source: URL pointing to an archive file.96 :param str source: URL pointing to an archive file.
90 :param str dest: Local path location to download archive file to.97 :param str dest: Local path location to download archive file to.
91 """98 """
92 # propogate all exceptions99 # propagate all exceptions
93 # URLError, OSError, etc100 # URLError, OSError, etc
94 proto, netloc, path, params, query, fragment = urlparse(source)101 proto, netloc, path, params, query, fragment = urlparse(source)
102 handlers = []
95 if proto in ('http', 'https'):103 if proto in ('http', 'https'):
96 auth, barehost = splituser(netloc)104 auth, barehost = splituser(netloc)
97 if auth is not None:105 if auth is not None:
@@ -101,10 +109,13 @@
101 # Realm is set to None in add_password to force the username and password109 # Realm is set to None in add_password to force the username and password
102 # to be used whatever the realm110 # to be used whatever the realm
103 passman.add_password(None, source, username, password)111 passman.add_password(None, source, username, password)
104 authhandler = HTTPBasicAuthHandler(passman)112 handlers.append(HTTPBasicAuthHandler(passman))
105 opener = build_opener(authhandler)113
106 install_opener(opener)114 with proxy_env():
107 response = urlopen(source)115 handlers.append(ProxyHandler())
116 opener = build_opener(*handlers)
117 install_opener(opener)
118 response = urlopen(source)
108 try:119 try:
109 with open(dest, 'wb') as dest_file:120 with open(dest, 'wb') as dest_file:
110 dest_file.write(response.read())121 dest_file.write(response.read())
@@ -150,10 +161,7 @@
150 raise UnhandledSource(e.strerror)161 raise UnhandledSource(e.strerror)
151 options = parse_qs(url_parts.fragment)162 options = parse_qs(url_parts.fragment)
152 for key, value in options.items():163 for key, value in options.items():
153 if not six.PY3:164 algorithms = hashlib.algorithms_available
154 algorithms = hashlib.algorithms
155 else:
156 algorithms = hashlib.algorithms_available
157 if key in algorithms:165 if key in algorithms:
158 if len(value) != 1:166 if len(value) != 1:
159 raise TypeError(167 raise TypeError(
160168
=== modified file 'hooks/charmhelpers/fetch/bzrurl.py'
--- hooks/charmhelpers/fetch/bzrurl.py 2016-12-20 14:35:00 +0000
+++ hooks/charmhelpers/fetch/bzrurl.py 2023-06-30 13:58:42 +0000
@@ -13,7 +13,7 @@
13# limitations under the License.13# limitations under the License.
1414
15import os15import os
16from subprocess import check_call16from subprocess import STDOUT, check_output
17from charmhelpers.fetch import (17from charmhelpers.fetch import (
18 BaseFetchHandler,18 BaseFetchHandler,
19 UnhandledSource,19 UnhandledSource,
@@ -55,7 +55,7 @@
55 cmd = ['bzr', 'branch']55 cmd = ['bzr', 'branch']
56 cmd += cmd_opts56 cmd += cmd_opts
57 cmd += [source, dest]57 cmd += [source, dest]
58 check_call(cmd)58 check_output(cmd, stderr=STDOUT)
5959
60 def install(self, source, dest=None, revno=None):60 def install(self, source, dest=None, revno=None):
61 url_parts = self.parse_url(source)61 url_parts = self.parse_url(source)
6262
=== modified file 'hooks/charmhelpers/fetch/centos.py'
--- hooks/charmhelpers/fetch/centos.py 2016-12-20 20:15:28 +0000
+++ hooks/charmhelpers/fetch/centos.py 2023-06-30 13:58:42 +0000
@@ -15,7 +15,6 @@
15import subprocess15import subprocess
16import os16import os
17import time17import time
18import six
19import yum18import yum
2019
21from tempfile import NamedTemporaryFile20from tempfile import NamedTemporaryFile
@@ -42,7 +41,7 @@
42 if options is not None:41 if options is not None:
43 cmd.extend(options)42 cmd.extend(options)
44 cmd.append('install')43 cmd.append('install')
45 if isinstance(packages, six.string_types):44 if isinstance(packages, str):
46 cmd.append(packages)45 cmd.append(packages)
47 else:46 else:
48 cmd.extend(packages)47 cmd.extend(packages)
@@ -71,7 +70,7 @@
71def purge(packages, fatal=False):70def purge(packages, fatal=False):
72 """Purge one or more packages."""71 """Purge one or more packages."""
73 cmd = ['yum', '--assumeyes', 'remove']72 cmd = ['yum', '--assumeyes', 'remove']
74 if isinstance(packages, six.string_types):73 if isinstance(packages, str):
75 cmd.append(packages)74 cmd.append(packages)
76 else:75 else:
77 cmd.extend(packages)76 cmd.extend(packages)
@@ -83,7 +82,7 @@
83 """Search for a package."""82 """Search for a package."""
84 output = {}83 output = {}
85 cmd = ['yum', 'search']84 cmd = ['yum', 'search']
86 if isinstance(packages, six.string_types):85 if isinstance(packages, str):
87 cmd.append(packages)86 cmd.append(packages)
88 else:87 else:
89 cmd.extend(packages)88 cmd.extend(packages)
@@ -132,7 +131,7 @@
132 key_file.write(key)131 key_file.write(key)
133 key_file.flush()132 key_file.flush()
134 key_file.seek(0)133 key_file.seek(0)
135 subprocess.check_call(['rpm', '--import', key_file])134 subprocess.check_call(['rpm', '--import', key_file.name])
136 else:135 else:
137 subprocess.check_call(['rpm', '--import', key])136 subprocess.check_call(['rpm', '--import', key])
138137
139138
=== modified file 'hooks/charmhelpers/fetch/giturl.py'
--- hooks/charmhelpers/fetch/giturl.py 2016-12-20 14:35:00 +0000
+++ hooks/charmhelpers/fetch/giturl.py 2023-06-30 13:58:42 +0000
@@ -13,7 +13,7 @@
13# limitations under the License.13# limitations under the License.
1414
15import os15import os
16from subprocess import check_call, CalledProcessError16from subprocess import check_output, CalledProcessError, STDOUT
17from charmhelpers.fetch import (17from charmhelpers.fetch import (
18 BaseFetchHandler,18 BaseFetchHandler,
19 UnhandledSource,19 UnhandledSource,
@@ -50,7 +50,7 @@
50 cmd = ['git', 'clone', source, dest, '--branch', branch]50 cmd = ['git', 'clone', source, dest, '--branch', branch]
51 if depth:51 if depth:
52 cmd.extend(['--depth', depth])52 cmd.extend(['--depth', depth])
53 check_call(cmd)53 check_output(cmd, stderr=STDOUT)
5454
55 def install(self, source, branch="master", dest=None, depth=None):55 def install(self, source, branch="master", dest=None, depth=None):
56 url_parts = self.parse_url(source)56 url_parts = self.parse_url(source)
5757
=== added directory 'hooks/charmhelpers/fetch/python'
=== added file 'hooks/charmhelpers/fetch/python/__init__.py'
--- hooks/charmhelpers/fetch/python/__init__.py 1970-01-01 00:00:00 +0000
+++ hooks/charmhelpers/fetch/python/__init__.py 2023-06-30 13:58:42 +0000
@@ -0,0 +1,13 @@
1# Copyright 2014-2019 Canonical Limited.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
014
=== added file 'hooks/charmhelpers/fetch/python/debug.py'
--- hooks/charmhelpers/fetch/python/debug.py 1970-01-01 00:00:00 +0000
+++ hooks/charmhelpers/fetch/python/debug.py 2023-06-30 13:58:42 +0000
@@ -0,0 +1,52 @@
1#!/usr/bin/env python
2# coding: utf-8
3
4# Copyright 2014-2015 Canonical Limited.
5#
6# Licensed under the Apache License, Version 2.0 (the "License");
7# you may not use this file except in compliance with the License.
8# You may obtain a copy of the License at
9#
10# http://www.apache.org/licenses/LICENSE-2.0
11#
12# Unless required by applicable law or agreed to in writing, software
13# distributed under the License is distributed on an "AS IS" BASIS,
14# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15# See the License for the specific language governing permissions and
16# limitations under the License.
17
18import atexit
19import sys
20
21from charmhelpers.fetch.python.rpdb import Rpdb
22from charmhelpers.core.hookenv import (
23 open_port,
24 close_port,
25 ERROR,
26 log
27)
28
29__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>"
30
31DEFAULT_ADDR = "0.0.0.0"
32DEFAULT_PORT = 4444
33
34
35def _error(message):
36 log(message, level=ERROR)
37
38
39def set_trace(addr=DEFAULT_ADDR, port=DEFAULT_PORT):
40 """
41 Set a trace point using the remote debugger
42 """
43 atexit.register(close_port, port)
44 try:
45 log("Starting a remote python debugger session on %s:%s" % (addr,
46 port))
47 open_port(port)
48 debugger = Rpdb(addr=addr, port=port)
49 debugger.set_trace(sys._getframe().f_back)
50 except Exception:
51 _error("Cannot start a remote debug session on %s:%s" % (addr,
52 port))
053
=== added file 'hooks/charmhelpers/fetch/python/packages.py'
--- hooks/charmhelpers/fetch/python/packages.py 1970-01-01 00:00:00 +0000
+++ hooks/charmhelpers/fetch/python/packages.py 2023-06-30 13:58:42 +0000
@@ -0,0 +1,148 @@
1#!/usr/bin/env python
2# coding: utf-8
3
4# Copyright 2014-2021 Canonical Limited.
5#
6# Licensed under the Apache License, Version 2.0 (the "License");
7# you may not use this file except in compliance with the License.
8# You may obtain a copy of the License at
9#
10# http://www.apache.org/licenses/LICENSE-2.0
11#
12# Unless required by applicable law or agreed to in writing, software
13# distributed under the License is distributed on an "AS IS" BASIS,
14# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15# See the License for the specific language governing permissions and
16# limitations under the License.
17
18import os
19import subprocess
20import sys
21
22from charmhelpers.fetch import apt_install, apt_update
23from charmhelpers.core.hookenv import charm_dir, log
24
25__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>"
26
27
28def pip_execute(*args, **kwargs):
29 """Overridden pip_execute() to stop sys.path being changed.
30
31 The act of importing main from the pip module seems to cause add wheels
32 from the /usr/share/python-wheels which are installed by various tools.
33 This function ensures that sys.path remains the same after the call is
34 executed.
35 """
36 try:
37 _path = sys.path
38 try:
39 from pip import main as _pip_execute
40 except ImportError:
41 apt_update()
42 apt_install('python3-pip')
43 from pip import main as _pip_execute
44 _pip_execute(*args, **kwargs)
45 finally:
46 sys.path = _path
47
48
49def parse_options(given, available):
50 """Given a set of options, check if available"""
51 for key, value in sorted(given.items()):
52 if not value:
53 continue
54 if key in available:
55 yield "--{0}={1}".format(key, value)
56
57
58def pip_install_requirements(requirements, constraints=None, **options):
59 """Install a requirements file.
60
61 :param constraints: Path to pip constraints file.
62 http://pip.readthedocs.org/en/stable/user_guide/#constraints-files
63 """
64 command = ["install"]
65
66 available_options = ('proxy', 'src', 'log', )
67 for option in parse_options(options, available_options):
68 command.append(option)
69
70 command.append("-r {0}".format(requirements))
71 if constraints:
72 command.append("-c {0}".format(constraints))
73 log("Installing from file: {} with constraints {} "
74 "and options: {}".format(requirements, constraints, command))
75 else:
76 log("Installing from file: {} with options: {}".format(requirements,
77 command))
78 pip_execute(command)
79
80
81def pip_install(package, fatal=False, upgrade=False, venv=None,
82 constraints=None, **options):
83 """Install a python package"""
84 if venv:
85 venv_python = os.path.join(venv, 'bin/pip')
86 command = [venv_python, "install"]
87 else:
88 command = ["install"]
89
90 available_options = ('proxy', 'src', 'log', 'index-url', )
91 for option in parse_options(options, available_options):
92 command.append(option)
93
94 if upgrade:
95 command.append('--upgrade')
96
97 if constraints:
98 command.extend(['-c', constraints])
99
100 if isinstance(package, list):
101 command.extend(package)
102 else:
103 command.append(package)
104
105 log("Installing {} package with options: {}".format(package,
106 command))
107 if venv:
108 subprocess.check_call(command)
109 else:
110 pip_execute(command)
111
112
113def pip_uninstall(package, **options):
114 """Uninstall a python package"""
115 command = ["uninstall", "-q", "-y"]
116
117 available_options = ('proxy', 'log', )
118 for option in parse_options(options, available_options):
119 command.append(option)
120
121 if isinstance(package, list):
122 command.extend(package)
123 else:
124 command.append(package)
125
126 log("Uninstalling {} package with options: {}".format(package,
127 command))
128 pip_execute(command)
129
130
131def pip_list():
132 """Returns the list of current python installed packages
133 """
134 return pip_execute(["list"])
135
136
137def pip_create_virtualenv(path=None):
138 """Create an isolated Python environment."""
139 apt_install(['python3-virtualenv', 'virtualenv'])
140 extra_flags = ['--python=python3']
141
142 if path:
143 venv_path = path
144 else:
145 venv_path = os.path.join(charm_dir(), 'venv')
146
147 if not os.path.exists(venv_path):
148 subprocess.check_call(['virtualenv', venv_path] + extra_flags)
0149
=== added file 'hooks/charmhelpers/fetch/python/rpdb.py'
--- hooks/charmhelpers/fetch/python/rpdb.py 1970-01-01 00:00:00 +0000
+++ hooks/charmhelpers/fetch/python/rpdb.py 2023-06-30 13:58:42 +0000
@@ -0,0 +1,56 @@
1# Copyright 2014-2015 Canonical Limited.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15"""Remote Python Debugger (pdb wrapper)."""
16
17import pdb
18import socket
19import sys
20
21__author__ = "Bertrand Janin <b@janin.com>"
22__version__ = "0.1.3"
23
24
25class Rpdb(pdb.Pdb):
26
27 def __init__(self, addr="127.0.0.1", port=4444):
28 """Initialize the socket and initialize pdb."""
29
30 # Backup stdin and stdout before replacing them by the socket handle
31 self.old_stdout = sys.stdout
32 self.old_stdin = sys.stdin
33
34 # Open a 'reusable' socket to let the webapp reload on the same port
35 self.skt = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
36 self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True)
37 self.skt.bind((addr, port))
38 self.skt.listen(1)
39 (clientsocket, address) = self.skt.accept()
40 handle = clientsocket.makefile('rw')
41 pdb.Pdb.__init__(self, completekey='tab', stdin=handle, stdout=handle)
42 sys.stdout = sys.stdin = handle
43
44 def shutdown(self):
45 """Revert stdin and stdout, close the socket."""
46 sys.stdout = self.old_stdout
47 sys.stdin = self.old_stdin
48 self.skt.close()
49 self.set_continue()
50
51 def do_continue(self, arg):
52 """Stop all operation on ``continue``."""
53 self.shutdown()
54 return 1
55
56 do_EOF = do_quit = do_exit = do_c = do_cont = do_continue
057
=== added file 'hooks/charmhelpers/fetch/python/version.py'
--- hooks/charmhelpers/fetch/python/version.py 1970-01-01 00:00:00 +0000
+++ hooks/charmhelpers/fetch/python/version.py 2023-06-30 13:58:42 +0000
@@ -0,0 +1,32 @@
1#!/usr/bin/env python
2# coding: utf-8
3
4# Copyright 2014-2015 Canonical Limited.
5#
6# Licensed under the Apache License, Version 2.0 (the "License");
7# you may not use this file except in compliance with the License.
8# You may obtain a copy of the License at
9#
10# http://www.apache.org/licenses/LICENSE-2.0
11#
12# Unless required by applicable law or agreed to in writing, software
13# distributed under the License is distributed on an "AS IS" BASIS,
14# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15# See the License for the specific language governing permissions and
16# limitations under the License.
17
18import sys
19
20__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>"
21
22
23def current_version():
24 """Current system python version"""
25 return sys.version_info
26
27
28def current_version_string():
29 """Current system python version as string major.minor.micro"""
30 return "{0}.{1}.{2}".format(sys.version_info.major,
31 sys.version_info.minor,
32 sys.version_info.micro)
033
=== added file 'hooks/charmhelpers/fetch/snap.py'
--- hooks/charmhelpers/fetch/snap.py 1970-01-01 00:00:00 +0000
+++ hooks/charmhelpers/fetch/snap.py 2023-06-30 13:58:42 +0000
@@ -0,0 +1,150 @@
1# Copyright 2014-2021 Canonical Limited.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14"""
15Charm helpers snap for classic charms.
16
17If writing reactive charms, use the snap layer:
18https://lists.ubuntu.com/archives/snapcraft/2016-September/001114.html
19"""
20import subprocess
21import os
22from time import sleep
23from charmhelpers.core.hookenv import log
24
25__author__ = 'Joseph Borg <joseph.borg@canonical.com>'
26
27# The return code for "couldn't acquire lock" in Snap
28# (hopefully this will be improved).
29SNAP_NO_LOCK = 1
30SNAP_NO_LOCK_RETRY_DELAY = 10 # Wait X seconds between Snap lock checks.
31SNAP_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times.
32SNAP_CHANNELS = [
33 'edge',
34 'beta',
35 'candidate',
36 'stable',
37]
38
39
40class CouldNotAcquireLockException(Exception):
41 pass
42
43
44class InvalidSnapChannel(Exception):
45 pass
46
47
48def _snap_exec(commands):
49 """
50 Execute snap commands.
51
52 :param commands: List commands
53 :return: Integer exit code
54 """
55 assert type(commands) == list
56
57 retry_count = 0
58 return_code = None
59
60 while return_code is None or return_code == SNAP_NO_LOCK:
61 try:
62 return_code = subprocess.check_call(['snap'] + commands,
63 env=os.environ)
64 except subprocess.CalledProcessError as e:
65 retry_count += + 1
66 if retry_count > SNAP_NO_LOCK_RETRY_COUNT:
67 raise CouldNotAcquireLockException(
68 'Could not acquire lock after {} attempts'
69 .format(SNAP_NO_LOCK_RETRY_COUNT))
70 return_code = e.returncode
71 log('Snap failed to acquire lock, trying again in {} seconds.'
72 .format(SNAP_NO_LOCK_RETRY_DELAY), level='WARN')
73 sleep(SNAP_NO_LOCK_RETRY_DELAY)
74
75 return return_code
76
77
78def snap_install(packages, *flags):
79 """
80 Install a snap package.
81
82 :param packages: String or List String package name
83 :param flags: List String flags to pass to install command
84 :return: Integer return code from snap
85 """
86 if type(packages) is not list:
87 packages = [packages]
88
89 flags = list(flags)
90
91 message = 'Installing snap(s) "%s"' % ', '.join(packages)
92 if flags:
93 message += ' with option(s) "%s"' % ', '.join(flags)
94
95 log(message, level='INFO')
96 return _snap_exec(['install'] + flags + packages)
97
98
99def snap_remove(packages, *flags):
100 """
101 Remove a snap package.
102
103 :param packages: String or List String package name
104 :param flags: List String flags to pass to remove command
105 :return: Integer return code from snap
106 """
107 if type(packages) is not list:
108 packages = [packages]
109
110 flags = list(flags)
111
112 message = 'Removing snap(s) "%s"' % ', '.join(packages)
113 if flags:
114 message += ' with options "%s"' % ', '.join(flags)
115
116 log(message, level='INFO')
117 return _snap_exec(['remove'] + flags + packages)
118
119
120def snap_refresh(packages, *flags):
121 """
122 Refresh / Update snap package.
123
124 :param packages: String or List String package name
125 :param flags: List String flags to pass to refresh command
126 :return: Integer return code from snap
127 """
128 if type(packages) is not list:
129 packages = [packages]
130
131 flags = list(flags)
132
133 message = 'Refreshing snap(s) "%s"' % ', '.join(packages)
134 if flags:
135 message += ' with options "%s"' % ', '.join(flags)
136
137 log(message, level='INFO')
138 return _snap_exec(['refresh'] + flags + packages)
139
140
141def valid_snap_channel(channel):
142 """ Validate snap channel exists
143
144 :raises InvalidSnapChannel: When channel does not exist
145 :return: Boolean
146 """
147 if channel.lower() in SNAP_CHANNELS:
148 return True
149 else:
150 raise InvalidSnapChannel("Invalid Snap Channel: {}".format(channel))
0151
=== modified file 'hooks/charmhelpers/fetch/ubuntu.py'
--- hooks/charmhelpers/fetch/ubuntu.py 2016-12-20 20:15:28 +0000
+++ hooks/charmhelpers/fetch/ubuntu.py 2023-06-30 13:58:42 +0000
@@ -1,4 +1,4 @@
1# Copyright 2014-2015 Canonical Limited.1# Copyright 2014-2021 Canonical Limited.
2#2#
3# Licensed under the Apache License, Version 2.0 (the "License");3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.4# you may not use this file except in compliance with the License.
@@ -12,29 +12,49 @@
12# See the License for the specific language governing permissions and12# See the License for the specific language governing permissions and
13# limitations under the License.13# limitations under the License.
1414
15import os15from collections import OrderedDict
16import six16import platform
17import re
18import subprocess
19import sys
17import time20import time
18import subprocess21
1922from charmhelpers import deprecate
20from tempfile import NamedTemporaryFile23from charmhelpers.core.host import get_distrib_codename, get_system_env
21from charmhelpers.core.host import (24
22 lsb_release25from charmhelpers.core.hookenv import (
26 log,
27 DEBUG,
28 WARNING,
29 env_proxy_settings,
23)30)
24from charmhelpers.core.hookenv import log31from charmhelpers.fetch import SourceConfigError, GPGKeyError
25from charmhelpers.fetch import SourceConfigError32from charmhelpers.fetch import ubuntu_apt_pkg
2633
34PROPOSED_POCKET = (
35 "# Proposed\n"
36 "deb http://archive.ubuntu.com/ubuntu {}-proposed main universe "
37 "multiverse restricted\n")
38PROPOSED_PORTS_POCKET = (
39 "# Proposed\n"
40 "deb http://ports.ubuntu.com/ubuntu-ports {}-proposed main universe "
41 "multiverse restricted\n")
42# Only supports 64bit and ppc64 at the moment.
43ARCH_TO_PROPOSED_POCKET = {
44 'x86_64': PROPOSED_POCKET,
45 'ppc64le': PROPOSED_PORTS_POCKET,
46 'aarch64': PROPOSED_PORTS_POCKET,
47 's390x': PROPOSED_PORTS_POCKET,
48}
49CLOUD_ARCHIVE_URL = "http://ubuntu-cloud.archive.canonical.com/ubuntu"
50CLOUD_ARCHIVE_KEY_ID = '5EDB1B62EC4926EA'
27CLOUD_ARCHIVE = """# Ubuntu Cloud Archive51CLOUD_ARCHIVE = """# Ubuntu Cloud Archive
28deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main52deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main
29"""53"""
30
31PROPOSED_POCKET = """# Proposed
32deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted
33"""
34
35CLOUD_ARCHIVE_POCKETS = {54CLOUD_ARCHIVE_POCKETS = {
36 # Folsom55 # Folsom
37 'folsom': 'precise-updates/folsom',56 'folsom': 'precise-updates/folsom',
57 'folsom/updates': 'precise-updates/folsom',
38 'precise-folsom': 'precise-updates/folsom',58 'precise-folsom': 'precise-updates/folsom',
39 'precise-folsom/updates': 'precise-updates/folsom',59 'precise-folsom/updates': 'precise-updates/folsom',
40 'precise-updates/folsom': 'precise-updates/folsom',60 'precise-updates/folsom': 'precise-updates/folsom',
@@ -43,6 +63,7 @@
43 'precise-proposed/folsom': 'precise-proposed/folsom',63 'precise-proposed/folsom': 'precise-proposed/folsom',
44 # Grizzly64 # Grizzly
45 'grizzly': 'precise-updates/grizzly',65 'grizzly': 'precise-updates/grizzly',
66 'grizzly/updates': 'precise-updates/grizzly',
46 'precise-grizzly': 'precise-updates/grizzly',67 'precise-grizzly': 'precise-updates/grizzly',
47 'precise-grizzly/updates': 'precise-updates/grizzly',68 'precise-grizzly/updates': 'precise-updates/grizzly',
48 'precise-updates/grizzly': 'precise-updates/grizzly',69 'precise-updates/grizzly': 'precise-updates/grizzly',
@@ -51,6 +72,7 @@
51 'precise-proposed/grizzly': 'precise-proposed/grizzly',72 'precise-proposed/grizzly': 'precise-proposed/grizzly',
52 # Havana73 # Havana
53 'havana': 'precise-updates/havana',74 'havana': 'precise-updates/havana',
75 'havana/updates': 'precise-updates/havana',
54 'precise-havana': 'precise-updates/havana',76 'precise-havana': 'precise-updates/havana',
55 'precise-havana/updates': 'precise-updates/havana',77 'precise-havana/updates': 'precise-updates/havana',
56 'precise-updates/havana': 'precise-updates/havana',78 'precise-updates/havana': 'precise-updates/havana',
@@ -59,6 +81,7 @@
59 'precise-proposed/havana': 'precise-proposed/havana',81 'precise-proposed/havana': 'precise-proposed/havana',
60 # Icehouse82 # Icehouse
61 'icehouse': 'precise-updates/icehouse',83 'icehouse': 'precise-updates/icehouse',
84 'icehouse/updates': 'precise-updates/icehouse',
62 'precise-icehouse': 'precise-updates/icehouse',85 'precise-icehouse': 'precise-updates/icehouse',
63 'precise-icehouse/updates': 'precise-updates/icehouse',86 'precise-icehouse/updates': 'precise-updates/icehouse',
64 'precise-updates/icehouse': 'precise-updates/icehouse',87 'precise-updates/icehouse': 'precise-updates/icehouse',
@@ -67,6 +90,7 @@
67 'precise-proposed/icehouse': 'precise-proposed/icehouse',90 'precise-proposed/icehouse': 'precise-proposed/icehouse',
68 # Juno91 # Juno
69 'juno': 'trusty-updates/juno',92 'juno': 'trusty-updates/juno',
93 'juno/updates': 'trusty-updates/juno',
70 'trusty-juno': 'trusty-updates/juno',94 'trusty-juno': 'trusty-updates/juno',
71 'trusty-juno/updates': 'trusty-updates/juno',95 'trusty-juno/updates': 'trusty-updates/juno',
72 'trusty-updates/juno': 'trusty-updates/juno',96 'trusty-updates/juno': 'trusty-updates/juno',
@@ -75,6 +99,7 @@
75 'trusty-proposed/juno': 'trusty-proposed/juno',99 'trusty-proposed/juno': 'trusty-proposed/juno',
76 # Kilo100 # Kilo
77 'kilo': 'trusty-updates/kilo',101 'kilo': 'trusty-updates/kilo',
102 'kilo/updates': 'trusty-updates/kilo',
78 'trusty-kilo': 'trusty-updates/kilo',103 'trusty-kilo': 'trusty-updates/kilo',
79 'trusty-kilo/updates': 'trusty-updates/kilo',104 'trusty-kilo/updates': 'trusty-updates/kilo',
80 'trusty-updates/kilo': 'trusty-updates/kilo',105 'trusty-updates/kilo': 'trusty-updates/kilo',
@@ -83,6 +108,7 @@
83 'trusty-proposed/kilo': 'trusty-proposed/kilo',108 'trusty-proposed/kilo': 'trusty-proposed/kilo',
84 # Liberty109 # Liberty
85 'liberty': 'trusty-updates/liberty',110 'liberty': 'trusty-updates/liberty',
111 'liberty/updates': 'trusty-updates/liberty',
86 'trusty-liberty': 'trusty-updates/liberty',112 'trusty-liberty': 'trusty-updates/liberty',
87 'trusty-liberty/updates': 'trusty-updates/liberty',113 'trusty-liberty/updates': 'trusty-updates/liberty',
88 'trusty-updates/liberty': 'trusty-updates/liberty',114 'trusty-updates/liberty': 'trusty-updates/liberty',
@@ -91,6 +117,7 @@
91 'trusty-proposed/liberty': 'trusty-proposed/liberty',117 'trusty-proposed/liberty': 'trusty-proposed/liberty',
92 # Mitaka118 # Mitaka
93 'mitaka': 'trusty-updates/mitaka',119 'mitaka': 'trusty-updates/mitaka',
120 'mitaka/updates': 'trusty-updates/mitaka',
94 'trusty-mitaka': 'trusty-updates/mitaka',121 'trusty-mitaka': 'trusty-updates/mitaka',
95 'trusty-mitaka/updates': 'trusty-updates/mitaka',122 'trusty-mitaka/updates': 'trusty-updates/mitaka',
96 'trusty-updates/mitaka': 'trusty-updates/mitaka',123 'trusty-updates/mitaka': 'trusty-updates/mitaka',
@@ -99,6 +126,7 @@
99 'trusty-proposed/mitaka': 'trusty-proposed/mitaka',126 'trusty-proposed/mitaka': 'trusty-proposed/mitaka',
100 # Newton127 # Newton
101 'newton': 'xenial-updates/newton',128 'newton': 'xenial-updates/newton',
129 'newton/updates': 'xenial-updates/newton',
102 'xenial-newton': 'xenial-updates/newton',130 'xenial-newton': 'xenial-updates/newton',
103 'xenial-newton/updates': 'xenial-updates/newton',131 'xenial-newton/updates': 'xenial-updates/newton',
104 'xenial-updates/newton': 'xenial-updates/newton',132 'xenial-updates/newton': 'xenial-updates/newton',
@@ -107,17 +135,175 @@
107 'xenial-proposed/newton': 'xenial-proposed/newton',135 'xenial-proposed/newton': 'xenial-proposed/newton',
108 # Ocata136 # Ocata
109 'ocata': 'xenial-updates/ocata',137 'ocata': 'xenial-updates/ocata',
138 'ocata/updates': 'xenial-updates/ocata',
110 'xenial-ocata': 'xenial-updates/ocata',139 'xenial-ocata': 'xenial-updates/ocata',
111 'xenial-ocata/updates': 'xenial-updates/ocata',140 'xenial-ocata/updates': 'xenial-updates/ocata',
112 'xenial-updates/ocata': 'xenial-updates/ocata',141 'xenial-updates/ocata': 'xenial-updates/ocata',
113 'ocata/proposed': 'xenial-proposed/ocata',142 'ocata/proposed': 'xenial-proposed/ocata',
114 'xenial-ocata/proposed': 'xenial-proposed/ocata',143 'xenial-ocata/proposed': 'xenial-proposed/ocata',
115 'xenial-ocata/newton': 'xenial-proposed/ocata',144 'xenial-proposed/ocata': 'xenial-proposed/ocata',
145 # Pike
146 'pike': 'xenial-updates/pike',
147 'xenial-pike': 'xenial-updates/pike',
148 'xenial-pike/updates': 'xenial-updates/pike',
149 'xenial-updates/pike': 'xenial-updates/pike',
150 'pike/proposed': 'xenial-proposed/pike',
151 'xenial-pike/proposed': 'xenial-proposed/pike',
152 'xenial-proposed/pike': 'xenial-proposed/pike',
153 # Queens
154 'queens': 'xenial-updates/queens',
155 'xenial-queens': 'xenial-updates/queens',
156 'xenial-queens/updates': 'xenial-updates/queens',
157 'xenial-updates/queens': 'xenial-updates/queens',
158 'queens/proposed': 'xenial-proposed/queens',
159 'xenial-queens/proposed': 'xenial-proposed/queens',
160 'xenial-proposed/queens': 'xenial-proposed/queens',
161 # Rocky
162 'rocky': 'bionic-updates/rocky',
163 'bionic-rocky': 'bionic-updates/rocky',
164 'bionic-rocky/updates': 'bionic-updates/rocky',
165 'bionic-updates/rocky': 'bionic-updates/rocky',
166 'rocky/proposed': 'bionic-proposed/rocky',
167 'bionic-rocky/proposed': 'bionic-proposed/rocky',
168 'bionic-proposed/rocky': 'bionic-proposed/rocky',
169 # Stein
170 'stein': 'bionic-updates/stein',
171 'bionic-stein': 'bionic-updates/stein',
172 'bionic-stein/updates': 'bionic-updates/stein',
173 'bionic-updates/stein': 'bionic-updates/stein',
174 'stein/proposed': 'bionic-proposed/stein',
175 'bionic-stein/proposed': 'bionic-proposed/stein',
176 'bionic-proposed/stein': 'bionic-proposed/stein',
177 # Train
178 'train': 'bionic-updates/train',
179 'bionic-train': 'bionic-updates/train',
180 'bionic-train/updates': 'bionic-updates/train',
181 'bionic-updates/train': 'bionic-updates/train',
182 'train/proposed': 'bionic-proposed/train',
183 'bionic-train/proposed': 'bionic-proposed/train',
184 'bionic-proposed/train': 'bionic-proposed/train',
185 # Ussuri
186 'ussuri': 'bionic-updates/ussuri',
187 'bionic-ussuri': 'bionic-updates/ussuri',
188 'bionic-ussuri/updates': 'bionic-updates/ussuri',
189 'bionic-updates/ussuri': 'bionic-updates/ussuri',
190 'ussuri/proposed': 'bionic-proposed/ussuri',
191 'bionic-ussuri/proposed': 'bionic-proposed/ussuri',
192 'bionic-proposed/ussuri': 'bionic-proposed/ussuri',
193 # Victoria
194 'victoria': 'focal-updates/victoria',
195 'focal-victoria': 'focal-updates/victoria',
196 'focal-victoria/updates': 'focal-updates/victoria',
197 'focal-updates/victoria': 'focal-updates/victoria',
198 'victoria/proposed': 'focal-proposed/victoria',
199 'focal-victoria/proposed': 'focal-proposed/victoria',
200 'focal-proposed/victoria': 'focal-proposed/victoria',
201 # Wallaby
202 'wallaby': 'focal-updates/wallaby',
203 'focal-wallaby': 'focal-updates/wallaby',
204 'focal-wallaby/updates': 'focal-updates/wallaby',
205 'focal-updates/wallaby': 'focal-updates/wallaby',
206 'wallaby/proposed': 'focal-proposed/wallaby',
207 'focal-wallaby/proposed': 'focal-proposed/wallaby',
208 'focal-proposed/wallaby': 'focal-proposed/wallaby',
209 # Xena
210 'xena': 'focal-updates/xena',
211 'focal-xena': 'focal-updates/xena',
212 'focal-xena/updates': 'focal-updates/xena',
213 'focal-updates/xena': 'focal-updates/xena',
214 'xena/proposed': 'focal-proposed/xena',
215 'focal-xena/proposed': 'focal-proposed/xena',
216 'focal-proposed/xena': 'focal-proposed/xena',
217 # Yoga
218 'yoga': 'focal-updates/yoga',
219 'focal-yoga': 'focal-updates/yoga',
220 'focal-yoga/updates': 'focal-updates/yoga',
221 'focal-updates/yoga': 'focal-updates/yoga',
222 'yoga/proposed': 'focal-proposed/yoga',
223 'focal-yoga/proposed': 'focal-proposed/yoga',
224 'focal-proposed/yoga': 'focal-proposed/yoga',
225 # Zed
226 'zed': 'jammy-updates/zed',
227 'jammy-zed': 'jammy-updates/zed',
228 'jammy-zed/updates': 'jammy-updates/zed',
229 'jammy-updates/zed': 'jammy-updates/zed',
230 'zed/proposed': 'jammy-proposed/zed',
231 'jammy-zed/proposed': 'jammy-proposed/zed',
232 'jammy-proposed/zed': 'jammy-proposed/zed',
233 # antelope
234 'antelope': 'jammy-updates/antelope',
235 'jammy-antelope': 'jammy-updates/antelope',
236 'jammy-antelope/updates': 'jammy-updates/antelope',
237 'jammy-updates/antelope': 'jammy-updates/antelope',
238 'antelope/proposed': 'jammy-proposed/antelope',
239 'jammy-antelope/proposed': 'jammy-proposed/antelope',
240 'jammy-proposed/antelope': 'jammy-proposed/antelope',
241
242 # OVN
243 'focal-ovn-22.03': 'focal-updates/ovn-22.03',
244 'focal-ovn-22.03/proposed': 'focal-proposed/ovn-22.03',
116}245}
117246
247
248OPENSTACK_RELEASES = (
249 'diablo',
250 'essex',
251 'folsom',
252 'grizzly',
253 'havana',
254 'icehouse',
255 'juno',
256 'kilo',
257 'liberty',
258 'mitaka',
259 'newton',
260 'ocata',
261 'pike',
262 'queens',
263 'rocky',
264 'stein',
265 'train',
266 'ussuri',
267 'victoria',
268 'wallaby',
269 'xena',
270 'yoga',
271 'zed',
272 'antelope',
273)
274
275
276UBUNTU_OPENSTACK_RELEASE = OrderedDict([
277 ('oneiric', 'diablo'),
278 ('precise', 'essex'),
279 ('quantal', 'folsom'),
280 ('raring', 'grizzly'),
281 ('saucy', 'havana'),
282 ('trusty', 'icehouse'),
283 ('utopic', 'juno'),
284 ('vivid', 'kilo'),
285 ('wily', 'liberty'),
286 ('xenial', 'mitaka'),
287 ('yakkety', 'newton'),
288 ('zesty', 'ocata'),
289 ('artful', 'pike'),
290 ('bionic', 'queens'),
291 ('cosmic', 'rocky'),
292 ('disco', 'stein'),
293 ('eoan', 'train'),
294 ('focal', 'ussuri'),
295 ('groovy', 'victoria'),
296 ('hirsute', 'wallaby'),
297 ('impish', 'xena'),
298 ('jammy', 'yoga'),
299 ('kinetic', 'zed'),
300 ('lunar', 'antelope'),
301])
302
303
118APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT.304APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT.
119APT_NO_LOCK_RETRY_DELAY = 10 # Wait 10 seconds between apt lock checks.305CMD_RETRY_DELAY = 10 # Wait 10 seconds between command retries.
120APT_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times.306CMD_RETRY_COUNT = 10 # Retry a failing fatal command X times.
121307
122308
123def filter_installed_packages(packages):309def filter_installed_packages(packages):
@@ -135,35 +321,93 @@
135 return _pkgs321 return _pkgs
136322
137323
138def apt_cache(in_memory=True, progress=None):324def filter_missing_packages(packages):
139 """Build and return an apt cache."""325 """Return a list of packages that are installed.
140 from apt import apt_pkg326
141 apt_pkg.init()327 :param packages: list of packages to evaluate.
142 if in_memory:328 :returns list: Packages that are installed.
143 apt_pkg.config.set("Dir::Cache::pkgcache", "")329 """
144 apt_pkg.config.set("Dir::Cache::srcpkgcache", "")330 return list(
145 return apt_pkg.Cache(progress)331 set(packages) -
146332 set(filter_installed_packages(packages))
147333 )
148def install(packages, options=None, fatal=False):334
149 """Install one or more packages."""335
336def apt_cache(*_, **__):
337 """Shim returning an object simulating the apt_pkg Cache.
338
339 :param _: Accept arguments for compatibility, not used.
340 :type _: any
341 :param __: Accept keyword arguments for compatibility, not used.
342 :type __: any
343 :returns:Object used to interrogate the system apt and dpkg databases.
344 :rtype:ubuntu_apt_pkg.Cache
345 """
346 if 'apt_pkg' in sys.modules:
347 # NOTE(fnordahl): When our consumer use the upstream ``apt_pkg`` module
348 # in conjunction with the apt_cache helper function, they may expect us
349 # to call ``apt_pkg.init()`` for them.
350 #
351 # Detect this situation, log a warning and make the call to
352 # ``apt_pkg.init()`` to avoid the consumer Python interpreter from
353 # crashing with a segmentation fault.
354 @deprecate(
355 'Support for use of upstream ``apt_pkg`` module in conjunction'
356 'with charm-helpers is deprecated since 2019-06-25',
357 date=None, log=lambda x: log(x, level=WARNING))
358 def one_shot_log():
359 pass
360
361 one_shot_log()
362 sys.modules['apt_pkg'].init()
363 return ubuntu_apt_pkg.Cache()
364
365
366def apt_install(packages, options=None, fatal=False, quiet=False):
367 """Install one or more packages.
368
369 :param packages: Package(s) to install
370 :type packages: Option[str, List[str]]
371 :param options: Options to pass on to apt-get
372 :type options: Option[None, List[str]]
373 :param fatal: Whether the command's output should be checked and
374 retried.
375 :type fatal: bool
376 :param quiet: if True (default), suppress log message to stdout/stderr
377 :type quiet: bool
378 :raises: subprocess.CalledProcessError
379 """
380 if not packages:
381 log("Nothing to install", level=DEBUG)
382 return
150 if options is None:383 if options is None:
151 options = ['--option=Dpkg::Options::=--force-confold']384 options = ['--option=Dpkg::Options::=--force-confold']
152385
153 cmd = ['apt-get', '--assume-yes']386 cmd = ['apt-get', '--assume-yes']
154 cmd.extend(options)387 cmd.extend(options)
155 cmd.append('install')388 cmd.append('install')
156 if isinstance(packages, six.string_types):389 if isinstance(packages, str):
157 cmd.append(packages)390 cmd.append(packages)
158 else:391 else:
159 cmd.extend(packages)392 cmd.extend(packages)
160 log("Installing {} with options: {}".format(packages,393 if not quiet:
161 options))394 log("Installing {} with options: {}"
162 _run_apt_command(cmd, fatal)395 .format(packages, options))
163396 _run_apt_command(cmd, fatal, quiet=quiet)
164397
165def upgrade(options=None, fatal=False, dist=False):398
166 """Upgrade all packages."""399def apt_upgrade(options=None, fatal=False, dist=False):
400 """Upgrade all packages.
401
402 :param options: Options to pass on to apt-get
403 :type options: Option[None, List[str]]
404 :param fatal: Whether the command's output should be checked and
405 retried.
406 :type fatal: bool
407 :param dist: Whether ``dist-upgrade`` should be used over ``upgrade``
408 :type dist: bool
409 :raises: subprocess.CalledProcessError
410 """
167 if options is None:411 if options is None:
168 options = ['--option=Dpkg::Options::=--force-confold']412 options = ['--option=Dpkg::Options::=--force-confold']
169413
@@ -177,16 +421,24 @@
177 _run_apt_command(cmd, fatal)421 _run_apt_command(cmd, fatal)
178422
179423
180def update(fatal=False):424def apt_update(fatal=False):
181 """Update local apt cache."""425 """Update local apt cache."""
182 cmd = ['apt-get', 'update']426 cmd = ['apt-get', 'update']
183 _run_apt_command(cmd, fatal)427 _run_apt_command(cmd, fatal)
184428
185429
186def purge(packages, fatal=False):430def apt_purge(packages, fatal=False):
187 """Purge one or more packages."""431 """Purge one or more packages.
432
433 :param packages: Package(s) to install
434 :type packages: Option[str, List[str]]
435 :param fatal: Whether the command's output should be checked and
436 retried.
437 :type fatal: bool
438 :raises: subprocess.CalledProcessError
439 """
188 cmd = ['apt-get', '--assume-yes', 'purge']440 cmd = ['apt-get', '--assume-yes', 'purge']
189 if isinstance(packages, six.string_types):441 if isinstance(packages, str):
190 cmd.append(packages)442 cmd.append(packages)
191 else:443 else:
192 cmd.extend(packages)444 cmd.extend(packages)
@@ -194,11 +446,26 @@
194 _run_apt_command(cmd, fatal)446 _run_apt_command(cmd, fatal)
195447
196448
449def apt_autoremove(purge=True, fatal=False):
450 """Purge one or more packages.
451 :param purge: Whether the ``--purge`` option should be passed on or not.
452 :type purge: bool
453 :param fatal: Whether the command's output should be checked and
454 retried.
455 :type fatal: bool
456 :raises: subprocess.CalledProcessError
457 """
458 cmd = ['apt-get', '--assume-yes', 'autoremove']
459 if purge:
460 cmd.append('--purge')
461 _run_apt_command(cmd, fatal)
462
463
197def apt_mark(packages, mark, fatal=False):464def apt_mark(packages, mark, fatal=False):
198 """Flag one or more packages using apt-mark."""465 """Flag one or more packages using apt-mark."""
199 log("Marking {} as {}".format(packages, mark))466 log("Marking {} as {}".format(packages, mark))
200 cmd = ['apt-mark', mark]467 cmd = ['apt-mark', mark]
201 if isinstance(packages, six.string_types):468 if isinstance(packages, str):
202 cmd.append(packages)469 cmd.append(packages)
203 else:470 else:
204 cmd.extend(packages)471 cmd.extend(packages)
@@ -217,7 +484,154 @@
217 return apt_mark(packages, 'unhold', fatal=fatal)484 return apt_mark(packages, 'unhold', fatal=fatal)
218485
219486
220def add_source(source, key=None):487def import_key(key):
488 """Import an ASCII Armor key.
489
490 A Radix64 format keyid is also supported for backwards
491 compatibility. In this case Ubuntu keyserver will be
492 queried for a key via HTTPS by its keyid. This method
493 is less preferable because https proxy servers may
494 require traffic decryption which is equivalent to a
495 man-in-the-middle attack (a proxy server impersonates
496 keyserver TLS certificates and has to be explicitly
497 trusted by the system).
498
499 :param key: A GPG key in ASCII armor format,
500 including BEGIN and END markers or a keyid.
501 :type key: (bytes, str)
502 :raises: GPGKeyError if the key could not be imported
503 """
504 key = key.strip()
505 if '-' in key or '\n' in key:
506 # Send everything not obviously a keyid to GPG to import, as
507 # we trust its validation better than our own. eg. handling
508 # comments before the key.
509 log("PGP key found (looks like ASCII Armor format)", level=DEBUG)
510 if ('-----BEGIN PGP PUBLIC KEY BLOCK-----' in key and
511 '-----END PGP PUBLIC KEY BLOCK-----' in key):
512 log("Writing provided PGP key in the binary format", level=DEBUG)
513 key_bytes = key.encode('utf-8')
514 key_name = _get_keyid_by_gpg_key(key_bytes)
515 key_gpg = _dearmor_gpg_key(key_bytes)
516 _write_apt_gpg_keyfile(key_name=key_name, key_material=key_gpg)
517 else:
518 raise GPGKeyError("ASCII armor markers missing from GPG key")
519 else:
520 log("PGP key found (looks like Radix64 format)", level=WARNING)
521 log("SECURELY importing PGP key from keyserver; "
522 "full key not provided.", level=WARNING)
523 # as of bionic add-apt-repository uses curl with an HTTPS keyserver URL
524 # to retrieve GPG keys. `apt-key adv` command is deprecated as is
525 # apt-key in general as noted in its manpage. See lp:1433761 for more
526 # history. Instead, /etc/apt/trusted.gpg.d is used directly to drop
527 # gpg
528 key_asc = _get_key_by_keyid(key)
529 # write the key in GPG format so that apt-key list shows it
530 key_gpg = _dearmor_gpg_key(key_asc)
531 _write_apt_gpg_keyfile(key_name=key, key_material=key_gpg)
532
533
534def _get_keyid_by_gpg_key(key_material):
535 """Get a GPG key fingerprint by GPG key material.
536 Gets a GPG key fingerprint (40-digit, 160-bit) by the ASCII armor-encoded
537 or binary GPG key material. Can be used, for example, to generate file
538 names for keys passed via charm options.
539
540 :param key_material: ASCII armor-encoded or binary GPG key material
541 :type key_material: bytes
542 :raises: GPGKeyError if invalid key material has been provided
543 :returns: A GPG key fingerprint
544 :rtype: str
545 """
546 # Use the same gpg command for both Xenial and Bionic
547 cmd = 'gpg --with-colons --with-fingerprint'
548 ps = subprocess.Popen(cmd.split(),
549 stdout=subprocess.PIPE,
550 stderr=subprocess.PIPE,
551 stdin=subprocess.PIPE)
552 out, err = ps.communicate(input=key_material)
553 out = out.decode('utf-8')
554 err = err.decode('utf-8')
555 if 'gpg: no valid OpenPGP data found.' in err:
556 raise GPGKeyError('Invalid GPG key material provided')
557 # from gnupg2 docs: fpr :: Fingerprint (fingerprint is in field 10)
558 return re.search(r"^fpr:{9}([0-9A-F]{40}):$", out, re.MULTILINE).group(1)
559
560
561def _get_key_by_keyid(keyid):
562 """Get a key via HTTPS from the Ubuntu keyserver.
563 Different key ID formats are supported by SKS keyservers (the longer ones
564 are more secure, see "dead beef attack" and https://evil32.com/). Since
565 HTTPS is used, if SSLBump-like HTTPS proxies are in place, they will
566 impersonate keyserver.ubuntu.com and generate a certificate with
567 keyserver.ubuntu.com in the CN field or in SubjAltName fields of a
568 certificate. If such proxy behavior is expected it is necessary to add the
569 CA certificate chain containing the intermediate CA of the SSLBump proxy to
570 every machine that this code runs on via ca-certs cloud-init directive (via
571 cloudinit-userdata model-config) or via other means (such as through a
572 custom charm option). Also note that DNS resolution for the hostname in a
573 URL is done at a proxy server - not at the client side.
574
575 8-digit (32 bit) key ID
576 https://keyserver.ubuntu.com/pks/lookup?search=0x4652B4E6
577 16-digit (64 bit) key ID
578 https://keyserver.ubuntu.com/pks/lookup?search=0x6E85A86E4652B4E6
579 40-digit key ID:
580 https://keyserver.ubuntu.com/pks/lookup?search=0x35F77D63B5CEC106C577ED856E85A86E4652B4E6
581
582 :param keyid: An 8, 16 or 40 hex digit keyid to find a key for
583 :type keyid: (bytes, str)
584 :returns: A key material for the specified GPG key id
585 :rtype: (str, bytes)
586 :raises: subprocess.CalledProcessError
587 """
588 # options=mr - machine-readable output (disables html wrappers)
589 keyserver_url = ('https://keyserver.ubuntu.com'
590 '/pks/lookup?op=get&options=mr&exact=on&search=0x{}')
591 curl_cmd = ['curl', keyserver_url.format(keyid)]
592 # use proxy server settings in order to retrieve the key
593 return subprocess.check_output(curl_cmd,
594 env=env_proxy_settings(['https', 'no_proxy']))
595
596
597def _dearmor_gpg_key(key_asc):
598 """Converts a GPG key in the ASCII armor format to the binary format.
599
600 :param key_asc: A GPG key in ASCII armor format.
601 :type key_asc: (str, bytes)
602 :returns: A GPG key in binary format
603 :rtype: (str, bytes)
604 :raises: GPGKeyError
605 """
606 ps = subprocess.Popen(['gpg', '--dearmor'],
607 stdout=subprocess.PIPE,
608 stderr=subprocess.PIPE,
609 stdin=subprocess.PIPE)
610 out, err = ps.communicate(input=key_asc)
611 # no need to decode output as it is binary (invalid utf-8), only error
612 err = err.decode('utf-8')
613 if 'gpg: no valid OpenPGP data found.' in err:
614 raise GPGKeyError('Invalid GPG key material. Check your network setup'
615 ' (MTU, routing, DNS) and/or proxy server settings'
616 ' as well as destination keyserver status.')
617 else:
618 return out
619
620
621def _write_apt_gpg_keyfile(key_name, key_material):
622 """Writes GPG key material into a file at a provided path.
623
624 :param key_name: A key name to use for a key file (could be a fingerprint)
625 :type key_name: str
626 :param key_material: A GPG key material (binary)
627 :type key_material: (str, bytes)
628 """
629 with open('/etc/apt/trusted.gpg.d/{}.gpg'.format(key_name),
630 'wb') as keyf:
631 keyf.write(key_material)
632
633
634def add_source(source, key=None, fail_invalid=False):
221 """Add a package source to this system.635 """Add a package source to this system.
222636
223 @param source: a URL or sources.list entry, as supported by637 @param source: a URL or sources.list entry, as supported by
@@ -233,95 +647,349 @@
233 such as 'cloud:icehouse'647 such as 'cloud:icehouse'
234 'distro' may be used as a noop648 'distro' may be used as a noop
235649
650 Full list of source specifications supported by the function are:
651
652 'distro': A NOP; i.e. it has no effect.
653 'proposed': the proposed deb spec [2] is wrtten to
654 /etc/apt/sources.list/proposed
655 'distro-proposed': adds <version>-proposed to the debs [2]
656 'ppa:<ppa-name>': add-apt-repository --yes <ppa_name>
657 'deb <deb-spec>': add-apt-repository --yes deb <deb-spec>
658 'http://....': add-apt-repository --yes http://...
659 'cloud-archive:<spec>': add-apt-repository -yes cloud-archive:<spec>
660 'cloud:<release>[-staging]': specify a Cloud Archive pocket <release> with
661 optional staging version. If staging is used then the staging PPA [2]
662 with be used. If staging is NOT used then the cloud archive [3] will be
663 added, and the 'ubuntu-cloud-keyring' package will be added for the
664 current distro.
665 '<openstack-version>': translate to cloud:<release> based on the current
666 distro version (i.e. for 'ussuri' this will either be 'bionic-ussuri' or
667 'distro'.
668 '<openstack-version>/proposed': as above, but for proposed.
669
670 Otherwise the source is not recognised and this is logged to the juju log.
671 However, no error is raised, unless sys_error_on_exit is True.
672
673 [1] deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main
674 where {} is replaced with the derived pocket name.
675 [2] deb http://archive.ubuntu.com/ubuntu {}-proposed \
676 main universe multiverse restricted
677 where {} is replaced with the lsb_release codename (e.g. xenial)
678 [3] deb http://ubuntu-cloud.archive.canonical.com/ubuntu <pocket>
679 to /etc/apt/sources.list.d/cloud-archive-list
680
236 @param key: A key to be added to the system's APT keyring and used681 @param key: A key to be added to the system's APT keyring and used
237 to verify the signatures on packages. Ideally, this should be an682 to verify the signatures on packages. Ideally, this should be an
238 ASCII format GPG public key including the block headers. A GPG key683 ASCII format GPG public key including the block headers. A GPG key
239 id may also be used, but be aware that only insecure protocols are684 id may also be used, but be aware that only insecure protocols are
240 available to retrieve the actual public key from a public keyserver685 available to retrieve the actual public key from a public keyserver
241 placing your Juju environment at risk. ppa and cloud archive keys686 placing your Juju environment at risk. ppa and cloud archive keys
242 are securely added automtically, so sould not be provided.687 are securely added automatically, so should not be provided.
688
689 @param fail_invalid: (boolean) if True, then the function raises a
690 SourceConfigError is there is no matching installation source.
691
692 @raises SourceConfigError() if for cloud:<pocket>, the <pocket> is not a
693 valid pocket in CLOUD_ARCHIVE_POCKETS
243 """694 """
695 # extract the OpenStack versions from the CLOUD_ARCHIVE_POCKETS; can't use
696 # the list in contrib.openstack.utils as it might not be included in
697 # classic charms and would break everything. Having OpenStack specific
698 # code in this file is a bit of an antipattern, anyway.
699 os_versions_regex = "({})".format("|".join(OPENSTACK_RELEASES))
700
701 _mapping = OrderedDict([
702 (r"^distro$", lambda: None), # This is a NOP
703 (r"^(?:proposed|distro-proposed)$", _add_proposed),
704 (r"^cloud-archive:(.*)$", _add_apt_repository),
705 (r"^((?:deb |http:|https:|ppa:).*)$", _add_apt_repository),
706 (r"^cloud:(.*)-(.*)\/staging$", _add_cloud_staging),
707 (r"^cloud:(.*)-(ovn-.*)$", _add_cloud_distro_check),
708 (r"^cloud:(.*)-(.*)$", _add_cloud_distro_check),
709 (r"^cloud:(.*)$", _add_cloud_pocket),
710 (r"^snap:.*-(.*)-(.*)$", _add_cloud_distro_check),
711 (r"^{}\/proposed$".format(os_versions_regex),
712 _add_bare_openstack_proposed),
713 (r"^{}$".format(os_versions_regex), _add_bare_openstack),
714 ])
244 if source is None:715 if source is None:
245 log('Source is not present. Skipping')
246 return
247
248 if (source.startswith('ppa:') or
249 source.startswith('http') or
250 source.startswith('deb ') or
The diff has been truncated for viewing.

Subscribers

People subscribed via source and target branches