Merge lp:~verterok/charms/xenial/conn-check/focal into lp:~ubuntuone-hackers/charms/xenial/conn-check/focal
- Xenial Xerus (16.04)
- focal
- Merge into focal
Proposed by
Guillermo Gonzalez
Status: | Merged |
---|---|
Approved by: | Guillermo Gonzalez |
Approved revision: | 62 |
Merged at revision: | 58 |
Proposed branch: | lp:~verterok/charms/xenial/conn-check/focal |
Merge into: | lp:~ubuntuone-hackers/charms/xenial/conn-check/focal |
Diff against target: |
5838 lines (+3744/-563) 33 files modified
hooks/charmhelpers/__init__.py (+67/-19) hooks/charmhelpers/contrib/ansible/__init__.py (+153/-89) hooks/charmhelpers/contrib/charmsupport/nrpe.py (+187/-31) hooks/charmhelpers/contrib/templating/contexts.py (+7/-7) hooks/charmhelpers/core/decorators.py (+38/-0) hooks/charmhelpers/core/hookenv.py (+658/-59) hooks/charmhelpers/core/host.py (+655/-102) hooks/charmhelpers/core/host_factory/centos.py (+16/-0) hooks/charmhelpers/core/host_factory/ubuntu.py (+73/-5) hooks/charmhelpers/core/kernel.py (+2/-2) hooks/charmhelpers/core/services/base.py (+22/-10) hooks/charmhelpers/core/services/helpers.py (+2/-2) hooks/charmhelpers/core/strutils.py (+75/-14) hooks/charmhelpers/core/sysctl.py (+32/-11) hooks/charmhelpers/core/templating.py (+21/-17) hooks/charmhelpers/core/unitdata.py (+17/-9) hooks/charmhelpers/fetch/__init__.py (+29/-18) hooks/charmhelpers/fetch/archiveurl.py (+35/-27) hooks/charmhelpers/fetch/bzrurl.py (+2/-2) hooks/charmhelpers/fetch/centos.py (+4/-5) hooks/charmhelpers/fetch/giturl.py (+2/-2) hooks/charmhelpers/fetch/python/__init__.py (+13/-0) hooks/charmhelpers/fetch/python/debug.py (+52/-0) hooks/charmhelpers/fetch/python/packages.py (+148/-0) hooks/charmhelpers/fetch/python/rpdb.py (+56/-0) hooks/charmhelpers/fetch/python/version.py (+32/-0) hooks/charmhelpers/fetch/snap.py (+150/-0) hooks/charmhelpers/fetch/ubuntu.py (+822/-125) hooks/charmhelpers/fetch/ubuntu_apt_pkg.py (+327/-0) hooks/charmhelpers/osplatform.py (+32/-2) hooks/hooks.py (+1/-1) playbook.yaml (+13/-3) roles/nrpe-external-master/tasks/main.yaml (+1/-1) |
To merge this branch: | bzr merge lp:~verterok/charms/xenial/conn-check/focal |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
John Paraskevopoulos | Approve | ||
Review via email: mp+445757@code.launchpad.net |
Commit message
update charm to work in focal
Description of the change
Most of the changes are from the automated pull of a newer charmhelpers with python3.8 support. Please check the individual commits for easier reviewing
To post a comment you must log in.
- 62. By Guillermo Gonzalez
-
use xenial for all distribution release comparisons
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'hooks/charmhelpers/__init__.py' | |||
2 | --- hooks/charmhelpers/__init__.py 2016-12-20 14:35:00 +0000 | |||
3 | +++ hooks/charmhelpers/__init__.py 2023-06-30 13:58:42 +0000 | |||
4 | @@ -14,23 +14,71 @@ | |||
5 | 14 | 14 | ||
6 | 15 | # Bootstrap charm-helpers, installing its dependencies if necessary using | 15 | # Bootstrap charm-helpers, installing its dependencies if necessary using |
7 | 16 | # only standard libraries. | 16 | # only standard libraries. |
8 | 17 | import functools | ||
9 | 18 | import inspect | ||
10 | 17 | import subprocess | 19 | import subprocess |
30 | 18 | import sys | 20 | |
31 | 19 | 21 | ||
32 | 20 | try: | 22 | try: |
33 | 21 | import six # flake8: noqa | 23 | import yaml # NOQA:F401 |
34 | 22 | except ImportError: | 24 | except ImportError: |
35 | 23 | if sys.version_info.major == 2: | 25 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml']) |
36 | 24 | subprocess.check_call(['apt-get', 'install', '-y', 'python-six']) | 26 | import yaml # NOQA:F401 |
37 | 25 | else: | 27 | |
38 | 26 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-six']) | 28 | |
39 | 27 | import six # flake8: noqa | 29 | # Holds a list of mapping of mangled function names that have been deprecated |
40 | 28 | 30 | # using the @deprecate decorator below. This is so that the warning is only | |
41 | 29 | try: | 31 | # printed once for each usage of the function. |
42 | 30 | import yaml # flake8: noqa | 32 | __deprecated_functions = {} |
43 | 31 | except ImportError: | 33 | |
44 | 32 | if sys.version_info.major == 2: | 34 | |
45 | 33 | subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml']) | 35 | def deprecate(warning, date=None, log=None): |
46 | 34 | else: | 36 | """Add a deprecation warning the first time the function is used. |
47 | 35 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml']) | 37 | |
48 | 36 | import yaml # flake8: noqa | 38 | The date which is a string in semi-ISO8660 format indicates the year-month |
49 | 39 | that the function is officially going to be removed. | ||
50 | 40 | |||
51 | 41 | usage: | ||
52 | 42 | |||
53 | 43 | @deprecate('use core/fetch/add_source() instead', '2017-04') | ||
54 | 44 | def contributed_add_source_thing(...): | ||
55 | 45 | ... | ||
56 | 46 | |||
57 | 47 | And it then prints to the log ONCE that the function is deprecated. | ||
58 | 48 | The reason for passing the logging function (log) is so that hookenv.log | ||
59 | 49 | can be used for a charm if needed. | ||
60 | 50 | |||
61 | 51 | :param warning: String to indicate what is to be used instead. | ||
62 | 52 | :param date: Optional string in YYYY-MM format to indicate when the | ||
63 | 53 | function will definitely (probably) be removed. | ||
64 | 54 | :param log: The log function to call in order to log. If None, logs to | ||
65 | 55 | stdout | ||
66 | 56 | """ | ||
67 | 57 | def wrap(f): | ||
68 | 58 | |||
69 | 59 | @functools.wraps(f) | ||
70 | 60 | def wrapped_f(*args, **kwargs): | ||
71 | 61 | try: | ||
72 | 62 | module = inspect.getmodule(f) | ||
73 | 63 | file = inspect.getsourcefile(f) | ||
74 | 64 | lines = inspect.getsourcelines(f) | ||
75 | 65 | f_name = "{}-{}-{}..{}-{}".format( | ||
76 | 66 | module.__name__, file, lines[0], lines[-1], f.__name__) | ||
77 | 67 | except (IOError, TypeError): | ||
78 | 68 | # assume it was local, so just use the name of the function | ||
79 | 69 | f_name = f.__name__ | ||
80 | 70 | if f_name not in __deprecated_functions: | ||
81 | 71 | __deprecated_functions[f_name] = True | ||
82 | 72 | s = "DEPRECATION WARNING: Function {} is being removed".format( | ||
83 | 73 | f.__name__) | ||
84 | 74 | if date: | ||
85 | 75 | s = "{} on/around {}".format(s, date) | ||
86 | 76 | if warning: | ||
87 | 77 | s = "{} : {}".format(s, warning) | ||
88 | 78 | if log: | ||
89 | 79 | log(s) | ||
90 | 80 | else: | ||
91 | 81 | print(s) | ||
92 | 82 | return f(*args, **kwargs) | ||
93 | 83 | return wrapped_f | ||
94 | 84 | return wrap | ||
95 | 37 | 85 | ||
96 | === modified file 'hooks/charmhelpers/contrib/ansible/__init__.py' | |||
97 | --- hooks/charmhelpers/contrib/ansible/__init__.py 2016-12-20 14:35:00 +0000 | |||
98 | +++ hooks/charmhelpers/contrib/ansible/__init__.py 2023-06-30 13:58:42 +0000 | |||
99 | @@ -16,90 +16,107 @@ | |||
100 | 16 | # | 16 | # |
101 | 17 | # Authors: | 17 | # Authors: |
102 | 18 | # Charm Helpers Developers <juju@lists.ubuntu.com> | 18 | # Charm Helpers Developers <juju@lists.ubuntu.com> |
113 | 19 | """Charm Helpers ansible - declare the state of your machines. | 19 | """ |
114 | 20 | 20 | The ansible package enables you to easily use the configuration management | |
115 | 21 | This helper enables you to declare your machine state, rather than | 21 | tool `Ansible`_ to setup and configure your charm. All of your charm |
116 | 22 | program it procedurally (and have to test each change to your procedures). | 22 | configuration options and relation-data are available as regular Ansible |
117 | 23 | Your install hook can be as simple as:: | 23 | variables which can be used in your playbooks and templates. |
118 | 24 | 24 | ||
119 | 25 | {{{ | 25 | .. _Ansible: https://www.ansible.com/ |
120 | 26 | import charmhelpers.contrib.ansible | 26 | |
121 | 27 | 27 | Usage | |
122 | 28 | 28 | ===== | |
123 | 29 | |||
124 | 30 | Here is an example directory structure for a charm to get you started:: | ||
125 | 31 | |||
126 | 32 | charm-ansible-example/ | ||
127 | 33 | |-- ansible | ||
128 | 34 | | |-- playbook.yaml | ||
129 | 35 | | `-- templates | ||
130 | 36 | | `-- example.j2 | ||
131 | 37 | |-- config.yaml | ||
132 | 38 | |-- copyright | ||
133 | 39 | |-- icon.svg | ||
134 | 40 | |-- layer.yaml | ||
135 | 41 | |-- metadata.yaml | ||
136 | 42 | |-- reactive | ||
137 | 43 | | `-- example.py | ||
138 | 44 | |-- README.md | ||
139 | 45 | |||
140 | 46 | Running a playbook called ``playbook.yaml`` when the ``install`` hook is run | ||
141 | 47 | can be as simple as:: | ||
142 | 48 | |||
143 | 49 | from charmhelpers.contrib import ansible | ||
144 | 50 | from charms.reactive import hook | ||
145 | 51 | |||
146 | 52 | @hook('install') | ||
147 | 29 | def install(): | 53 | def install(): |
160 | 30 | charmhelpers.contrib.ansible.install_ansible_support() | 54 | ansible.install_ansible_support() |
161 | 31 | charmhelpers.contrib.ansible.apply_playbook('playbooks/install.yaml') | 55 | ansible.apply_playbook('ansible/playbook.yaml') |
162 | 32 | }}} | 56 | |
163 | 33 | 57 | Here is an example playbook that uses the ``template`` module to template the | |
164 | 34 | and won't need to change (nor will its tests) when you change the machine | 58 | file ``example.j2`` to the charm host and then uses the ``debug`` module to |
165 | 35 | state. | 59 | print out all the host and Juju variables that you can use in your playbooks. |
166 | 36 | 60 | Note that you must target ``localhost`` as the playbook is run locally on the | |
167 | 37 | All of your juju config and relation-data are available as template | 61 | charm host:: |
168 | 38 | variables within your playbooks and templates. An install playbook looks | 62 | |
157 | 39 | something like:: | ||
158 | 40 | |||
159 | 41 | {{{ | ||
169 | 42 | --- | 63 | --- |
170 | 43 | - hosts: localhost | 64 | - hosts: localhost |
171 | 44 | user: root | ||
172 | 45 | |||
173 | 46 | tasks: | 65 | tasks: |
175 | 47 | - name: Add private repositories. | 66 | - name: Template a file |
176 | 48 | template: | 67 | template: |
205 | 49 | src: ../templates/private-repositories.list.jinja2 | 68 | src: templates/example.j2 |
206 | 50 | dest: /etc/apt/sources.list.d/private.list | 69 | dest: /tmp/example.j2 |
207 | 51 | 70 | ||
208 | 52 | - name: Update the cache. | 71 | - name: Print all variables available to Ansible |
209 | 53 | apt: update_cache=yes | 72 | debug: |
210 | 54 | 73 | var: vars | |
211 | 55 | - name: Install dependencies. | 74 | |
212 | 56 | apt: pkg={{ item }} | 75 | Read more online about `playbooks`_ and standard Ansible `modules`_. |
213 | 57 | with_items: | 76 | |
214 | 58 | - python-mimeparse | 77 | .. _playbooks: https://docs.ansible.com/ansible/latest/user_guide/playbooks.html |
215 | 59 | - python-webob | 78 | .. _modules: https://docs.ansible.com/ansible/latest/user_guide/modules.html |
216 | 60 | - sunburnt | 79 | |
217 | 61 | 80 | A further feature of the Ansible hooks is to provide a light weight "action" | |
190 | 62 | - name: Setup groups. | ||
191 | 63 | group: name={{ item.name }} gid={{ item.gid }} | ||
192 | 64 | with_items: | ||
193 | 65 | - { name: 'deploy_user', gid: 1800 } | ||
194 | 66 | - { name: 'service_user', gid: 1500 } | ||
195 | 67 | |||
196 | 68 | ... | ||
197 | 69 | }}} | ||
198 | 70 | |||
199 | 71 | Read more online about `playbooks`_ and standard ansible `modules`_. | ||
200 | 72 | |||
201 | 73 | .. _playbooks: http://www.ansibleworks.com/docs/playbooks.html | ||
202 | 74 | .. _modules: http://www.ansibleworks.com/docs/modules.html | ||
203 | 75 | |||
204 | 76 | A further feature os the ansible hooks is to provide a light weight "action" | ||
218 | 77 | scripting tool. This is a decorator that you apply to a function, and that | 81 | scripting tool. This is a decorator that you apply to a function, and that |
234 | 78 | function can now receive cli args, and can pass extra args to the playbook. | 82 | function can now receive cli args, and can pass extra args to the playbook:: |
235 | 79 | 83 | ||
236 | 80 | e.g. | 84 | @hooks.action() |
237 | 81 | 85 | def some_action(amount, force="False"): | |
238 | 82 | 86 | "Usage: some-action AMOUNT [force=True]" # <-- shown on error | |
239 | 83 | @hooks.action() | 87 | # process the arguments |
240 | 84 | def some_action(amount, force="False"): | 88 | # do some calls |
241 | 85 | "Usage: some-action AMOUNT [force=True]" # <-- shown on error | 89 | # return extra-vars to be passed to ansible-playbook |
242 | 86 | # process the arguments | 90 | return { |
243 | 87 | # do some calls | 91 | 'amount': int(amount), |
244 | 88 | # return extra-vars to be passed to ansible-playbook | 92 | 'type': force, |
245 | 89 | return { | 93 | } |
231 | 90 | 'amount': int(amount), | ||
232 | 91 | 'type': force, | ||
233 | 92 | } | ||
246 | 93 | 94 | ||
247 | 94 | You can now create a symlink to hooks.py that can be invoked like a hook, but | 95 | You can now create a symlink to hooks.py that can be invoked like a hook, but |
253 | 95 | with cli params: | 96 | with cli params:: |
254 | 96 | 97 | ||
255 | 97 | # link actions/some-action to hooks/hooks.py | 98 | # link actions/some-action to hooks/hooks.py |
256 | 98 | 99 | ||
257 | 99 | actions/some-action amount=10 force=true | 100 | actions/some-action amount=10 force=true |
258 | 101 | |||
259 | 102 | Install Ansible via pip | ||
260 | 103 | ======================= | ||
261 | 104 | |||
262 | 105 | If you want to install a specific version of Ansible via pip instead of | ||
263 | 106 | ``install_ansible_support`` which uses APT, consider using the layer options | ||
264 | 107 | of `layer-basic`_ to install Ansible in a virtualenv:: | ||
265 | 108 | |||
266 | 109 | options: | ||
267 | 110 | basic: | ||
268 | 111 | python_packages: ['ansible==2.9.0'] | ||
269 | 112 | include_system_packages: true | ||
270 | 113 | use_venv: true | ||
271 | 114 | |||
272 | 115 | .. _layer-basic: https://charmsreactive.readthedocs.io/en/latest/layer-basic.html#layer-configuration | ||
273 | 100 | 116 | ||
274 | 101 | """ | 117 | """ |
275 | 102 | import os | 118 | import os |
276 | 119 | import json | ||
277 | 103 | import stat | 120 | import stat |
278 | 104 | import subprocess | 121 | import subprocess |
279 | 105 | import functools | 122 | import functools |
280 | @@ -117,27 +134,63 @@ | |||
281 | 117 | ansible_vars_path = '/etc/ansible/host_vars/localhost' | 134 | ansible_vars_path = '/etc/ansible/host_vars/localhost' |
282 | 118 | 135 | ||
283 | 119 | 136 | ||
291 | 120 | def install_ansible_support(from_ppa=True, ppa_location='ppa:rquillo/ansible'): | 137 | def install_ansible_support(from_ppa=True, ppa_location='ppa:ansible/ansible'): |
292 | 121 | """Installs the ansible package. | 138 | """Installs Ansible via APT. |
293 | 122 | 139 | ||
294 | 123 | By default it is installed from the `PPA`_ linked from | 140 | By default this installs Ansible from the `PPA`_ linked from |
295 | 124 | the ansible `website`_ or from a ppa specified by a charm config.. | 141 | the Ansible `website`_ or from a PPA set in ``ppa_location``. |
296 | 125 | 142 | ||
297 | 126 | .. _PPA: https://launchpad.net/~rquillo/+archive/ansible | 143 | .. _PPA: https://launchpad.net/~ansible/+archive/ubuntu/ansible |
298 | 127 | .. _website: http://docs.ansible.com/intro_installation.html#latest-releases-via-apt-ubuntu | 144 | .. _website: http://docs.ansible.com/intro_installation.html#latest-releases-via-apt-ubuntu |
299 | 128 | 145 | ||
302 | 129 | If from_ppa is empty, you must ensure that the package is available | 146 | If ``from_ppa`` is ``False``, then Ansible will be installed from |
303 | 130 | from a configured repository. | 147 | Ubuntu's Universe repositories. |
304 | 131 | """ | 148 | """ |
305 | 132 | if from_ppa: | 149 | if from_ppa: |
306 | 133 | charmhelpers.fetch.add_source(ppa_location) | 150 | charmhelpers.fetch.add_source(ppa_location) |
307 | 134 | charmhelpers.fetch.apt_update(fatal=True) | 151 | charmhelpers.fetch.apt_update(fatal=True) |
308 | 135 | charmhelpers.fetch.apt_install('ansible') | 152 | charmhelpers.fetch.apt_install('ansible') |
309 | 136 | with open(ansible_hosts_path, 'w+') as hosts_file: | 153 | with open(ansible_hosts_path, 'w+') as hosts_file: |
311 | 137 | hosts_file.write('localhost ansible_connection=local') | 154 | hosts_file.write('localhost ansible_connection=local ansible_remote_tmp=/root/.ansible/tmp') |
312 | 138 | 155 | ||
313 | 139 | 156 | ||
314 | 140 | def apply_playbook(playbook, tags=None, extra_vars=None): | 157 | def apply_playbook(playbook, tags=None, extra_vars=None): |
315 | 158 | """Run a playbook. | ||
316 | 159 | |||
317 | 160 | This helper runs a playbook with juju state variables as context, | ||
318 | 161 | therefore variables set in application config can be used directly. | ||
319 | 162 | List of tags (--tags) and dictionary with extra_vars (--extra-vars) | ||
320 | 163 | can be passed as additional parameters. | ||
321 | 164 | |||
322 | 165 | Read more about playbook `_variables`_ online. | ||
323 | 166 | |||
324 | 167 | .. _variables: https://docs.ansible.com/ansible/latest/user_guide/playbooks_variables.html | ||
325 | 168 | |||
326 | 169 | Example:: | ||
327 | 170 | |||
328 | 171 | # Run ansible/playbook.yaml with tag install and pass extra | ||
329 | 172 | # variables var_a and var_b | ||
330 | 173 | apply_playbook( | ||
331 | 174 | playbook='ansible/playbook.yaml', | ||
332 | 175 | tags=['install'], | ||
333 | 176 | extra_vars={'var_a': 'val_a', 'var_b': 'val_b'} | ||
334 | 177 | ) | ||
335 | 178 | |||
336 | 179 | # Run ansible/playbook.yaml with tag config and extra variable nested, | ||
337 | 180 | # which is passed as json and can be used as dictionary in playbook | ||
338 | 181 | apply_playbook( | ||
339 | 182 | playbook='ansible/playbook.yaml', | ||
340 | 183 | tags=['config'], | ||
341 | 184 | extra_vars={'nested': {'a': 'value1', 'b': 'value2'}} | ||
342 | 185 | ) | ||
343 | 186 | |||
344 | 187 | # Custom config file can be passed within extra_vars | ||
345 | 188 | apply_playbook( | ||
346 | 189 | playbook='ansible/playbook.yaml', | ||
347 | 190 | extra_vars="@some_file.json" | ||
348 | 191 | ) | ||
349 | 192 | |||
350 | 193 | """ | ||
351 | 141 | tags = tags or [] | 194 | tags = tags or [] |
352 | 142 | tags = ",".join(tags) | 195 | tags = ",".join(tags) |
353 | 143 | charmhelpers.contrib.templating.contexts.juju_state_to_yaml( | 196 | charmhelpers.contrib.templating.contexts.juju_state_to_yaml( |
354 | @@ -146,9 +199,13 @@ | |||
355 | 146 | 199 | ||
356 | 147 | # we want ansible's log output to be unbuffered | 200 | # we want ansible's log output to be unbuffered |
357 | 148 | env = os.environ.copy() | 201 | env = os.environ.copy() |
358 | 202 | proxy_settings = charmhelpers.core.hookenv.env_proxy_settings() | ||
359 | 203 | if proxy_settings: | ||
360 | 204 | env.update(proxy_settings) | ||
361 | 149 | env['PYTHONUNBUFFERED'] = "1" | 205 | env['PYTHONUNBUFFERED'] = "1" |
362 | 150 | call = [ | 206 | call = [ |
363 | 151 | 'ansible-playbook', | 207 | 'ansible-playbook', |
364 | 208 | '-vvv', | ||
365 | 152 | '-c', | 209 | '-c', |
366 | 153 | 'local', | 210 | 'local', |
367 | 154 | playbook, | 211 | playbook, |
368 | @@ -156,9 +213,17 @@ | |||
369 | 156 | if tags: | 213 | if tags: |
370 | 157 | call.extend(['--tags', '{}'.format(tags)]) | 214 | call.extend(['--tags', '{}'.format(tags)]) |
371 | 158 | if extra_vars: | 215 | if extra_vars: |
375 | 159 | extra = ["%s=%s" % (k, v) for k, v in extra_vars.items()] | 216 | call.extend(['--extra-vars', json.dumps(extra_vars)]) |
376 | 160 | call.extend(['--extra-vars', " ".join(extra)]) | 217 | try: |
377 | 161 | subprocess.check_call(call, env=env) | 218 | subprocess.check_output(call, env=env) |
378 | 219 | except subprocess.CalledProcessError as e: | ||
379 | 220 | err_msg = e.output.decode().strip() | ||
380 | 221 | charmhelpers.core.hookenv.log("Ansible playbook failed with " | ||
381 | 222 | "{}".format(e), | ||
382 | 223 | level="ERROR") | ||
383 | 224 | charmhelpers.core.hookenv.log("Stdout: {}".format(err_msg), | ||
384 | 225 | level="ERROR") | ||
385 | 226 | raise e | ||
386 | 162 | 227 | ||
387 | 163 | 228 | ||
388 | 164 | class AnsibleHooks(charmhelpers.core.hookenv.Hooks): | 229 | class AnsibleHooks(charmhelpers.core.hookenv.Hooks): |
389 | @@ -170,7 +235,7 @@ | |||
390 | 170 | 235 | ||
391 | 171 | Example:: | 236 | Example:: |
392 | 172 | 237 | ||
394 | 173 | hooks = AnsibleHooks(playbook_path='playbooks/my_machine_state.yaml') | 238 | hooks = AnsibleHooks(playbook_path='ansible/my_machine_state.yaml') |
395 | 174 | 239 | ||
396 | 175 | # All the tasks within my_machine_state.yaml tagged with 'install' | 240 | # All the tasks within my_machine_state.yaml tagged with 'install' |
397 | 176 | # will be run automatically after do_custom_work() | 241 | # will be run automatically after do_custom_work() |
398 | @@ -188,13 +253,12 @@ | |||
399 | 188 | # the hooks which are handled by ansible-only and they'll be registered | 253 | # the hooks which are handled by ansible-only and they'll be registered |
400 | 189 | # for you: | 254 | # for you: |
401 | 190 | # hooks = AnsibleHooks( | 255 | # hooks = AnsibleHooks( |
403 | 191 | # 'playbooks/my_machine_state.yaml', | 256 | # 'ansible/my_machine_state.yaml', |
404 | 192 | # default_hooks=['config-changed', 'start', 'stop']) | 257 | # default_hooks=['config-changed', 'start', 'stop']) |
405 | 193 | 258 | ||
406 | 194 | if __name__ == "__main__": | 259 | if __name__ == "__main__": |
407 | 195 | # execute a hook based on the name the program is called by | 260 | # execute a hook based on the name the program is called by |
408 | 196 | hooks.execute(sys.argv) | 261 | hooks.execute(sys.argv) |
409 | 197 | |||
410 | 198 | """ | 262 | """ |
411 | 199 | 263 | ||
412 | 200 | def __init__(self, playbook_path, default_hooks=None): | 264 | def __init__(self, playbook_path, default_hooks=None): |
413 | 201 | 265 | ||
414 | === modified file 'hooks/charmhelpers/contrib/charmsupport/nrpe.py' | |||
415 | --- hooks/charmhelpers/contrib/charmsupport/nrpe.py 2016-12-20 14:35:00 +0000 | |||
416 | +++ hooks/charmhelpers/contrib/charmsupport/nrpe.py 2023-06-30 13:58:42 +0000 | |||
417 | @@ -1,4 +1,4 @@ | |||
419 | 1 | # Copyright 2014-2015 Canonical Limited. | 1 | # Copyright 2012-2021 Canonical Limited. |
420 | 2 | # | 2 | # |
421 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); |
422 | 4 | # you may not use this file except in compliance with the License. | 4 | # you may not use this file except in compliance with the License. |
423 | @@ -13,25 +13,29 @@ | |||
424 | 13 | # limitations under the License. | 13 | # limitations under the License. |
425 | 14 | 14 | ||
426 | 15 | """Compatibility with the nrpe-external-master charm""" | 15 | """Compatibility with the nrpe-external-master charm""" |
427 | 16 | # Copyright 2012 Canonical Ltd. | ||
428 | 17 | # | 16 | # |
429 | 18 | # Authors: | 17 | # Authors: |
430 | 19 | # Matthew Wedgwood <matthew.wedgwood@canonical.com> | 18 | # Matthew Wedgwood <matthew.wedgwood@canonical.com> |
431 | 20 | 19 | ||
434 | 21 | import subprocess | 20 | import glob |
433 | 22 | import pwd | ||
435 | 23 | import grp | 21 | import grp |
436 | 22 | import json | ||
437 | 24 | import os | 23 | import os |
440 | 25 | import glob | 24 | import pwd |
439 | 26 | import shutil | ||
441 | 27 | import re | 25 | import re |
442 | 28 | import shlex | 26 | import shlex |
443 | 27 | import shutil | ||
444 | 28 | import subprocess | ||
445 | 29 | import yaml | 29 | import yaml |
446 | 30 | 30 | ||
447 | 31 | from charmhelpers.core.hookenv import ( | 31 | from charmhelpers.core.hookenv import ( |
448 | 32 | application_name, | ||
449 | 32 | config, | 33 | config, |
450 | 34 | ERROR, | ||
451 | 35 | hook_name, | ||
452 | 33 | local_unit, | 36 | local_unit, |
453 | 34 | log, | 37 | log, |
454 | 38 | relation_get, | ||
455 | 35 | relation_ids, | 39 | relation_ids, |
456 | 36 | relation_set, | 40 | relation_set, |
457 | 37 | relations_of_type, | 41 | relations_of_type, |
458 | @@ -125,7 +129,7 @@ | |||
459 | 125 | 129 | ||
460 | 126 | 130 | ||
461 | 127 | class Check(object): | 131 | class Check(object): |
463 | 128 | shortname_re = '[A-Za-z0-9-_]+$' | 132 | shortname_re = '[A-Za-z0-9-_.@]+$' |
464 | 129 | service_template = (""" | 133 | service_template = (""" |
465 | 130 | #--------------------------------------------------- | 134 | #--------------------------------------------------- |
466 | 131 | # This file is Juju managed | 135 | # This file is Juju managed |
467 | @@ -137,10 +141,11 @@ | |||
468 | 137 | """{description} | 141 | """{description} |
469 | 138 | check_command check_nrpe!{command} | 142 | check_command check_nrpe!{command} |
470 | 139 | servicegroups {nagios_servicegroup} | 143 | servicegroups {nagios_servicegroup} |
471 | 144 | {service_config_overrides} | ||
472 | 140 | }} | 145 | }} |
473 | 141 | """) | 146 | """) |
474 | 142 | 147 | ||
476 | 143 | def __init__(self, shortname, description, check_cmd): | 148 | def __init__(self, shortname, description, check_cmd, max_check_attempts=None): |
477 | 144 | super(Check, self).__init__() | 149 | super(Check, self).__init__() |
478 | 145 | # XXX: could be better to calculate this from the service name | 150 | # XXX: could be better to calculate this from the service name |
479 | 146 | if not re.match(self.shortname_re, shortname): | 151 | if not re.match(self.shortname_re, shortname): |
480 | @@ -153,6 +158,7 @@ | |||
481 | 153 | # The default is: illegal_object_name_chars=`~!$%^&*"|'<>?,()= | 158 | # The default is: illegal_object_name_chars=`~!$%^&*"|'<>?,()= |
482 | 154 | self.description = description | 159 | self.description = description |
483 | 155 | self.check_cmd = self._locate_cmd(check_cmd) | 160 | self.check_cmd = self._locate_cmd(check_cmd) |
484 | 161 | self.max_check_attempts = max_check_attempts | ||
485 | 156 | 162 | ||
486 | 157 | def _get_check_filename(self): | 163 | def _get_check_filename(self): |
487 | 158 | return os.path.join(NRPE.nrpe_confdir, '{}.cfg'.format(self.command)) | 164 | return os.path.join(NRPE.nrpe_confdir, '{}.cfg'.format(self.command)) |
488 | @@ -171,7 +177,8 @@ | |||
489 | 171 | if os.path.exists(os.path.join(path, parts[0])): | 177 | if os.path.exists(os.path.join(path, parts[0])): |
490 | 172 | command = os.path.join(path, parts[0]) | 178 | command = os.path.join(path, parts[0]) |
491 | 173 | if len(parts) > 1: | 179 | if len(parts) > 1: |
493 | 174 | command += " " + " ".join(parts[1:]) | 180 | safe_args = [shlex.quote(arg) for arg in parts[1:]] |
494 | 181 | command += " " + " ".join(safe_args) | ||
495 | 175 | return command | 182 | return command |
496 | 176 | log('Check command not found: {}'.format(parts[0])) | 183 | log('Check command not found: {}'.format(parts[0])) |
497 | 177 | return '' | 184 | return '' |
498 | @@ -193,6 +200,13 @@ | |||
499 | 193 | nrpe_check_file = self._get_check_filename() | 200 | nrpe_check_file = self._get_check_filename() |
500 | 194 | with open(nrpe_check_file, 'w') as nrpe_check_config: | 201 | with open(nrpe_check_file, 'w') as nrpe_check_config: |
501 | 195 | nrpe_check_config.write("# check {}\n".format(self.shortname)) | 202 | nrpe_check_config.write("# check {}\n".format(self.shortname)) |
502 | 203 | if nagios_servicegroups: | ||
503 | 204 | nrpe_check_config.write( | ||
504 | 205 | "# The following header was added automatically by juju\n") | ||
505 | 206 | nrpe_check_config.write( | ||
506 | 207 | "# Modifying it will affect nagios monitoring and alerting\n") | ||
507 | 208 | nrpe_check_config.write( | ||
508 | 209 | "# servicegroups: {}\n".format(nagios_servicegroups)) | ||
509 | 196 | nrpe_check_config.write("command[{}]={}\n".format( | 210 | nrpe_check_config.write("command[{}]={}\n".format( |
510 | 197 | self.command, self.check_cmd)) | 211 | self.command, self.check_cmd)) |
511 | 198 | 212 | ||
512 | @@ -207,12 +221,19 @@ | |||
513 | 207 | nagios_servicegroups): | 221 | nagios_servicegroups): |
514 | 208 | self._remove_service_files() | 222 | self._remove_service_files() |
515 | 209 | 223 | ||
516 | 224 | if self.max_check_attempts: | ||
517 | 225 | service_config_overrides = ' max_check_attempts {}'.format( | ||
518 | 226 | self.max_check_attempts | ||
519 | 227 | ) # Note indentation is here rather than in the template to avoid trailing spaces | ||
520 | 228 | else: | ||
521 | 229 | service_config_overrides = '' # empty string to avoid printing 'None' | ||
522 | 210 | templ_vars = { | 230 | templ_vars = { |
523 | 211 | 'nagios_hostname': hostname, | 231 | 'nagios_hostname': hostname, |
524 | 212 | 'nagios_servicegroup': nagios_servicegroups, | 232 | 'nagios_servicegroup': nagios_servicegroups, |
525 | 213 | 'description': self.description, | 233 | 'description': self.description, |
526 | 214 | 'shortname': self.shortname, | 234 | 'shortname': self.shortname, |
527 | 215 | 'command': self.command, | 235 | 'command': self.command, |
528 | 236 | 'service_config_overrides': service_config_overrides, | ||
529 | 216 | } | 237 | } |
530 | 217 | nrpe_service_text = Check.service_template.format(**templ_vars) | 238 | nrpe_service_text = Check.service_template.format(**templ_vars) |
531 | 218 | nrpe_service_file = self._get_service_filename(hostname) | 239 | nrpe_service_file = self._get_service_filename(hostname) |
532 | @@ -227,6 +248,7 @@ | |||
533 | 227 | nagios_logdir = '/var/log/nagios' | 248 | nagios_logdir = '/var/log/nagios' |
534 | 228 | nagios_exportdir = '/var/lib/nagios/export' | 249 | nagios_exportdir = '/var/lib/nagios/export' |
535 | 229 | nrpe_confdir = '/etc/nagios/nrpe.d' | 250 | nrpe_confdir = '/etc/nagios/nrpe.d' |
536 | 251 | homedir = '/var/lib/nagios' # home dir provided by nagios-nrpe-server | ||
537 | 230 | 252 | ||
538 | 231 | def __init__(self, hostname=None, primary=True): | 253 | def __init__(self, hostname=None, primary=True): |
539 | 232 | super(NRPE, self).__init__() | 254 | super(NRPE, self).__init__() |
540 | @@ -251,11 +273,28 @@ | |||
541 | 251 | relation = relation_ids('nrpe-external-master') | 273 | relation = relation_ids('nrpe-external-master') |
542 | 252 | if relation: | 274 | if relation: |
543 | 253 | log("Setting charm primary status {}".format(primary)) | 275 | log("Setting charm primary status {}".format(primary)) |
545 | 254 | for rid in relation_ids('nrpe-external-master'): | 276 | for rid in relation: |
546 | 255 | relation_set(relation_id=rid, relation_settings={'primary': self.primary}) | 277 | relation_set(relation_id=rid, relation_settings={'primary': self.primary}) |
547 | 278 | self.remove_check_queue = set() | ||
548 | 279 | |||
549 | 280 | @classmethod | ||
550 | 281 | def does_nrpe_conf_dir_exist(cls): | ||
551 | 282 | """Return True if th nrpe_confdif directory exists.""" | ||
552 | 283 | return os.path.isdir(cls.nrpe_confdir) | ||
553 | 256 | 284 | ||
554 | 257 | def add_check(self, *args, **kwargs): | 285 | def add_check(self, *args, **kwargs): |
555 | 286 | shortname = None | ||
556 | 287 | if kwargs.get('shortname') is None: | ||
557 | 288 | if len(args) > 0: | ||
558 | 289 | shortname = args[0] | ||
559 | 290 | else: | ||
560 | 291 | shortname = kwargs['shortname'] | ||
561 | 292 | |||
562 | 258 | self.checks.append(Check(*args, **kwargs)) | 293 | self.checks.append(Check(*args, **kwargs)) |
563 | 294 | try: | ||
564 | 295 | self.remove_check_queue.remove(shortname) | ||
565 | 296 | except KeyError: | ||
566 | 297 | pass | ||
567 | 259 | 298 | ||
568 | 260 | def remove_check(self, *args, **kwargs): | 299 | def remove_check(self, *args, **kwargs): |
569 | 261 | if kwargs.get('shortname') is None: | 300 | if kwargs.get('shortname') is None: |
570 | @@ -272,12 +311,13 @@ | |||
571 | 272 | 311 | ||
572 | 273 | check = Check(*args, **kwargs) | 312 | check = Check(*args, **kwargs) |
573 | 274 | check.remove(self.hostname) | 313 | check.remove(self.hostname) |
574 | 314 | self.remove_check_queue.add(kwargs['shortname']) | ||
575 | 275 | 315 | ||
576 | 276 | def write(self): | 316 | def write(self): |
577 | 277 | try: | 317 | try: |
578 | 278 | nagios_uid = pwd.getpwnam('nagios').pw_uid | 318 | nagios_uid = pwd.getpwnam('nagios').pw_uid |
579 | 279 | nagios_gid = grp.getgrnam('nagios').gr_gid | 319 | nagios_gid = grp.getgrnam('nagios').gr_gid |
581 | 280 | except: | 320 | except Exception: |
582 | 281 | log("Nagios user not set up, nrpe checks not updated") | 321 | log("Nagios user not set up, nrpe checks not updated") |
583 | 282 | return | 322 | return |
584 | 283 | 323 | ||
585 | @@ -287,19 +327,50 @@ | |||
586 | 287 | 327 | ||
587 | 288 | nrpe_monitors = {} | 328 | nrpe_monitors = {} |
588 | 289 | monitors = {"monitors": {"remote": {"nrpe": nrpe_monitors}}} | 329 | monitors = {"monitors": {"remote": {"nrpe": nrpe_monitors}}} |
589 | 330 | |||
590 | 331 | # check that the charm can write to the conf dir. If not, then nagios | ||
591 | 332 | # probably isn't installed, and we can defer. | ||
592 | 333 | if not self.does_nrpe_conf_dir_exist(): | ||
593 | 334 | return | ||
594 | 335 | |||
595 | 290 | for nrpecheck in self.checks: | 336 | for nrpecheck in self.checks: |
596 | 291 | nrpecheck.write(self.nagios_context, self.hostname, | 337 | nrpecheck.write(self.nagios_context, self.hostname, |
597 | 292 | self.nagios_servicegroups) | 338 | self.nagios_servicegroups) |
598 | 293 | nrpe_monitors[nrpecheck.shortname] = { | 339 | nrpe_monitors[nrpecheck.shortname] = { |
599 | 294 | "command": nrpecheck.command, | 340 | "command": nrpecheck.command, |
600 | 295 | } | 341 | } |
601 | 342 | # If we were passed max_check_attempts, add that to the relation data | ||
602 | 343 | if nrpecheck.max_check_attempts is not None: | ||
603 | 344 | nrpe_monitors[nrpecheck.shortname]['max_check_attempts'] = nrpecheck.max_check_attempts | ||
604 | 296 | 345 | ||
606 | 297 | service('restart', 'nagios-nrpe-server') | 346 | # update-status hooks are configured to firing every 5 minutes by |
607 | 347 | # default. When nagios-nrpe-server is restarted, the nagios server | ||
608 | 348 | # reports checks failing causing unnecessary alerts. Let's not restart | ||
609 | 349 | # on update-status hooks. | ||
610 | 350 | if not hook_name() == 'update-status': | ||
611 | 351 | service('restart', 'nagios-nrpe-server') | ||
612 | 298 | 352 | ||
613 | 299 | monitor_ids = relation_ids("local-monitors") + \ | 353 | monitor_ids = relation_ids("local-monitors") + \ |
614 | 300 | relation_ids("nrpe-external-master") | 354 | relation_ids("nrpe-external-master") |
615 | 301 | for rid in monitor_ids: | 355 | for rid in monitor_ids: |
617 | 302 | relation_set(relation_id=rid, monitors=yaml.dump(monitors)) | 356 | reldata = relation_get(unit=local_unit(), rid=rid) |
618 | 357 | if 'monitors' in reldata: | ||
619 | 358 | # update the existing set of monitors with the new data | ||
620 | 359 | old_monitors = yaml.safe_load(reldata['monitors']) | ||
621 | 360 | old_nrpe_monitors = old_monitors['monitors']['remote']['nrpe'] | ||
622 | 361 | # remove keys that are in the remove_check_queue | ||
623 | 362 | old_nrpe_monitors = {k: v for k, v in old_nrpe_monitors.items() | ||
624 | 363 | if k not in self.remove_check_queue} | ||
625 | 364 | # update/add nrpe_monitors | ||
626 | 365 | old_nrpe_monitors.update(nrpe_monitors) | ||
627 | 366 | old_monitors['monitors']['remote']['nrpe'] = old_nrpe_monitors | ||
628 | 367 | # write back to the relation | ||
629 | 368 | relation_set(relation_id=rid, monitors=yaml.dump(old_monitors)) | ||
630 | 369 | else: | ||
631 | 370 | # write a brand new set of monitors, as no existing ones. | ||
632 | 371 | relation_set(relation_id=rid, monitors=yaml.dump(monitors)) | ||
633 | 372 | |||
634 | 373 | self.remove_check_queue.clear() | ||
635 | 303 | 374 | ||
636 | 304 | 375 | ||
637 | 305 | def get_nagios_hostcontext(relation_name='nrpe-external-master'): | 376 | def get_nagios_hostcontext(relation_name='nrpe-external-master'): |
638 | @@ -338,14 +409,29 @@ | |||
639 | 338 | return unit | 409 | return unit |
640 | 339 | 410 | ||
641 | 340 | 411 | ||
643 | 341 | def add_init_service_checks(nrpe, services, unit_name): | 412 | def add_init_service_checks(nrpe, services, unit_name, immediate_check=True): |
644 | 342 | """ | 413 | """ |
645 | 343 | Add checks for each service in list | 414 | Add checks for each service in list |
646 | 344 | 415 | ||
647 | 345 | :param NRPE nrpe: NRPE object to add check to | 416 | :param NRPE nrpe: NRPE object to add check to |
648 | 346 | :param list services: List of services to check | 417 | :param list services: List of services to check |
649 | 347 | :param str unit_name: Unit name to use in check description | 418 | :param str unit_name: Unit name to use in check description |
650 | 419 | :param bool immediate_check: For sysv init, run the service check immediately | ||
651 | 348 | """ | 420 | """ |
652 | 421 | # check_haproxy is redundant in the presence of check_crm. See LP Bug#1880601 for details. | ||
653 | 422 | # just remove check_haproxy if haproxy is added as a lsb resource in hacluster. | ||
654 | 423 | for rid in relation_ids("ha"): | ||
655 | 424 | ha_resources = relation_get("json_resources", rid=rid, unit=local_unit()) | ||
656 | 425 | if ha_resources: | ||
657 | 426 | try: | ||
658 | 427 | ha_resources_parsed = json.loads(ha_resources) | ||
659 | 428 | except ValueError as e: | ||
660 | 429 | log('Could not parse JSON from ha resources. {}'.format(e), level=ERROR) | ||
661 | 430 | raise | ||
662 | 431 | if "lsb:haproxy" in ha_resources_parsed.values(): | ||
663 | 432 | if "haproxy" in services: | ||
664 | 433 | log("removed check_haproxy. This service will be monitored by check_crm") | ||
665 | 434 | services.remove("haproxy") | ||
666 | 349 | for svc in services: | 435 | for svc in services: |
667 | 350 | # Don't add a check for these services from neutron-gateway | 436 | # Don't add a check for these services from neutron-gateway |
668 | 351 | if svc in ['ext-port', 'os-charm-phy-nic-mtu']: | 437 | if svc in ['ext-port', 'os-charm-phy-nic-mtu']: |
669 | @@ -354,7 +440,7 @@ | |||
670 | 354 | upstart_init = '/etc/init/%s.conf' % svc | 440 | upstart_init = '/etc/init/%s.conf' % svc |
671 | 355 | sysv_init = '/etc/init.d/%s' % svc | 441 | sysv_init = '/etc/init.d/%s' % svc |
672 | 356 | 442 | ||
674 | 357 | if host.init_is_systemd(): | 443 | if host.init_is_systemd(service_name=svc): |
675 | 358 | nrpe.add_check( | 444 | nrpe.add_check( |
676 | 359 | shortname=svc, | 445 | shortname=svc, |
677 | 360 | description='process check {%s}' % unit_name, | 446 | description='process check {%s}' % unit_name, |
678 | @@ -368,33 +454,53 @@ | |||
679 | 368 | ) | 454 | ) |
680 | 369 | elif os.path.exists(sysv_init): | 455 | elif os.path.exists(sysv_init): |
681 | 370 | cronpath = '/etc/cron.d/nagios-service-check-%s' % svc | 456 | cronpath = '/etc/cron.d/nagios-service-check-%s' % svc |
688 | 371 | cron_file = ('*/5 * * * * root ' | 457 | checkpath = '%s/service-check-%s.txt' % (nrpe.homedir, svc) |
689 | 372 | '/usr/local/lib/nagios/plugins/check_exit_status.pl ' | 458 | croncmd = ( |
690 | 373 | '-s /etc/init.d/%s status > ' | 459 | '/usr/local/lib/nagios/plugins/check_exit_status.pl ' |
691 | 374 | '/var/lib/nagios/service-check-%s.txt\n' % (svc, | 460 | '-e -s /etc/init.d/%s status' % svc |
692 | 375 | svc) | 461 | ) |
693 | 376 | ) | 462 | cron_file = '*/5 * * * * root %s > %s\n' % (croncmd, checkpath) |
694 | 377 | f = open(cronpath, 'w') | 463 | f = open(cronpath, 'w') |
695 | 378 | f.write(cron_file) | 464 | f.write(cron_file) |
696 | 379 | f.close() | 465 | f.close() |
697 | 380 | nrpe.add_check( | 466 | nrpe.add_check( |
698 | 381 | shortname=svc, | 467 | shortname=svc, |
702 | 382 | description='process check {%s}' % unit_name, | 468 | description='service check {%s}' % unit_name, |
703 | 383 | check_cmd='check_status_file.py -f ' | 469 | check_cmd='check_status_file.py -f %s' % checkpath, |
701 | 384 | '/var/lib/nagios/service-check-%s.txt' % svc, | ||
704 | 385 | ) | 470 | ) |
708 | 386 | 471 | # if /var/lib/nagios doesn't exist open(checkpath, 'w') will fail | |
709 | 387 | 472 | # (LP: #1670223). | |
710 | 388 | def copy_nrpe_checks(): | 473 | if immediate_check and os.path.isdir(nrpe.homedir): |
711 | 474 | f = open(checkpath, 'w') | ||
712 | 475 | subprocess.call( | ||
713 | 476 | croncmd.split(), | ||
714 | 477 | stdout=f, | ||
715 | 478 | stderr=subprocess.STDOUT | ||
716 | 479 | ) | ||
717 | 480 | f.close() | ||
718 | 481 | os.chmod(checkpath, 0o644) | ||
719 | 482 | |||
720 | 483 | |||
721 | 484 | def copy_nrpe_checks(nrpe_files_dir=None): | ||
722 | 389 | """ | 485 | """ |
723 | 390 | Copy the nrpe checks into place | 486 | Copy the nrpe checks into place |
724 | 391 | 487 | ||
725 | 392 | """ | 488 | """ |
726 | 393 | NAGIOS_PLUGINS = '/usr/local/lib/nagios/plugins' | 489 | NAGIOS_PLUGINS = '/usr/local/lib/nagios/plugins' |
731 | 394 | nrpe_files_dir = os.path.join(os.getenv('CHARM_DIR'), 'hooks', | 490 | if nrpe_files_dir is None: |
732 | 395 | 'charmhelpers', 'contrib', 'openstack', | 491 | # determine if "charmhelpers" is in CHARMDIR or CHARMDIR/hooks |
733 | 396 | 'files') | 492 | for segment in ['.', 'hooks']: |
734 | 397 | 493 | nrpe_files_dir = os.path.abspath(os.path.join( | |
735 | 494 | os.getenv('CHARM_DIR'), | ||
736 | 495 | segment, | ||
737 | 496 | 'charmhelpers', | ||
738 | 497 | 'contrib', | ||
739 | 498 | 'openstack', | ||
740 | 499 | 'files')) | ||
741 | 500 | if os.path.isdir(nrpe_files_dir): | ||
742 | 501 | break | ||
743 | 502 | else: | ||
744 | 503 | raise RuntimeError("Couldn't find charmhelpers directory") | ||
745 | 398 | if not os.path.exists(NAGIOS_PLUGINS): | 504 | if not os.path.exists(NAGIOS_PLUGINS): |
746 | 399 | os.makedirs(NAGIOS_PLUGINS) | 505 | os.makedirs(NAGIOS_PLUGINS) |
747 | 400 | for fname in glob.glob(os.path.join(nrpe_files_dir, "check_*")): | 506 | for fname in glob.glob(os.path.join(nrpe_files_dir, "check_*")): |
748 | @@ -418,3 +524,53 @@ | |||
749 | 418 | shortname='haproxy_queue', | 524 | shortname='haproxy_queue', |
750 | 419 | description='Check HAProxy queue depth {%s}' % unit_name, | 525 | description='Check HAProxy queue depth {%s}' % unit_name, |
751 | 420 | check_cmd='check_haproxy_queue_depth.sh') | 526 | check_cmd='check_haproxy_queue_depth.sh') |
752 | 527 | |||
753 | 528 | |||
754 | 529 | def remove_deprecated_check(nrpe, deprecated_services): | ||
755 | 530 | """ | ||
756 | 531 | Remove checks for deprecated services in list | ||
757 | 532 | |||
758 | 533 | :param nrpe: NRPE object to remove check from | ||
759 | 534 | :type nrpe: NRPE | ||
760 | 535 | :param deprecated_services: List of deprecated services that are removed | ||
761 | 536 | :type deprecated_services: list | ||
762 | 537 | """ | ||
763 | 538 | for dep_svc in deprecated_services: | ||
764 | 539 | log('Deprecated service: {}'.format(dep_svc)) | ||
765 | 540 | nrpe.remove_check(shortname=dep_svc) | ||
766 | 541 | |||
767 | 542 | |||
768 | 543 | def add_deferred_restarts_check(nrpe): | ||
769 | 544 | """ | ||
770 | 545 | Add NRPE check for services with deferred restarts. | ||
771 | 546 | |||
772 | 547 | :param NRPE nrpe: NRPE object to add check to | ||
773 | 548 | """ | ||
774 | 549 | unit_name = local_unit().replace('/', '-') | ||
775 | 550 | shortname = unit_name + '_deferred_restarts' | ||
776 | 551 | check_cmd = 'check_deferred_restarts.py --application {}'.format( | ||
777 | 552 | application_name()) | ||
778 | 553 | |||
779 | 554 | log('Adding deferred restarts nrpe check: {}'.format(shortname)) | ||
780 | 555 | nrpe.add_check( | ||
781 | 556 | shortname=shortname, | ||
782 | 557 | description='Check deferred service restarts {}'.format(unit_name), | ||
783 | 558 | check_cmd=check_cmd) | ||
784 | 559 | |||
785 | 560 | |||
786 | 561 | def remove_deferred_restarts_check(nrpe): | ||
787 | 562 | """ | ||
788 | 563 | Remove NRPE check for services with deferred service restarts. | ||
789 | 564 | |||
790 | 565 | :param NRPE nrpe: NRPE object to remove check from | ||
791 | 566 | """ | ||
792 | 567 | unit_name = local_unit().replace('/', '-') | ||
793 | 568 | shortname = unit_name + '_deferred_restarts' | ||
794 | 569 | check_cmd = 'check_deferred_restarts.py --application {}'.format( | ||
795 | 570 | application_name()) | ||
796 | 571 | |||
797 | 572 | log('Removing deferred restarts nrpe check: {}'.format(shortname)) | ||
798 | 573 | nrpe.remove_check( | ||
799 | 574 | shortname=shortname, | ||
800 | 575 | description='Check deferred service restarts {}'.format(unit_name), | ||
801 | 576 | check_cmd=check_cmd) | ||
802 | 421 | 577 | ||
803 | === modified file 'hooks/charmhelpers/contrib/templating/contexts.py' | |||
804 | --- hooks/charmhelpers/contrib/templating/contexts.py 2016-12-20 14:35:00 +0000 | |||
805 | +++ hooks/charmhelpers/contrib/templating/contexts.py 2023-06-30 13:58:42 +0000 | |||
806 | @@ -20,8 +20,6 @@ | |||
807 | 20 | import os | 20 | import os |
808 | 21 | import yaml | 21 | import yaml |
809 | 22 | 22 | ||
810 | 23 | import six | ||
811 | 24 | |||
812 | 25 | import charmhelpers.core.hookenv | 23 | import charmhelpers.core.hookenv |
813 | 26 | 24 | ||
814 | 27 | 25 | ||
815 | @@ -93,7 +91,8 @@ | |||
816 | 93 | By default, hyphens are allowed in keys as this is supported | 91 | By default, hyphens are allowed in keys as this is supported |
817 | 94 | by yaml, but for tools like ansible, hyphens are not valid [1]. | 92 | by yaml, but for tools like ansible, hyphens are not valid [1]. |
818 | 95 | 93 | ||
820 | 96 | [1] http://www.ansibleworks.com/docs/playbooks_variables.html#what-makes-a-valid-variable-name | 94 | [1] http://www.ansibleworks.com/docs/playbooks_variables.html |
821 | 95 | #what-makes-a-valid-variable-name | ||
822 | 97 | """ | 96 | """ |
823 | 98 | config = charmhelpers.core.hookenv.config() | 97 | config = charmhelpers.core.hookenv.config() |
824 | 99 | 98 | ||
825 | @@ -101,16 +100,17 @@ | |||
826 | 101 | # file resources etc. | 100 | # file resources etc. |
827 | 102 | config['charm_dir'] = charm_dir | 101 | config['charm_dir'] = charm_dir |
828 | 103 | config['local_unit'] = charmhelpers.core.hookenv.local_unit() | 102 | config['local_unit'] = charmhelpers.core.hookenv.local_unit() |
830 | 104 | config['unit_private_address'] = charmhelpers.core.hookenv.unit_private_ip() | 103 | config['unit_private_address'] = ( |
831 | 104 | charmhelpers.core.hookenv.unit_private_ip()) | ||
832 | 105 | config['unit_public_address'] = charmhelpers.core.hookenv.unit_get( | 105 | config['unit_public_address'] = charmhelpers.core.hookenv.unit_get( |
833 | 106 | 'public-address' | 106 | 'public-address' |
834 | 107 | ) | 107 | ) |
835 | 108 | 108 | ||
836 | 109 | # Don't use non-standard tags for unicode which will not | 109 | # Don't use non-standard tags for unicode which will not |
837 | 110 | # work when salt uses yaml.load_safe. | 110 | # work when salt uses yaml.load_safe. |
839 | 111 | yaml.add_representer(six.text_type, | 111 | yaml.add_representer(str, |
840 | 112 | lambda dumper, value: dumper.represent_scalar( | 112 | lambda dumper, value: dumper.represent_scalar( |
842 | 113 | six.u('tag:yaml.org,2002:str'), value)) | 113 | 'tag:yaml.org,2002:str', value)) |
843 | 114 | 114 | ||
844 | 115 | yaml_dir = os.path.dirname(yaml_path) | 115 | yaml_dir = os.path.dirname(yaml_path) |
845 | 116 | if not os.path.exists(yaml_dir): | 116 | if not os.path.exists(yaml_dir): |
846 | @@ -118,7 +118,7 @@ | |||
847 | 118 | 118 | ||
848 | 119 | if os.path.exists(yaml_path): | 119 | if os.path.exists(yaml_path): |
849 | 120 | with open(yaml_path, "r") as existing_vars_file: | 120 | with open(yaml_path, "r") as existing_vars_file: |
851 | 121 | existing_vars = yaml.load(existing_vars_file.read()) | 121 | existing_vars = yaml.safe_load(existing_vars_file.read()) |
852 | 122 | else: | 122 | else: |
853 | 123 | with open(yaml_path, "w+"): | 123 | with open(yaml_path, "w+"): |
854 | 124 | pass | 124 | pass |
855 | 125 | 125 | ||
856 | === modified file 'hooks/charmhelpers/core/decorators.py' | |||
857 | --- hooks/charmhelpers/core/decorators.py 2016-12-20 14:35:00 +0000 | |||
858 | +++ hooks/charmhelpers/core/decorators.py 2023-06-30 13:58:42 +0000 | |||
859 | @@ -53,3 +53,41 @@ | |||
860 | 53 | return _retry_on_exception_inner_2 | 53 | return _retry_on_exception_inner_2 |
861 | 54 | 54 | ||
862 | 55 | return _retry_on_exception_inner_1 | 55 | return _retry_on_exception_inner_1 |
863 | 56 | |||
864 | 57 | |||
865 | 58 | def retry_on_predicate(num_retries, predicate_fun, base_delay=0): | ||
866 | 59 | """Retry based on return value | ||
867 | 60 | |||
868 | 61 | The return value of the decorated function is passed to the given predicate_fun. If the | ||
869 | 62 | result of the predicate is False, retry the decorated function up to num_retries times | ||
870 | 63 | |||
871 | 64 | An exponential backoff up to base_delay^num_retries seconds can be introduced by setting | ||
872 | 65 | base_delay to a nonzero value. The default is to run with a zero (i.e. no) delay | ||
873 | 66 | |||
874 | 67 | :param num_retries: Max. number of retries to perform | ||
875 | 68 | :type num_retries: int | ||
876 | 69 | :param predicate_fun: Predicate function to determine if a retry is necessary | ||
877 | 70 | :type predicate_fun: callable | ||
878 | 71 | :param base_delay: Starting value in seconds for exponential delay, defaults to 0 (no delay) | ||
879 | 72 | :type base_delay: float | ||
880 | 73 | """ | ||
881 | 74 | def _retry_on_pred_inner_1(f): | ||
882 | 75 | def _retry_on_pred_inner_2(*args, **kwargs): | ||
883 | 76 | retries = num_retries | ||
884 | 77 | multiplier = 1 | ||
885 | 78 | delay = base_delay | ||
886 | 79 | while True: | ||
887 | 80 | result = f(*args, **kwargs) | ||
888 | 81 | if predicate_fun(result) or retries <= 0: | ||
889 | 82 | return result | ||
890 | 83 | delay *= multiplier | ||
891 | 84 | multiplier += 1 | ||
892 | 85 | log("Result {}, retrying '{}' {} more times (delay={})".format( | ||
893 | 86 | result, f.__name__, retries, delay), level=INFO) | ||
894 | 87 | retries -= 1 | ||
895 | 88 | if delay: | ||
896 | 89 | time.sleep(delay) | ||
897 | 90 | |||
898 | 91 | return _retry_on_pred_inner_2 | ||
899 | 92 | |||
900 | 93 | return _retry_on_pred_inner_1 | ||
901 | 56 | 94 | ||
902 | === modified file 'hooks/charmhelpers/core/hookenv.py' | |||
903 | --- hooks/charmhelpers/core/hookenv.py 2016-12-20 14:35:00 +0000 | |||
904 | +++ hooks/charmhelpers/core/hookenv.py 2023-06-30 13:58:42 +0000 | |||
905 | @@ -1,4 +1,4 @@ | |||
907 | 1 | # Copyright 2014-2015 Canonical Limited. | 1 | # Copyright 2013-2021 Canonical Limited. |
908 | 2 | # | 2 | # |
909 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); |
910 | 4 | # you may not use this file except in compliance with the License. | 4 | # you may not use this file except in compliance with the License. |
911 | @@ -13,37 +13,50 @@ | |||
912 | 13 | # limitations under the License. | 13 | # limitations under the License. |
913 | 14 | 14 | ||
914 | 15 | "Interactions with the Juju environment" | 15 | "Interactions with the Juju environment" |
915 | 16 | # Copyright 2013 Canonical Ltd. | ||
916 | 17 | # | 16 | # |
917 | 18 | # Authors: | 17 | # Authors: |
918 | 19 | # Charm Helpers Developers <juju@lists.ubuntu.com> | 18 | # Charm Helpers Developers <juju@lists.ubuntu.com> |
919 | 20 | 19 | ||
920 | 21 | from __future__ import print_function | ||
921 | 22 | import copy | 20 | import copy |
922 | 23 | from distutils.version import LooseVersion | 21 | from distutils.version import LooseVersion |
923 | 22 | from enum import Enum | ||
924 | 24 | from functools import wraps | 23 | from functools import wraps |
925 | 24 | from collections import namedtuple, UserDict | ||
926 | 25 | import glob | 25 | import glob |
927 | 26 | import os | 26 | import os |
928 | 27 | import json | 27 | import json |
929 | 28 | import yaml | 28 | import yaml |
930 | 29 | import re | ||
931 | 29 | import subprocess | 30 | import subprocess |
932 | 30 | import sys | 31 | import sys |
933 | 31 | import errno | 32 | import errno |
934 | 32 | import tempfile | 33 | import tempfile |
935 | 33 | from subprocess import CalledProcessError | 34 | from subprocess import CalledProcessError |
936 | 34 | 35 | ||
942 | 35 | import six | 36 | from charmhelpers import deprecate |
943 | 36 | if not six.PY3: | 37 | |
939 | 37 | from UserDict import UserDict | ||
940 | 38 | else: | ||
941 | 39 | from collections import UserDict | ||
944 | 40 | 38 | ||
945 | 41 | CRITICAL = "CRITICAL" | 39 | CRITICAL = "CRITICAL" |
946 | 42 | ERROR = "ERROR" | 40 | ERROR = "ERROR" |
947 | 43 | WARNING = "WARNING" | 41 | WARNING = "WARNING" |
948 | 44 | INFO = "INFO" | 42 | INFO = "INFO" |
949 | 45 | DEBUG = "DEBUG" | 43 | DEBUG = "DEBUG" |
950 | 44 | TRACE = "TRACE" | ||
951 | 46 | MARKER = object() | 45 | MARKER = object() |
952 | 46 | SH_MAX_ARG = 131071 | ||
953 | 47 | |||
954 | 48 | |||
955 | 49 | RANGE_WARNING = ('Passing NO_PROXY string that includes a cidr. ' | ||
956 | 50 | 'This may not be compatible with software you are ' | ||
957 | 51 | 'running in your shell.') | ||
958 | 52 | |||
959 | 53 | |||
960 | 54 | class WORKLOAD_STATES(Enum): | ||
961 | 55 | ACTIVE = 'active' | ||
962 | 56 | BLOCKED = 'blocked' | ||
963 | 57 | MAINTENANCE = 'maintenance' | ||
964 | 58 | WAITING = 'waiting' | ||
965 | 59 | |||
966 | 47 | 60 | ||
967 | 48 | cache = {} | 61 | cache = {} |
968 | 49 | 62 | ||
969 | @@ -64,7 +77,7 @@ | |||
970 | 64 | @wraps(func) | 77 | @wraps(func) |
971 | 65 | def wrapper(*args, **kwargs): | 78 | def wrapper(*args, **kwargs): |
972 | 66 | global cache | 79 | global cache |
974 | 67 | key = str((func, args, kwargs)) | 80 | key = json.dumps((func, args, kwargs), sort_keys=True, default=str) |
975 | 68 | try: | 81 | try: |
976 | 69 | return cache[key] | 82 | return cache[key] |
977 | 70 | except KeyError: | 83 | except KeyError: |
978 | @@ -92,9 +105,9 @@ | |||
979 | 92 | command = ['juju-log'] | 105 | command = ['juju-log'] |
980 | 93 | if level: | 106 | if level: |
981 | 94 | command += ['-l', level] | 107 | command += ['-l', level] |
983 | 95 | if not isinstance(message, six.string_types): | 108 | if not isinstance(message, str): |
984 | 96 | message = repr(message) | 109 | message = repr(message) |
986 | 97 | command += [message] | 110 | command += [message[:SH_MAX_ARG]] |
987 | 98 | # Missing juju-log should not cause failures in unit tests | 111 | # Missing juju-log should not cause failures in unit tests |
988 | 99 | # Send log output to stderr | 112 | # Send log output to stderr |
989 | 100 | try: | 113 | try: |
990 | @@ -109,6 +122,24 @@ | |||
991 | 109 | raise | 122 | raise |
992 | 110 | 123 | ||
993 | 111 | 124 | ||
994 | 125 | def function_log(message): | ||
995 | 126 | """Write a function progress message""" | ||
996 | 127 | command = ['function-log'] | ||
997 | 128 | if not isinstance(message, str): | ||
998 | 129 | message = repr(message) | ||
999 | 130 | command += [message[:SH_MAX_ARG]] | ||
1000 | 131 | # Missing function-log should not cause failures in unit tests | ||
1001 | 132 | # Send function_log output to stderr | ||
1002 | 133 | try: | ||
1003 | 134 | subprocess.call(command) | ||
1004 | 135 | except OSError as e: | ||
1005 | 136 | if e.errno == errno.ENOENT: | ||
1006 | 137 | message = "function-log: {}".format(message) | ||
1007 | 138 | print(message, file=sys.stderr) | ||
1008 | 139 | else: | ||
1009 | 140 | raise | ||
1010 | 141 | |||
1011 | 142 | |||
1012 | 112 | class Serializable(UserDict): | 143 | class Serializable(UserDict): |
1013 | 113 | """Wrapper, an object that can be serialized to yaml or json""" | 144 | """Wrapper, an object that can be serialized to yaml or json""" |
1014 | 114 | 145 | ||
1015 | @@ -187,6 +218,17 @@ | |||
1016 | 187 | raise ValueError('Must specify neither or both of relation_name and service_or_unit') | 218 | raise ValueError('Must specify neither or both of relation_name and service_or_unit') |
1017 | 188 | 219 | ||
1018 | 189 | 220 | ||
1019 | 221 | def departing_unit(): | ||
1020 | 222 | """The departing unit for the current relation hook. | ||
1021 | 223 | |||
1022 | 224 | Available since juju 2.8. | ||
1023 | 225 | |||
1024 | 226 | :returns: the departing unit, or None if the information isn't available. | ||
1025 | 227 | :rtype: Optional[str] | ||
1026 | 228 | """ | ||
1027 | 229 | return os.environ.get('JUJU_DEPARTING_UNIT', None) | ||
1028 | 230 | |||
1029 | 231 | |||
1030 | 190 | def local_unit(): | 232 | def local_unit(): |
1031 | 191 | """Local unit ID""" | 233 | """Local unit ID""" |
1032 | 192 | return os.environ['JUJU_UNIT_NAME'] | 234 | return os.environ['JUJU_UNIT_NAME'] |
1033 | @@ -197,9 +239,56 @@ | |||
1034 | 197 | return os.environ.get('JUJU_REMOTE_UNIT', None) | 239 | return os.environ.get('JUJU_REMOTE_UNIT', None) |
1035 | 198 | 240 | ||
1036 | 199 | 241 | ||
1037 | 242 | def application_name(): | ||
1038 | 243 | """ | ||
1039 | 244 | The name of the deployed application this unit belongs to. | ||
1040 | 245 | """ | ||
1041 | 246 | return local_unit().split('/')[0] | ||
1042 | 247 | |||
1043 | 248 | |||
1044 | 200 | def service_name(): | 249 | def service_name(): |
1047 | 201 | """The name service group this unit belongs to""" | 250 | """ |
1048 | 202 | return local_unit().split('/')[0] | 251 | .. deprecated:: 0.19.1 |
1049 | 252 | Alias for :func:`application_name`. | ||
1050 | 253 | """ | ||
1051 | 254 | return application_name() | ||
1052 | 255 | |||
1053 | 256 | |||
1054 | 257 | def model_name(): | ||
1055 | 258 | """ | ||
1056 | 259 | Name of the model that this unit is deployed in. | ||
1057 | 260 | """ | ||
1058 | 261 | return os.environ['JUJU_MODEL_NAME'] | ||
1059 | 262 | |||
1060 | 263 | |||
1061 | 264 | def model_uuid(): | ||
1062 | 265 | """ | ||
1063 | 266 | UUID of the model that this unit is deployed in. | ||
1064 | 267 | """ | ||
1065 | 268 | return os.environ['JUJU_MODEL_UUID'] | ||
1066 | 269 | |||
1067 | 270 | |||
1068 | 271 | def principal_unit(): | ||
1069 | 272 | """Returns the principal unit of this unit, otherwise None""" | ||
1070 | 273 | # Juju 2.2 and above provides JUJU_PRINCIPAL_UNIT | ||
1071 | 274 | principal_unit = os.environ.get('JUJU_PRINCIPAL_UNIT', None) | ||
1072 | 275 | # If it's empty, then this unit is the principal | ||
1073 | 276 | if principal_unit == '': | ||
1074 | 277 | return os.environ['JUJU_UNIT_NAME'] | ||
1075 | 278 | elif principal_unit is not None: | ||
1076 | 279 | return principal_unit | ||
1077 | 280 | # For Juju 2.1 and below, let's try work out the principle unit by | ||
1078 | 281 | # the various charms' metadata.yaml. | ||
1079 | 282 | for reltype in relation_types(): | ||
1080 | 283 | for rid in relation_ids(reltype): | ||
1081 | 284 | for unit in related_units(rid): | ||
1082 | 285 | md = _metadata_unit(unit) | ||
1083 | 286 | if not md: | ||
1084 | 287 | continue | ||
1085 | 288 | subordinate = md.pop('subordinate', None) | ||
1086 | 289 | if not subordinate: | ||
1087 | 290 | return unit | ||
1088 | 291 | return None | ||
1089 | 203 | 292 | ||
1090 | 204 | 293 | ||
1091 | 205 | @cached | 294 | @cached |
1092 | @@ -263,7 +352,7 @@ | |||
1093 | 263 | self.implicit_save = True | 352 | self.implicit_save = True |
1094 | 264 | self._prev_dict = None | 353 | self._prev_dict = None |
1095 | 265 | self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME) | 354 | self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME) |
1097 | 266 | if os.path.exists(self.path): | 355 | if os.path.exists(self.path) and os.stat(self.path).st_size: |
1098 | 267 | self.load_previous() | 356 | self.load_previous() |
1099 | 268 | atexit(self._implicit_save) | 357 | atexit(self._implicit_save) |
1100 | 269 | 358 | ||
1101 | @@ -283,7 +372,13 @@ | |||
1102 | 283 | """ | 372 | """ |
1103 | 284 | self.path = path or self.path | 373 | self.path = path or self.path |
1104 | 285 | with open(self.path) as f: | 374 | with open(self.path) as f: |
1106 | 286 | self._prev_dict = json.load(f) | 375 | try: |
1107 | 376 | self._prev_dict = json.load(f) | ||
1108 | 377 | except ValueError as e: | ||
1109 | 378 | log('Found but was unable to parse previous config data, ' | ||
1110 | 379 | 'ignoring which will report all values as changed - {}' | ||
1111 | 380 | .format(str(e)), level=ERROR) | ||
1112 | 381 | return | ||
1113 | 287 | for k, v in copy.deepcopy(self._prev_dict).items(): | 382 | for k, v in copy.deepcopy(self._prev_dict).items(): |
1114 | 288 | if k not in self: | 383 | if k not in self: |
1115 | 289 | self[k] = v | 384 | self[k] = v |
1116 | @@ -319,6 +414,7 @@ | |||
1117 | 319 | 414 | ||
1118 | 320 | """ | 415 | """ |
1119 | 321 | with open(self.path, 'w') as f: | 416 | with open(self.path, 'w') as f: |
1120 | 417 | os.fchmod(f.fileno(), 0o600) | ||
1121 | 322 | json.dump(self, f) | 418 | json.dump(self, f) |
1122 | 323 | 419 | ||
1123 | 324 | def _implicit_save(self): | 420 | def _implicit_save(self): |
1124 | @@ -326,35 +422,52 @@ | |||
1125 | 326 | self.save() | 422 | self.save() |
1126 | 327 | 423 | ||
1127 | 328 | 424 | ||
1129 | 329 | @cached | 425 | _cache_config = None |
1130 | 426 | |||
1131 | 427 | |||
1132 | 330 | def config(scope=None): | 428 | def config(scope=None): |
1140 | 331 | """Juju charm configuration""" | 429 | """ |
1141 | 332 | config_cmd_line = ['config-get'] | 430 | Get the juju charm configuration (scope==None) or individual key, |
1142 | 333 | if scope is not None: | 431 | (scope=str). The returned value is a Python data structure loaded as |
1143 | 334 | config_cmd_line.append(scope) | 432 | JSON from the Juju config command. |
1144 | 335 | else: | 433 | |
1145 | 336 | config_cmd_line.append('--all') | 434 | :param scope: If set, return the value for the specified key. |
1146 | 337 | config_cmd_line.append('--format=json') | 435 | :type scope: Optional[str] |
1147 | 436 | :returns: Either the whole config as a Config, or a key from it. | ||
1148 | 437 | :rtype: Any | ||
1149 | 438 | """ | ||
1150 | 439 | global _cache_config | ||
1151 | 440 | config_cmd_line = ['config-get', '--all', '--format=json'] | ||
1152 | 338 | try: | 441 | try: |
1155 | 339 | config_data = json.loads( | 442 | if _cache_config is None: |
1156 | 340 | subprocess.check_output(config_cmd_line).decode('UTF-8')) | 443 | config_data = json.loads( |
1157 | 444 | subprocess.check_output(config_cmd_line).decode('UTF-8')) | ||
1158 | 445 | _cache_config = Config(config_data) | ||
1159 | 341 | if scope is not None: | 446 | if scope is not None: |
1163 | 342 | return config_data | 447 | return _cache_config.get(scope) |
1164 | 343 | return Config(config_data) | 448 | return _cache_config |
1165 | 344 | except ValueError: | 449 | except (json.decoder.JSONDecodeError, UnicodeDecodeError) as e: |
1166 | 450 | log('Unable to parse output from config-get: config_cmd_line="{}" ' | ||
1167 | 451 | 'message="{}"' | ||
1168 | 452 | .format(config_cmd_line, str(e)), level=ERROR) | ||
1169 | 345 | return None | 453 | return None |
1170 | 346 | 454 | ||
1171 | 347 | 455 | ||
1172 | 348 | @cached | 456 | @cached |
1174 | 349 | def relation_get(attribute=None, unit=None, rid=None): | 457 | def relation_get(attribute=None, unit=None, rid=None, app=None): |
1175 | 350 | """Get relation information""" | 458 | """Get relation information""" |
1176 | 351 | _args = ['relation-get', '--format=json'] | 459 | _args = ['relation-get', '--format=json'] |
1177 | 460 | if app is not None: | ||
1178 | 461 | if unit is not None: | ||
1179 | 462 | raise ValueError("Cannot use both 'unit' and 'app'") | ||
1180 | 463 | _args.append('--app') | ||
1181 | 352 | if rid: | 464 | if rid: |
1182 | 353 | _args.append('-r') | 465 | _args.append('-r') |
1183 | 354 | _args.append(rid) | 466 | _args.append(rid) |
1184 | 355 | _args.append(attribute or '-') | 467 | _args.append(attribute or '-') |
1187 | 356 | if unit: | 468 | # unit or application name |
1188 | 357 | _args.append(unit) | 469 | if unit or app: |
1189 | 470 | _args.append(unit or app) | ||
1190 | 358 | try: | 471 | try: |
1191 | 359 | return json.loads(subprocess.check_output(_args).decode('UTF-8')) | 472 | return json.loads(subprocess.check_output(_args).decode('UTF-8')) |
1192 | 360 | except ValueError: | 473 | except ValueError: |
1193 | @@ -365,12 +478,28 @@ | |||
1194 | 365 | raise | 478 | raise |
1195 | 366 | 479 | ||
1196 | 367 | 480 | ||
1198 | 368 | def relation_set(relation_id=None, relation_settings=None, **kwargs): | 481 | @cached |
1199 | 482 | def _relation_set_accepts_file(): | ||
1200 | 483 | """Return True if the juju relation-set command accepts a file. | ||
1201 | 484 | |||
1202 | 485 | Cache the result as it won't change during the execution of a hook, and | ||
1203 | 486 | thus we can make relation_set() more efficient by only checking for the | ||
1204 | 487 | first relation_set() call. | ||
1205 | 488 | |||
1206 | 489 | :returns: True if relation_set accepts a file. | ||
1207 | 490 | :rtype: bool | ||
1208 | 491 | :raises: subprocess.CalledProcessError if the check fails. | ||
1209 | 492 | """ | ||
1210 | 493 | return "--file" in subprocess.check_output( | ||
1211 | 494 | ["relation-set", "--help"], universal_newlines=True) | ||
1212 | 495 | |||
1213 | 496 | |||
1214 | 497 | def relation_set(relation_id=None, relation_settings=None, app=False, **kwargs): | ||
1215 | 369 | """Set relation information for the current unit""" | 498 | """Set relation information for the current unit""" |
1216 | 370 | relation_settings = relation_settings if relation_settings else {} | 499 | relation_settings = relation_settings if relation_settings else {} |
1217 | 371 | relation_cmd_line = ['relation-set'] | 500 | relation_cmd_line = ['relation-set'] |
1220 | 372 | accepts_file = "--file" in subprocess.check_output( | 501 | if app: |
1221 | 373 | relation_cmd_line + ["--help"], universal_newlines=True) | 502 | relation_cmd_line.append('--app') |
1222 | 374 | if relation_id is not None: | 503 | if relation_id is not None: |
1223 | 375 | relation_cmd_line.extend(('-r', relation_id)) | 504 | relation_cmd_line.extend(('-r', relation_id)) |
1224 | 376 | settings = relation_settings.copy() | 505 | settings = relation_settings.copy() |
1225 | @@ -380,7 +509,7 @@ | |||
1226 | 380 | # sites pass in things like dicts or numbers. | 509 | # sites pass in things like dicts or numbers. |
1227 | 381 | if value is not None: | 510 | if value is not None: |
1228 | 382 | settings[key] = "{}".format(value) | 511 | settings[key] = "{}".format(value) |
1230 | 383 | if accepts_file: | 512 | if _relation_set_accepts_file(): |
1231 | 384 | # --file was introduced in Juju 1.23.2. Use it by default if | 513 | # --file was introduced in Juju 1.23.2. Use it by default if |
1232 | 385 | # available, since otherwise we'll break if the relation data is | 514 | # available, since otherwise we'll break if the relation data is |
1233 | 386 | # too big. Ideally we should tell relation-set to read the data from | 515 | # too big. Ideally we should tell relation-set to read the data from |
1234 | @@ -435,9 +564,70 @@ | |||
1235 | 435 | subprocess.check_output(units_cmd_line).decode('UTF-8')) or [] | 564 | subprocess.check_output(units_cmd_line).decode('UTF-8')) or [] |
1236 | 436 | 565 | ||
1237 | 437 | 566 | ||
1238 | 567 | def expected_peer_units(): | ||
1239 | 568 | """Get a generator for units we expect to join peer relation based on | ||
1240 | 569 | goal-state. | ||
1241 | 570 | |||
1242 | 571 | The local unit is excluded from the result to make it easy to gauge | ||
1243 | 572 | completion of all peers joining the relation with existing hook tools. | ||
1244 | 573 | |||
1245 | 574 | Example usage: | ||
1246 | 575 | log('peer {} of {} joined peer relation' | ||
1247 | 576 | .format(len(related_units()), | ||
1248 | 577 | len(list(expected_peer_units())))) | ||
1249 | 578 | |||
1250 | 579 | This function will raise NotImplementedError if used with juju versions | ||
1251 | 580 | without goal-state support. | ||
1252 | 581 | |||
1253 | 582 | :returns: iterator | ||
1254 | 583 | :rtype: types.GeneratorType | ||
1255 | 584 | :raises: NotImplementedError | ||
1256 | 585 | """ | ||
1257 | 586 | if not has_juju_version("2.4.0"): | ||
1258 | 587 | # goal-state first appeared in 2.4.0. | ||
1259 | 588 | raise NotImplementedError("goal-state") | ||
1260 | 589 | _goal_state = goal_state() | ||
1261 | 590 | return (key for key in _goal_state['units'] | ||
1262 | 591 | if '/' in key and key != local_unit()) | ||
1263 | 592 | |||
1264 | 593 | |||
1265 | 594 | def expected_related_units(reltype=None): | ||
1266 | 595 | """Get a generator for units we expect to join relation based on | ||
1267 | 596 | goal-state. | ||
1268 | 597 | |||
1269 | 598 | Note that you can not use this function for the peer relation, take a look | ||
1270 | 599 | at expected_peer_units() for that. | ||
1271 | 600 | |||
1272 | 601 | This function will raise KeyError if you request information for a | ||
1273 | 602 | relation type for which juju goal-state does not have information. It will | ||
1274 | 603 | raise NotImplementedError if used with juju versions without goal-state | ||
1275 | 604 | support. | ||
1276 | 605 | |||
1277 | 606 | Example usage: | ||
1278 | 607 | log('participant {} of {} joined relation {}' | ||
1279 | 608 | .format(len(related_units()), | ||
1280 | 609 | len(list(expected_related_units())), | ||
1281 | 610 | relation_type())) | ||
1282 | 611 | |||
1283 | 612 | :param reltype: Relation type to list data for, default is to list data for | ||
1284 | 613 | the relation type we are currently executing a hook for. | ||
1285 | 614 | :type reltype: str | ||
1286 | 615 | :returns: iterator | ||
1287 | 616 | :rtype: types.GeneratorType | ||
1288 | 617 | :raises: KeyError, NotImplementedError | ||
1289 | 618 | """ | ||
1290 | 619 | if not has_juju_version("2.4.4"): | ||
1291 | 620 | # goal-state existed in 2.4.0, but did not list individual units to | ||
1292 | 621 | # join a relation in 2.4.1 through 2.4.3. (LP: #1794739) | ||
1293 | 622 | raise NotImplementedError("goal-state relation unit count") | ||
1294 | 623 | reltype = reltype or relation_type() | ||
1295 | 624 | _goal_state = goal_state() | ||
1296 | 625 | return (key for key in _goal_state['relations'][reltype] if '/' in key) | ||
1297 | 626 | |||
1298 | 627 | |||
1299 | 438 | @cached | 628 | @cached |
1300 | 439 | def relation_for_unit(unit=None, rid=None): | 629 | def relation_for_unit(unit=None, rid=None): |
1302 | 440 | """Get the json represenation of a unit's relation""" | 630 | """Get the json representation of a unit's relation""" |
1303 | 441 | unit = unit or remote_unit() | 631 | unit = unit or remote_unit() |
1304 | 442 | relation = relation_get(unit=unit, rid=rid) | 632 | relation = relation_get(unit=unit, rid=rid) |
1305 | 443 | for key in relation: | 633 | for key in relation: |
1306 | @@ -478,6 +668,24 @@ | |||
1307 | 478 | return yaml.safe_load(md) | 668 | return yaml.safe_load(md) |
1308 | 479 | 669 | ||
1309 | 480 | 670 | ||
1310 | 671 | def _metadata_unit(unit): | ||
1311 | 672 | """Given the name of a unit (e.g. apache2/0), get the unit charm's | ||
1312 | 673 | metadata.yaml. Very similar to metadata() but allows us to inspect | ||
1313 | 674 | other units. Unit needs to be co-located, such as a subordinate or | ||
1314 | 675 | principal/primary. | ||
1315 | 676 | |||
1316 | 677 | :returns: metadata.yaml as a python object. | ||
1317 | 678 | |||
1318 | 679 | """ | ||
1319 | 680 | basedir = os.sep.join(charm_dir().split(os.sep)[:-2]) | ||
1320 | 681 | unitdir = 'unit-{}'.format(unit.replace(os.sep, '-')) | ||
1321 | 682 | joineddir = os.path.join(basedir, unitdir, 'charm', 'metadata.yaml') | ||
1322 | 683 | if not os.path.exists(joineddir): | ||
1323 | 684 | return None | ||
1324 | 685 | with open(joineddir) as md: | ||
1325 | 686 | return yaml.safe_load(md) | ||
1326 | 687 | |||
1327 | 688 | |||
1328 | 481 | @cached | 689 | @cached |
1329 | 482 | def relation_types(): | 690 | def relation_types(): |
1330 | 483 | """Get a list of relation types supported by this charm""" | 691 | """Get a list of relation types supported by this charm""" |
1331 | @@ -602,18 +810,31 @@ | |||
1332 | 602 | return False | 810 | return False |
1333 | 603 | 811 | ||
1334 | 604 | 812 | ||
1335 | 813 | def _port_op(op_name, port, protocol="TCP"): | ||
1336 | 814 | """Open or close a service network port""" | ||
1337 | 815 | _args = [op_name] | ||
1338 | 816 | icmp = protocol.upper() == "ICMP" | ||
1339 | 817 | if icmp: | ||
1340 | 818 | _args.append(protocol) | ||
1341 | 819 | else: | ||
1342 | 820 | _args.append('{}/{}'.format(port, protocol)) | ||
1343 | 821 | try: | ||
1344 | 822 | subprocess.check_call(_args) | ||
1345 | 823 | except subprocess.CalledProcessError: | ||
1346 | 824 | # Older Juju pre 2.3 doesn't support ICMP | ||
1347 | 825 | # so treat it as a no-op if it fails. | ||
1348 | 826 | if not icmp: | ||
1349 | 827 | raise | ||
1350 | 828 | |||
1351 | 829 | |||
1352 | 605 | def open_port(port, protocol="TCP"): | 830 | def open_port(port, protocol="TCP"): |
1353 | 606 | """Open a service network port""" | 831 | """Open a service network port""" |
1357 | 607 | _args = ['open-port'] | 832 | _port_op('open-port', port, protocol) |
1355 | 608 | _args.append('{}/{}'.format(port, protocol)) | ||
1356 | 609 | subprocess.check_call(_args) | ||
1358 | 610 | 833 | ||
1359 | 611 | 834 | ||
1360 | 612 | def close_port(port, protocol="TCP"): | 835 | def close_port(port, protocol="TCP"): |
1361 | 613 | """Close a service network port""" | 836 | """Close a service network port""" |
1365 | 614 | _args = ['close-port'] | 837 | _port_op('close-port', port, protocol) |
1363 | 615 | _args.append('{}/{}'.format(port, protocol)) | ||
1364 | 616 | subprocess.check_call(_args) | ||
1366 | 617 | 838 | ||
1367 | 618 | 839 | ||
1368 | 619 | def open_ports(start, end, protocol="TCP"): | 840 | def open_ports(start, end, protocol="TCP"): |
1369 | @@ -630,6 +851,17 @@ | |||
1370 | 630 | subprocess.check_call(_args) | 851 | subprocess.check_call(_args) |
1371 | 631 | 852 | ||
1372 | 632 | 853 | ||
1373 | 854 | def opened_ports(): | ||
1374 | 855 | """Get the opened ports | ||
1375 | 856 | |||
1376 | 857 | *Note that this will only show ports opened in a previous hook* | ||
1377 | 858 | |||
1378 | 859 | :returns: Opened ports as a list of strings: ``['8080/tcp', '8081-8083/tcp']`` | ||
1379 | 860 | """ | ||
1380 | 861 | _args = ['opened-ports', '--format=json'] | ||
1381 | 862 | return json.loads(subprocess.check_output(_args).decode('UTF-8')) | ||
1382 | 863 | |||
1383 | 864 | |||
1384 | 633 | @cached | 865 | @cached |
1385 | 634 | def unit_get(attribute): | 866 | def unit_get(attribute): |
1386 | 635 | """Get the unit ID for the remote unit""" | 867 | """Get the unit ID for the remote unit""" |
1387 | @@ -751,14 +983,29 @@ | |||
1388 | 751 | return wrapper | 983 | return wrapper |
1389 | 752 | 984 | ||
1390 | 753 | 985 | ||
1391 | 986 | class NoNetworkBinding(Exception): | ||
1392 | 987 | pass | ||
1393 | 988 | |||
1394 | 989 | |||
1395 | 754 | def charm_dir(): | 990 | def charm_dir(): |
1396 | 755 | """Return the root directory of the current charm""" | 991 | """Return the root directory of the current charm""" |
1397 | 992 | d = os.environ.get('JUJU_CHARM_DIR') | ||
1398 | 993 | if d is not None: | ||
1399 | 994 | return d | ||
1400 | 756 | return os.environ.get('CHARM_DIR') | 995 | return os.environ.get('CHARM_DIR') |
1401 | 757 | 996 | ||
1402 | 758 | 997 | ||
1403 | 998 | def cmd_exists(cmd): | ||
1404 | 999 | """Return True if the specified cmd exists in the path""" | ||
1405 | 1000 | return any( | ||
1406 | 1001 | os.access(os.path.join(path, cmd), os.X_OK) | ||
1407 | 1002 | for path in os.environ["PATH"].split(os.pathsep) | ||
1408 | 1003 | ) | ||
1409 | 1004 | |||
1410 | 1005 | |||
1411 | 759 | @cached | 1006 | @cached |
1412 | 760 | def action_get(key=None): | 1007 | def action_get(key=None): |
1414 | 761 | """Gets the value of an action parameter, or all key/value param pairs""" | 1008 | """Gets the value of an action parameter, or all key/value param pairs.""" |
1415 | 762 | cmd = ['action-get'] | 1009 | cmd = ['action-get'] |
1416 | 763 | if key is not None: | 1010 | if key is not None: |
1417 | 764 | cmd.append(key) | 1011 | cmd.append(key) |
1418 | @@ -767,52 +1014,132 @@ | |||
1419 | 767 | return action_data | 1014 | return action_data |
1420 | 768 | 1015 | ||
1421 | 769 | 1016 | ||
1422 | 1017 | @cached | ||
1423 | 1018 | @deprecate("moved to action_get()", log=log) | ||
1424 | 1019 | def function_get(key=None): | ||
1425 | 1020 | """ | ||
1426 | 1021 | .. deprecated:: | ||
1427 | 1022 | Gets the value of an action parameter, or all key/value param pairs. | ||
1428 | 1023 | """ | ||
1429 | 1024 | cmd = ['function-get'] | ||
1430 | 1025 | # Fallback for older charms. | ||
1431 | 1026 | if not cmd_exists('function-get'): | ||
1432 | 1027 | cmd = ['action-get'] | ||
1433 | 1028 | |||
1434 | 1029 | if key is not None: | ||
1435 | 1030 | cmd.append(key) | ||
1436 | 1031 | cmd.append('--format=json') | ||
1437 | 1032 | function_data = json.loads(subprocess.check_output(cmd).decode('UTF-8')) | ||
1438 | 1033 | return function_data | ||
1439 | 1034 | |||
1440 | 1035 | |||
1441 | 770 | def action_set(values): | 1036 | def action_set(values): |
1443 | 771 | """Sets the values to be returned after the action finishes""" | 1037 | """Sets the values to be returned after the action finishes.""" |
1444 | 772 | cmd = ['action-set'] | 1038 | cmd = ['action-set'] |
1445 | 773 | for k, v in list(values.items()): | 1039 | for k, v in list(values.items()): |
1446 | 774 | cmd.append('{}={}'.format(k, v)) | 1040 | cmd.append('{}={}'.format(k, v)) |
1447 | 775 | subprocess.check_call(cmd) | 1041 | subprocess.check_call(cmd) |
1448 | 776 | 1042 | ||
1449 | 777 | 1043 | ||
1450 | 1044 | @deprecate("moved to action_set()", log=log) | ||
1451 | 1045 | def function_set(values): | ||
1452 | 1046 | """ | ||
1453 | 1047 | .. deprecated:: | ||
1454 | 1048 | Sets the values to be returned after the function finishes. | ||
1455 | 1049 | """ | ||
1456 | 1050 | cmd = ['function-set'] | ||
1457 | 1051 | # Fallback for older charms. | ||
1458 | 1052 | if not cmd_exists('function-get'): | ||
1459 | 1053 | cmd = ['action-set'] | ||
1460 | 1054 | |||
1461 | 1055 | for k, v in list(values.items()): | ||
1462 | 1056 | cmd.append('{}={}'.format(k, v)) | ||
1463 | 1057 | subprocess.check_call(cmd) | ||
1464 | 1058 | |||
1465 | 1059 | |||
1466 | 778 | def action_fail(message): | 1060 | def action_fail(message): |
1468 | 779 | """Sets the action status to failed and sets the error message. | 1061 | """ |
1469 | 1062 | Sets the action status to failed and sets the error message. | ||
1470 | 780 | 1063 | ||
1472 | 781 | The results set by action_set are preserved.""" | 1064 | The results set by action_set are preserved. |
1473 | 1065 | """ | ||
1474 | 782 | subprocess.check_call(['action-fail', message]) | 1066 | subprocess.check_call(['action-fail', message]) |
1475 | 783 | 1067 | ||
1476 | 784 | 1068 | ||
1477 | 1069 | @deprecate("moved to action_fail()", log=log) | ||
1478 | 1070 | def function_fail(message): | ||
1479 | 1071 | """ | ||
1480 | 1072 | .. deprecated:: | ||
1481 | 1073 | Sets the function status to failed and sets the error message. | ||
1482 | 1074 | |||
1483 | 1075 | The results set by function_set are preserved. | ||
1484 | 1076 | """ | ||
1485 | 1077 | cmd = ['function-fail'] | ||
1486 | 1078 | # Fallback for older charms. | ||
1487 | 1079 | if not cmd_exists('function-fail'): | ||
1488 | 1080 | cmd = ['action-fail'] | ||
1489 | 1081 | cmd.append(message) | ||
1490 | 1082 | |||
1491 | 1083 | subprocess.check_call(cmd) | ||
1492 | 1084 | |||
1493 | 1085 | |||
1494 | 785 | def action_name(): | 1086 | def action_name(): |
1495 | 786 | """Get the name of the currently executing action.""" | 1087 | """Get the name of the currently executing action.""" |
1496 | 787 | return os.environ.get('JUJU_ACTION_NAME') | 1088 | return os.environ.get('JUJU_ACTION_NAME') |
1497 | 788 | 1089 | ||
1498 | 789 | 1090 | ||
1499 | 1091 | def function_name(): | ||
1500 | 1092 | """Get the name of the currently executing function.""" | ||
1501 | 1093 | return os.environ.get('JUJU_FUNCTION_NAME') or action_name() | ||
1502 | 1094 | |||
1503 | 1095 | |||
1504 | 790 | def action_uuid(): | 1096 | def action_uuid(): |
1505 | 791 | """Get the UUID of the currently executing action.""" | 1097 | """Get the UUID of the currently executing action.""" |
1506 | 792 | return os.environ.get('JUJU_ACTION_UUID') | 1098 | return os.environ.get('JUJU_ACTION_UUID') |
1507 | 793 | 1099 | ||
1508 | 794 | 1100 | ||
1509 | 1101 | def function_id(): | ||
1510 | 1102 | """Get the ID of the currently executing function.""" | ||
1511 | 1103 | return os.environ.get('JUJU_FUNCTION_ID') or action_uuid() | ||
1512 | 1104 | |||
1513 | 1105 | |||
1514 | 795 | def action_tag(): | 1106 | def action_tag(): |
1515 | 796 | """Get the tag for the currently executing action.""" | 1107 | """Get the tag for the currently executing action.""" |
1516 | 797 | return os.environ.get('JUJU_ACTION_TAG') | 1108 | return os.environ.get('JUJU_ACTION_TAG') |
1517 | 798 | 1109 | ||
1518 | 799 | 1110 | ||
1520 | 800 | def status_set(workload_state, message): | 1111 | def function_tag(): |
1521 | 1112 | """Get the tag for the currently executing function.""" | ||
1522 | 1113 | return os.environ.get('JUJU_FUNCTION_TAG') or action_tag() | ||
1523 | 1114 | |||
1524 | 1115 | |||
1525 | 1116 | def status_set(workload_state, message, application=False): | ||
1526 | 801 | """Set the workload state with a message | 1117 | """Set the workload state with a message |
1527 | 802 | 1118 | ||
1528 | 803 | Use status-set to set the workload state with a message which is visible | 1119 | Use status-set to set the workload state with a message which is visible |
1529 | 804 | to the user via juju status. If the status-set command is not found then | 1120 | to the user via juju status. If the status-set command is not found then |
1531 | 805 | assume this is juju < 1.23 and juju-log the message unstead. | 1121 | assume this is juju < 1.23 and juju-log the message instead. |
1532 | 806 | 1122 | ||
1535 | 807 | workload_state -- valid juju workload state. | 1123 | workload_state -- valid juju workload state. str or WORKLOAD_STATES |
1536 | 808 | message -- status update message | 1124 | message -- status update message |
1537 | 1125 | application -- Whether this is an application state set | ||
1538 | 809 | """ | 1126 | """ |
1545 | 810 | valid_states = ['maintenance', 'blocked', 'waiting', 'active'] | 1127 | bad_state_msg = '{!r} is not a valid workload state' |
1546 | 811 | if workload_state not in valid_states: | 1128 | |
1547 | 812 | raise ValueError( | 1129 | if isinstance(workload_state, str): |
1548 | 813 | '{!r} is not a valid workload state'.format(workload_state) | 1130 | try: |
1549 | 814 | ) | 1131 | # Convert string to enum. |
1550 | 815 | cmd = ['status-set', workload_state, message] | 1132 | workload_state = WORKLOAD_STATES[workload_state.upper()] |
1551 | 1133 | except KeyError: | ||
1552 | 1134 | raise ValueError(bad_state_msg.format(workload_state)) | ||
1553 | 1135 | |||
1554 | 1136 | if workload_state not in WORKLOAD_STATES: | ||
1555 | 1137 | raise ValueError(bad_state_msg.format(workload_state)) | ||
1556 | 1138 | |||
1557 | 1139 | cmd = ['status-set'] | ||
1558 | 1140 | if application: | ||
1559 | 1141 | cmd.append('--application') | ||
1560 | 1142 | cmd.extend([workload_state.value, message]) | ||
1561 | 816 | try: | 1143 | try: |
1562 | 817 | ret = subprocess.call(cmd) | 1144 | ret = subprocess.call(cmd) |
1563 | 818 | if ret == 0: | 1145 | if ret == 0: |
1564 | @@ -820,7 +1147,7 @@ | |||
1565 | 820 | except OSError as e: | 1147 | except OSError as e: |
1566 | 821 | if e.errno != errno.ENOENT: | 1148 | if e.errno != errno.ENOENT: |
1567 | 822 | raise | 1149 | raise |
1569 | 823 | log_message = 'status-set failed: {} {}'.format(workload_state, | 1150 | log_message = 'status-set failed: {} {}'.format(workload_state.value, |
1570 | 824 | message) | 1151 | message) |
1571 | 825 | log(log_message, level='INFO') | 1152 | log(log_message, level='INFO') |
1572 | 826 | 1153 | ||
1573 | @@ -874,6 +1201,14 @@ | |||
1574 | 874 | 1201 | ||
1575 | 875 | 1202 | ||
1576 | 876 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | 1203 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) |
1577 | 1204 | @cached | ||
1578 | 1205 | def goal_state(): | ||
1579 | 1206 | """Juju goal state values""" | ||
1580 | 1207 | cmd = ['goal-state', '--format=json'] | ||
1581 | 1208 | return json.loads(subprocess.check_output(cmd).decode('UTF-8')) | ||
1582 | 1209 | |||
1583 | 1210 | |||
1584 | 1211 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | ||
1585 | 877 | def is_leader(): | 1212 | def is_leader(): |
1586 | 878 | """Does the current unit hold the juju leadership | 1213 | """Does the current unit hold the juju leadership |
1587 | 879 | 1214 | ||
1588 | @@ -967,7 +1302,6 @@ | |||
1589 | 967 | universal_newlines=True).strip() | 1302 | universal_newlines=True).strip() |
1590 | 968 | 1303 | ||
1591 | 969 | 1304 | ||
1592 | 970 | @cached | ||
1593 | 971 | def has_juju_version(minimum_version): | 1305 | def has_juju_version(minimum_version): |
1594 | 972 | """Return True if the Juju version is at least the provided version""" | 1306 | """Return True if the Juju version is at least the provided version""" |
1595 | 973 | return LooseVersion(juju_version()) >= LooseVersion(minimum_version) | 1307 | return LooseVersion(juju_version()) >= LooseVersion(minimum_version) |
1596 | @@ -1027,6 +1361,8 @@ | |||
1597 | 1027 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | 1361 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) |
1598 | 1028 | def network_get_primary_address(binding): | 1362 | def network_get_primary_address(binding): |
1599 | 1029 | ''' | 1363 | ''' |
1600 | 1364 | Deprecated since Juju 2.3; use network_get() | ||
1601 | 1365 | |||
1602 | 1030 | Retrieve the primary network address for a named binding | 1366 | Retrieve the primary network address for a named binding |
1603 | 1031 | 1367 | ||
1604 | 1032 | :param binding: string. The name of a relation of extra-binding | 1368 | :param binding: string. The name of a relation of extra-binding |
1605 | @@ -1034,4 +1370,267 @@ | |||
1606 | 1034 | :raise: NotImplementedError if run on Juju < 2.0 | 1370 | :raise: NotImplementedError if run on Juju < 2.0 |
1607 | 1035 | ''' | 1371 | ''' |
1608 | 1036 | cmd = ['network-get', '--primary-address', binding] | 1372 | cmd = ['network-get', '--primary-address', binding] |
1610 | 1037 | return subprocess.check_output(cmd).decode('UTF-8').strip() | 1373 | try: |
1611 | 1374 | response = subprocess.check_output( | ||
1612 | 1375 | cmd, | ||
1613 | 1376 | stderr=subprocess.STDOUT).decode('UTF-8').strip() | ||
1614 | 1377 | except CalledProcessError as e: | ||
1615 | 1378 | if 'no network config found for binding' in e.output.decode('UTF-8'): | ||
1616 | 1379 | raise NoNetworkBinding("No network binding for {}" | ||
1617 | 1380 | .format(binding)) | ||
1618 | 1381 | else: | ||
1619 | 1382 | raise | ||
1620 | 1383 | return response | ||
1621 | 1384 | |||
1622 | 1385 | |||
1623 | 1386 | def network_get(endpoint, relation_id=None): | ||
1624 | 1387 | """ | ||
1625 | 1388 | Retrieve the network details for a relation endpoint | ||
1626 | 1389 | |||
1627 | 1390 | :param endpoint: string. The name of a relation endpoint | ||
1628 | 1391 | :param relation_id: int. The ID of the relation for the current context. | ||
1629 | 1392 | :return: dict. The loaded YAML output of the network-get query. | ||
1630 | 1393 | :raise: NotImplementedError if request not supported by the Juju version. | ||
1631 | 1394 | """ | ||
1632 | 1395 | if not has_juju_version('2.2'): | ||
1633 | 1396 | raise NotImplementedError(juju_version()) # earlier versions require --primary-address | ||
1634 | 1397 | if relation_id and not has_juju_version('2.3'): | ||
1635 | 1398 | raise NotImplementedError # 2.3 added the -r option | ||
1636 | 1399 | |||
1637 | 1400 | cmd = ['network-get', endpoint, '--format', 'yaml'] | ||
1638 | 1401 | if relation_id: | ||
1639 | 1402 | cmd.append('-r') | ||
1640 | 1403 | cmd.append(relation_id) | ||
1641 | 1404 | response = subprocess.check_output( | ||
1642 | 1405 | cmd, | ||
1643 | 1406 | stderr=subprocess.STDOUT).decode('UTF-8').strip() | ||
1644 | 1407 | return yaml.safe_load(response) | ||
1645 | 1408 | |||
1646 | 1409 | |||
1647 | 1410 | def add_metric(*args, **kwargs): | ||
1648 | 1411 | """Add metric values. Values may be expressed with keyword arguments. For | ||
1649 | 1412 | metric names containing dashes, these may be expressed as one or more | ||
1650 | 1413 | 'key=value' positional arguments. May only be called from the collect-metrics | ||
1651 | 1414 | hook.""" | ||
1652 | 1415 | _args = ['add-metric'] | ||
1653 | 1416 | _kvpairs = [] | ||
1654 | 1417 | _kvpairs.extend(args) | ||
1655 | 1418 | _kvpairs.extend(['{}={}'.format(k, v) for k, v in kwargs.items()]) | ||
1656 | 1419 | _args.extend(sorted(_kvpairs)) | ||
1657 | 1420 | try: | ||
1658 | 1421 | subprocess.check_call(_args) | ||
1659 | 1422 | return | ||
1660 | 1423 | except EnvironmentError as e: | ||
1661 | 1424 | if e.errno != errno.ENOENT: | ||
1662 | 1425 | raise | ||
1663 | 1426 | log_message = 'add-metric failed: {}'.format(' '.join(_kvpairs)) | ||
1664 | 1427 | log(log_message, level='INFO') | ||
1665 | 1428 | |||
1666 | 1429 | |||
1667 | 1430 | def meter_status(): | ||
1668 | 1431 | """Get the meter status, if running in the meter-status-changed hook.""" | ||
1669 | 1432 | return os.environ.get('JUJU_METER_STATUS') | ||
1670 | 1433 | |||
1671 | 1434 | |||
1672 | 1435 | def meter_info(): | ||
1673 | 1436 | """Get the meter status information, if running in the meter-status-changed | ||
1674 | 1437 | hook.""" | ||
1675 | 1438 | return os.environ.get('JUJU_METER_INFO') | ||
1676 | 1439 | |||
1677 | 1440 | |||
1678 | 1441 | def iter_units_for_relation_name(relation_name): | ||
1679 | 1442 | """Iterate through all units in a relation | ||
1680 | 1443 | |||
1681 | 1444 | Generator that iterates through all the units in a relation and yields | ||
1682 | 1445 | a named tuple with rid and unit field names. | ||
1683 | 1446 | |||
1684 | 1447 | Usage: | ||
1685 | 1448 | data = [(u.rid, u.unit) | ||
1686 | 1449 | for u in iter_units_for_relation_name(relation_name)] | ||
1687 | 1450 | |||
1688 | 1451 | :param relation_name: string relation name | ||
1689 | 1452 | :yield: Named Tuple with rid and unit field names | ||
1690 | 1453 | """ | ||
1691 | 1454 | RelatedUnit = namedtuple('RelatedUnit', 'rid, unit') | ||
1692 | 1455 | for rid in relation_ids(relation_name): | ||
1693 | 1456 | for unit in related_units(rid): | ||
1694 | 1457 | yield RelatedUnit(rid, unit) | ||
1695 | 1458 | |||
1696 | 1459 | |||
1697 | 1460 | def ingress_address(rid=None, unit=None): | ||
1698 | 1461 | """ | ||
1699 | 1462 | Retrieve the ingress-address from a relation when available. | ||
1700 | 1463 | Otherwise, return the private-address. | ||
1701 | 1464 | |||
1702 | 1465 | When used on the consuming side of the relation (unit is a remote | ||
1703 | 1466 | unit), the ingress-address is the IP address that this unit needs | ||
1704 | 1467 | to use to reach the provided service on the remote unit. | ||
1705 | 1468 | |||
1706 | 1469 | When used on the providing side of the relation (unit == local_unit()), | ||
1707 | 1470 | the ingress-address is the IP address that is advertised to remote | ||
1708 | 1471 | units on this relation. Remote units need to use this address to | ||
1709 | 1472 | reach the local provided service on this unit. | ||
1710 | 1473 | |||
1711 | 1474 | Note that charms may document some other method to use in | ||
1712 | 1475 | preference to the ingress_address(), such as an address provided | ||
1713 | 1476 | on a different relation attribute or a service discovery mechanism. | ||
1714 | 1477 | This allows charms to redirect inbound connections to their peers | ||
1715 | 1478 | or different applications such as load balancers. | ||
1716 | 1479 | |||
1717 | 1480 | Usage: | ||
1718 | 1481 | addresses = [ingress_address(rid=u.rid, unit=u.unit) | ||
1719 | 1482 | for u in iter_units_for_relation_name(relation_name)] | ||
1720 | 1483 | |||
1721 | 1484 | :param rid: string relation id | ||
1722 | 1485 | :param unit: string unit name | ||
1723 | 1486 | :side effect: calls relation_get | ||
1724 | 1487 | :return: string IP address | ||
1725 | 1488 | """ | ||
1726 | 1489 | settings = relation_get(rid=rid, unit=unit) | ||
1727 | 1490 | return (settings.get('ingress-address') or | ||
1728 | 1491 | settings.get('private-address')) | ||
1729 | 1492 | |||
1730 | 1493 | |||
1731 | 1494 | def egress_subnets(rid=None, unit=None): | ||
1732 | 1495 | """ | ||
1733 | 1496 | Retrieve the egress-subnets from a relation. | ||
1734 | 1497 | |||
1735 | 1498 | This function is to be used on the providing side of the | ||
1736 | 1499 | relation, and provides the ranges of addresses that client | ||
1737 | 1500 | connections may come from. The result is uninteresting on | ||
1738 | 1501 | the consuming side of a relation (unit == local_unit()). | ||
1739 | 1502 | |||
1740 | 1503 | Returns a stable list of subnets in CIDR format. | ||
1741 | 1504 | eg. ['192.168.1.0/24', '2001::F00F/128'] | ||
1742 | 1505 | |||
1743 | 1506 | If egress-subnets is not available, falls back to using the published | ||
1744 | 1507 | ingress-address, or finally private-address. | ||
1745 | 1508 | |||
1746 | 1509 | :param rid: string relation id | ||
1747 | 1510 | :param unit: string unit name | ||
1748 | 1511 | :side effect: calls relation_get | ||
1749 | 1512 | :return: list of subnets in CIDR format. eg. ['192.168.1.0/24', '2001::F00F/128'] | ||
1750 | 1513 | """ | ||
1751 | 1514 | def _to_range(addr): | ||
1752 | 1515 | if re.search(r'^(?:\d{1,3}\.){3}\d{1,3}$', addr) is not None: | ||
1753 | 1516 | addr += '/32' | ||
1754 | 1517 | elif ':' in addr and '/' not in addr: # IPv6 | ||
1755 | 1518 | addr += '/128' | ||
1756 | 1519 | return addr | ||
1757 | 1520 | |||
1758 | 1521 | settings = relation_get(rid=rid, unit=unit) | ||
1759 | 1522 | if 'egress-subnets' in settings: | ||
1760 | 1523 | return [n.strip() for n in settings['egress-subnets'].split(',') if n.strip()] | ||
1761 | 1524 | if 'ingress-address' in settings: | ||
1762 | 1525 | return [_to_range(settings['ingress-address'])] | ||
1763 | 1526 | if 'private-address' in settings: | ||
1764 | 1527 | return [_to_range(settings['private-address'])] | ||
1765 | 1528 | return [] # Should never happen | ||
1766 | 1529 | |||
1767 | 1530 | |||
1768 | 1531 | def unit_doomed(unit=None): | ||
1769 | 1532 | """Determines if the unit is being removed from the model | ||
1770 | 1533 | |||
1771 | 1534 | Requires Juju 2.4.1. | ||
1772 | 1535 | |||
1773 | 1536 | :param unit: string unit name, defaults to local_unit | ||
1774 | 1537 | :side effect: calls goal_state | ||
1775 | 1538 | :side effect: calls local_unit | ||
1776 | 1539 | :side effect: calls has_juju_version | ||
1777 | 1540 | :return: True if the unit is being removed, already gone, or never existed | ||
1778 | 1541 | """ | ||
1779 | 1542 | if not has_juju_version("2.4.1"): | ||
1780 | 1543 | # We cannot risk blindly returning False for 'we don't know', | ||
1781 | 1544 | # because that could cause data loss; if call sites don't | ||
1782 | 1545 | # need an accurate answer, they likely don't need this helper | ||
1783 | 1546 | # at all. | ||
1784 | 1547 | # goal-state existed in 2.4.0, but did not handle removals | ||
1785 | 1548 | # correctly until 2.4.1. | ||
1786 | 1549 | raise NotImplementedError("is_doomed") | ||
1787 | 1550 | if unit is None: | ||
1788 | 1551 | unit = local_unit() | ||
1789 | 1552 | gs = goal_state() | ||
1790 | 1553 | units = gs.get('units', {}) | ||
1791 | 1554 | if unit not in units: | ||
1792 | 1555 | return True | ||
1793 | 1556 | # I don't think 'dead' units ever show up in the goal-state, but | ||
1794 | 1557 | # check anyway in addition to 'dying'. | ||
1795 | 1558 | return units[unit]['status'] in ('dying', 'dead') | ||
1796 | 1559 | |||
1797 | 1560 | |||
1798 | 1561 | def env_proxy_settings(selected_settings=None): | ||
1799 | 1562 | """Get proxy settings from process environment variables. | ||
1800 | 1563 | |||
1801 | 1564 | Get charm proxy settings from environment variables that correspond to | ||
1802 | 1565 | juju-http-proxy, juju-https-proxy juju-no-proxy (available as of 2.4.2, see | ||
1803 | 1566 | lp:1782236) and juju-ftp-proxy in a format suitable for passing to an | ||
1804 | 1567 | application that reacts to proxy settings passed as environment variables. | ||
1805 | 1568 | Some applications support lowercase or uppercase notation (e.g. curl), some | ||
1806 | 1569 | support only lowercase (e.g. wget), there are also subjectively rare cases | ||
1807 | 1570 | of only uppercase notation support. no_proxy CIDR and wildcard support also | ||
1808 | 1571 | varies between runtimes and applications as there is no enforced standard. | ||
1809 | 1572 | |||
1810 | 1573 | Some applications may connect to multiple destinations and expose config | ||
1811 | 1574 | options that would affect only proxy settings for a specific destination | ||
1812 | 1575 | these should be handled in charms in an application-specific manner. | ||
1813 | 1576 | |||
1814 | 1577 | :param selected_settings: format only a subset of possible settings | ||
1815 | 1578 | :type selected_settings: list | ||
1816 | 1579 | :rtype: Option(None, dict[str, str]) | ||
1817 | 1580 | """ | ||
1818 | 1581 | SUPPORTED_SETTINGS = { | ||
1819 | 1582 | 'http': 'HTTP_PROXY', | ||
1820 | 1583 | 'https': 'HTTPS_PROXY', | ||
1821 | 1584 | 'no_proxy': 'NO_PROXY', | ||
1822 | 1585 | 'ftp': 'FTP_PROXY' | ||
1823 | 1586 | } | ||
1824 | 1587 | if selected_settings is None: | ||
1825 | 1588 | selected_settings = SUPPORTED_SETTINGS | ||
1826 | 1589 | |||
1827 | 1590 | selected_vars = [v for k, v in SUPPORTED_SETTINGS.items() | ||
1828 | 1591 | if k in selected_settings] | ||
1829 | 1592 | proxy_settings = {} | ||
1830 | 1593 | for var in selected_vars: | ||
1831 | 1594 | var_val = os.getenv(var) | ||
1832 | 1595 | if var_val: | ||
1833 | 1596 | proxy_settings[var] = var_val | ||
1834 | 1597 | proxy_settings[var.lower()] = var_val | ||
1835 | 1598 | # Now handle juju-prefixed environment variables. The legacy vs new | ||
1836 | 1599 | # environment variable usage is mutually exclusive | ||
1837 | 1600 | charm_var_val = os.getenv('JUJU_CHARM_{}'.format(var)) | ||
1838 | 1601 | if charm_var_val: | ||
1839 | 1602 | proxy_settings[var] = charm_var_val | ||
1840 | 1603 | proxy_settings[var.lower()] = charm_var_val | ||
1841 | 1604 | if 'no_proxy' in proxy_settings: | ||
1842 | 1605 | if _contains_range(proxy_settings['no_proxy']): | ||
1843 | 1606 | log(RANGE_WARNING, level=WARNING) | ||
1844 | 1607 | return proxy_settings if proxy_settings else None | ||
1845 | 1608 | |||
1846 | 1609 | |||
1847 | 1610 | def _contains_range(addresses): | ||
1848 | 1611 | """Check for cidr or wildcard domain in a string. | ||
1849 | 1612 | |||
1850 | 1613 | Given a string comprising a comma separated list of ip addresses | ||
1851 | 1614 | and domain names, determine whether the string contains IP ranges | ||
1852 | 1615 | or wildcard domains. | ||
1853 | 1616 | |||
1854 | 1617 | :param addresses: comma separated list of domains and ip addresses. | ||
1855 | 1618 | :type addresses: str | ||
1856 | 1619 | """ | ||
1857 | 1620 | return ( | ||
1858 | 1621 | # Test for cidr (e.g. 10.20.20.0/24) | ||
1859 | 1622 | "/" in addresses or | ||
1860 | 1623 | # Test for wildcard domains (*.foo.com or .foo.com) | ||
1861 | 1624 | "*" in addresses or | ||
1862 | 1625 | addresses.startswith(".") or | ||
1863 | 1626 | ",." in addresses or | ||
1864 | 1627 | " ." in addresses) | ||
1865 | 1628 | |||
1866 | 1629 | |||
1867 | 1630 | def is_subordinate(): | ||
1868 | 1631 | """Check whether charm is subordinate in unit metadata. | ||
1869 | 1632 | |||
1870 | 1633 | :returns: True if unit is subordniate, False otherwise. | ||
1871 | 1634 | :rtype: bool | ||
1872 | 1635 | """ | ||
1873 | 1636 | return metadata().get('subordinate') is True | ||
1874 | 1038 | 1637 | ||
1875 | === modified file 'hooks/charmhelpers/core/host.py' | |||
1876 | --- hooks/charmhelpers/core/host.py 2017-01-16 16:28:40 +0000 | |||
1877 | +++ hooks/charmhelpers/core/host.py 2023-06-30 13:58:42 +0000 | |||
1878 | @@ -1,4 +1,4 @@ | |||
1880 | 1 | # Copyright 2014-2015 Canonical Limited. | 1 | # Copyright 2014-2021 Canonical Limited. |
1881 | 2 | # | 2 | # |
1882 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); |
1883 | 4 | # you may not use this file except in compliance with the License. | 4 | # you may not use this file except in compliance with the License. |
1884 | @@ -19,6 +19,7 @@ | |||
1885 | 19 | # Nick Moffitt <nick.moffitt@canonical.com> | 19 | # Nick Moffitt <nick.moffitt@canonical.com> |
1886 | 20 | # Matthew Wedgwood <matthew.wedgwood@canonical.com> | 20 | # Matthew Wedgwood <matthew.wedgwood@canonical.com> |
1887 | 21 | 21 | ||
1888 | 22 | import errno | ||
1889 | 22 | import os | 23 | import os |
1890 | 23 | import re | 24 | import re |
1891 | 24 | import pwd | 25 | import pwd |
1892 | @@ -30,66 +31,199 @@ | |||
1893 | 30 | import hashlib | 31 | import hashlib |
1894 | 31 | import functools | 32 | import functools |
1895 | 32 | import itertools | 33 | import itertools |
1896 | 33 | import six | ||
1897 | 34 | 34 | ||
1898 | 35 | from contextlib import contextmanager | 35 | from contextlib import contextmanager |
1901 | 36 | from collections import OrderedDict | 36 | from collections import OrderedDict, defaultdict |
1902 | 37 | from .hookenv import log | 37 | from .hookenv import log, INFO, DEBUG, local_unit, charm_name |
1903 | 38 | from .fstab import Fstab | 38 | from .fstab import Fstab |
1904 | 39 | from charmhelpers.osplatform import get_platform | 39 | from charmhelpers.osplatform import get_platform |
1905 | 40 | 40 | ||
1906 | 41 | __platform__ = get_platform() | 41 | __platform__ = get_platform() |
1907 | 42 | if __platform__ == "ubuntu": | 42 | if __platform__ == "ubuntu": |
1909 | 43 | from charmhelpers.core.host_factory.ubuntu import ( | 43 | from charmhelpers.core.host_factory.ubuntu import ( # NOQA:F401 |
1910 | 44 | service_available, | 44 | service_available, |
1911 | 45 | add_new_group, | 45 | add_new_group, |
1912 | 46 | lsb_release, | 46 | lsb_release, |
1913 | 47 | cmp_pkgrevno, | 47 | cmp_pkgrevno, |
1914 | 48 | CompareHostReleases, | ||
1915 | 49 | get_distrib_codename, | ||
1916 | 50 | arch | ||
1917 | 48 | ) # flake8: noqa -- ignore F401 for this import | 51 | ) # flake8: noqa -- ignore F401 for this import |
1918 | 49 | elif __platform__ == "centos": | 52 | elif __platform__ == "centos": |
1920 | 50 | from charmhelpers.core.host_factory.centos import ( | 53 | from charmhelpers.core.host_factory.centos import ( # NOQA:F401 |
1921 | 51 | service_available, | 54 | service_available, |
1922 | 52 | add_new_group, | 55 | add_new_group, |
1923 | 53 | lsb_release, | 56 | lsb_release, |
1924 | 54 | cmp_pkgrevno, | 57 | cmp_pkgrevno, |
1925 | 58 | CompareHostReleases, | ||
1926 | 55 | ) # flake8: noqa -- ignore F401 for this import | 59 | ) # flake8: noqa -- ignore F401 for this import |
1927 | 56 | 60 | ||
1941 | 57 | 61 | UPDATEDB_PATH = '/etc/updatedb.conf' | |
1942 | 58 | def service_start(service_name): | 62 | CA_CERT_DIR = '/usr/local/share/ca-certificates' |
1943 | 59 | """Start a system service""" | 63 | |
1944 | 60 | return service('start', service_name) | 64 | |
1945 | 61 | 65 | def service_start(service_name, **kwargs): | |
1946 | 62 | 66 | """Start a system service. | |
1947 | 63 | def service_stop(service_name): | 67 | |
1948 | 64 | """Stop a system service""" | 68 | The specified service name is managed via the system level init system. |
1949 | 65 | return service('stop', service_name) | 69 | Some init systems (e.g. upstart) require that additional arguments be |
1950 | 66 | 70 | provided in order to directly control service instances whereas other init | |
1951 | 67 | 71 | systems allow for addressing instances of a service directly by name (e.g. | |
1952 | 68 | def service_restart(service_name): | 72 | systemd). |
1953 | 69 | """Restart a system service""" | 73 | |
1954 | 74 | The kwargs allow for the additional parameters to be passed to underlying | ||
1955 | 75 | init systems for those systems which require/allow for them. For example, | ||
1956 | 76 | the ceph-osd upstart script requires the id parameter to be passed along | ||
1957 | 77 | in order to identify which running daemon should be reloaded. The follow- | ||
1958 | 78 | ing example stops the ceph-osd service for instance id=4: | ||
1959 | 79 | |||
1960 | 80 | service_stop('ceph-osd', id=4) | ||
1961 | 81 | |||
1962 | 82 | :param service_name: the name of the service to stop | ||
1963 | 83 | :param **kwargs: additional parameters to pass to the init system when | ||
1964 | 84 | managing services. These will be passed as key=value | ||
1965 | 85 | parameters to the init system's commandline. kwargs | ||
1966 | 86 | are ignored for systemd enabled systems. | ||
1967 | 87 | """ | ||
1968 | 88 | return service('start', service_name, **kwargs) | ||
1969 | 89 | |||
1970 | 90 | |||
1971 | 91 | def service_stop(service_name, **kwargs): | ||
1972 | 92 | """Stop a system service. | ||
1973 | 93 | |||
1974 | 94 | The specified service name is managed via the system level init system. | ||
1975 | 95 | Some init systems (e.g. upstart) require that additional arguments be | ||
1976 | 96 | provided in order to directly control service instances whereas other init | ||
1977 | 97 | systems allow for addressing instances of a service directly by name (e.g. | ||
1978 | 98 | systemd). | ||
1979 | 99 | |||
1980 | 100 | The kwargs allow for the additional parameters to be passed to underlying | ||
1981 | 101 | init systems for those systems which require/allow for them. For example, | ||
1982 | 102 | the ceph-osd upstart script requires the id parameter to be passed along | ||
1983 | 103 | in order to identify which running daemon should be reloaded. The follow- | ||
1984 | 104 | ing example stops the ceph-osd service for instance id=4: | ||
1985 | 105 | |||
1986 | 106 | service_stop('ceph-osd', id=4) | ||
1987 | 107 | |||
1988 | 108 | :param service_name: the name of the service to stop | ||
1989 | 109 | :param **kwargs: additional parameters to pass to the init system when | ||
1990 | 110 | managing services. These will be passed as key=value | ||
1991 | 111 | parameters to the init system's commandline. kwargs | ||
1992 | 112 | are ignored for systemd enabled systems. | ||
1993 | 113 | """ | ||
1994 | 114 | return service('stop', service_name, **kwargs) | ||
1995 | 115 | |||
1996 | 116 | |||
1997 | 117 | def service_enable(service_name, **kwargs): | ||
1998 | 118 | """Enable a system service. | ||
1999 | 119 | |||
2000 | 120 | The specified service name is managed via the system level init system. | ||
2001 | 121 | Some init systems (e.g. upstart) require that additional arguments be | ||
2002 | 122 | provided in order to directly control service instances whereas other init | ||
2003 | 123 | systems allow for addressing instances of a service directly by name (e.g. | ||
2004 | 124 | systemd). | ||
2005 | 125 | |||
2006 | 126 | The kwargs allow for the additional parameters to be passed to underlying | ||
2007 | 127 | init systems for those systems which require/allow for them. For example, | ||
2008 | 128 | the ceph-osd upstart script requires the id parameter to be passed along | ||
2009 | 129 | in order to identify which running daemon should be restarted. The follow- | ||
2010 | 130 | ing example restarts the ceph-osd service for instance id=4: | ||
2011 | 131 | |||
2012 | 132 | service_enable('ceph-osd', id=4) | ||
2013 | 133 | |||
2014 | 134 | :param service_name: the name of the service to enable | ||
2015 | 135 | :param **kwargs: additional parameters to pass to the init system when | ||
2016 | 136 | managing services. These will be passed as key=value | ||
2017 | 137 | parameters to the init system's commandline. kwargs | ||
2018 | 138 | are ignored for init systems not allowing additional | ||
2019 | 139 | parameters via the commandline (systemd). | ||
2020 | 140 | """ | ||
2021 | 141 | return service('enable', service_name, **kwargs) | ||
2022 | 142 | |||
2023 | 143 | |||
2024 | 144 | def service_restart(service_name, **kwargs): | ||
2025 | 145 | """Restart a system service. | ||
2026 | 146 | |||
2027 | 147 | The specified service name is managed via the system level init system. | ||
2028 | 148 | Some init systems (e.g. upstart) require that additional arguments be | ||
2029 | 149 | provided in order to directly control service instances whereas other init | ||
2030 | 150 | systems allow for addressing instances of a service directly by name (e.g. | ||
2031 | 151 | systemd). | ||
2032 | 152 | |||
2033 | 153 | The kwargs allow for the additional parameters to be passed to underlying | ||
2034 | 154 | init systems for those systems which require/allow for them. For example, | ||
2035 | 155 | the ceph-osd upstart script requires the id parameter to be passed along | ||
2036 | 156 | in order to identify which running daemon should be restarted. The follow- | ||
2037 | 157 | ing example restarts the ceph-osd service for instance id=4: | ||
2038 | 158 | |||
2039 | 159 | service_restart('ceph-osd', id=4) | ||
2040 | 160 | |||
2041 | 161 | :param service_name: the name of the service to restart | ||
2042 | 162 | :param **kwargs: additional parameters to pass to the init system when | ||
2043 | 163 | managing services. These will be passed as key=value | ||
2044 | 164 | parameters to the init system's commandline. kwargs | ||
2045 | 165 | are ignored for init systems not allowing additional | ||
2046 | 166 | parameters via the commandline (systemd). | ||
2047 | 167 | """ | ||
2048 | 70 | return service('restart', service_name) | 168 | return service('restart', service_name) |
2049 | 71 | 169 | ||
2050 | 72 | 170 | ||
2052 | 73 | def service_reload(service_name, restart_on_failure=False): | 171 | def service_reload(service_name, restart_on_failure=False, **kwargs): |
2053 | 74 | """Reload a system service, optionally falling back to restart if | 172 | """Reload a system service, optionally falling back to restart if |
2056 | 75 | reload fails""" | 173 | reload fails. |
2057 | 76 | service_result = service('reload', service_name) | 174 | |
2058 | 175 | The specified service name is managed via the system level init system. | ||
2059 | 176 | Some init systems (e.g. upstart) require that additional arguments be | ||
2060 | 177 | provided in order to directly control service instances whereas other init | ||
2061 | 178 | systems allow for addressing instances of a service directly by name (e.g. | ||
2062 | 179 | systemd). | ||
2063 | 180 | |||
2064 | 181 | The kwargs allow for the additional parameters to be passed to underlying | ||
2065 | 182 | init systems for those systems which require/allow for them. For example, | ||
2066 | 183 | the ceph-osd upstart script requires the id parameter to be passed along | ||
2067 | 184 | in order to identify which running daemon should be reloaded. The follow- | ||
2068 | 185 | ing example restarts the ceph-osd service for instance id=4: | ||
2069 | 186 | |||
2070 | 187 | service_reload('ceph-osd', id=4) | ||
2071 | 188 | |||
2072 | 189 | :param service_name: the name of the service to reload | ||
2073 | 190 | :param restart_on_failure: boolean indicating whether to fallback to a | ||
2074 | 191 | restart if the reload fails. | ||
2075 | 192 | :param **kwargs: additional parameters to pass to the init system when | ||
2076 | 193 | managing services. These will be passed as key=value | ||
2077 | 194 | parameters to the init system's commandline. kwargs | ||
2078 | 195 | are ignored for init systems not allowing additional | ||
2079 | 196 | parameters via the commandline (systemd). | ||
2080 | 197 | """ | ||
2081 | 198 | service_result = service('reload', service_name, **kwargs) | ||
2082 | 77 | if not service_result and restart_on_failure: | 199 | if not service_result and restart_on_failure: |
2084 | 78 | service_result = service('restart', service_name) | 200 | service_result = service('restart', service_name, **kwargs) |
2085 | 79 | return service_result | 201 | return service_result |
2086 | 80 | 202 | ||
2087 | 81 | 203 | ||
2089 | 82 | def service_pause(service_name, init_dir="/etc/init", initd_dir="/etc/init.d"): | 204 | def service_pause(service_name, init_dir="/etc/init", initd_dir="/etc/init.d", |
2090 | 205 | **kwargs): | ||
2091 | 83 | """Pause a system service. | 206 | """Pause a system service. |
2092 | 84 | 207 | ||
2094 | 85 | Stop it, and prevent it from starting again at boot.""" | 208 | Stop it, and prevent it from starting again at boot. |
2095 | 209 | |||
2096 | 210 | :param service_name: the name of the service to pause | ||
2097 | 211 | :param init_dir: path to the upstart init directory | ||
2098 | 212 | :param initd_dir: path to the sysv init directory | ||
2099 | 213 | :param **kwargs: additional parameters to pass to the init system when | ||
2100 | 214 | managing services. These will be passed as key=value | ||
2101 | 215 | parameters to the init system's commandline. kwargs | ||
2102 | 216 | are ignored for init systems which do not support | ||
2103 | 217 | key=value arguments via the commandline. | ||
2104 | 218 | """ | ||
2105 | 86 | stopped = True | 219 | stopped = True |
2108 | 87 | if service_running(service_name): | 220 | if service_running(service_name, **kwargs): |
2109 | 88 | stopped = service_stop(service_name) | 221 | stopped = service_stop(service_name, **kwargs) |
2110 | 89 | upstart_file = os.path.join(init_dir, "{}.conf".format(service_name)) | 222 | upstart_file = os.path.join(init_dir, "{}.conf".format(service_name)) |
2111 | 90 | sysv_file = os.path.join(initd_dir, service_name) | 223 | sysv_file = os.path.join(initd_dir, service_name) |
2113 | 91 | if init_is_systemd(): | 224 | if init_is_systemd(service_name=service_name): |
2114 | 92 | service('disable', service_name) | 225 | service('disable', service_name) |
2115 | 226 | service('mask', service_name) | ||
2116 | 93 | elif os.path.exists(upstart_file): | 227 | elif os.path.exists(upstart_file): |
2117 | 94 | override_path = os.path.join( | 228 | override_path = os.path.join( |
2118 | 95 | init_dir, '{}.override'.format(service_name)) | 229 | init_dir, '{}.override'.format(service_name)) |
2119 | @@ -106,13 +240,23 @@ | |||
2120 | 106 | 240 | ||
2121 | 107 | 241 | ||
2122 | 108 | def service_resume(service_name, init_dir="/etc/init", | 242 | def service_resume(service_name, init_dir="/etc/init", |
2124 | 109 | initd_dir="/etc/init.d"): | 243 | initd_dir="/etc/init.d", **kwargs): |
2125 | 110 | """Resume a system service. | 244 | """Resume a system service. |
2126 | 111 | 245 | ||
2128 | 112 | Reenable starting again at boot. Start the service""" | 246 | Re-enable starting again at boot. Start the service. |
2129 | 247 | |||
2130 | 248 | :param service_name: the name of the service to resume | ||
2131 | 249 | :param init_dir: the path to the init dir | ||
2132 | 250 | :param initd dir: the path to the initd dir | ||
2133 | 251 | :param **kwargs: additional parameters to pass to the init system when | ||
2134 | 252 | managing services. These will be passed as key=value | ||
2135 | 253 | parameters to the init system's commandline. kwargs | ||
2136 | 254 | are ignored for systemd enabled systems. | ||
2137 | 255 | """ | ||
2138 | 113 | upstart_file = os.path.join(init_dir, "{}.conf".format(service_name)) | 256 | upstart_file = os.path.join(init_dir, "{}.conf".format(service_name)) |
2139 | 114 | sysv_file = os.path.join(initd_dir, service_name) | 257 | sysv_file = os.path.join(initd_dir, service_name) |
2141 | 115 | if init_is_systemd(): | 258 | if init_is_systemd(service_name=service_name): |
2142 | 259 | service('unmask', service_name) | ||
2143 | 116 | service('enable', service_name) | 260 | service('enable', service_name) |
2144 | 117 | elif os.path.exists(upstart_file): | 261 | elif os.path.exists(upstart_file): |
2145 | 118 | override_path = os.path.join( | 262 | override_path = os.path.join( |
2146 | @@ -126,19 +270,30 @@ | |||
2147 | 126 | "Unable to detect {0} as SystemD, Upstart {1} or" | 270 | "Unable to detect {0} as SystemD, Upstart {1} or" |
2148 | 127 | " SysV {2}".format( | 271 | " SysV {2}".format( |
2149 | 128 | service_name, upstart_file, sysv_file)) | 272 | service_name, upstart_file, sysv_file)) |
2150 | 273 | started = service_running(service_name, **kwargs) | ||
2151 | 129 | 274 | ||
2152 | 130 | started = service_running(service_name) | ||
2153 | 131 | if not started: | 275 | if not started: |
2155 | 132 | started = service_start(service_name) | 276 | started = service_start(service_name, **kwargs) |
2156 | 133 | return started | 277 | return started |
2157 | 134 | 278 | ||
2158 | 135 | 279 | ||
2163 | 136 | def service(action, service_name): | 280 | def service(action, service_name=None, **kwargs): |
2164 | 137 | """Control a system service""" | 281 | """Control a system service. |
2165 | 138 | if init_is_systemd(): | 282 | |
2166 | 139 | cmd = ['systemctl', action, service_name] | 283 | :param action: the action to take on the service |
2167 | 284 | :param service_name: the name of the service to perform th action on | ||
2168 | 285 | :param **kwargs: additional params to be passed to the service command in | ||
2169 | 286 | the form of key=value. | ||
2170 | 287 | """ | ||
2171 | 288 | if init_is_systemd(service_name=service_name): | ||
2172 | 289 | cmd = ['systemctl', action] | ||
2173 | 290 | if service_name is not None: | ||
2174 | 291 | cmd.append(service_name) | ||
2175 | 140 | else: | 292 | else: |
2176 | 141 | cmd = ['service', service_name, action] | 293 | cmd = ['service', service_name, action] |
2177 | 294 | for key, value in kwargs.items(): | ||
2178 | 295 | parameter = '%s=%s' % (key, value) | ||
2179 | 296 | cmd.append(parameter) | ||
2180 | 142 | return subprocess.call(cmd) == 0 | 297 | return subprocess.call(cmd) == 0 |
2181 | 143 | 298 | ||
2182 | 144 | 299 | ||
2183 | @@ -146,16 +301,27 @@ | |||
2184 | 146 | _INIT_D_CONF = "/etc/init.d/{}" | 301 | _INIT_D_CONF = "/etc/init.d/{}" |
2185 | 147 | 302 | ||
2186 | 148 | 303 | ||
2190 | 149 | def service_running(service_name): | 304 | def service_running(service_name, **kwargs): |
2191 | 150 | """Determine whether a system service is running""" | 305 | """Determine whether a system service is running. |
2192 | 151 | if init_is_systemd(): | 306 | |
2193 | 307 | :param service_name: the name of the service | ||
2194 | 308 | :param **kwargs: additional args to pass to the service command. This is | ||
2195 | 309 | used to pass additional key=value arguments to the | ||
2196 | 310 | service command line for managing specific instance | ||
2197 | 311 | units (e.g. service ceph-osd status id=2). The kwargs | ||
2198 | 312 | are ignored in systemd services. | ||
2199 | 313 | """ | ||
2200 | 314 | if init_is_systemd(service_name=service_name): | ||
2201 | 152 | return service('is-active', service_name) | 315 | return service('is-active', service_name) |
2202 | 153 | else: | 316 | else: |
2203 | 154 | if os.path.exists(_UPSTART_CONF.format(service_name)): | 317 | if os.path.exists(_UPSTART_CONF.format(service_name)): |
2204 | 155 | try: | 318 | try: |
2205 | 319 | cmd = ['status', service_name] | ||
2206 | 320 | for key, value in kwargs.items(): | ||
2207 | 321 | parameter = '%s=%s' % (key, value) | ||
2208 | 322 | cmd.append(parameter) | ||
2209 | 156 | output = subprocess.check_output( | 323 | output = subprocess.check_output( |
2212 | 157 | ['status', service_name], | 324 | cmd, stderr=subprocess.STDOUT).decode('UTF-8') |
2211 | 158 | stderr=subprocess.STDOUT).decode('UTF-8') | ||
2213 | 159 | except subprocess.CalledProcessError: | 325 | except subprocess.CalledProcessError: |
2214 | 160 | return False | 326 | return False |
2215 | 161 | else: | 327 | else: |
2216 | @@ -175,8 +341,16 @@ | |||
2217 | 175 | SYSTEMD_SYSTEM = '/run/systemd/system' | 341 | SYSTEMD_SYSTEM = '/run/systemd/system' |
2218 | 176 | 342 | ||
2219 | 177 | 343 | ||
2222 | 178 | def init_is_systemd(): | 344 | def init_is_systemd(service_name=None): |
2223 | 179 | """Return True if the host system uses systemd, False otherwise.""" | 345 | """ |
2224 | 346 | Returns whether the host uses systemd for the specified service. | ||
2225 | 347 | |||
2226 | 348 | @param Optional[str] service_name: specific name of service | ||
2227 | 349 | """ | ||
2228 | 350 | if str(service_name).startswith("snap."): | ||
2229 | 351 | return True | ||
2230 | 352 | if lsb_release()['DISTRIB_CODENAME'] == 'trusty': | ||
2231 | 353 | return False | ||
2232 | 180 | return os.path.isdir(SYSTEMD_SYSTEM) | 354 | return os.path.isdir(SYSTEMD_SYSTEM) |
2233 | 181 | 355 | ||
2234 | 182 | 356 | ||
2235 | @@ -306,6 +480,51 @@ | |||
2236 | 306 | subprocess.check_call(cmd) | 480 | subprocess.check_call(cmd) |
2237 | 307 | 481 | ||
2238 | 308 | 482 | ||
2239 | 483 | def chage(username, lastday=None, expiredate=None, inactive=None, | ||
2240 | 484 | mindays=None, maxdays=None, root=None, warndays=None): | ||
2241 | 485 | """Change user password expiry information | ||
2242 | 486 | |||
2243 | 487 | :param str username: User to update | ||
2244 | 488 | :param str lastday: Set when password was changed in YYYY-MM-DD format | ||
2245 | 489 | :param str expiredate: Set when user's account will no longer be | ||
2246 | 490 | accessible in YYYY-MM-DD format. | ||
2247 | 491 | -1 will remove an account expiration date. | ||
2248 | 492 | :param str inactive: Set the number of days of inactivity after a password | ||
2249 | 493 | has expired before the account is locked. | ||
2250 | 494 | -1 will remove an account's inactivity. | ||
2251 | 495 | :param str mindays: Set the minimum number of days between password | ||
2252 | 496 | changes to MIN_DAYS. | ||
2253 | 497 | 0 indicates the password can be changed anytime. | ||
2254 | 498 | :param str maxdays: Set the maximum number of days during which a | ||
2255 | 499 | password is valid. | ||
2256 | 500 | -1 as MAX_DAYS will remove checking maxdays | ||
2257 | 501 | :param str root: Apply changes in the CHROOT_DIR directory | ||
2258 | 502 | :param str warndays: Set the number of days of warning before a password | ||
2259 | 503 | change is required | ||
2260 | 504 | :raises subprocess.CalledProcessError: if call to chage fails | ||
2261 | 505 | """ | ||
2262 | 506 | cmd = ['chage'] | ||
2263 | 507 | if root: | ||
2264 | 508 | cmd.extend(['--root', root]) | ||
2265 | 509 | if lastday: | ||
2266 | 510 | cmd.extend(['--lastday', lastday]) | ||
2267 | 511 | if expiredate: | ||
2268 | 512 | cmd.extend(['--expiredate', expiredate]) | ||
2269 | 513 | if inactive: | ||
2270 | 514 | cmd.extend(['--inactive', inactive]) | ||
2271 | 515 | if mindays: | ||
2272 | 516 | cmd.extend(['--mindays', mindays]) | ||
2273 | 517 | if maxdays: | ||
2274 | 518 | cmd.extend(['--maxdays', maxdays]) | ||
2275 | 519 | if warndays: | ||
2276 | 520 | cmd.extend(['--warndays', warndays]) | ||
2277 | 521 | cmd.append(username) | ||
2278 | 522 | subprocess.check_call(cmd) | ||
2279 | 523 | |||
2280 | 524 | |||
2281 | 525 | remove_password_expiry = functools.partial(chage, expiredate='-1', inactive='-1', mindays='0', maxdays='-1') | ||
2282 | 526 | |||
2283 | 527 | |||
2284 | 309 | def rsync(from_path, to_path, flags='-r', options=None, timeout=None): | 528 | def rsync(from_path, to_path, flags='-r', options=None, timeout=None): |
2285 | 310 | """Replicate the contents of a path""" | 529 | """Replicate the contents of a path""" |
2286 | 311 | options = options or ['--delete', '--executability'] | 530 | options = options or ['--delete', '--executability'] |
2287 | @@ -352,13 +571,45 @@ | |||
2288 | 352 | 571 | ||
2289 | 353 | def write_file(path, content, owner='root', group='root', perms=0o444): | 572 | def write_file(path, content, owner='root', group='root', perms=0o444): |
2290 | 354 | """Create or overwrite a file with the contents of a byte string.""" | 573 | """Create or overwrite a file with the contents of a byte string.""" |
2291 | 355 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms)) | ||
2292 | 356 | uid = pwd.getpwnam(owner).pw_uid | 574 | uid = pwd.getpwnam(owner).pw_uid |
2293 | 357 | gid = grp.getgrnam(group).gr_gid | 575 | gid = grp.getgrnam(group).gr_gid |
2298 | 358 | with open(path, 'wb') as target: | 576 | # lets see if we can grab the file and compare the context, to avoid doing |
2299 | 359 | os.fchown(target.fileno(), uid, gid) | 577 | # a write. |
2300 | 360 | os.fchmod(target.fileno(), perms) | 578 | existing_content = None |
2301 | 361 | target.write(content) | 579 | existing_uid, existing_gid, existing_perms = None, None, None |
2302 | 580 | try: | ||
2303 | 581 | with open(path, 'rb') as target: | ||
2304 | 582 | existing_content = target.read() | ||
2305 | 583 | stat = os.stat(path) | ||
2306 | 584 | existing_uid, existing_gid, existing_perms = ( | ||
2307 | 585 | stat.st_uid, stat.st_gid, stat.st_mode | ||
2308 | 586 | ) | ||
2309 | 587 | except Exception: | ||
2310 | 588 | pass | ||
2311 | 589 | if content != existing_content: | ||
2312 | 590 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms), | ||
2313 | 591 | level=DEBUG) | ||
2314 | 592 | with open(path, 'wb') as target: | ||
2315 | 593 | os.fchown(target.fileno(), uid, gid) | ||
2316 | 594 | os.fchmod(target.fileno(), perms) | ||
2317 | 595 | if isinstance(content, str): | ||
2318 | 596 | content = content.encode('UTF-8') | ||
2319 | 597 | target.write(content) | ||
2320 | 598 | return | ||
2321 | 599 | # the contents were the same, but we might still need to change the | ||
2322 | 600 | # ownership or permissions. | ||
2323 | 601 | if existing_uid != uid: | ||
2324 | 602 | log("Changing uid on already existing content: {} -> {}" | ||
2325 | 603 | .format(existing_uid, uid), level=DEBUG) | ||
2326 | 604 | os.chown(path, uid, -1) | ||
2327 | 605 | if existing_gid != gid: | ||
2328 | 606 | log("Changing gid on already existing content: {} -> {}" | ||
2329 | 607 | .format(existing_gid, gid), level=DEBUG) | ||
2330 | 608 | os.chown(path, -1, gid) | ||
2331 | 609 | if existing_perms != perms: | ||
2332 | 610 | log("Changing permissions on existing content: {} -> {}" | ||
2333 | 611 | .format(existing_perms, perms), level=DEBUG) | ||
2334 | 612 | os.chmod(path, perms) | ||
2335 | 362 | 613 | ||
2336 | 363 | 614 | ||
2337 | 364 | def fstab_remove(mp): | 615 | def fstab_remove(mp): |
2338 | @@ -456,7 +707,7 @@ | |||
2339 | 456 | 707 | ||
2340 | 457 | :param str checksum: Value of the checksum used to validate the file. | 708 | :param str checksum: Value of the checksum used to validate the file. |
2341 | 458 | :param str hash_type: Hash algorithm used to generate `checksum`. | 709 | :param str hash_type: Hash algorithm used to generate `checksum`. |
2343 | 459 | Can be any hash alrgorithm supported by :mod:`hashlib`, | 710 | Can be any hash algorithm supported by :mod:`hashlib`, |
2344 | 460 | such as md5, sha1, sha256, sha512, etc. | 711 | such as md5, sha1, sha256, sha512, etc. |
2345 | 461 | :raises ChecksumError: If the file fails the checksum | 712 | :raises ChecksumError: If the file fails the checksum |
2346 | 462 | 713 | ||
2347 | @@ -471,78 +722,227 @@ | |||
2348 | 471 | pass | 722 | pass |
2349 | 472 | 723 | ||
2350 | 473 | 724 | ||
2375 | 474 | def restart_on_change(restart_map, stopstart=False, restart_functions=None): | 725 | class restart_on_change(object): |
2376 | 475 | """Restart services based on configuration files changing | 726 | """Decorator and context manager to handle restarts. |
2377 | 476 | 727 | ||
2378 | 477 | This function is used a decorator, for example:: | 728 | Usage: |
2379 | 478 | 729 | ||
2380 | 479 | @restart_on_change({ | 730 | @restart_on_change(restart_map, ...) |
2381 | 480 | '/etc/ceph/ceph.conf': [ 'cinder-api', 'cinder-volume' ] | 731 | def function_that_might_trigger_a_restart(...) |
2382 | 481 | '/etc/apache/sites-enabled/*': [ 'apache2' ] | 732 | ... |
2383 | 482 | }) | 733 | |
2384 | 483 | def config_changed(): | 734 | Or: |
2385 | 484 | pass # your code here | 735 | |
2386 | 485 | 736 | with restart_on_change(restart_map, ...): | |
2387 | 486 | In this example, the cinder-api and cinder-volume services | 737 | do_stuff_that_might_trigger_a_restart() |
2388 | 487 | would be restarted if /etc/ceph/ceph.conf is changed by the | 738 | ... |
2365 | 488 | ceph_client_changed function. The apache2 service would be | ||
2366 | 489 | restarted if any file matching the pattern got changed, created | ||
2367 | 490 | or removed. Standard wildcards are supported, see documentation | ||
2368 | 491 | for the 'glob' module for more information. | ||
2369 | 492 | |||
2370 | 493 | @param restart_map: {path_file_name: [service_name, ...] | ||
2371 | 494 | @param stopstart: DEFAULT false; whether to stop, start OR restart | ||
2372 | 495 | @param restart_functions: nonstandard functions to use to restart services | ||
2373 | 496 | {svc: func, ...} | ||
2374 | 497 | @returns result from decorated function | ||
2389 | 498 | """ | 739 | """ |
2391 | 499 | def wrap(f): | 740 | |
2392 | 741 | def __init__(self, restart_map, stopstart=False, restart_functions=None, | ||
2393 | 742 | can_restart_now_f=None, post_svc_restart_f=None, | ||
2394 | 743 | pre_restarts_wait_f=None): | ||
2395 | 744 | """ | ||
2396 | 745 | :param restart_map: {file: [service, ...]} | ||
2397 | 746 | :type restart_map: Dict[str, List[str,]] | ||
2398 | 747 | :param stopstart: whether to stop, start or restart a service | ||
2399 | 748 | :type stopstart: booleean | ||
2400 | 749 | :param restart_functions: nonstandard functions to use to restart | ||
2401 | 750 | services {svc: func, ...} | ||
2402 | 751 | :type restart_functions: Dict[str, Callable[[str], None]] | ||
2403 | 752 | :param can_restart_now_f: A function used to check if the restart is | ||
2404 | 753 | permitted. | ||
2405 | 754 | :type can_restart_now_f: Callable[[str, List[str]], boolean] | ||
2406 | 755 | :param post_svc_restart_f: A function run after a service has | ||
2407 | 756 | restarted. | ||
2408 | 757 | :type post_svc_restart_f: Callable[[str], None] | ||
2409 | 758 | :param pre_restarts_wait_f: A function called before any restarts. | ||
2410 | 759 | :type pre_restarts_wait_f: Callable[None, None] | ||
2411 | 760 | """ | ||
2412 | 761 | self.restart_map = restart_map | ||
2413 | 762 | self.stopstart = stopstart | ||
2414 | 763 | self.restart_functions = restart_functions | ||
2415 | 764 | self.can_restart_now_f = can_restart_now_f | ||
2416 | 765 | self.post_svc_restart_f = post_svc_restart_f | ||
2417 | 766 | self.pre_restarts_wait_f = pre_restarts_wait_f | ||
2418 | 767 | |||
2419 | 768 | def __call__(self, f): | ||
2420 | 769 | """Work like a decorator. | ||
2421 | 770 | |||
2422 | 771 | Returns a wrapped function that performs the restart if triggered. | ||
2423 | 772 | |||
2424 | 773 | :param f: The function that is being wrapped. | ||
2425 | 774 | :type f: Callable[[Any], Any] | ||
2426 | 775 | :returns: the wrapped function | ||
2427 | 776 | :rtype: Callable[[Any], Any] | ||
2428 | 777 | """ | ||
2429 | 500 | @functools.wraps(f) | 778 | @functools.wraps(f) |
2430 | 501 | def wrapped_f(*args, **kwargs): | 779 | def wrapped_f(*args, **kwargs): |
2431 | 502 | return restart_on_change_helper( | 780 | return restart_on_change_helper( |
2434 | 503 | (lambda: f(*args, **kwargs)), restart_map, stopstart, | 781 | (lambda: f(*args, **kwargs)), |
2435 | 504 | restart_functions) | 782 | self.restart_map, |
2436 | 783 | stopstart=self.stopstart, | ||
2437 | 784 | restart_functions=self.restart_functions, | ||
2438 | 785 | can_restart_now_f=self.can_restart_now_f, | ||
2439 | 786 | post_svc_restart_f=self.post_svc_restart_f, | ||
2440 | 787 | pre_restarts_wait_f=self.pre_restarts_wait_f) | ||
2441 | 505 | return wrapped_f | 788 | return wrapped_f |
2443 | 506 | return wrap | 789 | |
2444 | 790 | def __enter__(self): | ||
2445 | 791 | """Enter the runtime context related to this object. """ | ||
2446 | 792 | self.checksums = _pre_restart_on_change_helper(self.restart_map) | ||
2447 | 793 | |||
2448 | 794 | def __exit__(self, exc_type, exc_val, exc_tb): | ||
2449 | 795 | """Exit the runtime context related to this object. | ||
2450 | 796 | |||
2451 | 797 | The parameters describe the exception that caused the context to be | ||
2452 | 798 | exited. If the context was exited without an exception, all three | ||
2453 | 799 | arguments will be None. | ||
2454 | 800 | """ | ||
2455 | 801 | if exc_type is None: | ||
2456 | 802 | _post_restart_on_change_helper( | ||
2457 | 803 | self.checksums, | ||
2458 | 804 | self.restart_map, | ||
2459 | 805 | stopstart=self.stopstart, | ||
2460 | 806 | restart_functions=self.restart_functions, | ||
2461 | 807 | can_restart_now_f=self.can_restart_now_f, | ||
2462 | 808 | post_svc_restart_f=self.post_svc_restart_f, | ||
2463 | 809 | pre_restarts_wait_f=self.pre_restarts_wait_f) | ||
2464 | 810 | # All is good, so return False; any exceptions will propagate. | ||
2465 | 811 | return False | ||
2466 | 507 | 812 | ||
2467 | 508 | 813 | ||
2468 | 509 | def restart_on_change_helper(lambda_f, restart_map, stopstart=False, | 814 | def restart_on_change_helper(lambda_f, restart_map, stopstart=False, |
2470 | 510 | restart_functions=None): | 815 | restart_functions=None, |
2471 | 816 | can_restart_now_f=None, | ||
2472 | 817 | post_svc_restart_f=None, | ||
2473 | 818 | pre_restarts_wait_f=None): | ||
2474 | 511 | """Helper function to perform the restart_on_change function. | 819 | """Helper function to perform the restart_on_change function. |
2475 | 512 | 820 | ||
2476 | 513 | This is provided for decorators to restart services if files described | 821 | This is provided for decorators to restart services if files described |
2477 | 514 | in the restart_map have changed after an invocation of lambda_f(). | 822 | in the restart_map have changed after an invocation of lambda_f(). |
2478 | 515 | 823 | ||
2485 | 516 | @param lambda_f: function to call. | 824 | This functions allows for a number of helper functions to be passed. |
2486 | 517 | @param restart_map: {file: [service, ...]} | 825 | |
2487 | 518 | @param stopstart: whether to stop, start or restart a service | 826 | `restart_functions` is a map with a service as the key and the |
2488 | 519 | @param restart_functions: nonstandard functions to use to restart services | 827 | corresponding value being the function to call to restart the service. For |
2489 | 520 | {svc: func, ...} | 828 | example if `restart_functions={'some-service': my_restart_func}` then |
2490 | 521 | @returns result of lambda_f() | 829 | `my_restart_func` should a function which takes one argument which is the |
2491 | 830 | service name to be retstarted. | ||
2492 | 831 | |||
2493 | 832 | `can_restart_now_f` is a function which checks that a restart is permitted. | ||
2494 | 833 | It should return a bool which indicates if a restart is allowed and should | ||
2495 | 834 | take a service name (str) and a list of changed files (List[str]) as | ||
2496 | 835 | arguments. | ||
2497 | 836 | |||
2498 | 837 | `post_svc_restart_f` is a function which runs after a service has been | ||
2499 | 838 | restarted. It takes the service name that was restarted as an argument. | ||
2500 | 839 | |||
2501 | 840 | `pre_restarts_wait_f` is a function which is called before any restarts | ||
2502 | 841 | occur. The use case for this is an application which wants to try and | ||
2503 | 842 | stagger restarts between units. | ||
2504 | 843 | |||
2505 | 844 | :param lambda_f: function to call. | ||
2506 | 845 | :type lambda_f: Callable[[], ANY] | ||
2507 | 846 | :param restart_map: {file: [service, ...]} | ||
2508 | 847 | :type restart_map: Dict[str, List[str,]] | ||
2509 | 848 | :param stopstart: whether to stop, start or restart a service | ||
2510 | 849 | :type stopstart: booleean | ||
2511 | 850 | :param restart_functions: nonstandard functions to use to restart services | ||
2512 | 851 | {svc: func, ...} | ||
2513 | 852 | :type restart_functions: Dict[str, Callable[[str], None]] | ||
2514 | 853 | :param can_restart_now_f: A function used to check if the restart is | ||
2515 | 854 | permitted. | ||
2516 | 855 | :type can_restart_now_f: Callable[[str, List[str]], boolean] | ||
2517 | 856 | :param post_svc_restart_f: A function run after a service has | ||
2518 | 857 | restarted. | ||
2519 | 858 | :type post_svc_restart_f: Callable[[str], None] | ||
2520 | 859 | :param pre_restarts_wait_f: A function called before any restarts. | ||
2521 | 860 | :type pre_restarts_wait_f: Callable[None, None] | ||
2522 | 861 | :returns: result of lambda_f() | ||
2523 | 862 | :rtype: ANY | ||
2524 | 863 | """ | ||
2525 | 864 | checksums = _pre_restart_on_change_helper(restart_map) | ||
2526 | 865 | r = lambda_f() | ||
2527 | 866 | _post_restart_on_change_helper(checksums, | ||
2528 | 867 | restart_map, | ||
2529 | 868 | stopstart, | ||
2530 | 869 | restart_functions, | ||
2531 | 870 | can_restart_now_f, | ||
2532 | 871 | post_svc_restart_f, | ||
2533 | 872 | pre_restarts_wait_f) | ||
2534 | 873 | return r | ||
2535 | 874 | |||
2536 | 875 | |||
2537 | 876 | def _pre_restart_on_change_helper(restart_map): | ||
2538 | 877 | """Take a snapshot of file hashes. | ||
2539 | 878 | |||
2540 | 879 | :param restart_map: {file: [service, ...]} | ||
2541 | 880 | :type restart_map: Dict[str, List[str,]] | ||
2542 | 881 | :returns: Dictionary of file paths and the files checksum. | ||
2543 | 882 | :rtype: Dict[str, str] | ||
2544 | 883 | """ | ||
2545 | 884 | return {path: path_hash(path) for path in restart_map} | ||
2546 | 885 | |||
2547 | 886 | |||
2548 | 887 | def _post_restart_on_change_helper(checksums, | ||
2549 | 888 | restart_map, | ||
2550 | 889 | stopstart=False, | ||
2551 | 890 | restart_functions=None, | ||
2552 | 891 | can_restart_now_f=None, | ||
2553 | 892 | post_svc_restart_f=None, | ||
2554 | 893 | pre_restarts_wait_f=None): | ||
2555 | 894 | """Check whether files have changed. | ||
2556 | 895 | |||
2557 | 896 | :param checksums: Dictionary of file paths and the files checksum. | ||
2558 | 897 | :type checksums: Dict[str, str] | ||
2559 | 898 | :param restart_map: {file: [service, ...]} | ||
2560 | 899 | :type restart_map: Dict[str, List[str,]] | ||
2561 | 900 | :param stopstart: whether to stop, start or restart a service | ||
2562 | 901 | :type stopstart: booleean | ||
2563 | 902 | :param restart_functions: nonstandard functions to use to restart services | ||
2564 | 903 | {svc: func, ...} | ||
2565 | 904 | :type restart_functions: Dict[str, Callable[[str], None]] | ||
2566 | 905 | :param can_restart_now_f: A function used to check if the restart is | ||
2567 | 906 | permitted. | ||
2568 | 907 | :type can_restart_now_f: Callable[[str, List[str]], boolean] | ||
2569 | 908 | :param post_svc_restart_f: A function run after a service has | ||
2570 | 909 | restarted. | ||
2571 | 910 | :type post_svc_restart_f: Callable[[str], None] | ||
2572 | 911 | :param pre_restarts_wait_f: A function called before any restarts. | ||
2573 | 912 | :type pre_restarts_wait_f: Callable[None, None] | ||
2574 | 522 | """ | 913 | """ |
2575 | 523 | if restart_functions is None: | 914 | if restart_functions is None: |
2576 | 524 | restart_functions = {} | 915 | restart_functions = {} |
2579 | 525 | checksums = {path: path_hash(path) for path in restart_map} | 916 | changed_files = defaultdict(list) |
2580 | 526 | r = lambda_f() | 917 | restarts = [] |
2581 | 527 | # create a list of lists of the services to restart | 918 | # create a list of lists of the services to restart |
2585 | 528 | restarts = [restart_map[path] | 919 | for path, services in restart_map.items(): |
2586 | 529 | for path in restart_map | 920 | if path_hash(path) != checksums[path]: |
2587 | 530 | if path_hash(path) != checksums[path]] | 921 | restarts.append(services) |
2588 | 922 | for svc in services: | ||
2589 | 923 | changed_files[svc].append(path) | ||
2590 | 531 | # create a flat list of ordered services without duplicates from lists | 924 | # create a flat list of ordered services without duplicates from lists |
2591 | 532 | services_list = list(OrderedDict.fromkeys(itertools.chain(*restarts))) | 925 | services_list = list(OrderedDict.fromkeys(itertools.chain(*restarts))) |
2592 | 533 | if services_list: | 926 | if services_list: |
2593 | 927 | if pre_restarts_wait_f: | ||
2594 | 928 | pre_restarts_wait_f() | ||
2595 | 534 | actions = ('stop', 'start') if stopstart else ('restart',) | 929 | actions = ('stop', 'start') if stopstart else ('restart',) |
2596 | 535 | for service_name in services_list: | 930 | for service_name in services_list: |
2597 | 931 | if can_restart_now_f: | ||
2598 | 932 | if not can_restart_now_f(service_name, | ||
2599 | 933 | changed_files[service_name]): | ||
2600 | 934 | continue | ||
2601 | 536 | if service_name in restart_functions: | 935 | if service_name in restart_functions: |
2602 | 537 | restart_functions[service_name](service_name) | 936 | restart_functions[service_name](service_name) |
2603 | 538 | else: | 937 | else: |
2604 | 539 | for action in actions: | 938 | for action in actions: |
2605 | 540 | service(action, service_name) | 939 | service(action, service_name) |
2607 | 541 | return r | 940 | if post_svc_restart_f: |
2608 | 941 | post_svc_restart_f(service_name) | ||
2609 | 542 | 942 | ||
2610 | 543 | 943 | ||
2611 | 544 | def pwgen(length=None): | 944 | def pwgen(length=None): |
2613 | 545 | """Generate a random pasword.""" | 945 | """Generate a random password.""" |
2614 | 546 | if length is None: | 946 | if length is None: |
2615 | 547 | # A random length is ok to use a weak PRNG | 947 | # A random length is ok to use a weak PRNG |
2616 | 548 | length = random.choice(range(35, 45)) | 948 | length = random.choice(range(35, 45)) |
2617 | @@ -554,7 +954,7 @@ | |||
2618 | 554 | random_generator = random.SystemRandom() | 954 | random_generator = random.SystemRandom() |
2619 | 555 | random_chars = [ | 955 | random_chars = [ |
2620 | 556 | random_generator.choice(alphanumeric_chars) for _ in range(length)] | 956 | random_generator.choice(alphanumeric_chars) for _ in range(length)] |
2622 | 557 | return(''.join(random_chars)) | 957 | return ''.join(random_chars) |
2623 | 558 | 958 | ||
2624 | 559 | 959 | ||
2625 | 560 | def is_phy_iface(interface): | 960 | def is_phy_iface(interface): |
2626 | @@ -595,7 +995,7 @@ | |||
2627 | 595 | 995 | ||
2628 | 596 | def list_nics(nic_type=None): | 996 | def list_nics(nic_type=None): |
2629 | 597 | """Return a list of nics of given type(s)""" | 997 | """Return a list of nics of given type(s)""" |
2631 | 598 | if isinstance(nic_type, six.string_types): | 998 | if isinstance(nic_type, str): |
2632 | 599 | int_types = [nic_type] | 999 | int_types = [nic_type] |
2633 | 600 | else: | 1000 | else: |
2634 | 601 | int_types = nic_type | 1001 | int_types = nic_type |
2635 | @@ -604,7 +1004,8 @@ | |||
2636 | 604 | if nic_type: | 1004 | if nic_type: |
2637 | 605 | for int_type in int_types: | 1005 | for int_type in int_types: |
2638 | 606 | cmd = ['ip', 'addr', 'show', 'label', int_type + '*'] | 1006 | cmd = ['ip', 'addr', 'show', 'label', int_type + '*'] |
2640 | 607 | ip_output = subprocess.check_output(cmd).decode('UTF-8') | 1007 | ip_output = subprocess.check_output( |
2641 | 1008 | cmd).decode('UTF-8', errors='replace') | ||
2642 | 608 | ip_output = ip_output.split('\n') | 1009 | ip_output = ip_output.split('\n') |
2643 | 609 | ip_output = (line for line in ip_output if line) | 1010 | ip_output = (line for line in ip_output if line) |
2644 | 610 | for line in ip_output: | 1011 | for line in ip_output: |
2645 | @@ -620,10 +1021,11 @@ | |||
2646 | 620 | interfaces.append(iface) | 1021 | interfaces.append(iface) |
2647 | 621 | else: | 1022 | else: |
2648 | 622 | cmd = ['ip', 'a'] | 1023 | cmd = ['ip', 'a'] |
2650 | 623 | ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') | 1024 | ip_output = subprocess.check_output( |
2651 | 1025 | cmd).decode('UTF-8', errors='replace').split('\n') | ||
2652 | 624 | ip_output = (line.strip() for line in ip_output if line) | 1026 | ip_output = (line.strip() for line in ip_output if line) |
2653 | 625 | 1027 | ||
2655 | 626 | key = re.compile('^[0-9]+:\s+(.+):') | 1028 | key = re.compile(r'^[0-9]+:\s+(.+):') |
2656 | 627 | for line in ip_output: | 1029 | for line in ip_output: |
2657 | 628 | matched = re.search(key, line) | 1030 | matched = re.search(key, line) |
2658 | 629 | if matched: | 1031 | if matched: |
2659 | @@ -644,7 +1046,8 @@ | |||
2660 | 644 | def get_nic_mtu(nic): | 1046 | def get_nic_mtu(nic): |
2661 | 645 | """Return the Maximum Transmission Unit (MTU) for a network interface.""" | 1047 | """Return the Maximum Transmission Unit (MTU) for a network interface.""" |
2662 | 646 | cmd = ['ip', 'addr', 'show', nic] | 1048 | cmd = ['ip', 'addr', 'show', nic] |
2664 | 647 | ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') | 1049 | ip_output = subprocess.check_output( |
2665 | 1050 | cmd).decode('UTF-8', errors='replace').split('\n') | ||
2666 | 648 | mtu = "" | 1051 | mtu = "" |
2667 | 649 | for line in ip_output: | 1052 | for line in ip_output: |
2668 | 650 | words = line.split() | 1053 | words = line.split() |
2669 | @@ -656,7 +1059,7 @@ | |||
2670 | 656 | def get_nic_hwaddr(nic): | 1059 | def get_nic_hwaddr(nic): |
2671 | 657 | """Return the Media Access Control (MAC) for a network interface.""" | 1060 | """Return the Media Access Control (MAC) for a network interface.""" |
2672 | 658 | cmd = ['ip', '-o', '-0', 'addr', 'show', nic] | 1061 | cmd = ['ip', '-o', '-0', 'addr', 'show', nic] |
2674 | 659 | ip_output = subprocess.check_output(cmd).decode('UTF-8') | 1062 | ip_output = subprocess.check_output(cmd).decode('UTF-8', errors='replace') |
2675 | 660 | hwaddr = "" | 1063 | hwaddr = "" |
2676 | 661 | words = ip_output.split() | 1064 | words = ip_output.split() |
2677 | 662 | if 'link/ether' in words: | 1065 | if 'link/ether' in words: |
2678 | @@ -668,7 +1071,7 @@ | |||
2679 | 668 | def chdir(directory): | 1071 | def chdir(directory): |
2680 | 669 | """Change the current working directory to a different directory for a code | 1072 | """Change the current working directory to a different directory for a code |
2681 | 670 | block and return the previous directory after the block exits. Useful to | 1073 | block and return the previous directory after the block exits. Useful to |
2683 | 671 | run commands from a specificed directory. | 1074 | run commands from a specified directory. |
2684 | 672 | 1075 | ||
2685 | 673 | :param str directory: The directory path to change to for this context. | 1076 | :param str directory: The directory path to change to for this context. |
2686 | 674 | """ | 1077 | """ |
2687 | @@ -703,9 +1106,12 @@ | |||
2688 | 703 | for root, dirs, files in os.walk(path, followlinks=follow_links): | 1106 | for root, dirs, files in os.walk(path, followlinks=follow_links): |
2689 | 704 | for name in dirs + files: | 1107 | for name in dirs + files: |
2690 | 705 | full = os.path.join(root, name) | 1108 | full = os.path.join(root, name) |
2693 | 706 | broken_symlink = os.path.lexists(full) and not os.path.exists(full) | 1109 | try: |
2692 | 707 | if not broken_symlink: | ||
2694 | 708 | chown(full, uid, gid) | 1110 | chown(full, uid, gid) |
2695 | 1111 | except (IOError, OSError) as e: | ||
2696 | 1112 | # Intended to ignore "file not found". | ||
2697 | 1113 | if e.errno == errno.ENOENT: | ||
2698 | 1114 | pass | ||
2699 | 709 | 1115 | ||
2700 | 710 | 1116 | ||
2701 | 711 | def lchownr(path, owner, group): | 1117 | def lchownr(path, owner, group): |
2702 | @@ -720,6 +1126,20 @@ | |||
2703 | 720 | chownr(path, owner, group, follow_links=False) | 1126 | chownr(path, owner, group, follow_links=False) |
2704 | 721 | 1127 | ||
2705 | 722 | 1128 | ||
2706 | 1129 | def owner(path): | ||
2707 | 1130 | """Returns a tuple containing the username & groupname owning the path. | ||
2708 | 1131 | |||
2709 | 1132 | :param str path: the string path to retrieve the ownership | ||
2710 | 1133 | :return tuple(str, str): A (username, groupname) tuple containing the | ||
2711 | 1134 | name of the user and group owning the path. | ||
2712 | 1135 | :raises OSError: if the specified path does not exist | ||
2713 | 1136 | """ | ||
2714 | 1137 | stat = os.stat(path) | ||
2715 | 1138 | username = pwd.getpwuid(stat.st_uid)[0] | ||
2716 | 1139 | groupname = grp.getgrgid(stat.st_gid)[0] | ||
2717 | 1140 | return username, groupname | ||
2718 | 1141 | |||
2719 | 1142 | |||
2720 | 723 | def get_total_ram(): | 1143 | def get_total_ram(): |
2721 | 724 | """The total amount of system RAM in bytes. | 1144 | """The total amount of system RAM in bytes. |
2722 | 725 | 1145 | ||
2723 | @@ -751,3 +1171,136 @@ | |||
2724 | 751 | else: | 1171 | else: |
2725 | 752 | # Detect using upstart container file marker | 1172 | # Detect using upstart container file marker |
2726 | 753 | return os.path.exists(UPSTART_CONTAINER_TYPE) | 1173 | return os.path.exists(UPSTART_CONTAINER_TYPE) |
2727 | 1174 | |||
2728 | 1175 | |||
2729 | 1176 | def add_to_updatedb_prunepath(path, updatedb_path=UPDATEDB_PATH): | ||
2730 | 1177 | """Adds the specified path to the mlocate's udpatedb.conf PRUNEPATH list. | ||
2731 | 1178 | |||
2732 | 1179 | This method has no effect if the path specified by updatedb_path does not | ||
2733 | 1180 | exist or is not a file. | ||
2734 | 1181 | |||
2735 | 1182 | @param path: string the path to add to the updatedb.conf PRUNEPATHS value | ||
2736 | 1183 | @param updatedb_path: the path the updatedb.conf file | ||
2737 | 1184 | """ | ||
2738 | 1185 | if not os.path.exists(updatedb_path) or os.path.isdir(updatedb_path): | ||
2739 | 1186 | # If the updatedb.conf file doesn't exist then don't attempt to update | ||
2740 | 1187 | # the file as the package providing mlocate may not be installed on | ||
2741 | 1188 | # the local system | ||
2742 | 1189 | return | ||
2743 | 1190 | |||
2744 | 1191 | with open(updatedb_path, 'r+') as f_id: | ||
2745 | 1192 | updatedb_text = f_id.read() | ||
2746 | 1193 | output = updatedb(updatedb_text, path) | ||
2747 | 1194 | f_id.seek(0) | ||
2748 | 1195 | f_id.write(output) | ||
2749 | 1196 | f_id.truncate() | ||
2750 | 1197 | |||
2751 | 1198 | |||
2752 | 1199 | def updatedb(updatedb_text, new_path): | ||
2753 | 1200 | lines = [line for line in updatedb_text.split("\n")] | ||
2754 | 1201 | for i, line in enumerate(lines): | ||
2755 | 1202 | if line.startswith("PRUNEPATHS="): | ||
2756 | 1203 | paths_line = line.split("=")[1].replace('"', '') | ||
2757 | 1204 | paths = paths_line.split(" ") | ||
2758 | 1205 | if new_path not in paths: | ||
2759 | 1206 | paths.append(new_path) | ||
2760 | 1207 | lines[i] = 'PRUNEPATHS="{}"'.format(' '.join(paths)) | ||
2761 | 1208 | output = "\n".join(lines) | ||
2762 | 1209 | return output | ||
2763 | 1210 | |||
2764 | 1211 | |||
2765 | 1212 | def modulo_distribution(modulo=3, wait=30, non_zero_wait=False): | ||
2766 | 1213 | """ Modulo distribution | ||
2767 | 1214 | |||
2768 | 1215 | This helper uses the unit number, a modulo value and a constant wait time | ||
2769 | 1216 | to produce a calculated wait time distribution. This is useful in large | ||
2770 | 1217 | scale deployments to distribute load during an expensive operation such as | ||
2771 | 1218 | service restarts. | ||
2772 | 1219 | |||
2773 | 1220 | If you have 1000 nodes that need to restart 100 at a time 1 minute at a | ||
2774 | 1221 | time: | ||
2775 | 1222 | |||
2776 | 1223 | time.wait(modulo_distribution(modulo=100, wait=60)) | ||
2777 | 1224 | restart() | ||
2778 | 1225 | |||
2779 | 1226 | If you need restarts to happen serially set modulo to the exact number of | ||
2780 | 1227 | nodes and set a high constant wait time: | ||
2781 | 1228 | |||
2782 | 1229 | time.wait(modulo_distribution(modulo=10, wait=120)) | ||
2783 | 1230 | restart() | ||
2784 | 1231 | |||
2785 | 1232 | @param modulo: int The modulo number creates the group distribution | ||
2786 | 1233 | @param wait: int The constant time wait value | ||
2787 | 1234 | @param non_zero_wait: boolean Override unit % modulo == 0, | ||
2788 | 1235 | return modulo * wait. Used to avoid collisions with | ||
2789 | 1236 | leader nodes which are often given priority. | ||
2790 | 1237 | @return: int Calculated time to wait for unit operation | ||
2791 | 1238 | """ | ||
2792 | 1239 | unit_number = int(local_unit().split('/')[1]) | ||
2793 | 1240 | calculated_wait_time = (unit_number % modulo) * wait | ||
2794 | 1241 | if non_zero_wait and calculated_wait_time == 0: | ||
2795 | 1242 | return modulo * wait | ||
2796 | 1243 | else: | ||
2797 | 1244 | return calculated_wait_time | ||
2798 | 1245 | |||
2799 | 1246 | |||
2800 | 1247 | def ca_cert_absolute_path(basename_without_extension): | ||
2801 | 1248 | """Returns absolute path to CA certificate. | ||
2802 | 1249 | |||
2803 | 1250 | :param basename_without_extension: Filename without extension | ||
2804 | 1251 | :type basename_without_extension: str | ||
2805 | 1252 | :returns: Absolute full path | ||
2806 | 1253 | :rtype: str | ||
2807 | 1254 | """ | ||
2808 | 1255 | return '{}/{}.crt'.format(CA_CERT_DIR, basename_without_extension) | ||
2809 | 1256 | |||
2810 | 1257 | |||
2811 | 1258 | def install_ca_cert(ca_cert, name=None): | ||
2812 | 1259 | """ | ||
2813 | 1260 | Install the given cert as a trusted CA. | ||
2814 | 1261 | |||
2815 | 1262 | The ``name`` is the stem of the filename where the cert is written, and if | ||
2816 | 1263 | not provided, it will default to ``juju-{charm_name}``. | ||
2817 | 1264 | |||
2818 | 1265 | If the cert is empty or None, or is unchanged, nothing is done. | ||
2819 | 1266 | """ | ||
2820 | 1267 | if not ca_cert: | ||
2821 | 1268 | return | ||
2822 | 1269 | if not isinstance(ca_cert, bytes): | ||
2823 | 1270 | ca_cert = ca_cert.encode('utf8') | ||
2824 | 1271 | if not name: | ||
2825 | 1272 | name = 'juju-{}'.format(charm_name()) | ||
2826 | 1273 | cert_file = ca_cert_absolute_path(name) | ||
2827 | 1274 | new_hash = hashlib.md5(ca_cert).hexdigest() | ||
2828 | 1275 | if file_hash(cert_file) == new_hash: | ||
2829 | 1276 | return | ||
2830 | 1277 | log("Installing new CA cert at: {}".format(cert_file), level=INFO) | ||
2831 | 1278 | write_file(cert_file, ca_cert) | ||
2832 | 1279 | subprocess.check_call(['update-ca-certificates', '--fresh']) | ||
2833 | 1280 | |||
2834 | 1281 | |||
2835 | 1282 | def get_system_env(key, default=None): | ||
2836 | 1283 | """Get data from system environment as represented in ``/etc/environment``. | ||
2837 | 1284 | |||
2838 | 1285 | :param key: Key to look up | ||
2839 | 1286 | :type key: str | ||
2840 | 1287 | :param default: Value to return if key is not found | ||
2841 | 1288 | :type default: any | ||
2842 | 1289 | :returns: Value for key if found or contents of default parameter | ||
2843 | 1290 | :rtype: any | ||
2844 | 1291 | :raises: subprocess.CalledProcessError | ||
2845 | 1292 | """ | ||
2846 | 1293 | env_file = '/etc/environment' | ||
2847 | 1294 | # use the shell and env(1) to parse the global environments file. This is | ||
2848 | 1295 | # done to get the correct result even if the user has shell variable | ||
2849 | 1296 | # substitutions or other shell logic in that file. | ||
2850 | 1297 | output = subprocess.check_output( | ||
2851 | 1298 | ['env', '-i', '/bin/bash', '-c', | ||
2852 | 1299 | 'set -a && source {} && env'.format(env_file)], | ||
2853 | 1300 | universal_newlines=True) | ||
2854 | 1301 | for k, v in (line.split('=', 1) | ||
2855 | 1302 | for line in output.splitlines() if '=' in line): | ||
2856 | 1303 | if k == key: | ||
2857 | 1304 | return v | ||
2858 | 1305 | else: | ||
2859 | 1306 | return default | ||
2860 | 754 | 1307 | ||
2861 | === modified file 'hooks/charmhelpers/core/host_factory/centos.py' | |||
2862 | --- hooks/charmhelpers/core/host_factory/centos.py 2016-12-20 20:15:28 +0000 | |||
2863 | +++ hooks/charmhelpers/core/host_factory/centos.py 2023-06-30 13:58:42 +0000 | |||
2864 | @@ -2,6 +2,22 @@ | |||
2865 | 2 | import yum | 2 | import yum |
2866 | 3 | import os | 3 | import os |
2867 | 4 | 4 | ||
2868 | 5 | from charmhelpers.core.strutils import BasicStringComparator | ||
2869 | 6 | |||
2870 | 7 | |||
2871 | 8 | class CompareHostReleases(BasicStringComparator): | ||
2872 | 9 | """Provide comparisons of Host releases. | ||
2873 | 10 | |||
2874 | 11 | Use in the form of | ||
2875 | 12 | |||
2876 | 13 | if CompareHostReleases(release) > 'trusty': | ||
2877 | 14 | # do something with mitaka | ||
2878 | 15 | """ | ||
2879 | 16 | |||
2880 | 17 | def __init__(self, item): | ||
2881 | 18 | raise NotImplementedError( | ||
2882 | 19 | "CompareHostReleases() is not implemented for CentOS") | ||
2883 | 20 | |||
2884 | 5 | 21 | ||
2885 | 6 | def service_available(service_name): | 22 | def service_available(service_name): |
2886 | 7 | # """Determine whether a system service is available.""" | 23 | # """Determine whether a system service is available.""" |
2887 | 8 | 24 | ||
2888 | === modified file 'hooks/charmhelpers/core/host_factory/ubuntu.py' | |||
2889 | --- hooks/charmhelpers/core/host_factory/ubuntu.py 2016-12-20 20:15:28 +0000 | |||
2890 | +++ hooks/charmhelpers/core/host_factory/ubuntu.py 2023-06-30 13:58:42 +0000 | |||
2891 | @@ -1,5 +1,50 @@ | |||
2892 | 1 | import subprocess | 1 | import subprocess |
2893 | 2 | 2 | ||
2894 | 3 | from charmhelpers.core.hookenv import cached | ||
2895 | 4 | from charmhelpers.core.strutils import BasicStringComparator | ||
2896 | 5 | |||
2897 | 6 | |||
2898 | 7 | UBUNTU_RELEASES = ( | ||
2899 | 8 | 'lucid', | ||
2900 | 9 | 'maverick', | ||
2901 | 10 | 'natty', | ||
2902 | 11 | 'oneiric', | ||
2903 | 12 | 'precise', | ||
2904 | 13 | 'quantal', | ||
2905 | 14 | 'raring', | ||
2906 | 15 | 'saucy', | ||
2907 | 16 | 'trusty', | ||
2908 | 17 | 'utopic', | ||
2909 | 18 | 'vivid', | ||
2910 | 19 | 'wily', | ||
2911 | 20 | 'xenial', | ||
2912 | 21 | 'yakkety', | ||
2913 | 22 | 'zesty', | ||
2914 | 23 | 'artful', | ||
2915 | 24 | 'bionic', | ||
2916 | 25 | 'cosmic', | ||
2917 | 26 | 'disco', | ||
2918 | 27 | 'eoan', | ||
2919 | 28 | 'focal', | ||
2920 | 29 | 'groovy', | ||
2921 | 30 | 'hirsute', | ||
2922 | 31 | 'impish', | ||
2923 | 32 | 'jammy', | ||
2924 | 33 | 'kinetic', | ||
2925 | 34 | 'lunar', | ||
2926 | 35 | ) | ||
2927 | 36 | |||
2928 | 37 | |||
2929 | 38 | class CompareHostReleases(BasicStringComparator): | ||
2930 | 39 | """Provide comparisons of Ubuntu releases. | ||
2931 | 40 | |||
2932 | 41 | Use in the form of | ||
2933 | 42 | |||
2934 | 43 | if CompareHostReleases(release) > 'trusty': | ||
2935 | 44 | # do something with mitaka | ||
2936 | 45 | """ | ||
2937 | 46 | _list = UBUNTU_RELEASES | ||
2938 | 47 | |||
2939 | 3 | 48 | ||
2940 | 4 | def service_available(service_name): | 49 | def service_available(service_name): |
2941 | 5 | """Determine whether a system service is available""" | 50 | """Determine whether a system service is available""" |
2942 | @@ -37,6 +82,14 @@ | |||
2943 | 37 | return d | 82 | return d |
2944 | 38 | 83 | ||
2945 | 39 | 84 | ||
2946 | 85 | def get_distrib_codename(): | ||
2947 | 86 | """Return the codename of the distribution | ||
2948 | 87 | :returns: The codename | ||
2949 | 88 | :rtype: str | ||
2950 | 89 | """ | ||
2951 | 90 | return lsb_release()['DISTRIB_CODENAME'].lower() | ||
2952 | 91 | |||
2953 | 92 | |||
2954 | 40 | def cmp_pkgrevno(package, revno, pkgcache=None): | 93 | def cmp_pkgrevno(package, revno, pkgcache=None): |
2955 | 41 | """Compare supplied revno with the revno of the installed package. | 94 | """Compare supplied revno with the revno of the installed package. |
2956 | 42 | 95 | ||
2957 | @@ -48,9 +101,24 @@ | |||
2958 | 48 | the pkgcache argument is None. Be sure to add charmhelpers.fetch if | 101 | the pkgcache argument is None. Be sure to add charmhelpers.fetch if |
2959 | 49 | you call this function, or pass an apt_pkg.Cache() instance. | 102 | you call this function, or pass an apt_pkg.Cache() instance. |
2960 | 50 | """ | 103 | """ |
2962 | 51 | import apt_pkg | 104 | from charmhelpers.fetch import apt_pkg, get_installed_version |
2963 | 52 | if not pkgcache: | 105 | if not pkgcache: |
2968 | 53 | from charmhelpers.fetch import apt_cache | 106 | current_ver = get_installed_version(package) |
2969 | 54 | pkgcache = apt_cache() | 107 | else: |
2970 | 55 | pkg = pkgcache[package] | 108 | pkg = pkgcache[package] |
2971 | 56 | return apt_pkg.version_compare(pkg.current_ver.ver_str, revno) | 109 | current_ver = pkg.current_ver |
2972 | 110 | |||
2973 | 111 | return apt_pkg.version_compare(current_ver.ver_str, revno) | ||
2974 | 112 | |||
2975 | 113 | |||
2976 | 114 | @cached | ||
2977 | 115 | def arch(): | ||
2978 | 116 | """Return the package architecture as a string. | ||
2979 | 117 | |||
2980 | 118 | :returns: the architecture | ||
2981 | 119 | :rtype: str | ||
2982 | 120 | :raises: subprocess.CalledProcessError if dpkg command fails | ||
2983 | 121 | """ | ||
2984 | 122 | return subprocess.check_output( | ||
2985 | 123 | ['dpkg', '--print-architecture'] | ||
2986 | 124 | ).rstrip().decode('UTF-8') | ||
2987 | 57 | 125 | ||
2988 | === modified file 'hooks/charmhelpers/core/kernel.py' | |||
2989 | --- hooks/charmhelpers/core/kernel.py 2016-12-20 20:15:28 +0000 | |||
2990 | +++ hooks/charmhelpers/core/kernel.py 2023-06-30 13:58:42 +0000 | |||
2991 | @@ -26,12 +26,12 @@ | |||
2992 | 26 | 26 | ||
2993 | 27 | __platform__ = get_platform() | 27 | __platform__ = get_platform() |
2994 | 28 | if __platform__ == "ubuntu": | 28 | if __platform__ == "ubuntu": |
2996 | 29 | from charmhelpers.core.kernel_factory.ubuntu import ( | 29 | from charmhelpers.core.kernel_factory.ubuntu import ( # NOQA:F401 |
2997 | 30 | persistent_modprobe, | 30 | persistent_modprobe, |
2998 | 31 | update_initramfs, | 31 | update_initramfs, |
2999 | 32 | ) # flake8: noqa -- ignore F401 for this import | 32 | ) # flake8: noqa -- ignore F401 for this import |
3000 | 33 | elif __platform__ == "centos": | 33 | elif __platform__ == "centos": |
3002 | 34 | from charmhelpers.core.kernel_factory.centos import ( | 34 | from charmhelpers.core.kernel_factory.centos import ( # NOQA:F401 |
3003 | 35 | persistent_modprobe, | 35 | persistent_modprobe, |
3004 | 36 | update_initramfs, | 36 | update_initramfs, |
3005 | 37 | ) # flake8: noqa -- ignore F401 for this import | 37 | ) # flake8: noqa -- ignore F401 for this import |
3006 | 38 | 38 | ||
3007 | === modified file 'hooks/charmhelpers/core/services/base.py' | |||
3008 | --- hooks/charmhelpers/core/services/base.py 2016-12-20 14:35:00 +0000 | |||
3009 | +++ hooks/charmhelpers/core/services/base.py 2023-06-30 13:58:42 +0000 | |||
3010 | @@ -14,8 +14,9 @@ | |||
3011 | 14 | 14 | ||
3012 | 15 | import os | 15 | import os |
3013 | 16 | import json | 16 | import json |
3016 | 17 | from inspect import getargspec | 17 | import inspect |
3017 | 18 | from collections import Iterable, OrderedDict | 18 | from collections import OrderedDict |
3018 | 19 | from collections.abc import Iterable | ||
3019 | 19 | 20 | ||
3020 | 20 | from charmhelpers.core import host | 21 | from charmhelpers.core import host |
3021 | 21 | from charmhelpers.core import hookenv | 22 | from charmhelpers.core import hookenv |
3022 | @@ -169,7 +170,7 @@ | |||
3023 | 169 | if not units: | 170 | if not units: |
3024 | 170 | continue | 171 | continue |
3025 | 171 | remote_service = units[0].split('/')[0] | 172 | remote_service = units[0].split('/')[0] |
3027 | 172 | argspec = getargspec(provider.provide_data) | 173 | argspec = inspect.getfullargspec(provider.provide_data) |
3028 | 173 | if len(argspec.args) > 1: | 174 | if len(argspec.args) > 1: |
3029 | 174 | data = provider.provide_data(remote_service, service_ready) | 175 | data = provider.provide_data(remote_service, service_ready) |
3030 | 175 | else: | 176 | else: |
3031 | @@ -307,23 +308,34 @@ | |||
3032 | 307 | """ | 308 | """ |
3033 | 308 | def __call__(self, manager, service_name, event_name): | 309 | def __call__(self, manager, service_name, event_name): |
3034 | 309 | service = manager.get_service(service_name) | 310 | service = manager.get_service(service_name) |
3036 | 310 | new_ports = service.get('ports', []) | 311 | # turn this generator into a list, |
3037 | 312 | # as we'll be going over it multiple times | ||
3038 | 313 | new_ports = list(service.get('ports', [])) | ||
3039 | 311 | port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name)) | 314 | port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name)) |
3040 | 312 | if os.path.exists(port_file): | 315 | if os.path.exists(port_file): |
3041 | 313 | with open(port_file) as fp: | 316 | with open(port_file) as fp: |
3042 | 314 | old_ports = fp.read().split(',') | 317 | old_ports = fp.read().split(',') |
3043 | 315 | for old_port in old_ports: | 318 | for old_port in old_ports: |
3048 | 316 | if bool(old_port): | 319 | if bool(old_port) and not self.ports_contains(old_port, new_ports): |
3049 | 317 | old_port = int(old_port) | 320 | hookenv.close_port(old_port) |
3046 | 318 | if old_port not in new_ports: | ||
3047 | 319 | hookenv.close_port(old_port) | ||
3050 | 320 | with open(port_file, 'w') as fp: | 321 | with open(port_file, 'w') as fp: |
3051 | 321 | fp.write(','.join(str(port) for port in new_ports)) | 322 | fp.write(','.join(str(port) for port in new_ports)) |
3052 | 322 | for port in new_ports: | 323 | for port in new_ports: |
3053 | 324 | # A port is either a number or 'ICMP' | ||
3054 | 325 | protocol = 'TCP' | ||
3055 | 326 | if str(port).upper() == 'ICMP': | ||
3056 | 327 | protocol = 'ICMP' | ||
3057 | 323 | if event_name == 'start': | 328 | if event_name == 'start': |
3059 | 324 | hookenv.open_port(port) | 329 | hookenv.open_port(port, protocol) |
3060 | 325 | elif event_name == 'stop': | 330 | elif event_name == 'stop': |
3062 | 326 | hookenv.close_port(port) | 331 | hookenv.close_port(port, protocol) |
3063 | 332 | |||
3064 | 333 | def ports_contains(self, port, ports): | ||
3065 | 334 | if not bool(port): | ||
3066 | 335 | return False | ||
3067 | 336 | if str(port).upper() != 'ICMP': | ||
3068 | 337 | port = int(port) | ||
3069 | 338 | return port in ports | ||
3070 | 327 | 339 | ||
3071 | 328 | 340 | ||
3072 | 329 | def service_stop(service_name): | 341 | def service_stop(service_name): |
3073 | 330 | 342 | ||
3074 | === modified file 'hooks/charmhelpers/core/services/helpers.py' | |||
3075 | --- hooks/charmhelpers/core/services/helpers.py 2016-12-20 14:35:00 +0000 | |||
3076 | +++ hooks/charmhelpers/core/services/helpers.py 2023-06-30 13:58:42 +0000 | |||
3077 | @@ -179,7 +179,7 @@ | |||
3078 | 179 | self.required_options = args | 179 | self.required_options = args |
3079 | 180 | self['config'] = hookenv.config() | 180 | self['config'] = hookenv.config() |
3080 | 181 | with open(os.path.join(hookenv.charm_dir(), 'config.yaml')) as fp: | 181 | with open(os.path.join(hookenv.charm_dir(), 'config.yaml')) as fp: |
3082 | 182 | self.config = yaml.load(fp).get('options', {}) | 182 | self.config = yaml.safe_load(fp).get('options', {}) |
3083 | 183 | 183 | ||
3084 | 184 | def __bool__(self): | 184 | def __bool__(self): |
3085 | 185 | for option in self.required_options: | 185 | for option in self.required_options: |
3086 | @@ -227,7 +227,7 @@ | |||
3087 | 227 | if not os.path.isabs(file_name): | 227 | if not os.path.isabs(file_name): |
3088 | 228 | file_name = os.path.join(hookenv.charm_dir(), file_name) | 228 | file_name = os.path.join(hookenv.charm_dir(), file_name) |
3089 | 229 | with open(file_name, 'r') as file_stream: | 229 | with open(file_name, 'r') as file_stream: |
3091 | 230 | data = yaml.load(file_stream) | 230 | data = yaml.safe_load(file_stream) |
3092 | 231 | if not data: | 231 | if not data: |
3093 | 232 | raise OSError("%s is empty" % file_name) | 232 | raise OSError("%s is empty" % file_name) |
3094 | 233 | return data | 233 | return data |
3095 | 234 | 234 | ||
3096 | === modified file 'hooks/charmhelpers/core/strutils.py' | |||
3097 | --- hooks/charmhelpers/core/strutils.py 2016-12-20 14:35:00 +0000 | |||
3098 | +++ hooks/charmhelpers/core/strutils.py 2023-06-30 13:58:42 +0000 | |||
3099 | @@ -15,26 +15,28 @@ | |||
3100 | 15 | # See the License for the specific language governing permissions and | 15 | # See the License for the specific language governing permissions and |
3101 | 16 | # limitations under the License. | 16 | # limitations under the License. |
3102 | 17 | 17 | ||
3103 | 18 | import six | ||
3104 | 19 | import re | 18 | import re |
3105 | 20 | 19 | ||
3108 | 21 | 20 | TRUTHY_STRINGS = {'y', 'yes', 'true', 't', 'on'} | |
3109 | 22 | def bool_from_string(value): | 21 | FALSEY_STRINGS = {'n', 'no', 'false', 'f', 'off'} |
3110 | 22 | |||
3111 | 23 | |||
3112 | 24 | def bool_from_string(value, truthy_strings=TRUTHY_STRINGS, falsey_strings=FALSEY_STRINGS, assume_false=False): | ||
3113 | 23 | """Interpret string value as boolean. | 25 | """Interpret string value as boolean. |
3114 | 24 | 26 | ||
3115 | 25 | Returns True if value translates to True otherwise False. | 27 | Returns True if value translates to True otherwise False. |
3116 | 26 | """ | 28 | """ |
3119 | 27 | if isinstance(value, six.string_types): | 29 | if isinstance(value, str): |
3120 | 28 | value = six.text_type(value) | 30 | value = str(value) |
3121 | 29 | else: | 31 | else: |
3122 | 30 | msg = "Unable to interpret non-string value '%s' as boolean" % (value) | 32 | msg = "Unable to interpret non-string value '%s' as boolean" % (value) |
3123 | 31 | raise ValueError(msg) | 33 | raise ValueError(msg) |
3124 | 32 | 34 | ||
3125 | 33 | value = value.strip().lower() | 35 | value = value.strip().lower() |
3126 | 34 | 36 | ||
3128 | 35 | if value in ['y', 'yes', 'true', 't', 'on']: | 37 | if value in truthy_strings: |
3129 | 36 | return True | 38 | return True |
3131 | 37 | elif value in ['n', 'no', 'false', 'f', 'off']: | 39 | elif value in falsey_strings or assume_false: |
3132 | 38 | return False | 40 | return False |
3133 | 39 | 41 | ||
3134 | 40 | msg = "Unable to interpret string value '%s' as boolean" % (value) | 42 | msg = "Unable to interpret string value '%s' as boolean" % (value) |
3135 | @@ -58,13 +60,72 @@ | |||
3136 | 58 | 'P': 5, | 60 | 'P': 5, |
3137 | 59 | 'PB': 5, | 61 | 'PB': 5, |
3138 | 60 | } | 62 | } |
3141 | 61 | if isinstance(value, six.string_types): | 63 | if isinstance(value, str): |
3142 | 62 | value = six.text_type(value) | 64 | value = str(value) |
3143 | 63 | else: | 65 | else: |
3145 | 64 | msg = "Unable to interpret non-string value '%s' as boolean" % (value) | 66 | msg = "Unable to interpret non-string value '%s' as bytes" % (value) |
3146 | 65 | raise ValueError(msg) | 67 | raise ValueError(msg) |
3147 | 66 | matches = re.match("([0-9]+)([a-zA-Z]+)", value) | 68 | matches = re.match("([0-9]+)([a-zA-Z]+)", value) |
3152 | 67 | if not matches: | 69 | if matches: |
3153 | 68 | msg = "Unable to interpret string value '%s' as bytes" % (value) | 70 | size = int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)]) |
3154 | 69 | raise ValueError(msg) | 71 | else: |
3155 | 70 | return int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)]) | 72 | # Assume that value passed in is bytes |
3156 | 73 | try: | ||
3157 | 74 | size = int(value) | ||
3158 | 75 | except ValueError: | ||
3159 | 76 | msg = "Unable to interpret string value '%s' as bytes" % (value) | ||
3160 | 77 | raise ValueError(msg) | ||
3161 | 78 | return size | ||
3162 | 79 | |||
3163 | 80 | |||
3164 | 81 | class BasicStringComparator(object): | ||
3165 | 82 | """Provides a class that will compare strings from an iterator type object. | ||
3166 | 83 | Used to provide > and < comparisons on strings that may not necessarily be | ||
3167 | 84 | alphanumerically ordered. e.g. OpenStack or Ubuntu releases AFTER the | ||
3168 | 85 | z-wrap. | ||
3169 | 86 | """ | ||
3170 | 87 | |||
3171 | 88 | _list = None | ||
3172 | 89 | |||
3173 | 90 | def __init__(self, item): | ||
3174 | 91 | if self._list is None: | ||
3175 | 92 | raise Exception("Must define the _list in the class definition!") | ||
3176 | 93 | try: | ||
3177 | 94 | self.index = self._list.index(item) | ||
3178 | 95 | except Exception: | ||
3179 | 96 | raise KeyError("Item '{}' is not in list '{}'" | ||
3180 | 97 | .format(item, self._list)) | ||
3181 | 98 | |||
3182 | 99 | def __eq__(self, other): | ||
3183 | 100 | assert isinstance(other, str) or isinstance(other, self.__class__) | ||
3184 | 101 | return self.index == self._list.index(other) | ||
3185 | 102 | |||
3186 | 103 | def __ne__(self, other): | ||
3187 | 104 | return not self.__eq__(other) | ||
3188 | 105 | |||
3189 | 106 | def __lt__(self, other): | ||
3190 | 107 | assert isinstance(other, str) or isinstance(other, self.__class__) | ||
3191 | 108 | return self.index < self._list.index(other) | ||
3192 | 109 | |||
3193 | 110 | def __ge__(self, other): | ||
3194 | 111 | return not self.__lt__(other) | ||
3195 | 112 | |||
3196 | 113 | def __gt__(self, other): | ||
3197 | 114 | assert isinstance(other, str) or isinstance(other, self.__class__) | ||
3198 | 115 | return self.index > self._list.index(other) | ||
3199 | 116 | |||
3200 | 117 | def __le__(self, other): | ||
3201 | 118 | return not self.__gt__(other) | ||
3202 | 119 | |||
3203 | 120 | def __str__(self): | ||
3204 | 121 | """Always give back the item at the index so it can be used in | ||
3205 | 122 | comparisons like: | ||
3206 | 123 | |||
3207 | 124 | s_mitaka = CompareOpenStack('mitaka') | ||
3208 | 125 | s_newton = CompareOpenstack('newton') | ||
3209 | 126 | |||
3210 | 127 | assert s_newton > s_mitaka | ||
3211 | 128 | |||
3212 | 129 | @returns: <string> | ||
3213 | 130 | """ | ||
3214 | 131 | return self._list[self.index] | ||
3215 | 71 | 132 | ||
3216 | === modified file 'hooks/charmhelpers/core/sysctl.py' | |||
3217 | --- hooks/charmhelpers/core/sysctl.py 2016-12-20 14:35:00 +0000 | |||
3218 | +++ hooks/charmhelpers/core/sysctl.py 2023-06-30 13:58:42 +0000 | |||
3219 | @@ -17,38 +17,59 @@ | |||
3220 | 17 | 17 | ||
3221 | 18 | import yaml | 18 | import yaml |
3222 | 19 | 19 | ||
3224 | 20 | from subprocess import check_call | 20 | from subprocess import check_call, CalledProcessError |
3225 | 21 | 21 | ||
3226 | 22 | from charmhelpers.core.hookenv import ( | 22 | from charmhelpers.core.hookenv import ( |
3227 | 23 | log, | 23 | log, |
3228 | 24 | DEBUG, | 24 | DEBUG, |
3229 | 25 | ERROR, | 25 | ERROR, |
3230 | 26 | WARNING, | ||
3231 | 26 | ) | 27 | ) |
3232 | 27 | 28 | ||
3233 | 29 | from charmhelpers.core.host import is_container | ||
3234 | 30 | |||
3235 | 28 | __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' | 31 | __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' |
3236 | 29 | 32 | ||
3237 | 30 | 33 | ||
3239 | 31 | def create(sysctl_dict, sysctl_file): | 34 | def create(sysctl_dict, sysctl_file, ignore=False): |
3240 | 32 | """Creates a sysctl.conf file from a YAML associative array | 35 | """Creates a sysctl.conf file from a YAML associative array |
3241 | 33 | 36 | ||
3243 | 34 | :param sysctl_dict: a YAML-formatted string of sysctl options eg "{ 'kernel.max_pid': 1337 }" | 37 | :param sysctl_dict: a dict or YAML-formatted string of sysctl |
3244 | 38 | options eg "{ 'kernel.max_pid': 1337 }" | ||
3245 | 35 | :type sysctl_dict: str | 39 | :type sysctl_dict: str |
3246 | 36 | :param sysctl_file: path to the sysctl file to be saved | 40 | :param sysctl_file: path to the sysctl file to be saved |
3247 | 37 | :type sysctl_file: str or unicode | 41 | :type sysctl_file: str or unicode |
3248 | 42 | :param ignore: If True, ignore "unknown variable" errors. | ||
3249 | 43 | :type ignore: bool | ||
3250 | 38 | :returns: None | 44 | :returns: None |
3251 | 39 | """ | 45 | """ |
3258 | 40 | try: | 46 | if type(sysctl_dict) is not dict: |
3259 | 41 | sysctl_dict_parsed = yaml.safe_load(sysctl_dict) | 47 | try: |
3260 | 42 | except yaml.YAMLError: | 48 | sysctl_dict_parsed = yaml.safe_load(sysctl_dict) |
3261 | 43 | log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict), | 49 | except yaml.YAMLError: |
3262 | 44 | level=ERROR) | 50 | log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict), |
3263 | 45 | return | 51 | level=ERROR) |
3264 | 52 | return | ||
3265 | 53 | else: | ||
3266 | 54 | sysctl_dict_parsed = sysctl_dict | ||
3267 | 46 | 55 | ||
3268 | 47 | with open(sysctl_file, "w") as fd: | 56 | with open(sysctl_file, "w") as fd: |
3269 | 48 | for key, value in sysctl_dict_parsed.items(): | 57 | for key, value in sysctl_dict_parsed.items(): |
3270 | 49 | fd.write("{}={}\n".format(key, value)) | 58 | fd.write("{}={}\n".format(key, value)) |
3271 | 50 | 59 | ||
3273 | 51 | log("Updating sysctl_file: %s values: %s" % (sysctl_file, sysctl_dict_parsed), | 60 | log("Updating sysctl_file: {} values: {}".format(sysctl_file, |
3274 | 61 | sysctl_dict_parsed), | ||
3275 | 52 | level=DEBUG) | 62 | level=DEBUG) |
3276 | 53 | 63 | ||
3278 | 54 | check_call(["sysctl", "-p", sysctl_file]) | 64 | call = ["sysctl", "-p", sysctl_file] |
3279 | 65 | if ignore: | ||
3280 | 66 | call.append("-e") | ||
3281 | 67 | |||
3282 | 68 | try: | ||
3283 | 69 | check_call(call) | ||
3284 | 70 | except CalledProcessError as e: | ||
3285 | 71 | if is_container(): | ||
3286 | 72 | log("Error setting some sysctl keys in this container: {}".format(e.output), | ||
3287 | 73 | level=WARNING) | ||
3288 | 74 | else: | ||
3289 | 75 | raise e | ||
3290 | 55 | 76 | ||
3291 | === modified file 'hooks/charmhelpers/core/templating.py' | |||
3292 | --- hooks/charmhelpers/core/templating.py 2016-12-20 14:35:00 +0000 | |||
3293 | +++ hooks/charmhelpers/core/templating.py 2023-06-30 13:58:42 +0000 | |||
3294 | @@ -13,14 +13,14 @@ | |||
3295 | 13 | # limitations under the License. | 13 | # limitations under the License. |
3296 | 14 | 14 | ||
3297 | 15 | import os | 15 | import os |
3298 | 16 | import sys | ||
3299 | 17 | 16 | ||
3300 | 18 | from charmhelpers.core import host | 17 | from charmhelpers.core import host |
3301 | 19 | from charmhelpers.core import hookenv | 18 | from charmhelpers.core import hookenv |
3302 | 20 | 19 | ||
3303 | 21 | 20 | ||
3304 | 22 | def render(source, target, context, owner='root', group='root', | 21 | def render(source, target, context, owner='root', group='root', |
3306 | 23 | perms=0o444, templates_dir=None, encoding='UTF-8', template_loader=None): | 22 | perms=0o444, templates_dir=None, encoding='UTF-8', |
3307 | 23 | template_loader=None, config_template=None): | ||
3308 | 24 | """ | 24 | """ |
3309 | 25 | Render a template. | 25 | Render a template. |
3310 | 26 | 26 | ||
3311 | @@ -32,6 +32,9 @@ | |||
3312 | 32 | The context should be a dict containing the values to be replaced in the | 32 | The context should be a dict containing the values to be replaced in the |
3313 | 33 | template. | 33 | template. |
3314 | 34 | 34 | ||
3315 | 35 | config_template may be provided to render from a provided template instead | ||
3316 | 36 | of loading from a file. | ||
3317 | 37 | |||
3318 | 35 | The `owner`, `group`, and `perms` options will be passed to `write_file`. | 38 | The `owner`, `group`, and `perms` options will be passed to `write_file`. |
3319 | 36 | 39 | ||
3320 | 37 | If omitted, `templates_dir` defaults to the `templates` folder in the charm. | 40 | If omitted, `templates_dir` defaults to the `templates` folder in the charm. |
3321 | @@ -39,9 +42,8 @@ | |||
3322 | 39 | The rendered template will be written to the file as well as being returned | 42 | The rendered template will be written to the file as well as being returned |
3323 | 40 | as a string. | 43 | as a string. |
3324 | 41 | 44 | ||
3328 | 42 | Note: Using this requires python-jinja2 or python3-jinja2; if it is not | 45 | Note: Using this requires python3-jinja2; if it is not installed, calling |
3329 | 43 | installed, calling this will attempt to use charmhelpers.fetch.apt_install | 46 | this will attempt to use charmhelpers.fetch.apt_install to install it. |
3327 | 44 | to install it. | ||
3330 | 45 | """ | 47 | """ |
3331 | 46 | try: | 48 | try: |
3332 | 47 | from jinja2 import FileSystemLoader, Environment, exceptions | 49 | from jinja2 import FileSystemLoader, Environment, exceptions |
3333 | @@ -53,10 +55,7 @@ | |||
3334 | 53 | 'charmhelpers.fetch to install it', | 55 | 'charmhelpers.fetch to install it', |
3335 | 54 | level=hookenv.ERROR) | 56 | level=hookenv.ERROR) |
3336 | 55 | raise | 57 | raise |
3341 | 56 | if sys.version_info.major == 2: | 58 | apt_install('python3-jinja2', fatal=True) |
3338 | 57 | apt_install('python-jinja2', fatal=True) | ||
3339 | 58 | else: | ||
3340 | 59 | apt_install('python3-jinja2', fatal=True) | ||
3342 | 60 | from jinja2 import FileSystemLoader, Environment, exceptions | 59 | from jinja2 import FileSystemLoader, Environment, exceptions |
3343 | 61 | 60 | ||
3344 | 62 | if template_loader: | 61 | if template_loader: |
3345 | @@ -65,14 +64,19 @@ | |||
3346 | 65 | if templates_dir is None: | 64 | if templates_dir is None: |
3347 | 66 | templates_dir = os.path.join(hookenv.charm_dir(), 'templates') | 65 | templates_dir = os.path.join(hookenv.charm_dir(), 'templates') |
3348 | 67 | template_env = Environment(loader=FileSystemLoader(templates_dir)) | 66 | template_env = Environment(loader=FileSystemLoader(templates_dir)) |
3357 | 68 | try: | 67 | |
3358 | 69 | source = source | 68 | # load from a string if provided explicitly |
3359 | 70 | template = template_env.get_template(source) | 69 | if config_template is not None: |
3360 | 71 | except exceptions.TemplateNotFound as e: | 70 | template = template_env.from_string(config_template) |
3361 | 72 | hookenv.log('Could not load template %s from %s.' % | 71 | else: |
3362 | 73 | (source, templates_dir), | 72 | try: |
3363 | 74 | level=hookenv.ERROR) | 73 | source = source |
3364 | 75 | raise e | 74 | template = template_env.get_template(source) |
3365 | 75 | except exceptions.TemplateNotFound as e: | ||
3366 | 76 | hookenv.log('Could not load template %s from %s.' % | ||
3367 | 77 | (source, templates_dir), | ||
3368 | 78 | level=hookenv.ERROR) | ||
3369 | 79 | raise e | ||
3370 | 76 | content = template.render(context) | 80 | content = template.render(context) |
3371 | 77 | if target is not None: | 81 | if target is not None: |
3372 | 78 | target_dir = os.path.dirname(target) | 82 | target_dir = os.path.dirname(target) |
3373 | 79 | 83 | ||
3374 | === modified file 'hooks/charmhelpers/core/unitdata.py' | |||
3375 | --- hooks/charmhelpers/core/unitdata.py 2016-12-20 14:35:00 +0000 | |||
3376 | +++ hooks/charmhelpers/core/unitdata.py 2023-06-30 13:58:42 +0000 | |||
3377 | @@ -1,7 +1,7 @@ | |||
3378 | 1 | #!/usr/bin/env python | 1 | #!/usr/bin/env python |
3379 | 2 | # -*- coding: utf-8 -*- | 2 | # -*- coding: utf-8 -*- |
3380 | 3 | # | 3 | # |
3382 | 4 | # Copyright 2014-2015 Canonical Limited. | 4 | # Copyright 2014-2021 Canonical Limited. |
3383 | 5 | # | 5 | # |
3384 | 6 | # Licensed under the Apache License, Version 2.0 (the "License"); | 6 | # Licensed under the Apache License, Version 2.0 (the "License"); |
3385 | 7 | # you may not use this file except in compliance with the License. | 7 | # you may not use this file except in compliance with the License. |
3386 | @@ -61,7 +61,7 @@ | |||
3387 | 61 | 'previous value', prev, | 61 | 'previous value', prev, |
3388 | 62 | 'current value', cur) | 62 | 'current value', cur) |
3389 | 63 | 63 | ||
3391 | 64 | # Get some unit specific bookeeping | 64 | # Get some unit specific bookkeeping |
3392 | 65 | if not db.get('pkg_key'): | 65 | if not db.get('pkg_key'): |
3393 | 66 | key = urllib.urlopen('https://example.com/pkg_key').read() | 66 | key = urllib.urlopen('https://example.com/pkg_key').read() |
3394 | 67 | db.set('pkg_key', key) | 67 | db.set('pkg_key', key) |
3395 | @@ -166,15 +166,23 @@ | |||
3396 | 166 | 166 | ||
3397 | 167 | To support dicts, lists, integer, floats, and booleans values | 167 | To support dicts, lists, integer, floats, and booleans values |
3398 | 168 | are automatically json encoded/decoded. | 168 | are automatically json encoded/decoded. |
3399 | 169 | |||
3400 | 170 | Note: to facilitate unit testing, ':memory:' can be passed as the | ||
3401 | 171 | path parameter which causes sqlite3 to only build the db in memory. | ||
3402 | 172 | This should only be used for testing purposes. | ||
3403 | 169 | """ | 173 | """ |
3405 | 170 | def __init__(self, path=None): | 174 | def __init__(self, path=None, keep_revisions=False): |
3406 | 171 | self.db_path = path | 175 | self.db_path = path |
3407 | 176 | self.keep_revisions = keep_revisions | ||
3408 | 172 | if path is None: | 177 | if path is None: |
3409 | 173 | if 'UNIT_STATE_DB' in os.environ: | 178 | if 'UNIT_STATE_DB' in os.environ: |
3410 | 174 | self.db_path = os.environ['UNIT_STATE_DB'] | 179 | self.db_path = os.environ['UNIT_STATE_DB'] |
3411 | 175 | else: | 180 | else: |
3412 | 176 | self.db_path = os.path.join( | 181 | self.db_path = os.path.join( |
3413 | 177 | os.environ.get('CHARM_DIR', ''), '.unit-state.db') | 182 | os.environ.get('CHARM_DIR', ''), '.unit-state.db') |
3414 | 183 | if self.db_path != ':memory:': | ||
3415 | 184 | with open(self.db_path, 'a') as f: | ||
3416 | 185 | os.fchmod(f.fileno(), 0o600) | ||
3417 | 178 | self.conn = sqlite3.connect('%s' % self.db_path) | 186 | self.conn = sqlite3.connect('%s' % self.db_path) |
3418 | 179 | self.cursor = self.conn.cursor() | 187 | self.cursor = self.conn.cursor() |
3419 | 180 | self.revision = None | 188 | self.revision = None |
3420 | @@ -235,7 +243,7 @@ | |||
3421 | 235 | Remove a key from the database entirely. | 243 | Remove a key from the database entirely. |
3422 | 236 | """ | 244 | """ |
3423 | 237 | self.cursor.execute('delete from kv where key=?', [key]) | 245 | self.cursor.execute('delete from kv where key=?', [key]) |
3425 | 238 | if self.revision and self.cursor.rowcount: | 246 | if self.keep_revisions and self.revision and self.cursor.rowcount: |
3426 | 239 | self.cursor.execute( | 247 | self.cursor.execute( |
3427 | 240 | 'insert into kv_revisions values (?, ?, ?)', | 248 | 'insert into kv_revisions values (?, ?, ?)', |
3428 | 241 | [key, self.revision, json.dumps('DELETED')]) | 249 | [key, self.revision, json.dumps('DELETED')]) |
3429 | @@ -252,14 +260,14 @@ | |||
3430 | 252 | if keys is not None: | 260 | if keys is not None: |
3431 | 253 | keys = ['%s%s' % (prefix, key) for key in keys] | 261 | keys = ['%s%s' % (prefix, key) for key in keys] |
3432 | 254 | self.cursor.execute('delete from kv where key in (%s)' % ','.join(['?'] * len(keys)), keys) | 262 | self.cursor.execute('delete from kv where key in (%s)' % ','.join(['?'] * len(keys)), keys) |
3434 | 255 | if self.revision and self.cursor.rowcount: | 263 | if self.keep_revisions and self.revision and self.cursor.rowcount: |
3435 | 256 | self.cursor.execute( | 264 | self.cursor.execute( |
3436 | 257 | 'insert into kv_revisions values %s' % ','.join(['(?, ?, ?)'] * len(keys)), | 265 | 'insert into kv_revisions values %s' % ','.join(['(?, ?, ?)'] * len(keys)), |
3437 | 258 | list(itertools.chain.from_iterable((key, self.revision, json.dumps('DELETED')) for key in keys))) | 266 | list(itertools.chain.from_iterable((key, self.revision, json.dumps('DELETED')) for key in keys))) |
3438 | 259 | else: | 267 | else: |
3439 | 260 | self.cursor.execute('delete from kv where key like ?', | 268 | self.cursor.execute('delete from kv where key like ?', |
3440 | 261 | ['%s%%' % prefix]) | 269 | ['%s%%' % prefix]) |
3442 | 262 | if self.revision and self.cursor.rowcount: | 270 | if self.keep_revisions and self.revision and self.cursor.rowcount: |
3443 | 263 | self.cursor.execute( | 271 | self.cursor.execute( |
3444 | 264 | 'insert into kv_revisions values (?, ?, ?)', | 272 | 'insert into kv_revisions values (?, ?, ?)', |
3445 | 265 | ['%s%%' % prefix, self.revision, json.dumps('DELETED')]) | 273 | ['%s%%' % prefix, self.revision, json.dumps('DELETED')]) |
3446 | @@ -292,7 +300,7 @@ | |||
3447 | 292 | where key = ?''', [serialized, key]) | 300 | where key = ?''', [serialized, key]) |
3448 | 293 | 301 | ||
3449 | 294 | # Save | 302 | # Save |
3451 | 295 | if not self.revision: | 303 | if (not self.keep_revisions) or (not self.revision): |
3452 | 296 | return value | 304 | return value |
3453 | 297 | 305 | ||
3454 | 298 | self.cursor.execute( | 306 | self.cursor.execute( |
3455 | @@ -358,7 +366,7 @@ | |||
3456 | 358 | try: | 366 | try: |
3457 | 359 | yield self.revision | 367 | yield self.revision |
3458 | 360 | self.revision = None | 368 | self.revision = None |
3460 | 361 | except: | 369 | except Exception: |
3461 | 362 | self.flush(False) | 370 | self.flush(False) |
3462 | 363 | self.revision = None | 371 | self.revision = None |
3463 | 364 | raise | 372 | raise |
3464 | @@ -442,7 +450,7 @@ | |||
3465 | 442 | 'previous value', prev, | 450 | 'previous value', prev, |
3466 | 443 | 'current value', cur) | 451 | 'current value', cur) |
3467 | 444 | 452 | ||
3469 | 445 | # Get some unit specific bookeeping | 453 | # Get some unit specific bookkeeping |
3470 | 446 | if not db.get('pkg_key'): | 454 | if not db.get('pkg_key'): |
3471 | 447 | key = urllib.urlopen('https://example.com/pkg_key').read() | 455 | key = urllib.urlopen('https://example.com/pkg_key').read() |
3472 | 448 | db.set('pkg_key', key) | 456 | db.set('pkg_key', key) |
3473 | 449 | 457 | ||
3474 | === modified file 'hooks/charmhelpers/fetch/__init__.py' | |||
3475 | --- hooks/charmhelpers/fetch/__init__.py 2016-12-20 14:35:00 +0000 | |||
3476 | +++ hooks/charmhelpers/fetch/__init__.py 2023-06-30 13:58:42 +0000 | |||
3477 | @@ -1,4 +1,4 @@ | |||
3479 | 1 | # Copyright 2014-2015 Canonical Limited. | 1 | # Copyright 2014-2021 Canonical Limited. |
3480 | 2 | # | 2 | # |
3481 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); |
3482 | 4 | # you may not use this file except in compliance with the License. | 4 | # you may not use this file except in compliance with the License. |
3483 | @@ -20,11 +20,7 @@ | |||
3484 | 20 | log, | 20 | log, |
3485 | 21 | ) | 21 | ) |
3486 | 22 | 22 | ||
3492 | 23 | import six | 23 | from urllib.parse import urlparse, urlunparse |
3488 | 24 | if six.PY3: | ||
3489 | 25 | from urllib.parse import urlparse, urlunparse | ||
3490 | 26 | else: | ||
3491 | 27 | from urlparse import urlparse, urlunparse | ||
3493 | 28 | 24 | ||
3494 | 29 | 25 | ||
3495 | 30 | # The order of this list is very important. Handlers should be listed in from | 26 | # The order of this list is very important. Handlers should be listed in from |
3496 | @@ -48,6 +44,13 @@ | |||
3497 | 48 | pass | 44 | pass |
3498 | 49 | 45 | ||
3499 | 50 | 46 | ||
3500 | 47 | class GPGKeyError(Exception): | ||
3501 | 48 | """Exception occurs when a GPG key cannot be fetched or used. The message | ||
3502 | 49 | indicates what the problem is. | ||
3503 | 50 | """ | ||
3504 | 51 | pass | ||
3505 | 52 | |||
3506 | 53 | |||
3507 | 51 | class BaseFetchHandler(object): | 54 | class BaseFetchHandler(object): |
3508 | 52 | 55 | ||
3509 | 53 | """Base class for FetchHandler implementations in fetch plugins""" | 56 | """Base class for FetchHandler implementations in fetch plugins""" |
3510 | @@ -77,22 +80,30 @@ | |||
3511 | 77 | fetch = importlib.import_module(module) | 80 | fetch = importlib.import_module(module) |
3512 | 78 | 81 | ||
3513 | 79 | filter_installed_packages = fetch.filter_installed_packages | 82 | filter_installed_packages = fetch.filter_installed_packages |
3518 | 80 | install = fetch.install | 83 | filter_missing_packages = fetch.filter_missing_packages |
3519 | 81 | upgrade = fetch.upgrade | 84 | install = fetch.apt_install |
3520 | 82 | update = fetch.update | 85 | upgrade = fetch.apt_upgrade |
3521 | 83 | purge = fetch.purge | 86 | update = _fetch_update = fetch.apt_update |
3522 | 87 | purge = fetch.apt_purge | ||
3523 | 84 | add_source = fetch.add_source | 88 | add_source = fetch.add_source |
3524 | 85 | 89 | ||
3525 | 86 | if __platform__ == "ubuntu": | 90 | if __platform__ == "ubuntu": |
3526 | 87 | apt_cache = fetch.apt_cache | 91 | apt_cache = fetch.apt_cache |
3531 | 88 | apt_install = fetch.install | 92 | apt_install = fetch.apt_install |
3532 | 89 | apt_update = fetch.update | 93 | apt_update = fetch.apt_update |
3533 | 90 | apt_upgrade = fetch.upgrade | 94 | apt_upgrade = fetch.apt_upgrade |
3534 | 91 | apt_purge = fetch.purge | 95 | apt_purge = fetch.apt_purge |
3535 | 96 | apt_autoremove = fetch.apt_autoremove | ||
3536 | 92 | apt_mark = fetch.apt_mark | 97 | apt_mark = fetch.apt_mark |
3537 | 93 | apt_hold = fetch.apt_hold | 98 | apt_hold = fetch.apt_hold |
3538 | 94 | apt_unhold = fetch.apt_unhold | 99 | apt_unhold = fetch.apt_unhold |
3539 | 100 | import_key = fetch.import_key | ||
3540 | 95 | get_upstream_version = fetch.get_upstream_version | 101 | get_upstream_version = fetch.get_upstream_version |
3541 | 102 | apt_pkg = fetch.ubuntu_apt_pkg | ||
3542 | 103 | get_apt_dpkg_env = fetch.get_apt_dpkg_env | ||
3543 | 104 | get_installed_version = fetch.get_installed_version | ||
3544 | 105 | OPENSTACK_RELEASES = fetch.OPENSTACK_RELEASES | ||
3545 | 106 | UBUNTU_OPENSTACK_RELEASE = fetch.UBUNTU_OPENSTACK_RELEASE | ||
3546 | 96 | elif __platform__ == "centos": | 107 | elif __platform__ == "centos": |
3547 | 97 | yum_search = fetch.yum_search | 108 | yum_search = fetch.yum_search |
3548 | 98 | 109 | ||
3549 | @@ -119,14 +130,14 @@ | |||
3550 | 119 | sources = safe_load((config(sources_var) or '').strip()) or [] | 130 | sources = safe_load((config(sources_var) or '').strip()) or [] |
3551 | 120 | keys = safe_load((config(keys_var) or '').strip()) or None | 131 | keys = safe_load((config(keys_var) or '').strip()) or None |
3552 | 121 | 132 | ||
3554 | 122 | if isinstance(sources, six.string_types): | 133 | if isinstance(sources, str): |
3555 | 123 | sources = [sources] | 134 | sources = [sources] |
3556 | 124 | 135 | ||
3557 | 125 | if keys is None: | 136 | if keys is None: |
3558 | 126 | for source in sources: | 137 | for source in sources: |
3559 | 127 | add_source(source, None) | 138 | add_source(source, None) |
3560 | 128 | else: | 139 | else: |
3562 | 129 | if isinstance(keys, six.string_types): | 140 | if isinstance(keys, str): |
3563 | 130 | keys = [keys] | 141 | keys = [keys] |
3564 | 131 | 142 | ||
3565 | 132 | if len(sources) != len(keys): | 143 | if len(sources) != len(keys): |
3566 | @@ -135,7 +146,7 @@ | |||
3567 | 135 | for source, key in zip(sources, keys): | 146 | for source, key in zip(sources, keys): |
3568 | 136 | add_source(source, key) | 147 | add_source(source, key) |
3569 | 137 | if update: | 148 | if update: |
3571 | 138 | fetch.update(fatal=True) | 149 | _fetch_update(fatal=True) |
3572 | 139 | 150 | ||
3573 | 140 | 151 | ||
3574 | 141 | def install_remote(source, *args, **kwargs): | 152 | def install_remote(source, *args, **kwargs): |
3575 | @@ -190,7 +201,7 @@ | |||
3576 | 190 | classname) | 201 | classname) |
3577 | 191 | plugin_list.append(handler_class()) | 202 | plugin_list.append(handler_class()) |
3578 | 192 | except NotImplementedError: | 203 | except NotImplementedError: |
3580 | 193 | # Skip missing plugins so that they can be ommitted from | 204 | # Skip missing plugins so that they can be omitted from |
3581 | 194 | # installation if desired | 205 | # installation if desired |
3582 | 195 | log("FetchHandler {} not found, skipping plugin".format( | 206 | log("FetchHandler {} not found, skipping plugin".format( |
3583 | 196 | handler_name)) | 207 | handler_name)) |
3584 | 197 | 208 | ||
3585 | === modified file 'hooks/charmhelpers/fetch/archiveurl.py' | |||
3586 | --- hooks/charmhelpers/fetch/archiveurl.py 2016-12-20 14:35:00 +0000 | |||
3587 | +++ hooks/charmhelpers/fetch/archiveurl.py 2023-06-30 13:58:42 +0000 | |||
3588 | @@ -12,6 +12,7 @@ | |||
3589 | 12 | # See the License for the specific language governing permissions and | 12 | # See the License for the specific language governing permissions and |
3590 | 13 | # limitations under the License. | 13 | # limitations under the License. |
3591 | 14 | 14 | ||
3592 | 15 | import contextlib | ||
3593 | 15 | import os | 16 | import os |
3594 | 16 | import hashlib | 17 | import hashlib |
3595 | 17 | import re | 18 | import re |
3596 | @@ -24,28 +25,21 @@ | |||
3597 | 24 | get_archive_handler, | 25 | get_archive_handler, |
3598 | 25 | extract, | 26 | extract, |
3599 | 26 | ) | 27 | ) |
3600 | 28 | from charmhelpers.core.hookenv import ( | ||
3601 | 29 | env_proxy_settings, | ||
3602 | 30 | ) | ||
3603 | 27 | from charmhelpers.core.host import mkdir, check_hash | 31 | from charmhelpers.core.host import mkdir, check_hash |
3604 | 28 | 32 | ||
3621 | 29 | import six | 33 | from urllib.request import ( |
3622 | 30 | if six.PY3: | 34 | build_opener, install_opener, urlopen, urlretrieve, |
3623 | 31 | from urllib.request import ( | 35 | HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler, |
3624 | 32 | build_opener, install_opener, urlopen, urlretrieve, | 36 | ProxyHandler |
3625 | 33 | HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler, | 37 | ) |
3626 | 34 | ) | 38 | from urllib.parse import urlparse, urlunparse, parse_qs |
3627 | 35 | from urllib.parse import urlparse, urlunparse, parse_qs | 39 | from urllib.error import URLError |
3612 | 36 | from urllib.error import URLError | ||
3613 | 37 | else: | ||
3614 | 38 | from urllib import urlretrieve | ||
3615 | 39 | from urllib2 import ( | ||
3616 | 40 | build_opener, install_opener, urlopen, | ||
3617 | 41 | HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler, | ||
3618 | 42 | URLError | ||
3619 | 43 | ) | ||
3620 | 44 | from urlparse import urlparse, urlunparse, parse_qs | ||
3628 | 45 | 40 | ||
3629 | 46 | 41 | ||
3630 | 47 | def splituser(host): | 42 | def splituser(host): |
3631 | 48 | '''urllib.splituser(), but six's support of this seems broken''' | ||
3632 | 49 | _userprog = re.compile('^(.*)@(.*)$') | 43 | _userprog = re.compile('^(.*)@(.*)$') |
3633 | 50 | match = _userprog.match(host) | 44 | match = _userprog.match(host) |
3634 | 51 | if match: | 45 | if match: |
3635 | @@ -54,7 +48,6 @@ | |||
3636 | 54 | 48 | ||
3637 | 55 | 49 | ||
3638 | 56 | def splitpasswd(user): | 50 | def splitpasswd(user): |
3639 | 57 | '''urllib.splitpasswd(), but six's support of this is missing''' | ||
3640 | 58 | _passwdprog = re.compile('^([^:]*):(.*)$', re.S) | 51 | _passwdprog = re.compile('^([^:]*):(.*)$', re.S) |
3641 | 59 | match = _passwdprog.match(user) | 52 | match = _passwdprog.match(user) |
3642 | 60 | if match: | 53 | if match: |
3643 | @@ -62,6 +55,20 @@ | |||
3644 | 62 | return user, None | 55 | return user, None |
3645 | 63 | 56 | ||
3646 | 64 | 57 | ||
3647 | 58 | @contextlib.contextmanager | ||
3648 | 59 | def proxy_env(): | ||
3649 | 60 | """ | ||
3650 | 61 | Creates a context which temporarily modifies the proxy settings in os.environ. | ||
3651 | 62 | """ | ||
3652 | 63 | restore = {**os.environ} # Copy the current os.environ | ||
3653 | 64 | juju_proxies = env_proxy_settings() or {} | ||
3654 | 65 | os.environ.update(**juju_proxies) # Insert or Update the os.environ | ||
3655 | 66 | yield os.environ | ||
3656 | 67 | for key in juju_proxies: | ||
3657 | 68 | del os.environ[key] # remove any keys which were added or updated | ||
3658 | 69 | os.environ.update(**restore) # restore any original values | ||
3659 | 70 | |||
3660 | 71 | |||
3661 | 65 | class ArchiveUrlFetchHandler(BaseFetchHandler): | 72 | class ArchiveUrlFetchHandler(BaseFetchHandler): |
3662 | 66 | """ | 73 | """ |
3663 | 67 | Handler to download archive files from arbitrary URLs. | 74 | Handler to download archive files from arbitrary URLs. |
3664 | @@ -89,9 +96,10 @@ | |||
3665 | 89 | :param str source: URL pointing to an archive file. | 96 | :param str source: URL pointing to an archive file. |
3666 | 90 | :param str dest: Local path location to download archive file to. | 97 | :param str dest: Local path location to download archive file to. |
3667 | 91 | """ | 98 | """ |
3669 | 92 | # propogate all exceptions | 99 | # propagate all exceptions |
3670 | 93 | # URLError, OSError, etc | 100 | # URLError, OSError, etc |
3671 | 94 | proto, netloc, path, params, query, fragment = urlparse(source) | 101 | proto, netloc, path, params, query, fragment = urlparse(source) |
3672 | 102 | handlers = [] | ||
3673 | 95 | if proto in ('http', 'https'): | 103 | if proto in ('http', 'https'): |
3674 | 96 | auth, barehost = splituser(netloc) | 104 | auth, barehost = splituser(netloc) |
3675 | 97 | if auth is not None: | 105 | if auth is not None: |
3676 | @@ -101,10 +109,13 @@ | |||
3677 | 101 | # Realm is set to None in add_password to force the username and password | 109 | # Realm is set to None in add_password to force the username and password |
3678 | 102 | # to be used whatever the realm | 110 | # to be used whatever the realm |
3679 | 103 | passman.add_password(None, source, username, password) | 111 | passman.add_password(None, source, username, password) |
3684 | 104 | authhandler = HTTPBasicAuthHandler(passman) | 112 | handlers.append(HTTPBasicAuthHandler(passman)) |
3685 | 105 | opener = build_opener(authhandler) | 113 | |
3686 | 106 | install_opener(opener) | 114 | with proxy_env(): |
3687 | 107 | response = urlopen(source) | 115 | handlers.append(ProxyHandler()) |
3688 | 116 | opener = build_opener(*handlers) | ||
3689 | 117 | install_opener(opener) | ||
3690 | 118 | response = urlopen(source) | ||
3691 | 108 | try: | 119 | try: |
3692 | 109 | with open(dest, 'wb') as dest_file: | 120 | with open(dest, 'wb') as dest_file: |
3693 | 110 | dest_file.write(response.read()) | 121 | dest_file.write(response.read()) |
3694 | @@ -150,10 +161,7 @@ | |||
3695 | 150 | raise UnhandledSource(e.strerror) | 161 | raise UnhandledSource(e.strerror) |
3696 | 151 | options = parse_qs(url_parts.fragment) | 162 | options = parse_qs(url_parts.fragment) |
3697 | 152 | for key, value in options.items(): | 163 | for key, value in options.items(): |
3702 | 153 | if not six.PY3: | 164 | algorithms = hashlib.algorithms_available |
3699 | 154 | algorithms = hashlib.algorithms | ||
3700 | 155 | else: | ||
3701 | 156 | algorithms = hashlib.algorithms_available | ||
3703 | 157 | if key in algorithms: | 165 | if key in algorithms: |
3704 | 158 | if len(value) != 1: | 166 | if len(value) != 1: |
3705 | 159 | raise TypeError( | 167 | raise TypeError( |
3706 | 160 | 168 | ||
3707 | === modified file 'hooks/charmhelpers/fetch/bzrurl.py' | |||
3708 | --- hooks/charmhelpers/fetch/bzrurl.py 2016-12-20 14:35:00 +0000 | |||
3709 | +++ hooks/charmhelpers/fetch/bzrurl.py 2023-06-30 13:58:42 +0000 | |||
3710 | @@ -13,7 +13,7 @@ | |||
3711 | 13 | # limitations under the License. | 13 | # limitations under the License. |
3712 | 14 | 14 | ||
3713 | 15 | import os | 15 | import os |
3715 | 16 | from subprocess import check_call | 16 | from subprocess import STDOUT, check_output |
3716 | 17 | from charmhelpers.fetch import ( | 17 | from charmhelpers.fetch import ( |
3717 | 18 | BaseFetchHandler, | 18 | BaseFetchHandler, |
3718 | 19 | UnhandledSource, | 19 | UnhandledSource, |
3719 | @@ -55,7 +55,7 @@ | |||
3720 | 55 | cmd = ['bzr', 'branch'] | 55 | cmd = ['bzr', 'branch'] |
3721 | 56 | cmd += cmd_opts | 56 | cmd += cmd_opts |
3722 | 57 | cmd += [source, dest] | 57 | cmd += [source, dest] |
3724 | 58 | check_call(cmd) | 58 | check_output(cmd, stderr=STDOUT) |
3725 | 59 | 59 | ||
3726 | 60 | def install(self, source, dest=None, revno=None): | 60 | def install(self, source, dest=None, revno=None): |
3727 | 61 | url_parts = self.parse_url(source) | 61 | url_parts = self.parse_url(source) |
3728 | 62 | 62 | ||
3729 | === modified file 'hooks/charmhelpers/fetch/centos.py' | |||
3730 | --- hooks/charmhelpers/fetch/centos.py 2016-12-20 20:15:28 +0000 | |||
3731 | +++ hooks/charmhelpers/fetch/centos.py 2023-06-30 13:58:42 +0000 | |||
3732 | @@ -15,7 +15,6 @@ | |||
3733 | 15 | import subprocess | 15 | import subprocess |
3734 | 16 | import os | 16 | import os |
3735 | 17 | import time | 17 | import time |
3736 | 18 | import six | ||
3737 | 19 | import yum | 18 | import yum |
3738 | 20 | 19 | ||
3739 | 21 | from tempfile import NamedTemporaryFile | 20 | from tempfile import NamedTemporaryFile |
3740 | @@ -42,7 +41,7 @@ | |||
3741 | 42 | if options is not None: | 41 | if options is not None: |
3742 | 43 | cmd.extend(options) | 42 | cmd.extend(options) |
3743 | 44 | cmd.append('install') | 43 | cmd.append('install') |
3745 | 45 | if isinstance(packages, six.string_types): | 44 | if isinstance(packages, str): |
3746 | 46 | cmd.append(packages) | 45 | cmd.append(packages) |
3747 | 47 | else: | 46 | else: |
3748 | 48 | cmd.extend(packages) | 47 | cmd.extend(packages) |
3749 | @@ -71,7 +70,7 @@ | |||
3750 | 71 | def purge(packages, fatal=False): | 70 | def purge(packages, fatal=False): |
3751 | 72 | """Purge one or more packages.""" | 71 | """Purge one or more packages.""" |
3752 | 73 | cmd = ['yum', '--assumeyes', 'remove'] | 72 | cmd = ['yum', '--assumeyes', 'remove'] |
3754 | 74 | if isinstance(packages, six.string_types): | 73 | if isinstance(packages, str): |
3755 | 75 | cmd.append(packages) | 74 | cmd.append(packages) |
3756 | 76 | else: | 75 | else: |
3757 | 77 | cmd.extend(packages) | 76 | cmd.extend(packages) |
3758 | @@ -83,7 +82,7 @@ | |||
3759 | 83 | """Search for a package.""" | 82 | """Search for a package.""" |
3760 | 84 | output = {} | 83 | output = {} |
3761 | 85 | cmd = ['yum', 'search'] | 84 | cmd = ['yum', 'search'] |
3763 | 86 | if isinstance(packages, six.string_types): | 85 | if isinstance(packages, str): |
3764 | 87 | cmd.append(packages) | 86 | cmd.append(packages) |
3765 | 88 | else: | 87 | else: |
3766 | 89 | cmd.extend(packages) | 88 | cmd.extend(packages) |
3767 | @@ -132,7 +131,7 @@ | |||
3768 | 132 | key_file.write(key) | 131 | key_file.write(key) |
3769 | 133 | key_file.flush() | 132 | key_file.flush() |
3770 | 134 | key_file.seek(0) | 133 | key_file.seek(0) |
3772 | 135 | subprocess.check_call(['rpm', '--import', key_file]) | 134 | subprocess.check_call(['rpm', '--import', key_file.name]) |
3773 | 136 | else: | 135 | else: |
3774 | 137 | subprocess.check_call(['rpm', '--import', key]) | 136 | subprocess.check_call(['rpm', '--import', key]) |
3775 | 138 | 137 | ||
3776 | 139 | 138 | ||
3777 | === modified file 'hooks/charmhelpers/fetch/giturl.py' | |||
3778 | --- hooks/charmhelpers/fetch/giturl.py 2016-12-20 14:35:00 +0000 | |||
3779 | +++ hooks/charmhelpers/fetch/giturl.py 2023-06-30 13:58:42 +0000 | |||
3780 | @@ -13,7 +13,7 @@ | |||
3781 | 13 | # limitations under the License. | 13 | # limitations under the License. |
3782 | 14 | 14 | ||
3783 | 15 | import os | 15 | import os |
3785 | 16 | from subprocess import check_call, CalledProcessError | 16 | from subprocess import check_output, CalledProcessError, STDOUT |
3786 | 17 | from charmhelpers.fetch import ( | 17 | from charmhelpers.fetch import ( |
3787 | 18 | BaseFetchHandler, | 18 | BaseFetchHandler, |
3788 | 19 | UnhandledSource, | 19 | UnhandledSource, |
3789 | @@ -50,7 +50,7 @@ | |||
3790 | 50 | cmd = ['git', 'clone', source, dest, '--branch', branch] | 50 | cmd = ['git', 'clone', source, dest, '--branch', branch] |
3791 | 51 | if depth: | 51 | if depth: |
3792 | 52 | cmd.extend(['--depth', depth]) | 52 | cmd.extend(['--depth', depth]) |
3794 | 53 | check_call(cmd) | 53 | check_output(cmd, stderr=STDOUT) |
3795 | 54 | 54 | ||
3796 | 55 | def install(self, source, branch="master", dest=None, depth=None): | 55 | def install(self, source, branch="master", dest=None, depth=None): |
3797 | 56 | url_parts = self.parse_url(source) | 56 | url_parts = self.parse_url(source) |
3798 | 57 | 57 | ||
3799 | === added directory 'hooks/charmhelpers/fetch/python' | |||
3800 | === added file 'hooks/charmhelpers/fetch/python/__init__.py' | |||
3801 | --- hooks/charmhelpers/fetch/python/__init__.py 1970-01-01 00:00:00 +0000 | |||
3802 | +++ hooks/charmhelpers/fetch/python/__init__.py 2023-06-30 13:58:42 +0000 | |||
3803 | @@ -0,0 +1,13 @@ | |||
3804 | 1 | # Copyright 2014-2019 Canonical Limited. | ||
3805 | 2 | # | ||
3806 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
3807 | 4 | # you may not use this file except in compliance with the License. | ||
3808 | 5 | # You may obtain a copy of the License at | ||
3809 | 6 | # | ||
3810 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
3811 | 8 | # | ||
3812 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
3813 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
3814 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
3815 | 12 | # See the License for the specific language governing permissions and | ||
3816 | 13 | # limitations under the License. | ||
3817 | 0 | 14 | ||
3818 | === added file 'hooks/charmhelpers/fetch/python/debug.py' | |||
3819 | --- hooks/charmhelpers/fetch/python/debug.py 1970-01-01 00:00:00 +0000 | |||
3820 | +++ hooks/charmhelpers/fetch/python/debug.py 2023-06-30 13:58:42 +0000 | |||
3821 | @@ -0,0 +1,52 @@ | |||
3822 | 1 | #!/usr/bin/env python | ||
3823 | 2 | # coding: utf-8 | ||
3824 | 3 | |||
3825 | 4 | # Copyright 2014-2015 Canonical Limited. | ||
3826 | 5 | # | ||
3827 | 6 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
3828 | 7 | # you may not use this file except in compliance with the License. | ||
3829 | 8 | # You may obtain a copy of the License at | ||
3830 | 9 | # | ||
3831 | 10 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
3832 | 11 | # | ||
3833 | 12 | # Unless required by applicable law or agreed to in writing, software | ||
3834 | 13 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
3835 | 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
3836 | 15 | # See the License for the specific language governing permissions and | ||
3837 | 16 | # limitations under the License. | ||
3838 | 17 | |||
3839 | 18 | import atexit | ||
3840 | 19 | import sys | ||
3841 | 20 | |||
3842 | 21 | from charmhelpers.fetch.python.rpdb import Rpdb | ||
3843 | 22 | from charmhelpers.core.hookenv import ( | ||
3844 | 23 | open_port, | ||
3845 | 24 | close_port, | ||
3846 | 25 | ERROR, | ||
3847 | 26 | log | ||
3848 | 27 | ) | ||
3849 | 28 | |||
3850 | 29 | __author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" | ||
3851 | 30 | |||
3852 | 31 | DEFAULT_ADDR = "0.0.0.0" | ||
3853 | 32 | DEFAULT_PORT = 4444 | ||
3854 | 33 | |||
3855 | 34 | |||
3856 | 35 | def _error(message): | ||
3857 | 36 | log(message, level=ERROR) | ||
3858 | 37 | |||
3859 | 38 | |||
3860 | 39 | def set_trace(addr=DEFAULT_ADDR, port=DEFAULT_PORT): | ||
3861 | 40 | """ | ||
3862 | 41 | Set a trace point using the remote debugger | ||
3863 | 42 | """ | ||
3864 | 43 | atexit.register(close_port, port) | ||
3865 | 44 | try: | ||
3866 | 45 | log("Starting a remote python debugger session on %s:%s" % (addr, | ||
3867 | 46 | port)) | ||
3868 | 47 | open_port(port) | ||
3869 | 48 | debugger = Rpdb(addr=addr, port=port) | ||
3870 | 49 | debugger.set_trace(sys._getframe().f_back) | ||
3871 | 50 | except Exception: | ||
3872 | 51 | _error("Cannot start a remote debug session on %s:%s" % (addr, | ||
3873 | 52 | port)) | ||
3874 | 0 | 53 | ||
3875 | === added file 'hooks/charmhelpers/fetch/python/packages.py' | |||
3876 | --- hooks/charmhelpers/fetch/python/packages.py 1970-01-01 00:00:00 +0000 | |||
3877 | +++ hooks/charmhelpers/fetch/python/packages.py 2023-06-30 13:58:42 +0000 | |||
3878 | @@ -0,0 +1,148 @@ | |||
3879 | 1 | #!/usr/bin/env python | ||
3880 | 2 | # coding: utf-8 | ||
3881 | 3 | |||
3882 | 4 | # Copyright 2014-2021 Canonical Limited. | ||
3883 | 5 | # | ||
3884 | 6 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
3885 | 7 | # you may not use this file except in compliance with the License. | ||
3886 | 8 | # You may obtain a copy of the License at | ||
3887 | 9 | # | ||
3888 | 10 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
3889 | 11 | # | ||
3890 | 12 | # Unless required by applicable law or agreed to in writing, software | ||
3891 | 13 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
3892 | 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
3893 | 15 | # See the License for the specific language governing permissions and | ||
3894 | 16 | # limitations under the License. | ||
3895 | 17 | |||
3896 | 18 | import os | ||
3897 | 19 | import subprocess | ||
3898 | 20 | import sys | ||
3899 | 21 | |||
3900 | 22 | from charmhelpers.fetch import apt_install, apt_update | ||
3901 | 23 | from charmhelpers.core.hookenv import charm_dir, log | ||
3902 | 24 | |||
3903 | 25 | __author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" | ||
3904 | 26 | |||
3905 | 27 | |||
3906 | 28 | def pip_execute(*args, **kwargs): | ||
3907 | 29 | """Overridden pip_execute() to stop sys.path being changed. | ||
3908 | 30 | |||
3909 | 31 | The act of importing main from the pip module seems to cause add wheels | ||
3910 | 32 | from the /usr/share/python-wheels which are installed by various tools. | ||
3911 | 33 | This function ensures that sys.path remains the same after the call is | ||
3912 | 34 | executed. | ||
3913 | 35 | """ | ||
3914 | 36 | try: | ||
3915 | 37 | _path = sys.path | ||
3916 | 38 | try: | ||
3917 | 39 | from pip import main as _pip_execute | ||
3918 | 40 | except ImportError: | ||
3919 | 41 | apt_update() | ||
3920 | 42 | apt_install('python3-pip') | ||
3921 | 43 | from pip import main as _pip_execute | ||
3922 | 44 | _pip_execute(*args, **kwargs) | ||
3923 | 45 | finally: | ||
3924 | 46 | sys.path = _path | ||
3925 | 47 | |||
3926 | 48 | |||
3927 | 49 | def parse_options(given, available): | ||
3928 | 50 | """Given a set of options, check if available""" | ||
3929 | 51 | for key, value in sorted(given.items()): | ||
3930 | 52 | if not value: | ||
3931 | 53 | continue | ||
3932 | 54 | if key in available: | ||
3933 | 55 | yield "--{0}={1}".format(key, value) | ||
3934 | 56 | |||
3935 | 57 | |||
3936 | 58 | def pip_install_requirements(requirements, constraints=None, **options): | ||
3937 | 59 | """Install a requirements file. | ||
3938 | 60 | |||
3939 | 61 | :param constraints: Path to pip constraints file. | ||
3940 | 62 | http://pip.readthedocs.org/en/stable/user_guide/#constraints-files | ||
3941 | 63 | """ | ||
3942 | 64 | command = ["install"] | ||
3943 | 65 | |||
3944 | 66 | available_options = ('proxy', 'src', 'log', ) | ||
3945 | 67 | for option in parse_options(options, available_options): | ||
3946 | 68 | command.append(option) | ||
3947 | 69 | |||
3948 | 70 | command.append("-r {0}".format(requirements)) | ||
3949 | 71 | if constraints: | ||
3950 | 72 | command.append("-c {0}".format(constraints)) | ||
3951 | 73 | log("Installing from file: {} with constraints {} " | ||
3952 | 74 | "and options: {}".format(requirements, constraints, command)) | ||
3953 | 75 | else: | ||
3954 | 76 | log("Installing from file: {} with options: {}".format(requirements, | ||
3955 | 77 | command)) | ||
3956 | 78 | pip_execute(command) | ||
3957 | 79 | |||
3958 | 80 | |||
3959 | 81 | def pip_install(package, fatal=False, upgrade=False, venv=None, | ||
3960 | 82 | constraints=None, **options): | ||
3961 | 83 | """Install a python package""" | ||
3962 | 84 | if venv: | ||
3963 | 85 | venv_python = os.path.join(venv, 'bin/pip') | ||
3964 | 86 | command = [venv_python, "install"] | ||
3965 | 87 | else: | ||
3966 | 88 | command = ["install"] | ||
3967 | 89 | |||
3968 | 90 | available_options = ('proxy', 'src', 'log', 'index-url', ) | ||
3969 | 91 | for option in parse_options(options, available_options): | ||
3970 | 92 | command.append(option) | ||
3971 | 93 | |||
3972 | 94 | if upgrade: | ||
3973 | 95 | command.append('--upgrade') | ||
3974 | 96 | |||
3975 | 97 | if constraints: | ||
3976 | 98 | command.extend(['-c', constraints]) | ||
3977 | 99 | |||
3978 | 100 | if isinstance(package, list): | ||
3979 | 101 | command.extend(package) | ||
3980 | 102 | else: | ||
3981 | 103 | command.append(package) | ||
3982 | 104 | |||
3983 | 105 | log("Installing {} package with options: {}".format(package, | ||
3984 | 106 | command)) | ||
3985 | 107 | if venv: | ||
3986 | 108 | subprocess.check_call(command) | ||
3987 | 109 | else: | ||
3988 | 110 | pip_execute(command) | ||
3989 | 111 | |||
3990 | 112 | |||
3991 | 113 | def pip_uninstall(package, **options): | ||
3992 | 114 | """Uninstall a python package""" | ||
3993 | 115 | command = ["uninstall", "-q", "-y"] | ||
3994 | 116 | |||
3995 | 117 | available_options = ('proxy', 'log', ) | ||
3996 | 118 | for option in parse_options(options, available_options): | ||
3997 | 119 | command.append(option) | ||
3998 | 120 | |||
3999 | 121 | if isinstance(package, list): | ||
4000 | 122 | command.extend(package) | ||
4001 | 123 | else: | ||
4002 | 124 | command.append(package) | ||
4003 | 125 | |||
4004 | 126 | log("Uninstalling {} package with options: {}".format(package, | ||
4005 | 127 | command)) | ||
4006 | 128 | pip_execute(command) | ||
4007 | 129 | |||
4008 | 130 | |||
4009 | 131 | def pip_list(): | ||
4010 | 132 | """Returns the list of current python installed packages | ||
4011 | 133 | """ | ||
4012 | 134 | return pip_execute(["list"]) | ||
4013 | 135 | |||
4014 | 136 | |||
4015 | 137 | def pip_create_virtualenv(path=None): | ||
4016 | 138 | """Create an isolated Python environment.""" | ||
4017 | 139 | apt_install(['python3-virtualenv', 'virtualenv']) | ||
4018 | 140 | extra_flags = ['--python=python3'] | ||
4019 | 141 | |||
4020 | 142 | if path: | ||
4021 | 143 | venv_path = path | ||
4022 | 144 | else: | ||
4023 | 145 | venv_path = os.path.join(charm_dir(), 'venv') | ||
4024 | 146 | |||
4025 | 147 | if not os.path.exists(venv_path): | ||
4026 | 148 | subprocess.check_call(['virtualenv', venv_path] + extra_flags) | ||
4027 | 0 | 149 | ||
4028 | === added file 'hooks/charmhelpers/fetch/python/rpdb.py' | |||
4029 | --- hooks/charmhelpers/fetch/python/rpdb.py 1970-01-01 00:00:00 +0000 | |||
4030 | +++ hooks/charmhelpers/fetch/python/rpdb.py 2023-06-30 13:58:42 +0000 | |||
4031 | @@ -0,0 +1,56 @@ | |||
4032 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
4033 | 2 | # | ||
4034 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
4035 | 4 | # you may not use this file except in compliance with the License. | ||
4036 | 5 | # You may obtain a copy of the License at | ||
4037 | 6 | # | ||
4038 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
4039 | 8 | # | ||
4040 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
4041 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
4042 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
4043 | 12 | # See the License for the specific language governing permissions and | ||
4044 | 13 | # limitations under the License. | ||
4045 | 14 | |||
4046 | 15 | """Remote Python Debugger (pdb wrapper).""" | ||
4047 | 16 | |||
4048 | 17 | import pdb | ||
4049 | 18 | import socket | ||
4050 | 19 | import sys | ||
4051 | 20 | |||
4052 | 21 | __author__ = "Bertrand Janin <b@janin.com>" | ||
4053 | 22 | __version__ = "0.1.3" | ||
4054 | 23 | |||
4055 | 24 | |||
4056 | 25 | class Rpdb(pdb.Pdb): | ||
4057 | 26 | |||
4058 | 27 | def __init__(self, addr="127.0.0.1", port=4444): | ||
4059 | 28 | """Initialize the socket and initialize pdb.""" | ||
4060 | 29 | |||
4061 | 30 | # Backup stdin and stdout before replacing them by the socket handle | ||
4062 | 31 | self.old_stdout = sys.stdout | ||
4063 | 32 | self.old_stdin = sys.stdin | ||
4064 | 33 | |||
4065 | 34 | # Open a 'reusable' socket to let the webapp reload on the same port | ||
4066 | 35 | self.skt = socket.socket(socket.AF_INET, socket.SOCK_STREAM) | ||
4067 | 36 | self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True) | ||
4068 | 37 | self.skt.bind((addr, port)) | ||
4069 | 38 | self.skt.listen(1) | ||
4070 | 39 | (clientsocket, address) = self.skt.accept() | ||
4071 | 40 | handle = clientsocket.makefile('rw') | ||
4072 | 41 | pdb.Pdb.__init__(self, completekey='tab', stdin=handle, stdout=handle) | ||
4073 | 42 | sys.stdout = sys.stdin = handle | ||
4074 | 43 | |||
4075 | 44 | def shutdown(self): | ||
4076 | 45 | """Revert stdin and stdout, close the socket.""" | ||
4077 | 46 | sys.stdout = self.old_stdout | ||
4078 | 47 | sys.stdin = self.old_stdin | ||
4079 | 48 | self.skt.close() | ||
4080 | 49 | self.set_continue() | ||
4081 | 50 | |||
4082 | 51 | def do_continue(self, arg): | ||
4083 | 52 | """Stop all operation on ``continue``.""" | ||
4084 | 53 | self.shutdown() | ||
4085 | 54 | return 1 | ||
4086 | 55 | |||
4087 | 56 | do_EOF = do_quit = do_exit = do_c = do_cont = do_continue | ||
4088 | 0 | 57 | ||
4089 | === added file 'hooks/charmhelpers/fetch/python/version.py' | |||
4090 | --- hooks/charmhelpers/fetch/python/version.py 1970-01-01 00:00:00 +0000 | |||
4091 | +++ hooks/charmhelpers/fetch/python/version.py 2023-06-30 13:58:42 +0000 | |||
4092 | @@ -0,0 +1,32 @@ | |||
4093 | 1 | #!/usr/bin/env python | ||
4094 | 2 | # coding: utf-8 | ||
4095 | 3 | |||
4096 | 4 | # Copyright 2014-2015 Canonical Limited. | ||
4097 | 5 | # | ||
4098 | 6 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
4099 | 7 | # you may not use this file except in compliance with the License. | ||
4100 | 8 | # You may obtain a copy of the License at | ||
4101 | 9 | # | ||
4102 | 10 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
4103 | 11 | # | ||
4104 | 12 | # Unless required by applicable law or agreed to in writing, software | ||
4105 | 13 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
4106 | 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
4107 | 15 | # See the License for the specific language governing permissions and | ||
4108 | 16 | # limitations under the License. | ||
4109 | 17 | |||
4110 | 18 | import sys | ||
4111 | 19 | |||
4112 | 20 | __author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" | ||
4113 | 21 | |||
4114 | 22 | |||
4115 | 23 | def current_version(): | ||
4116 | 24 | """Current system python version""" | ||
4117 | 25 | return sys.version_info | ||
4118 | 26 | |||
4119 | 27 | |||
4120 | 28 | def current_version_string(): | ||
4121 | 29 | """Current system python version as string major.minor.micro""" | ||
4122 | 30 | return "{0}.{1}.{2}".format(sys.version_info.major, | ||
4123 | 31 | sys.version_info.minor, | ||
4124 | 32 | sys.version_info.micro) | ||
4125 | 0 | 33 | ||
4126 | === added file 'hooks/charmhelpers/fetch/snap.py' | |||
4127 | --- hooks/charmhelpers/fetch/snap.py 1970-01-01 00:00:00 +0000 | |||
4128 | +++ hooks/charmhelpers/fetch/snap.py 2023-06-30 13:58:42 +0000 | |||
4129 | @@ -0,0 +1,150 @@ | |||
4130 | 1 | # Copyright 2014-2021 Canonical Limited. | ||
4131 | 2 | # | ||
4132 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
4133 | 4 | # you may not use this file except in compliance with the License. | ||
4134 | 5 | # You may obtain a copy of the License at | ||
4135 | 6 | # | ||
4136 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
4137 | 8 | # | ||
4138 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
4139 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
4140 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
4141 | 12 | # See the License for the specific language governing permissions and | ||
4142 | 13 | # limitations under the License. | ||
4143 | 14 | """ | ||
4144 | 15 | Charm helpers snap for classic charms. | ||
4145 | 16 | |||
4146 | 17 | If writing reactive charms, use the snap layer: | ||
4147 | 18 | https://lists.ubuntu.com/archives/snapcraft/2016-September/001114.html | ||
4148 | 19 | """ | ||
4149 | 20 | import subprocess | ||
4150 | 21 | import os | ||
4151 | 22 | from time import sleep | ||
4152 | 23 | from charmhelpers.core.hookenv import log | ||
4153 | 24 | |||
4154 | 25 | __author__ = 'Joseph Borg <joseph.borg@canonical.com>' | ||
4155 | 26 | |||
4156 | 27 | # The return code for "couldn't acquire lock" in Snap | ||
4157 | 28 | # (hopefully this will be improved). | ||
4158 | 29 | SNAP_NO_LOCK = 1 | ||
4159 | 30 | SNAP_NO_LOCK_RETRY_DELAY = 10 # Wait X seconds between Snap lock checks. | ||
4160 | 31 | SNAP_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times. | ||
4161 | 32 | SNAP_CHANNELS = [ | ||
4162 | 33 | 'edge', | ||
4163 | 34 | 'beta', | ||
4164 | 35 | 'candidate', | ||
4165 | 36 | 'stable', | ||
4166 | 37 | ] | ||
4167 | 38 | |||
4168 | 39 | |||
4169 | 40 | class CouldNotAcquireLockException(Exception): | ||
4170 | 41 | pass | ||
4171 | 42 | |||
4172 | 43 | |||
4173 | 44 | class InvalidSnapChannel(Exception): | ||
4174 | 45 | pass | ||
4175 | 46 | |||
4176 | 47 | |||
4177 | 48 | def _snap_exec(commands): | ||
4178 | 49 | """ | ||
4179 | 50 | Execute snap commands. | ||
4180 | 51 | |||
4181 | 52 | :param commands: List commands | ||
4182 | 53 | :return: Integer exit code | ||
4183 | 54 | """ | ||
4184 | 55 | assert type(commands) == list | ||
4185 | 56 | |||
4186 | 57 | retry_count = 0 | ||
4187 | 58 | return_code = None | ||
4188 | 59 | |||
4189 | 60 | while return_code is None or return_code == SNAP_NO_LOCK: | ||
4190 | 61 | try: | ||
4191 | 62 | return_code = subprocess.check_call(['snap'] + commands, | ||
4192 | 63 | env=os.environ) | ||
4193 | 64 | except subprocess.CalledProcessError as e: | ||
4194 | 65 | retry_count += + 1 | ||
4195 | 66 | if retry_count > SNAP_NO_LOCK_RETRY_COUNT: | ||
4196 | 67 | raise CouldNotAcquireLockException( | ||
4197 | 68 | 'Could not acquire lock after {} attempts' | ||
4198 | 69 | .format(SNAP_NO_LOCK_RETRY_COUNT)) | ||
4199 | 70 | return_code = e.returncode | ||
4200 | 71 | log('Snap failed to acquire lock, trying again in {} seconds.' | ||
4201 | 72 | .format(SNAP_NO_LOCK_RETRY_DELAY), level='WARN') | ||
4202 | 73 | sleep(SNAP_NO_LOCK_RETRY_DELAY) | ||
4203 | 74 | |||
4204 | 75 | return return_code | ||
4205 | 76 | |||
4206 | 77 | |||
4207 | 78 | def snap_install(packages, *flags): | ||
4208 | 79 | """ | ||
4209 | 80 | Install a snap package. | ||
4210 | 81 | |||
4211 | 82 | :param packages: String or List String package name | ||
4212 | 83 | :param flags: List String flags to pass to install command | ||
4213 | 84 | :return: Integer return code from snap | ||
4214 | 85 | """ | ||
4215 | 86 | if type(packages) is not list: | ||
4216 | 87 | packages = [packages] | ||
4217 | 88 | |||
4218 | 89 | flags = list(flags) | ||
4219 | 90 | |||
4220 | 91 | message = 'Installing snap(s) "%s"' % ', '.join(packages) | ||
4221 | 92 | if flags: | ||
4222 | 93 | message += ' with option(s) "%s"' % ', '.join(flags) | ||
4223 | 94 | |||
4224 | 95 | log(message, level='INFO') | ||
4225 | 96 | return _snap_exec(['install'] + flags + packages) | ||
4226 | 97 | |||
4227 | 98 | |||
4228 | 99 | def snap_remove(packages, *flags): | ||
4229 | 100 | """ | ||
4230 | 101 | Remove a snap package. | ||
4231 | 102 | |||
4232 | 103 | :param packages: String or List String package name | ||
4233 | 104 | :param flags: List String flags to pass to remove command | ||
4234 | 105 | :return: Integer return code from snap | ||
4235 | 106 | """ | ||
4236 | 107 | if type(packages) is not list: | ||
4237 | 108 | packages = [packages] | ||
4238 | 109 | |||
4239 | 110 | flags = list(flags) | ||
4240 | 111 | |||
4241 | 112 | message = 'Removing snap(s) "%s"' % ', '.join(packages) | ||
4242 | 113 | if flags: | ||
4243 | 114 | message += ' with options "%s"' % ', '.join(flags) | ||
4244 | 115 | |||
4245 | 116 | log(message, level='INFO') | ||
4246 | 117 | return _snap_exec(['remove'] + flags + packages) | ||
4247 | 118 | |||
4248 | 119 | |||
4249 | 120 | def snap_refresh(packages, *flags): | ||
4250 | 121 | """ | ||
4251 | 122 | Refresh / Update snap package. | ||
4252 | 123 | |||
4253 | 124 | :param packages: String or List String package name | ||
4254 | 125 | :param flags: List String flags to pass to refresh command | ||
4255 | 126 | :return: Integer return code from snap | ||
4256 | 127 | """ | ||
4257 | 128 | if type(packages) is not list: | ||
4258 | 129 | packages = [packages] | ||
4259 | 130 | |||
4260 | 131 | flags = list(flags) | ||
4261 | 132 | |||
4262 | 133 | message = 'Refreshing snap(s) "%s"' % ', '.join(packages) | ||
4263 | 134 | if flags: | ||
4264 | 135 | message += ' with options "%s"' % ', '.join(flags) | ||
4265 | 136 | |||
4266 | 137 | log(message, level='INFO') | ||
4267 | 138 | return _snap_exec(['refresh'] + flags + packages) | ||
4268 | 139 | |||
4269 | 140 | |||
4270 | 141 | def valid_snap_channel(channel): | ||
4271 | 142 | """ Validate snap channel exists | ||
4272 | 143 | |||
4273 | 144 | :raises InvalidSnapChannel: When channel does not exist | ||
4274 | 145 | :return: Boolean | ||
4275 | 146 | """ | ||
4276 | 147 | if channel.lower() in SNAP_CHANNELS: | ||
4277 | 148 | return True | ||
4278 | 149 | else: | ||
4279 | 150 | raise InvalidSnapChannel("Invalid Snap Channel: {}".format(channel)) | ||
4280 | 0 | 151 | ||
4281 | === modified file 'hooks/charmhelpers/fetch/ubuntu.py' | |||
4282 | --- hooks/charmhelpers/fetch/ubuntu.py 2016-12-20 20:15:28 +0000 | |||
4283 | +++ hooks/charmhelpers/fetch/ubuntu.py 2023-06-30 13:58:42 +0000 | |||
4284 | @@ -1,4 +1,4 @@ | |||
4286 | 1 | # Copyright 2014-2015 Canonical Limited. | 1 | # Copyright 2014-2021 Canonical Limited. |
4287 | 2 | # | 2 | # |
4288 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); |
4289 | 4 | # you may not use this file except in compliance with the License. | 4 | # you may not use this file except in compliance with the License. |
4290 | @@ -12,29 +12,49 @@ | |||
4291 | 12 | # See the License for the specific language governing permissions and | 12 | # See the License for the specific language governing permissions and |
4292 | 13 | # limitations under the License. | 13 | # limitations under the License. |
4293 | 14 | 14 | ||
4296 | 15 | import os | 15 | from collections import OrderedDict |
4297 | 16 | import six | 16 | import platform |
4298 | 17 | import re | ||
4299 | 18 | import subprocess | ||
4300 | 19 | import sys | ||
4301 | 17 | import time | 20 | import time |
4307 | 18 | import subprocess | 21 | |
4308 | 19 | 22 | from charmhelpers import deprecate | |
4309 | 20 | from tempfile import NamedTemporaryFile | 23 | from charmhelpers.core.host import get_distrib_codename, get_system_env |
4310 | 21 | from charmhelpers.core.host import ( | 24 | |
4311 | 22 | lsb_release | 25 | from charmhelpers.core.hookenv import ( |
4312 | 26 | log, | ||
4313 | 27 | DEBUG, | ||
4314 | 28 | WARNING, | ||
4315 | 29 | env_proxy_settings, | ||
4316 | 23 | ) | 30 | ) |
4319 | 24 | from charmhelpers.core.hookenv import log | 31 | from charmhelpers.fetch import SourceConfigError, GPGKeyError |
4320 | 25 | from charmhelpers.fetch import SourceConfigError | 32 | from charmhelpers.fetch import ubuntu_apt_pkg |
4321 | 26 | 33 | ||
4322 | 34 | PROPOSED_POCKET = ( | ||
4323 | 35 | "# Proposed\n" | ||
4324 | 36 | "deb http://archive.ubuntu.com/ubuntu {}-proposed main universe " | ||
4325 | 37 | "multiverse restricted\n") | ||
4326 | 38 | PROPOSED_PORTS_POCKET = ( | ||
4327 | 39 | "# Proposed\n" | ||
4328 | 40 | "deb http://ports.ubuntu.com/ubuntu-ports {}-proposed main universe " | ||
4329 | 41 | "multiverse restricted\n") | ||
4330 | 42 | # Only supports 64bit and ppc64 at the moment. | ||
4331 | 43 | ARCH_TO_PROPOSED_POCKET = { | ||
4332 | 44 | 'x86_64': PROPOSED_POCKET, | ||
4333 | 45 | 'ppc64le': PROPOSED_PORTS_POCKET, | ||
4334 | 46 | 'aarch64': PROPOSED_PORTS_POCKET, | ||
4335 | 47 | 's390x': PROPOSED_PORTS_POCKET, | ||
4336 | 48 | } | ||
4337 | 49 | CLOUD_ARCHIVE_URL = "http://ubuntu-cloud.archive.canonical.com/ubuntu" | ||
4338 | 50 | CLOUD_ARCHIVE_KEY_ID = '5EDB1B62EC4926EA' | ||
4339 | 27 | CLOUD_ARCHIVE = """# Ubuntu Cloud Archive | 51 | CLOUD_ARCHIVE = """# Ubuntu Cloud Archive |
4340 | 28 | deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main | 52 | deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main |
4341 | 29 | """ | 53 | """ |
4342 | 30 | |||
4343 | 31 | PROPOSED_POCKET = """# Proposed | ||
4344 | 32 | deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted | ||
4345 | 33 | """ | ||
4346 | 34 | |||
4347 | 35 | CLOUD_ARCHIVE_POCKETS = { | 54 | CLOUD_ARCHIVE_POCKETS = { |
4348 | 36 | # Folsom | 55 | # Folsom |
4349 | 37 | 'folsom': 'precise-updates/folsom', | 56 | 'folsom': 'precise-updates/folsom', |
4350 | 57 | 'folsom/updates': 'precise-updates/folsom', | ||
4351 | 38 | 'precise-folsom': 'precise-updates/folsom', | 58 | 'precise-folsom': 'precise-updates/folsom', |
4352 | 39 | 'precise-folsom/updates': 'precise-updates/folsom', | 59 | 'precise-folsom/updates': 'precise-updates/folsom', |
4353 | 40 | 'precise-updates/folsom': 'precise-updates/folsom', | 60 | 'precise-updates/folsom': 'precise-updates/folsom', |
4354 | @@ -43,6 +63,7 @@ | |||
4355 | 43 | 'precise-proposed/folsom': 'precise-proposed/folsom', | 63 | 'precise-proposed/folsom': 'precise-proposed/folsom', |
4356 | 44 | # Grizzly | 64 | # Grizzly |
4357 | 45 | 'grizzly': 'precise-updates/grizzly', | 65 | 'grizzly': 'precise-updates/grizzly', |
4358 | 66 | 'grizzly/updates': 'precise-updates/grizzly', | ||
4359 | 46 | 'precise-grizzly': 'precise-updates/grizzly', | 67 | 'precise-grizzly': 'precise-updates/grizzly', |
4360 | 47 | 'precise-grizzly/updates': 'precise-updates/grizzly', | 68 | 'precise-grizzly/updates': 'precise-updates/grizzly', |
4361 | 48 | 'precise-updates/grizzly': 'precise-updates/grizzly', | 69 | 'precise-updates/grizzly': 'precise-updates/grizzly', |
4362 | @@ -51,6 +72,7 @@ | |||
4363 | 51 | 'precise-proposed/grizzly': 'precise-proposed/grizzly', | 72 | 'precise-proposed/grizzly': 'precise-proposed/grizzly', |
4364 | 52 | # Havana | 73 | # Havana |
4365 | 53 | 'havana': 'precise-updates/havana', | 74 | 'havana': 'precise-updates/havana', |
4366 | 75 | 'havana/updates': 'precise-updates/havana', | ||
4367 | 54 | 'precise-havana': 'precise-updates/havana', | 76 | 'precise-havana': 'precise-updates/havana', |
4368 | 55 | 'precise-havana/updates': 'precise-updates/havana', | 77 | 'precise-havana/updates': 'precise-updates/havana', |
4369 | 56 | 'precise-updates/havana': 'precise-updates/havana', | 78 | 'precise-updates/havana': 'precise-updates/havana', |
4370 | @@ -59,6 +81,7 @@ | |||
4371 | 59 | 'precise-proposed/havana': 'precise-proposed/havana', | 81 | 'precise-proposed/havana': 'precise-proposed/havana', |
4372 | 60 | # Icehouse | 82 | # Icehouse |
4373 | 61 | 'icehouse': 'precise-updates/icehouse', | 83 | 'icehouse': 'precise-updates/icehouse', |
4374 | 84 | 'icehouse/updates': 'precise-updates/icehouse', | ||
4375 | 62 | 'precise-icehouse': 'precise-updates/icehouse', | 85 | 'precise-icehouse': 'precise-updates/icehouse', |
4376 | 63 | 'precise-icehouse/updates': 'precise-updates/icehouse', | 86 | 'precise-icehouse/updates': 'precise-updates/icehouse', |
4377 | 64 | 'precise-updates/icehouse': 'precise-updates/icehouse', | 87 | 'precise-updates/icehouse': 'precise-updates/icehouse', |
4378 | @@ -67,6 +90,7 @@ | |||
4379 | 67 | 'precise-proposed/icehouse': 'precise-proposed/icehouse', | 90 | 'precise-proposed/icehouse': 'precise-proposed/icehouse', |
4380 | 68 | # Juno | 91 | # Juno |
4381 | 69 | 'juno': 'trusty-updates/juno', | 92 | 'juno': 'trusty-updates/juno', |
4382 | 93 | 'juno/updates': 'trusty-updates/juno', | ||
4383 | 70 | 'trusty-juno': 'trusty-updates/juno', | 94 | 'trusty-juno': 'trusty-updates/juno', |
4384 | 71 | 'trusty-juno/updates': 'trusty-updates/juno', | 95 | 'trusty-juno/updates': 'trusty-updates/juno', |
4385 | 72 | 'trusty-updates/juno': 'trusty-updates/juno', | 96 | 'trusty-updates/juno': 'trusty-updates/juno', |
4386 | @@ -75,6 +99,7 @@ | |||
4387 | 75 | 'trusty-proposed/juno': 'trusty-proposed/juno', | 99 | 'trusty-proposed/juno': 'trusty-proposed/juno', |
4388 | 76 | # Kilo | 100 | # Kilo |
4389 | 77 | 'kilo': 'trusty-updates/kilo', | 101 | 'kilo': 'trusty-updates/kilo', |
4390 | 102 | 'kilo/updates': 'trusty-updates/kilo', | ||
4391 | 78 | 'trusty-kilo': 'trusty-updates/kilo', | 103 | 'trusty-kilo': 'trusty-updates/kilo', |
4392 | 79 | 'trusty-kilo/updates': 'trusty-updates/kilo', | 104 | 'trusty-kilo/updates': 'trusty-updates/kilo', |
4393 | 80 | 'trusty-updates/kilo': 'trusty-updates/kilo', | 105 | 'trusty-updates/kilo': 'trusty-updates/kilo', |
4394 | @@ -83,6 +108,7 @@ | |||
4395 | 83 | 'trusty-proposed/kilo': 'trusty-proposed/kilo', | 108 | 'trusty-proposed/kilo': 'trusty-proposed/kilo', |
4396 | 84 | # Liberty | 109 | # Liberty |
4397 | 85 | 'liberty': 'trusty-updates/liberty', | 110 | 'liberty': 'trusty-updates/liberty', |
4398 | 111 | 'liberty/updates': 'trusty-updates/liberty', | ||
4399 | 86 | 'trusty-liberty': 'trusty-updates/liberty', | 112 | 'trusty-liberty': 'trusty-updates/liberty', |
4400 | 87 | 'trusty-liberty/updates': 'trusty-updates/liberty', | 113 | 'trusty-liberty/updates': 'trusty-updates/liberty', |
4401 | 88 | 'trusty-updates/liberty': 'trusty-updates/liberty', | 114 | 'trusty-updates/liberty': 'trusty-updates/liberty', |
4402 | @@ -91,6 +117,7 @@ | |||
4403 | 91 | 'trusty-proposed/liberty': 'trusty-proposed/liberty', | 117 | 'trusty-proposed/liberty': 'trusty-proposed/liberty', |
4404 | 92 | # Mitaka | 118 | # Mitaka |
4405 | 93 | 'mitaka': 'trusty-updates/mitaka', | 119 | 'mitaka': 'trusty-updates/mitaka', |
4406 | 120 | 'mitaka/updates': 'trusty-updates/mitaka', | ||
4407 | 94 | 'trusty-mitaka': 'trusty-updates/mitaka', | 121 | 'trusty-mitaka': 'trusty-updates/mitaka', |
4408 | 95 | 'trusty-mitaka/updates': 'trusty-updates/mitaka', | 122 | 'trusty-mitaka/updates': 'trusty-updates/mitaka', |
4409 | 96 | 'trusty-updates/mitaka': 'trusty-updates/mitaka', | 123 | 'trusty-updates/mitaka': 'trusty-updates/mitaka', |
4410 | @@ -99,6 +126,7 @@ | |||
4411 | 99 | 'trusty-proposed/mitaka': 'trusty-proposed/mitaka', | 126 | 'trusty-proposed/mitaka': 'trusty-proposed/mitaka', |
4412 | 100 | # Newton | 127 | # Newton |
4413 | 101 | 'newton': 'xenial-updates/newton', | 128 | 'newton': 'xenial-updates/newton', |
4414 | 129 | 'newton/updates': 'xenial-updates/newton', | ||
4415 | 102 | 'xenial-newton': 'xenial-updates/newton', | 130 | 'xenial-newton': 'xenial-updates/newton', |
4416 | 103 | 'xenial-newton/updates': 'xenial-updates/newton', | 131 | 'xenial-newton/updates': 'xenial-updates/newton', |
4417 | 104 | 'xenial-updates/newton': 'xenial-updates/newton', | 132 | 'xenial-updates/newton': 'xenial-updates/newton', |
4418 | @@ -107,17 +135,175 @@ | |||
4419 | 107 | 'xenial-proposed/newton': 'xenial-proposed/newton', | 135 | 'xenial-proposed/newton': 'xenial-proposed/newton', |
4420 | 108 | # Ocata | 136 | # Ocata |
4421 | 109 | 'ocata': 'xenial-updates/ocata', | 137 | 'ocata': 'xenial-updates/ocata', |
4422 | 138 | 'ocata/updates': 'xenial-updates/ocata', | ||
4423 | 110 | 'xenial-ocata': 'xenial-updates/ocata', | 139 | 'xenial-ocata': 'xenial-updates/ocata', |
4424 | 111 | 'xenial-ocata/updates': 'xenial-updates/ocata', | 140 | 'xenial-ocata/updates': 'xenial-updates/ocata', |
4425 | 112 | 'xenial-updates/ocata': 'xenial-updates/ocata', | 141 | 'xenial-updates/ocata': 'xenial-updates/ocata', |
4426 | 113 | 'ocata/proposed': 'xenial-proposed/ocata', | 142 | 'ocata/proposed': 'xenial-proposed/ocata', |
4427 | 114 | 'xenial-ocata/proposed': 'xenial-proposed/ocata', | 143 | 'xenial-ocata/proposed': 'xenial-proposed/ocata', |
4429 | 115 | 'xenial-ocata/newton': 'xenial-proposed/ocata', | 144 | 'xenial-proposed/ocata': 'xenial-proposed/ocata', |
4430 | 145 | # Pike | ||
4431 | 146 | 'pike': 'xenial-updates/pike', | ||
4432 | 147 | 'xenial-pike': 'xenial-updates/pike', | ||
4433 | 148 | 'xenial-pike/updates': 'xenial-updates/pike', | ||
4434 | 149 | 'xenial-updates/pike': 'xenial-updates/pike', | ||
4435 | 150 | 'pike/proposed': 'xenial-proposed/pike', | ||
4436 | 151 | 'xenial-pike/proposed': 'xenial-proposed/pike', | ||
4437 | 152 | 'xenial-proposed/pike': 'xenial-proposed/pike', | ||
4438 | 153 | # Queens | ||
4439 | 154 | 'queens': 'xenial-updates/queens', | ||
4440 | 155 | 'xenial-queens': 'xenial-updates/queens', | ||
4441 | 156 | 'xenial-queens/updates': 'xenial-updates/queens', | ||
4442 | 157 | 'xenial-updates/queens': 'xenial-updates/queens', | ||
4443 | 158 | 'queens/proposed': 'xenial-proposed/queens', | ||
4444 | 159 | 'xenial-queens/proposed': 'xenial-proposed/queens', | ||
4445 | 160 | 'xenial-proposed/queens': 'xenial-proposed/queens', | ||
4446 | 161 | # Rocky | ||
4447 | 162 | 'rocky': 'bionic-updates/rocky', | ||
4448 | 163 | 'bionic-rocky': 'bionic-updates/rocky', | ||
4449 | 164 | 'bionic-rocky/updates': 'bionic-updates/rocky', | ||
4450 | 165 | 'bionic-updates/rocky': 'bionic-updates/rocky', | ||
4451 | 166 | 'rocky/proposed': 'bionic-proposed/rocky', | ||
4452 | 167 | 'bionic-rocky/proposed': 'bionic-proposed/rocky', | ||
4453 | 168 | 'bionic-proposed/rocky': 'bionic-proposed/rocky', | ||
4454 | 169 | # Stein | ||
4455 | 170 | 'stein': 'bionic-updates/stein', | ||
4456 | 171 | 'bionic-stein': 'bionic-updates/stein', | ||
4457 | 172 | 'bionic-stein/updates': 'bionic-updates/stein', | ||
4458 | 173 | 'bionic-updates/stein': 'bionic-updates/stein', | ||
4459 | 174 | 'stein/proposed': 'bionic-proposed/stein', | ||
4460 | 175 | 'bionic-stein/proposed': 'bionic-proposed/stein', | ||
4461 | 176 | 'bionic-proposed/stein': 'bionic-proposed/stein', | ||
4462 | 177 | # Train | ||
4463 | 178 | 'train': 'bionic-updates/train', | ||
4464 | 179 | 'bionic-train': 'bionic-updates/train', | ||
4465 | 180 | 'bionic-train/updates': 'bionic-updates/train', | ||
4466 | 181 | 'bionic-updates/train': 'bionic-updates/train', | ||
4467 | 182 | 'train/proposed': 'bionic-proposed/train', | ||
4468 | 183 | 'bionic-train/proposed': 'bionic-proposed/train', | ||
4469 | 184 | 'bionic-proposed/train': 'bionic-proposed/train', | ||
4470 | 185 | # Ussuri | ||
4471 | 186 | 'ussuri': 'bionic-updates/ussuri', | ||
4472 | 187 | 'bionic-ussuri': 'bionic-updates/ussuri', | ||
4473 | 188 | 'bionic-ussuri/updates': 'bionic-updates/ussuri', | ||
4474 | 189 | 'bionic-updates/ussuri': 'bionic-updates/ussuri', | ||
4475 | 190 | 'ussuri/proposed': 'bionic-proposed/ussuri', | ||
4476 | 191 | 'bionic-ussuri/proposed': 'bionic-proposed/ussuri', | ||
4477 | 192 | 'bionic-proposed/ussuri': 'bionic-proposed/ussuri', | ||
4478 | 193 | # Victoria | ||
4479 | 194 | 'victoria': 'focal-updates/victoria', | ||
4480 | 195 | 'focal-victoria': 'focal-updates/victoria', | ||
4481 | 196 | 'focal-victoria/updates': 'focal-updates/victoria', | ||
4482 | 197 | 'focal-updates/victoria': 'focal-updates/victoria', | ||
4483 | 198 | 'victoria/proposed': 'focal-proposed/victoria', | ||
4484 | 199 | 'focal-victoria/proposed': 'focal-proposed/victoria', | ||
4485 | 200 | 'focal-proposed/victoria': 'focal-proposed/victoria', | ||
4486 | 201 | # Wallaby | ||
4487 | 202 | 'wallaby': 'focal-updates/wallaby', | ||
4488 | 203 | 'focal-wallaby': 'focal-updates/wallaby', | ||
4489 | 204 | 'focal-wallaby/updates': 'focal-updates/wallaby', | ||
4490 | 205 | 'focal-updates/wallaby': 'focal-updates/wallaby', | ||
4491 | 206 | 'wallaby/proposed': 'focal-proposed/wallaby', | ||
4492 | 207 | 'focal-wallaby/proposed': 'focal-proposed/wallaby', | ||
4493 | 208 | 'focal-proposed/wallaby': 'focal-proposed/wallaby', | ||
4494 | 209 | # Xena | ||
4495 | 210 | 'xena': 'focal-updates/xena', | ||
4496 | 211 | 'focal-xena': 'focal-updates/xena', | ||
4497 | 212 | 'focal-xena/updates': 'focal-updates/xena', | ||
4498 | 213 | 'focal-updates/xena': 'focal-updates/xena', | ||
4499 | 214 | 'xena/proposed': 'focal-proposed/xena', | ||
4500 | 215 | 'focal-xena/proposed': 'focal-proposed/xena', | ||
4501 | 216 | 'focal-proposed/xena': 'focal-proposed/xena', | ||
4502 | 217 | # Yoga | ||
4503 | 218 | 'yoga': 'focal-updates/yoga', | ||
4504 | 219 | 'focal-yoga': 'focal-updates/yoga', | ||
4505 | 220 | 'focal-yoga/updates': 'focal-updates/yoga', | ||
4506 | 221 | 'focal-updates/yoga': 'focal-updates/yoga', | ||
4507 | 222 | 'yoga/proposed': 'focal-proposed/yoga', | ||
4508 | 223 | 'focal-yoga/proposed': 'focal-proposed/yoga', | ||
4509 | 224 | 'focal-proposed/yoga': 'focal-proposed/yoga', | ||
4510 | 225 | # Zed | ||
4511 | 226 | 'zed': 'jammy-updates/zed', | ||
4512 | 227 | 'jammy-zed': 'jammy-updates/zed', | ||
4513 | 228 | 'jammy-zed/updates': 'jammy-updates/zed', | ||
4514 | 229 | 'jammy-updates/zed': 'jammy-updates/zed', | ||
4515 | 230 | 'zed/proposed': 'jammy-proposed/zed', | ||
4516 | 231 | 'jammy-zed/proposed': 'jammy-proposed/zed', | ||
4517 | 232 | 'jammy-proposed/zed': 'jammy-proposed/zed', | ||
4518 | 233 | # antelope | ||
4519 | 234 | 'antelope': 'jammy-updates/antelope', | ||
4520 | 235 | 'jammy-antelope': 'jammy-updates/antelope', | ||
4521 | 236 | 'jammy-antelope/updates': 'jammy-updates/antelope', | ||
4522 | 237 | 'jammy-updates/antelope': 'jammy-updates/antelope', | ||
4523 | 238 | 'antelope/proposed': 'jammy-proposed/antelope', | ||
4524 | 239 | 'jammy-antelope/proposed': 'jammy-proposed/antelope', | ||
4525 | 240 | 'jammy-proposed/antelope': 'jammy-proposed/antelope', | ||
4526 | 241 | |||
4527 | 242 | # OVN | ||
4528 | 243 | 'focal-ovn-22.03': 'focal-updates/ovn-22.03', | ||
4529 | 244 | 'focal-ovn-22.03/proposed': 'focal-proposed/ovn-22.03', | ||
4530 | 116 | } | 245 | } |
4531 | 117 | 246 | ||
4532 | 247 | |||
4533 | 248 | OPENSTACK_RELEASES = ( | ||
4534 | 249 | 'diablo', | ||
4535 | 250 | 'essex', | ||
4536 | 251 | 'folsom', | ||
4537 | 252 | 'grizzly', | ||
4538 | 253 | 'havana', | ||
4539 | 254 | 'icehouse', | ||
4540 | 255 | 'juno', | ||
4541 | 256 | 'kilo', | ||
4542 | 257 | 'liberty', | ||
4543 | 258 | 'mitaka', | ||
4544 | 259 | 'newton', | ||
4545 | 260 | 'ocata', | ||
4546 | 261 | 'pike', | ||
4547 | 262 | 'queens', | ||
4548 | 263 | 'rocky', | ||
4549 | 264 | 'stein', | ||
4550 | 265 | 'train', | ||
4551 | 266 | 'ussuri', | ||
4552 | 267 | 'victoria', | ||
4553 | 268 | 'wallaby', | ||
4554 | 269 | 'xena', | ||
4555 | 270 | 'yoga', | ||
4556 | 271 | 'zed', | ||
4557 | 272 | 'antelope', | ||
4558 | 273 | ) | ||
4559 | 274 | |||
4560 | 275 | |||
4561 | 276 | UBUNTU_OPENSTACK_RELEASE = OrderedDict([ | ||
4562 | 277 | ('oneiric', 'diablo'), | ||
4563 | 278 | ('precise', 'essex'), | ||
4564 | 279 | ('quantal', 'folsom'), | ||
4565 | 280 | ('raring', 'grizzly'), | ||
4566 | 281 | ('saucy', 'havana'), | ||
4567 | 282 | ('trusty', 'icehouse'), | ||
4568 | 283 | ('utopic', 'juno'), | ||
4569 | 284 | ('vivid', 'kilo'), | ||
4570 | 285 | ('wily', 'liberty'), | ||
4571 | 286 | ('xenial', 'mitaka'), | ||
4572 | 287 | ('yakkety', 'newton'), | ||
4573 | 288 | ('zesty', 'ocata'), | ||
4574 | 289 | ('artful', 'pike'), | ||
4575 | 290 | ('bionic', 'queens'), | ||
4576 | 291 | ('cosmic', 'rocky'), | ||
4577 | 292 | ('disco', 'stein'), | ||
4578 | 293 | ('eoan', 'train'), | ||
4579 | 294 | ('focal', 'ussuri'), | ||
4580 | 295 | ('groovy', 'victoria'), | ||
4581 | 296 | ('hirsute', 'wallaby'), | ||
4582 | 297 | ('impish', 'xena'), | ||
4583 | 298 | ('jammy', 'yoga'), | ||
4584 | 299 | ('kinetic', 'zed'), | ||
4585 | 300 | ('lunar', 'antelope'), | ||
4586 | 301 | ]) | ||
4587 | 302 | |||
4588 | 303 | |||
4589 | 118 | APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT. | 304 | APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT. |
4592 | 119 | APT_NO_LOCK_RETRY_DELAY = 10 # Wait 10 seconds between apt lock checks. | 305 | CMD_RETRY_DELAY = 10 # Wait 10 seconds between command retries. |
4593 | 120 | APT_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times. | 306 | CMD_RETRY_COUNT = 10 # Retry a failing fatal command X times. |
4594 | 121 | 307 | ||
4595 | 122 | 308 | ||
4596 | 123 | def filter_installed_packages(packages): | 309 | def filter_installed_packages(packages): |
4597 | @@ -135,35 +321,93 @@ | |||
4598 | 135 | return _pkgs | 321 | return _pkgs |
4599 | 136 | 322 | ||
4600 | 137 | 323 | ||
4613 | 138 | def apt_cache(in_memory=True, progress=None): | 324 | def filter_missing_packages(packages): |
4614 | 139 | """Build and return an apt cache.""" | 325 | """Return a list of packages that are installed. |
4615 | 140 | from apt import apt_pkg | 326 | |
4616 | 141 | apt_pkg.init() | 327 | :param packages: list of packages to evaluate. |
4617 | 142 | if in_memory: | 328 | :returns list: Packages that are installed. |
4618 | 143 | apt_pkg.config.set("Dir::Cache::pkgcache", "") | 329 | """ |
4619 | 144 | apt_pkg.config.set("Dir::Cache::srcpkgcache", "") | 330 | return list( |
4620 | 145 | return apt_pkg.Cache(progress) | 331 | set(packages) - |
4621 | 146 | 332 | set(filter_installed_packages(packages)) | |
4622 | 147 | 333 | ) | |
4623 | 148 | def install(packages, options=None, fatal=False): | 334 | |
4624 | 149 | """Install one or more packages.""" | 335 | |
4625 | 336 | def apt_cache(*_, **__): | ||
4626 | 337 | """Shim returning an object simulating the apt_pkg Cache. | ||
4627 | 338 | |||
4628 | 339 | :param _: Accept arguments for compatibility, not used. | ||
4629 | 340 | :type _: any | ||
4630 | 341 | :param __: Accept keyword arguments for compatibility, not used. | ||
4631 | 342 | :type __: any | ||
4632 | 343 | :returns:Object used to interrogate the system apt and dpkg databases. | ||
4633 | 344 | :rtype:ubuntu_apt_pkg.Cache | ||
4634 | 345 | """ | ||
4635 | 346 | if 'apt_pkg' in sys.modules: | ||
4636 | 347 | # NOTE(fnordahl): When our consumer use the upstream ``apt_pkg`` module | ||
4637 | 348 | # in conjunction with the apt_cache helper function, they may expect us | ||
4638 | 349 | # to call ``apt_pkg.init()`` for them. | ||
4639 | 350 | # | ||
4640 | 351 | # Detect this situation, log a warning and make the call to | ||
4641 | 352 | # ``apt_pkg.init()`` to avoid the consumer Python interpreter from | ||
4642 | 353 | # crashing with a segmentation fault. | ||
4643 | 354 | @deprecate( | ||
4644 | 355 | 'Support for use of upstream ``apt_pkg`` module in conjunction' | ||
4645 | 356 | 'with charm-helpers is deprecated since 2019-06-25', | ||
4646 | 357 | date=None, log=lambda x: log(x, level=WARNING)) | ||
4647 | 358 | def one_shot_log(): | ||
4648 | 359 | pass | ||
4649 | 360 | |||
4650 | 361 | one_shot_log() | ||
4651 | 362 | sys.modules['apt_pkg'].init() | ||
4652 | 363 | return ubuntu_apt_pkg.Cache() | ||
4653 | 364 | |||
4654 | 365 | |||
4655 | 366 | def apt_install(packages, options=None, fatal=False, quiet=False): | ||
4656 | 367 | """Install one or more packages. | ||
4657 | 368 | |||
4658 | 369 | :param packages: Package(s) to install | ||
4659 | 370 | :type packages: Option[str, List[str]] | ||
4660 | 371 | :param options: Options to pass on to apt-get | ||
4661 | 372 | :type options: Option[None, List[str]] | ||
4662 | 373 | :param fatal: Whether the command's output should be checked and | ||
4663 | 374 | retried. | ||
4664 | 375 | :type fatal: bool | ||
4665 | 376 | :param quiet: if True (default), suppress log message to stdout/stderr | ||
4666 | 377 | :type quiet: bool | ||
4667 | 378 | :raises: subprocess.CalledProcessError | ||
4668 | 379 | """ | ||
4669 | 380 | if not packages: | ||
4670 | 381 | log("Nothing to install", level=DEBUG) | ||
4671 | 382 | return | ||
4672 | 150 | if options is None: | 383 | if options is None: |
4673 | 151 | options = ['--option=Dpkg::Options::=--force-confold'] | 384 | options = ['--option=Dpkg::Options::=--force-confold'] |
4674 | 152 | 385 | ||
4675 | 153 | cmd = ['apt-get', '--assume-yes'] | 386 | cmd = ['apt-get', '--assume-yes'] |
4676 | 154 | cmd.extend(options) | 387 | cmd.extend(options) |
4677 | 155 | cmd.append('install') | 388 | cmd.append('install') |
4679 | 156 | if isinstance(packages, six.string_types): | 389 | if isinstance(packages, str): |
4680 | 157 | cmd.append(packages) | 390 | cmd.append(packages) |
4681 | 158 | else: | 391 | else: |
4682 | 159 | cmd.extend(packages) | 392 | cmd.extend(packages) |
4690 | 160 | log("Installing {} with options: {}".format(packages, | 393 | if not quiet: |
4691 | 161 | options)) | 394 | log("Installing {} with options: {}" |
4692 | 162 | _run_apt_command(cmd, fatal) | 395 | .format(packages, options)) |
4693 | 163 | 396 | _run_apt_command(cmd, fatal, quiet=quiet) | |
4694 | 164 | 397 | ||
4695 | 165 | def upgrade(options=None, fatal=False, dist=False): | 398 | |
4696 | 166 | """Upgrade all packages.""" | 399 | def apt_upgrade(options=None, fatal=False, dist=False): |
4697 | 400 | """Upgrade all packages. | ||
4698 | 401 | |||
4699 | 402 | :param options: Options to pass on to apt-get | ||
4700 | 403 | :type options: Option[None, List[str]] | ||
4701 | 404 | :param fatal: Whether the command's output should be checked and | ||
4702 | 405 | retried. | ||
4703 | 406 | :type fatal: bool | ||
4704 | 407 | :param dist: Whether ``dist-upgrade`` should be used over ``upgrade`` | ||
4705 | 408 | :type dist: bool | ||
4706 | 409 | :raises: subprocess.CalledProcessError | ||
4707 | 410 | """ | ||
4708 | 167 | if options is None: | 411 | if options is None: |
4709 | 168 | options = ['--option=Dpkg::Options::=--force-confold'] | 412 | options = ['--option=Dpkg::Options::=--force-confold'] |
4710 | 169 | 413 | ||
4711 | @@ -177,16 +421,24 @@ | |||
4712 | 177 | _run_apt_command(cmd, fatal) | 421 | _run_apt_command(cmd, fatal) |
4713 | 178 | 422 | ||
4714 | 179 | 423 | ||
4716 | 180 | def update(fatal=False): | 424 | def apt_update(fatal=False): |
4717 | 181 | """Update local apt cache.""" | 425 | """Update local apt cache.""" |
4718 | 182 | cmd = ['apt-get', 'update'] | 426 | cmd = ['apt-get', 'update'] |
4719 | 183 | _run_apt_command(cmd, fatal) | 427 | _run_apt_command(cmd, fatal) |
4720 | 184 | 428 | ||
4721 | 185 | 429 | ||
4724 | 186 | def purge(packages, fatal=False): | 430 | def apt_purge(packages, fatal=False): |
4725 | 187 | """Purge one or more packages.""" | 431 | """Purge one or more packages. |
4726 | 432 | |||
4727 | 433 | :param packages: Package(s) to install | ||
4728 | 434 | :type packages: Option[str, List[str]] | ||
4729 | 435 | :param fatal: Whether the command's output should be checked and | ||
4730 | 436 | retried. | ||
4731 | 437 | :type fatal: bool | ||
4732 | 438 | :raises: subprocess.CalledProcessError | ||
4733 | 439 | """ | ||
4734 | 188 | cmd = ['apt-get', '--assume-yes', 'purge'] | 440 | cmd = ['apt-get', '--assume-yes', 'purge'] |
4736 | 189 | if isinstance(packages, six.string_types): | 441 | if isinstance(packages, str): |
4737 | 190 | cmd.append(packages) | 442 | cmd.append(packages) |
4738 | 191 | else: | 443 | else: |
4739 | 192 | cmd.extend(packages) | 444 | cmd.extend(packages) |
4740 | @@ -194,11 +446,26 @@ | |||
4741 | 194 | _run_apt_command(cmd, fatal) | 446 | _run_apt_command(cmd, fatal) |
4742 | 195 | 447 | ||
4743 | 196 | 448 | ||
4744 | 449 | def apt_autoremove(purge=True, fatal=False): | ||
4745 | 450 | """Purge one or more packages. | ||
4746 | 451 | :param purge: Whether the ``--purge`` option should be passed on or not. | ||
4747 | 452 | :type purge: bool | ||
4748 | 453 | :param fatal: Whether the command's output should be checked and | ||
4749 | 454 | retried. | ||
4750 | 455 | :type fatal: bool | ||
4751 | 456 | :raises: subprocess.CalledProcessError | ||
4752 | 457 | """ | ||
4753 | 458 | cmd = ['apt-get', '--assume-yes', 'autoremove'] | ||
4754 | 459 | if purge: | ||
4755 | 460 | cmd.append('--purge') | ||
4756 | 461 | _run_apt_command(cmd, fatal) | ||
4757 | 462 | |||
4758 | 463 | |||
4759 | 197 | def apt_mark(packages, mark, fatal=False): | 464 | def apt_mark(packages, mark, fatal=False): |
4760 | 198 | """Flag one or more packages using apt-mark.""" | 465 | """Flag one or more packages using apt-mark.""" |
4761 | 199 | log("Marking {} as {}".format(packages, mark)) | 466 | log("Marking {} as {}".format(packages, mark)) |
4762 | 200 | cmd = ['apt-mark', mark] | 467 | cmd = ['apt-mark', mark] |
4764 | 201 | if isinstance(packages, six.string_types): | 468 | if isinstance(packages, str): |
4765 | 202 | cmd.append(packages) | 469 | cmd.append(packages) |
4766 | 203 | else: | 470 | else: |
4767 | 204 | cmd.extend(packages) | 471 | cmd.extend(packages) |
4768 | @@ -217,7 +484,154 @@ | |||
4769 | 217 | return apt_mark(packages, 'unhold', fatal=fatal) | 484 | return apt_mark(packages, 'unhold', fatal=fatal) |
4770 | 218 | 485 | ||
4771 | 219 | 486 | ||
4773 | 220 | def add_source(source, key=None): | 487 | def import_key(key): |
4774 | 488 | """Import an ASCII Armor key. | ||
4775 | 489 | |||
4776 | 490 | A Radix64 format keyid is also supported for backwards | ||
4777 | 491 | compatibility. In this case Ubuntu keyserver will be | ||
4778 | 492 | queried for a key via HTTPS by its keyid. This method | ||
4779 | 493 | is less preferable because https proxy servers may | ||
4780 | 494 | require traffic decryption which is equivalent to a | ||
4781 | 495 | man-in-the-middle attack (a proxy server impersonates | ||
4782 | 496 | keyserver TLS certificates and has to be explicitly | ||
4783 | 497 | trusted by the system). | ||
4784 | 498 | |||
4785 | 499 | :param key: A GPG key in ASCII armor format, | ||
4786 | 500 | including BEGIN and END markers or a keyid. | ||
4787 | 501 | :type key: (bytes, str) | ||
4788 | 502 | :raises: GPGKeyError if the key could not be imported | ||
4789 | 503 | """ | ||
4790 | 504 | key = key.strip() | ||
4791 | 505 | if '-' in key or '\n' in key: | ||
4792 | 506 | # Send everything not obviously a keyid to GPG to import, as | ||
4793 | 507 | # we trust its validation better than our own. eg. handling | ||
4794 | 508 | # comments before the key. | ||
4795 | 509 | log("PGP key found (looks like ASCII Armor format)", level=DEBUG) | ||
4796 | 510 | if ('-----BEGIN PGP PUBLIC KEY BLOCK-----' in key and | ||
4797 | 511 | '-----END PGP PUBLIC KEY BLOCK-----' in key): | ||
4798 | 512 | log("Writing provided PGP key in the binary format", level=DEBUG) | ||
4799 | 513 | key_bytes = key.encode('utf-8') | ||
4800 | 514 | key_name = _get_keyid_by_gpg_key(key_bytes) | ||
4801 | 515 | key_gpg = _dearmor_gpg_key(key_bytes) | ||
4802 | 516 | _write_apt_gpg_keyfile(key_name=key_name, key_material=key_gpg) | ||
4803 | 517 | else: | ||
4804 | 518 | raise GPGKeyError("ASCII armor markers missing from GPG key") | ||
4805 | 519 | else: | ||
4806 | 520 | log("PGP key found (looks like Radix64 format)", level=WARNING) | ||
4807 | 521 | log("SECURELY importing PGP key from keyserver; " | ||
4808 | 522 | "full key not provided.", level=WARNING) | ||
4809 | 523 | # as of bionic add-apt-repository uses curl with an HTTPS keyserver URL | ||
4810 | 524 | # to retrieve GPG keys. `apt-key adv` command is deprecated as is | ||
4811 | 525 | # apt-key in general as noted in its manpage. See lp:1433761 for more | ||
4812 | 526 | # history. Instead, /etc/apt/trusted.gpg.d is used directly to drop | ||
4813 | 527 | # gpg | ||
4814 | 528 | key_asc = _get_key_by_keyid(key) | ||
4815 | 529 | # write the key in GPG format so that apt-key list shows it | ||
4816 | 530 | key_gpg = _dearmor_gpg_key(key_asc) | ||
4817 | 531 | _write_apt_gpg_keyfile(key_name=key, key_material=key_gpg) | ||
4818 | 532 | |||
4819 | 533 | |||
4820 | 534 | def _get_keyid_by_gpg_key(key_material): | ||
4821 | 535 | """Get a GPG key fingerprint by GPG key material. | ||
4822 | 536 | Gets a GPG key fingerprint (40-digit, 160-bit) by the ASCII armor-encoded | ||
4823 | 537 | or binary GPG key material. Can be used, for example, to generate file | ||
4824 | 538 | names for keys passed via charm options. | ||
4825 | 539 | |||
4826 | 540 | :param key_material: ASCII armor-encoded or binary GPG key material | ||
4827 | 541 | :type key_material: bytes | ||
4828 | 542 | :raises: GPGKeyError if invalid key material has been provided | ||
4829 | 543 | :returns: A GPG key fingerprint | ||
4830 | 544 | :rtype: str | ||
4831 | 545 | """ | ||
4832 | 546 | # Use the same gpg command for both Xenial and Bionic | ||
4833 | 547 | cmd = 'gpg --with-colons --with-fingerprint' | ||
4834 | 548 | ps = subprocess.Popen(cmd.split(), | ||
4835 | 549 | stdout=subprocess.PIPE, | ||
4836 | 550 | stderr=subprocess.PIPE, | ||
4837 | 551 | stdin=subprocess.PIPE) | ||
4838 | 552 | out, err = ps.communicate(input=key_material) | ||
4839 | 553 | out = out.decode('utf-8') | ||
4840 | 554 | err = err.decode('utf-8') | ||
4841 | 555 | if 'gpg: no valid OpenPGP data found.' in err: | ||
4842 | 556 | raise GPGKeyError('Invalid GPG key material provided') | ||
4843 | 557 | # from gnupg2 docs: fpr :: Fingerprint (fingerprint is in field 10) | ||
4844 | 558 | return re.search(r"^fpr:{9}([0-9A-F]{40}):$", out, re.MULTILINE).group(1) | ||
4845 | 559 | |||
4846 | 560 | |||
4847 | 561 | def _get_key_by_keyid(keyid): | ||
4848 | 562 | """Get a key via HTTPS from the Ubuntu keyserver. | ||
4849 | 563 | Different key ID formats are supported by SKS keyservers (the longer ones | ||
4850 | 564 | are more secure, see "dead beef attack" and https://evil32.com/). Since | ||
4851 | 565 | HTTPS is used, if SSLBump-like HTTPS proxies are in place, they will | ||
4852 | 566 | impersonate keyserver.ubuntu.com and generate a certificate with | ||
4853 | 567 | keyserver.ubuntu.com in the CN field or in SubjAltName fields of a | ||
4854 | 568 | certificate. If such proxy behavior is expected it is necessary to add the | ||
4855 | 569 | CA certificate chain containing the intermediate CA of the SSLBump proxy to | ||
4856 | 570 | every machine that this code runs on via ca-certs cloud-init directive (via | ||
4857 | 571 | cloudinit-userdata model-config) or via other means (such as through a | ||
4858 | 572 | custom charm option). Also note that DNS resolution for the hostname in a | ||
4859 | 573 | URL is done at a proxy server - not at the client side. | ||
4860 | 574 | |||
4861 | 575 | 8-digit (32 bit) key ID | ||
4862 | 576 | https://keyserver.ubuntu.com/pks/lookup?search=0x4652B4E6 | ||
4863 | 577 | 16-digit (64 bit) key ID | ||
4864 | 578 | https://keyserver.ubuntu.com/pks/lookup?search=0x6E85A86E4652B4E6 | ||
4865 | 579 | 40-digit key ID: | ||
4866 | 580 | https://keyserver.ubuntu.com/pks/lookup?search=0x35F77D63B5CEC106C577ED856E85A86E4652B4E6 | ||
4867 | 581 | |||
4868 | 582 | :param keyid: An 8, 16 or 40 hex digit keyid to find a key for | ||
4869 | 583 | :type keyid: (bytes, str) | ||
4870 | 584 | :returns: A key material for the specified GPG key id | ||
4871 | 585 | :rtype: (str, bytes) | ||
4872 | 586 | :raises: subprocess.CalledProcessError | ||
4873 | 587 | """ | ||
4874 | 588 | # options=mr - machine-readable output (disables html wrappers) | ||
4875 | 589 | keyserver_url = ('https://keyserver.ubuntu.com' | ||
4876 | 590 | '/pks/lookup?op=get&options=mr&exact=on&search=0x{}') | ||
4877 | 591 | curl_cmd = ['curl', keyserver_url.format(keyid)] | ||
4878 | 592 | # use proxy server settings in order to retrieve the key | ||
4879 | 593 | return subprocess.check_output(curl_cmd, | ||
4880 | 594 | env=env_proxy_settings(['https', 'no_proxy'])) | ||
4881 | 595 | |||
4882 | 596 | |||
4883 | 597 | def _dearmor_gpg_key(key_asc): | ||
4884 | 598 | """Converts a GPG key in the ASCII armor format to the binary format. | ||
4885 | 599 | |||
4886 | 600 | :param key_asc: A GPG key in ASCII armor format. | ||
4887 | 601 | :type key_asc: (str, bytes) | ||
4888 | 602 | :returns: A GPG key in binary format | ||
4889 | 603 | :rtype: (str, bytes) | ||
4890 | 604 | :raises: GPGKeyError | ||
4891 | 605 | """ | ||
4892 | 606 | ps = subprocess.Popen(['gpg', '--dearmor'], | ||
4893 | 607 | stdout=subprocess.PIPE, | ||
4894 | 608 | stderr=subprocess.PIPE, | ||
4895 | 609 | stdin=subprocess.PIPE) | ||
4896 | 610 | out, err = ps.communicate(input=key_asc) | ||
4897 | 611 | # no need to decode output as it is binary (invalid utf-8), only error | ||
4898 | 612 | err = err.decode('utf-8') | ||
4899 | 613 | if 'gpg: no valid OpenPGP data found.' in err: | ||
4900 | 614 | raise GPGKeyError('Invalid GPG key material. Check your network setup' | ||
4901 | 615 | ' (MTU, routing, DNS) and/or proxy server settings' | ||
4902 | 616 | ' as well as destination keyserver status.') | ||
4903 | 617 | else: | ||
4904 | 618 | return out | ||
4905 | 619 | |||
4906 | 620 | |||
4907 | 621 | def _write_apt_gpg_keyfile(key_name, key_material): | ||
4908 | 622 | """Writes GPG key material into a file at a provided path. | ||
4909 | 623 | |||
4910 | 624 | :param key_name: A key name to use for a key file (could be a fingerprint) | ||
4911 | 625 | :type key_name: str | ||
4912 | 626 | :param key_material: A GPG key material (binary) | ||
4913 | 627 | :type key_material: (str, bytes) | ||
4914 | 628 | """ | ||
4915 | 629 | with open('/etc/apt/trusted.gpg.d/{}.gpg'.format(key_name), | ||
4916 | 630 | 'wb') as keyf: | ||
4917 | 631 | keyf.write(key_material) | ||
4918 | 632 | |||
4919 | 633 | |||
4920 | 634 | def add_source(source, key=None, fail_invalid=False): | ||
4921 | 221 | """Add a package source to this system. | 635 | """Add a package source to this system. |
4922 | 222 | 636 | ||
4923 | 223 | @param source: a URL or sources.list entry, as supported by | 637 | @param source: a URL or sources.list entry, as supported by |
4924 | @@ -233,95 +647,349 @@ | |||
4925 | 233 | such as 'cloud:icehouse' | 647 | such as 'cloud:icehouse' |
4926 | 234 | 'distro' may be used as a noop | 648 | 'distro' may be used as a noop |
4927 | 235 | 649 | ||
4928 | 650 | Full list of source specifications supported by the function are: | ||
4929 | 651 | |||
4930 | 652 | 'distro': A NOP; i.e. it has no effect. | ||
4931 | 653 | 'proposed': the proposed deb spec [2] is wrtten to | ||
4932 | 654 | /etc/apt/sources.list/proposed | ||
4933 | 655 | 'distro-proposed': adds <version>-proposed to the debs [2] | ||
4934 | 656 | 'ppa:<ppa-name>': add-apt-repository --yes <ppa_name> | ||
4935 | 657 | 'deb <deb-spec>': add-apt-repository --yes deb <deb-spec> | ||
4936 | 658 | 'http://....': add-apt-repository --yes http://... | ||
4937 | 659 | 'cloud-archive:<spec>': add-apt-repository -yes cloud-archive:<spec> | ||
4938 | 660 | 'cloud:<release>[-staging]': specify a Cloud Archive pocket <release> with | ||
4939 | 661 | optional staging version. If staging is used then the staging PPA [2] | ||
4940 | 662 | with be used. If staging is NOT used then the cloud archive [3] will be | ||
4941 | 663 | added, and the 'ubuntu-cloud-keyring' package will be added for the | ||
4942 | 664 | current distro. | ||
4943 | 665 | '<openstack-version>': translate to cloud:<release> based on the current | ||
4944 | 666 | distro version (i.e. for 'ussuri' this will either be 'bionic-ussuri' or | ||
4945 | 667 | 'distro'. | ||
4946 | 668 | '<openstack-version>/proposed': as above, but for proposed. | ||
4947 | 669 | |||
4948 | 670 | Otherwise the source is not recognised and this is logged to the juju log. | ||
4949 | 671 | However, no error is raised, unless sys_error_on_exit is True. | ||
4950 | 672 | |||
4951 | 673 | [1] deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main | ||
4952 | 674 | where {} is replaced with the derived pocket name. | ||
4953 | 675 | [2] deb http://archive.ubuntu.com/ubuntu {}-proposed \ | ||
4954 | 676 | main universe multiverse restricted | ||
4955 | 677 | where {} is replaced with the lsb_release codename (e.g. xenial) | ||
4956 | 678 | [3] deb http://ubuntu-cloud.archive.canonical.com/ubuntu <pocket> | ||
4957 | 679 | to /etc/apt/sources.list.d/cloud-archive-list | ||
4958 | 680 | |||
4959 | 236 | @param key: A key to be added to the system's APT keyring and used | 681 | @param key: A key to be added to the system's APT keyring and used |
4960 | 237 | to verify the signatures on packages. Ideally, this should be an | 682 | to verify the signatures on packages. Ideally, this should be an |
4961 | 238 | ASCII format GPG public key including the block headers. A GPG key | 683 | ASCII format GPG public key including the block headers. A GPG key |
4962 | 239 | id may also be used, but be aware that only insecure protocols are | 684 | id may also be used, but be aware that only insecure protocols are |
4963 | 240 | available to retrieve the actual public key from a public keyserver | 685 | available to retrieve the actual public key from a public keyserver |
4964 | 241 | placing your Juju environment at risk. ppa and cloud archive keys | 686 | placing your Juju environment at risk. ppa and cloud archive keys |
4966 | 242 | are securely added automtically, so sould not be provided. | 687 | are securely added automatically, so should not be provided. |
4967 | 688 | |||
4968 | 689 | @param fail_invalid: (boolean) if True, then the function raises a | ||
4969 | 690 | SourceConfigError is there is no matching installation source. | ||
4970 | 691 | |||
4971 | 692 | @raises SourceConfigError() if for cloud:<pocket>, the <pocket> is not a | ||
4972 | 693 | valid pocket in CLOUD_ARCHIVE_POCKETS | ||
4973 | 243 | """ | 694 | """ |
4974 | 695 | # extract the OpenStack versions from the CLOUD_ARCHIVE_POCKETS; can't use | ||
4975 | 696 | # the list in contrib.openstack.utils as it might not be included in | ||
4976 | 697 | # classic charms and would break everything. Having OpenStack specific | ||
4977 | 698 | # code in this file is a bit of an antipattern, anyway. | ||
4978 | 699 | os_versions_regex = "({})".format("|".join(OPENSTACK_RELEASES)) | ||
4979 | 700 | |||
4980 | 701 | _mapping = OrderedDict([ | ||
4981 | 702 | (r"^distro$", lambda: None), # This is a NOP | ||
4982 | 703 | (r"^(?:proposed|distro-proposed)$", _add_proposed), | ||
4983 | 704 | (r"^cloud-archive:(.*)$", _add_apt_repository), | ||
4984 | 705 | (r"^((?:deb |http:|https:|ppa:).*)$", _add_apt_repository), | ||
4985 | 706 | (r"^cloud:(.*)-(.*)\/staging$", _add_cloud_staging), | ||
4986 | 707 | (r"^cloud:(.*)-(ovn-.*)$", _add_cloud_distro_check), | ||
4987 | 708 | (r"^cloud:(.*)-(.*)$", _add_cloud_distro_check), | ||
4988 | 709 | (r"^cloud:(.*)$", _add_cloud_pocket), | ||
4989 | 710 | (r"^snap:.*-(.*)-(.*)$", _add_cloud_distro_check), | ||
4990 | 711 | (r"^{}\/proposed$".format(os_versions_regex), | ||
4991 | 712 | _add_bare_openstack_proposed), | ||
4992 | 713 | (r"^{}$".format(os_versions_regex), _add_bare_openstack), | ||
4993 | 714 | ]) | ||
4994 | 244 | if source is None: | 715 | if source is None: |
4995 | 245 | log('Source is not present. Skipping') | ||
4996 | 246 | return | ||
4997 | 247 | |||
4998 | 248 | if (source.startswith('ppa:') or | ||
4999 | 249 | source.startswith('http') or | ||
5000 | 250 | source.startswith('deb ') or |
The diff has been truncated for viewing.
lgtm thanks!