Merge lp:~verterok/charms/xenial/conn-check/focal into lp:~ubuntuone-hackers/charms/xenial/conn-check/focal
- Xenial Xerus (16.04)
- focal
- Merge into focal
Proposed by
Guillermo Gonzalez
Status: | Merged |
---|---|
Approved by: | Guillermo Gonzalez |
Approved revision: | 62 |
Merged at revision: | 58 |
Proposed branch: | lp:~verterok/charms/xenial/conn-check/focal |
Merge into: | lp:~ubuntuone-hackers/charms/xenial/conn-check/focal |
Diff against target: |
5838 lines (+3744/-563) 33 files modified
hooks/charmhelpers/__init__.py (+67/-19) hooks/charmhelpers/contrib/ansible/__init__.py (+153/-89) hooks/charmhelpers/contrib/charmsupport/nrpe.py (+187/-31) hooks/charmhelpers/contrib/templating/contexts.py (+7/-7) hooks/charmhelpers/core/decorators.py (+38/-0) hooks/charmhelpers/core/hookenv.py (+658/-59) hooks/charmhelpers/core/host.py (+655/-102) hooks/charmhelpers/core/host_factory/centos.py (+16/-0) hooks/charmhelpers/core/host_factory/ubuntu.py (+73/-5) hooks/charmhelpers/core/kernel.py (+2/-2) hooks/charmhelpers/core/services/base.py (+22/-10) hooks/charmhelpers/core/services/helpers.py (+2/-2) hooks/charmhelpers/core/strutils.py (+75/-14) hooks/charmhelpers/core/sysctl.py (+32/-11) hooks/charmhelpers/core/templating.py (+21/-17) hooks/charmhelpers/core/unitdata.py (+17/-9) hooks/charmhelpers/fetch/__init__.py (+29/-18) hooks/charmhelpers/fetch/archiveurl.py (+35/-27) hooks/charmhelpers/fetch/bzrurl.py (+2/-2) hooks/charmhelpers/fetch/centos.py (+4/-5) hooks/charmhelpers/fetch/giturl.py (+2/-2) hooks/charmhelpers/fetch/python/__init__.py (+13/-0) hooks/charmhelpers/fetch/python/debug.py (+52/-0) hooks/charmhelpers/fetch/python/packages.py (+148/-0) hooks/charmhelpers/fetch/python/rpdb.py (+56/-0) hooks/charmhelpers/fetch/python/version.py (+32/-0) hooks/charmhelpers/fetch/snap.py (+150/-0) hooks/charmhelpers/fetch/ubuntu.py (+822/-125) hooks/charmhelpers/fetch/ubuntu_apt_pkg.py (+327/-0) hooks/charmhelpers/osplatform.py (+32/-2) hooks/hooks.py (+1/-1) playbook.yaml (+13/-3) roles/nrpe-external-master/tasks/main.yaml (+1/-1) |
To merge this branch: | bzr merge lp:~verterok/charms/xenial/conn-check/focal |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
John Paraskevopoulos | Approve | ||
Review via email: mp+445757@code.launchpad.net |
Commit message
update charm to work in focal
Description of the change
Most of the changes are from the automated pull of a newer charmhelpers with python3.8 support. Please check the individual commits for easier reviewing
To post a comment you must log in.
- 62. By Guillermo Gonzalez
-
use xenial for all distribution release comparisons
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'hooks/charmhelpers/__init__.py' |
2 | --- hooks/charmhelpers/__init__.py 2016-12-20 14:35:00 +0000 |
3 | +++ hooks/charmhelpers/__init__.py 2023-06-30 13:58:42 +0000 |
4 | @@ -14,23 +14,71 @@ |
5 | |
6 | # Bootstrap charm-helpers, installing its dependencies if necessary using |
7 | # only standard libraries. |
8 | +import functools |
9 | +import inspect |
10 | import subprocess |
11 | -import sys |
12 | - |
13 | -try: |
14 | - import six # flake8: noqa |
15 | -except ImportError: |
16 | - if sys.version_info.major == 2: |
17 | - subprocess.check_call(['apt-get', 'install', '-y', 'python-six']) |
18 | - else: |
19 | - subprocess.check_call(['apt-get', 'install', '-y', 'python3-six']) |
20 | - import six # flake8: noqa |
21 | - |
22 | -try: |
23 | - import yaml # flake8: noqa |
24 | -except ImportError: |
25 | - if sys.version_info.major == 2: |
26 | - subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml']) |
27 | - else: |
28 | - subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml']) |
29 | - import yaml # flake8: noqa |
30 | + |
31 | + |
32 | +try: |
33 | + import yaml # NOQA:F401 |
34 | +except ImportError: |
35 | + subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml']) |
36 | + import yaml # NOQA:F401 |
37 | + |
38 | + |
39 | +# Holds a list of mapping of mangled function names that have been deprecated |
40 | +# using the @deprecate decorator below. This is so that the warning is only |
41 | +# printed once for each usage of the function. |
42 | +__deprecated_functions = {} |
43 | + |
44 | + |
45 | +def deprecate(warning, date=None, log=None): |
46 | + """Add a deprecation warning the first time the function is used. |
47 | + |
48 | + The date which is a string in semi-ISO8660 format indicates the year-month |
49 | + that the function is officially going to be removed. |
50 | + |
51 | + usage: |
52 | + |
53 | + @deprecate('use core/fetch/add_source() instead', '2017-04') |
54 | + def contributed_add_source_thing(...): |
55 | + ... |
56 | + |
57 | + And it then prints to the log ONCE that the function is deprecated. |
58 | + The reason for passing the logging function (log) is so that hookenv.log |
59 | + can be used for a charm if needed. |
60 | + |
61 | + :param warning: String to indicate what is to be used instead. |
62 | + :param date: Optional string in YYYY-MM format to indicate when the |
63 | + function will definitely (probably) be removed. |
64 | + :param log: The log function to call in order to log. If None, logs to |
65 | + stdout |
66 | + """ |
67 | + def wrap(f): |
68 | + |
69 | + @functools.wraps(f) |
70 | + def wrapped_f(*args, **kwargs): |
71 | + try: |
72 | + module = inspect.getmodule(f) |
73 | + file = inspect.getsourcefile(f) |
74 | + lines = inspect.getsourcelines(f) |
75 | + f_name = "{}-{}-{}..{}-{}".format( |
76 | + module.__name__, file, lines[0], lines[-1], f.__name__) |
77 | + except (IOError, TypeError): |
78 | + # assume it was local, so just use the name of the function |
79 | + f_name = f.__name__ |
80 | + if f_name not in __deprecated_functions: |
81 | + __deprecated_functions[f_name] = True |
82 | + s = "DEPRECATION WARNING: Function {} is being removed".format( |
83 | + f.__name__) |
84 | + if date: |
85 | + s = "{} on/around {}".format(s, date) |
86 | + if warning: |
87 | + s = "{} : {}".format(s, warning) |
88 | + if log: |
89 | + log(s) |
90 | + else: |
91 | + print(s) |
92 | + return f(*args, **kwargs) |
93 | + return wrapped_f |
94 | + return wrap |
95 | |
96 | === modified file 'hooks/charmhelpers/contrib/ansible/__init__.py' |
97 | --- hooks/charmhelpers/contrib/ansible/__init__.py 2016-12-20 14:35:00 +0000 |
98 | +++ hooks/charmhelpers/contrib/ansible/__init__.py 2023-06-30 13:58:42 +0000 |
99 | @@ -16,90 +16,107 @@ |
100 | # |
101 | # Authors: |
102 | # Charm Helpers Developers <juju@lists.ubuntu.com> |
103 | -"""Charm Helpers ansible - declare the state of your machines. |
104 | - |
105 | -This helper enables you to declare your machine state, rather than |
106 | -program it procedurally (and have to test each change to your procedures). |
107 | -Your install hook can be as simple as:: |
108 | - |
109 | - {{{ |
110 | - import charmhelpers.contrib.ansible |
111 | - |
112 | - |
113 | +""" |
114 | +The ansible package enables you to easily use the configuration management |
115 | +tool `Ansible`_ to setup and configure your charm. All of your charm |
116 | +configuration options and relation-data are available as regular Ansible |
117 | +variables which can be used in your playbooks and templates. |
118 | + |
119 | +.. _Ansible: https://www.ansible.com/ |
120 | + |
121 | +Usage |
122 | +===== |
123 | + |
124 | +Here is an example directory structure for a charm to get you started:: |
125 | + |
126 | + charm-ansible-example/ |
127 | + |-- ansible |
128 | + | |-- playbook.yaml |
129 | + | `-- templates |
130 | + | `-- example.j2 |
131 | + |-- config.yaml |
132 | + |-- copyright |
133 | + |-- icon.svg |
134 | + |-- layer.yaml |
135 | + |-- metadata.yaml |
136 | + |-- reactive |
137 | + | `-- example.py |
138 | + |-- README.md |
139 | + |
140 | +Running a playbook called ``playbook.yaml`` when the ``install`` hook is run |
141 | +can be as simple as:: |
142 | + |
143 | + from charmhelpers.contrib import ansible |
144 | + from charms.reactive import hook |
145 | + |
146 | + @hook('install') |
147 | def install(): |
148 | - charmhelpers.contrib.ansible.install_ansible_support() |
149 | - charmhelpers.contrib.ansible.apply_playbook('playbooks/install.yaml') |
150 | - }}} |
151 | - |
152 | -and won't need to change (nor will its tests) when you change the machine |
153 | -state. |
154 | - |
155 | -All of your juju config and relation-data are available as template |
156 | -variables within your playbooks and templates. An install playbook looks |
157 | -something like:: |
158 | - |
159 | - {{{ |
160 | + ansible.install_ansible_support() |
161 | + ansible.apply_playbook('ansible/playbook.yaml') |
162 | + |
163 | +Here is an example playbook that uses the ``template`` module to template the |
164 | +file ``example.j2`` to the charm host and then uses the ``debug`` module to |
165 | +print out all the host and Juju variables that you can use in your playbooks. |
166 | +Note that you must target ``localhost`` as the playbook is run locally on the |
167 | +charm host:: |
168 | + |
169 | --- |
170 | - hosts: localhost |
171 | - user: root |
172 | - |
173 | tasks: |
174 | - - name: Add private repositories. |
175 | + - name: Template a file |
176 | template: |
177 | - src: ../templates/private-repositories.list.jinja2 |
178 | - dest: /etc/apt/sources.list.d/private.list |
179 | - |
180 | - - name: Update the cache. |
181 | - apt: update_cache=yes |
182 | - |
183 | - - name: Install dependencies. |
184 | - apt: pkg={{ item }} |
185 | - with_items: |
186 | - - python-mimeparse |
187 | - - python-webob |
188 | - - sunburnt |
189 | - |
190 | - - name: Setup groups. |
191 | - group: name={{ item.name }} gid={{ item.gid }} |
192 | - with_items: |
193 | - - { name: 'deploy_user', gid: 1800 } |
194 | - - { name: 'service_user', gid: 1500 } |
195 | - |
196 | - ... |
197 | - }}} |
198 | - |
199 | -Read more online about `playbooks`_ and standard ansible `modules`_. |
200 | - |
201 | -.. _playbooks: http://www.ansibleworks.com/docs/playbooks.html |
202 | -.. _modules: http://www.ansibleworks.com/docs/modules.html |
203 | - |
204 | -A further feature os the ansible hooks is to provide a light weight "action" |
205 | + src: templates/example.j2 |
206 | + dest: /tmp/example.j2 |
207 | + |
208 | + - name: Print all variables available to Ansible |
209 | + debug: |
210 | + var: vars |
211 | + |
212 | +Read more online about `playbooks`_ and standard Ansible `modules`_. |
213 | + |
214 | +.. _playbooks: https://docs.ansible.com/ansible/latest/user_guide/playbooks.html |
215 | +.. _modules: https://docs.ansible.com/ansible/latest/user_guide/modules.html |
216 | + |
217 | +A further feature of the Ansible hooks is to provide a light weight "action" |
218 | scripting tool. This is a decorator that you apply to a function, and that |
219 | -function can now receive cli args, and can pass extra args to the playbook. |
220 | - |
221 | -e.g. |
222 | - |
223 | - |
224 | -@hooks.action() |
225 | -def some_action(amount, force="False"): |
226 | - "Usage: some-action AMOUNT [force=True]" # <-- shown on error |
227 | - # process the arguments |
228 | - # do some calls |
229 | - # return extra-vars to be passed to ansible-playbook |
230 | - return { |
231 | - 'amount': int(amount), |
232 | - 'type': force, |
233 | - } |
234 | +function can now receive cli args, and can pass extra args to the playbook:: |
235 | + |
236 | + @hooks.action() |
237 | + def some_action(amount, force="False"): |
238 | + "Usage: some-action AMOUNT [force=True]" # <-- shown on error |
239 | + # process the arguments |
240 | + # do some calls |
241 | + # return extra-vars to be passed to ansible-playbook |
242 | + return { |
243 | + 'amount': int(amount), |
244 | + 'type': force, |
245 | + } |
246 | |
247 | You can now create a symlink to hooks.py that can be invoked like a hook, but |
248 | -with cli params: |
249 | - |
250 | -# link actions/some-action to hooks/hooks.py |
251 | - |
252 | -actions/some-action amount=10 force=true |
253 | +with cli params:: |
254 | + |
255 | + # link actions/some-action to hooks/hooks.py |
256 | + |
257 | + actions/some-action amount=10 force=true |
258 | + |
259 | +Install Ansible via pip |
260 | +======================= |
261 | + |
262 | +If you want to install a specific version of Ansible via pip instead of |
263 | +``install_ansible_support`` which uses APT, consider using the layer options |
264 | +of `layer-basic`_ to install Ansible in a virtualenv:: |
265 | + |
266 | + options: |
267 | + basic: |
268 | + python_packages: ['ansible==2.9.0'] |
269 | + include_system_packages: true |
270 | + use_venv: true |
271 | + |
272 | +.. _layer-basic: https://charmsreactive.readthedocs.io/en/latest/layer-basic.html#layer-configuration |
273 | |
274 | """ |
275 | import os |
276 | +import json |
277 | import stat |
278 | import subprocess |
279 | import functools |
280 | @@ -117,27 +134,63 @@ |
281 | ansible_vars_path = '/etc/ansible/host_vars/localhost' |
282 | |
283 | |
284 | -def install_ansible_support(from_ppa=True, ppa_location='ppa:rquillo/ansible'): |
285 | - """Installs the ansible package. |
286 | - |
287 | - By default it is installed from the `PPA`_ linked from |
288 | - the ansible `website`_ or from a ppa specified by a charm config.. |
289 | - |
290 | - .. _PPA: https://launchpad.net/~rquillo/+archive/ansible |
291 | +def install_ansible_support(from_ppa=True, ppa_location='ppa:ansible/ansible'): |
292 | + """Installs Ansible via APT. |
293 | + |
294 | + By default this installs Ansible from the `PPA`_ linked from |
295 | + the Ansible `website`_ or from a PPA set in ``ppa_location``. |
296 | + |
297 | + .. _PPA: https://launchpad.net/~ansible/+archive/ubuntu/ansible |
298 | .. _website: http://docs.ansible.com/intro_installation.html#latest-releases-via-apt-ubuntu |
299 | |
300 | - If from_ppa is empty, you must ensure that the package is available |
301 | - from a configured repository. |
302 | + If ``from_ppa`` is ``False``, then Ansible will be installed from |
303 | + Ubuntu's Universe repositories. |
304 | """ |
305 | if from_ppa: |
306 | charmhelpers.fetch.add_source(ppa_location) |
307 | charmhelpers.fetch.apt_update(fatal=True) |
308 | charmhelpers.fetch.apt_install('ansible') |
309 | with open(ansible_hosts_path, 'w+') as hosts_file: |
310 | - hosts_file.write('localhost ansible_connection=local') |
311 | + hosts_file.write('localhost ansible_connection=local ansible_remote_tmp=/root/.ansible/tmp') |
312 | |
313 | |
314 | def apply_playbook(playbook, tags=None, extra_vars=None): |
315 | + """Run a playbook. |
316 | + |
317 | + This helper runs a playbook with juju state variables as context, |
318 | + therefore variables set in application config can be used directly. |
319 | + List of tags (--tags) and dictionary with extra_vars (--extra-vars) |
320 | + can be passed as additional parameters. |
321 | + |
322 | + Read more about playbook `_variables`_ online. |
323 | + |
324 | + .. _variables: https://docs.ansible.com/ansible/latest/user_guide/playbooks_variables.html |
325 | + |
326 | + Example:: |
327 | + |
328 | + # Run ansible/playbook.yaml with tag install and pass extra |
329 | + # variables var_a and var_b |
330 | + apply_playbook( |
331 | + playbook='ansible/playbook.yaml', |
332 | + tags=['install'], |
333 | + extra_vars={'var_a': 'val_a', 'var_b': 'val_b'} |
334 | + ) |
335 | + |
336 | + # Run ansible/playbook.yaml with tag config and extra variable nested, |
337 | + # which is passed as json and can be used as dictionary in playbook |
338 | + apply_playbook( |
339 | + playbook='ansible/playbook.yaml', |
340 | + tags=['config'], |
341 | + extra_vars={'nested': {'a': 'value1', 'b': 'value2'}} |
342 | + ) |
343 | + |
344 | + # Custom config file can be passed within extra_vars |
345 | + apply_playbook( |
346 | + playbook='ansible/playbook.yaml', |
347 | + extra_vars="@some_file.json" |
348 | + ) |
349 | + |
350 | + """ |
351 | tags = tags or [] |
352 | tags = ",".join(tags) |
353 | charmhelpers.contrib.templating.contexts.juju_state_to_yaml( |
354 | @@ -146,9 +199,13 @@ |
355 | |
356 | # we want ansible's log output to be unbuffered |
357 | env = os.environ.copy() |
358 | + proxy_settings = charmhelpers.core.hookenv.env_proxy_settings() |
359 | + if proxy_settings: |
360 | + env.update(proxy_settings) |
361 | env['PYTHONUNBUFFERED'] = "1" |
362 | call = [ |
363 | 'ansible-playbook', |
364 | + '-vvv', |
365 | '-c', |
366 | 'local', |
367 | playbook, |
368 | @@ -156,9 +213,17 @@ |
369 | if tags: |
370 | call.extend(['--tags', '{}'.format(tags)]) |
371 | if extra_vars: |
372 | - extra = ["%s=%s" % (k, v) for k, v in extra_vars.items()] |
373 | - call.extend(['--extra-vars', " ".join(extra)]) |
374 | - subprocess.check_call(call, env=env) |
375 | + call.extend(['--extra-vars', json.dumps(extra_vars)]) |
376 | + try: |
377 | + subprocess.check_output(call, env=env) |
378 | + except subprocess.CalledProcessError as e: |
379 | + err_msg = e.output.decode().strip() |
380 | + charmhelpers.core.hookenv.log("Ansible playbook failed with " |
381 | + "{}".format(e), |
382 | + level="ERROR") |
383 | + charmhelpers.core.hookenv.log("Stdout: {}".format(err_msg), |
384 | + level="ERROR") |
385 | + raise e |
386 | |
387 | |
388 | class AnsibleHooks(charmhelpers.core.hookenv.Hooks): |
389 | @@ -170,7 +235,7 @@ |
390 | |
391 | Example:: |
392 | |
393 | - hooks = AnsibleHooks(playbook_path='playbooks/my_machine_state.yaml') |
394 | + hooks = AnsibleHooks(playbook_path='ansible/my_machine_state.yaml') |
395 | |
396 | # All the tasks within my_machine_state.yaml tagged with 'install' |
397 | # will be run automatically after do_custom_work() |
398 | @@ -188,13 +253,12 @@ |
399 | # the hooks which are handled by ansible-only and they'll be registered |
400 | # for you: |
401 | # hooks = AnsibleHooks( |
402 | - # 'playbooks/my_machine_state.yaml', |
403 | + # 'ansible/my_machine_state.yaml', |
404 | # default_hooks=['config-changed', 'start', 'stop']) |
405 | |
406 | if __name__ == "__main__": |
407 | # execute a hook based on the name the program is called by |
408 | hooks.execute(sys.argv) |
409 | - |
410 | """ |
411 | |
412 | def __init__(self, playbook_path, default_hooks=None): |
413 | |
414 | === modified file 'hooks/charmhelpers/contrib/charmsupport/nrpe.py' |
415 | --- hooks/charmhelpers/contrib/charmsupport/nrpe.py 2016-12-20 14:35:00 +0000 |
416 | +++ hooks/charmhelpers/contrib/charmsupport/nrpe.py 2023-06-30 13:58:42 +0000 |
417 | @@ -1,4 +1,4 @@ |
418 | -# Copyright 2014-2015 Canonical Limited. |
419 | +# Copyright 2012-2021 Canonical Limited. |
420 | # |
421 | # Licensed under the Apache License, Version 2.0 (the "License"); |
422 | # you may not use this file except in compliance with the License. |
423 | @@ -13,25 +13,29 @@ |
424 | # limitations under the License. |
425 | |
426 | """Compatibility with the nrpe-external-master charm""" |
427 | -# Copyright 2012 Canonical Ltd. |
428 | # |
429 | # Authors: |
430 | # Matthew Wedgwood <matthew.wedgwood@canonical.com> |
431 | |
432 | -import subprocess |
433 | -import pwd |
434 | +import glob |
435 | import grp |
436 | +import json |
437 | import os |
438 | -import glob |
439 | -import shutil |
440 | +import pwd |
441 | import re |
442 | import shlex |
443 | +import shutil |
444 | +import subprocess |
445 | import yaml |
446 | |
447 | from charmhelpers.core.hookenv import ( |
448 | + application_name, |
449 | config, |
450 | + ERROR, |
451 | + hook_name, |
452 | local_unit, |
453 | log, |
454 | + relation_get, |
455 | relation_ids, |
456 | relation_set, |
457 | relations_of_type, |
458 | @@ -125,7 +129,7 @@ |
459 | |
460 | |
461 | class Check(object): |
462 | - shortname_re = '[A-Za-z0-9-_]+$' |
463 | + shortname_re = '[A-Za-z0-9-_.@]+$' |
464 | service_template = (""" |
465 | #--------------------------------------------------- |
466 | # This file is Juju managed |
467 | @@ -137,10 +141,11 @@ |
468 | """{description} |
469 | check_command check_nrpe!{command} |
470 | servicegroups {nagios_servicegroup} |
471 | +{service_config_overrides} |
472 | }} |
473 | """) |
474 | |
475 | - def __init__(self, shortname, description, check_cmd): |
476 | + def __init__(self, shortname, description, check_cmd, max_check_attempts=None): |
477 | super(Check, self).__init__() |
478 | # XXX: could be better to calculate this from the service name |
479 | if not re.match(self.shortname_re, shortname): |
480 | @@ -153,6 +158,7 @@ |
481 | # The default is: illegal_object_name_chars=`~!$%^&*"|'<>?,()= |
482 | self.description = description |
483 | self.check_cmd = self._locate_cmd(check_cmd) |
484 | + self.max_check_attempts = max_check_attempts |
485 | |
486 | def _get_check_filename(self): |
487 | return os.path.join(NRPE.nrpe_confdir, '{}.cfg'.format(self.command)) |
488 | @@ -171,7 +177,8 @@ |
489 | if os.path.exists(os.path.join(path, parts[0])): |
490 | command = os.path.join(path, parts[0]) |
491 | if len(parts) > 1: |
492 | - command += " " + " ".join(parts[1:]) |
493 | + safe_args = [shlex.quote(arg) for arg in parts[1:]] |
494 | + command += " " + " ".join(safe_args) |
495 | return command |
496 | log('Check command not found: {}'.format(parts[0])) |
497 | return '' |
498 | @@ -193,6 +200,13 @@ |
499 | nrpe_check_file = self._get_check_filename() |
500 | with open(nrpe_check_file, 'w') as nrpe_check_config: |
501 | nrpe_check_config.write("# check {}\n".format(self.shortname)) |
502 | + if nagios_servicegroups: |
503 | + nrpe_check_config.write( |
504 | + "# The following header was added automatically by juju\n") |
505 | + nrpe_check_config.write( |
506 | + "# Modifying it will affect nagios monitoring and alerting\n") |
507 | + nrpe_check_config.write( |
508 | + "# servicegroups: {}\n".format(nagios_servicegroups)) |
509 | nrpe_check_config.write("command[{}]={}\n".format( |
510 | self.command, self.check_cmd)) |
511 | |
512 | @@ -207,12 +221,19 @@ |
513 | nagios_servicegroups): |
514 | self._remove_service_files() |
515 | |
516 | + if self.max_check_attempts: |
517 | + service_config_overrides = ' max_check_attempts {}'.format( |
518 | + self.max_check_attempts |
519 | + ) # Note indentation is here rather than in the template to avoid trailing spaces |
520 | + else: |
521 | + service_config_overrides = '' # empty string to avoid printing 'None' |
522 | templ_vars = { |
523 | 'nagios_hostname': hostname, |
524 | 'nagios_servicegroup': nagios_servicegroups, |
525 | 'description': self.description, |
526 | 'shortname': self.shortname, |
527 | 'command': self.command, |
528 | + 'service_config_overrides': service_config_overrides, |
529 | } |
530 | nrpe_service_text = Check.service_template.format(**templ_vars) |
531 | nrpe_service_file = self._get_service_filename(hostname) |
532 | @@ -227,6 +248,7 @@ |
533 | nagios_logdir = '/var/log/nagios' |
534 | nagios_exportdir = '/var/lib/nagios/export' |
535 | nrpe_confdir = '/etc/nagios/nrpe.d' |
536 | + homedir = '/var/lib/nagios' # home dir provided by nagios-nrpe-server |
537 | |
538 | def __init__(self, hostname=None, primary=True): |
539 | super(NRPE, self).__init__() |
540 | @@ -251,11 +273,28 @@ |
541 | relation = relation_ids('nrpe-external-master') |
542 | if relation: |
543 | log("Setting charm primary status {}".format(primary)) |
544 | - for rid in relation_ids('nrpe-external-master'): |
545 | + for rid in relation: |
546 | relation_set(relation_id=rid, relation_settings={'primary': self.primary}) |
547 | + self.remove_check_queue = set() |
548 | + |
549 | + @classmethod |
550 | + def does_nrpe_conf_dir_exist(cls): |
551 | + """Return True if th nrpe_confdif directory exists.""" |
552 | + return os.path.isdir(cls.nrpe_confdir) |
553 | |
554 | def add_check(self, *args, **kwargs): |
555 | + shortname = None |
556 | + if kwargs.get('shortname') is None: |
557 | + if len(args) > 0: |
558 | + shortname = args[0] |
559 | + else: |
560 | + shortname = kwargs['shortname'] |
561 | + |
562 | self.checks.append(Check(*args, **kwargs)) |
563 | + try: |
564 | + self.remove_check_queue.remove(shortname) |
565 | + except KeyError: |
566 | + pass |
567 | |
568 | def remove_check(self, *args, **kwargs): |
569 | if kwargs.get('shortname') is None: |
570 | @@ -272,12 +311,13 @@ |
571 | |
572 | check = Check(*args, **kwargs) |
573 | check.remove(self.hostname) |
574 | + self.remove_check_queue.add(kwargs['shortname']) |
575 | |
576 | def write(self): |
577 | try: |
578 | nagios_uid = pwd.getpwnam('nagios').pw_uid |
579 | nagios_gid = grp.getgrnam('nagios').gr_gid |
580 | - except: |
581 | + except Exception: |
582 | log("Nagios user not set up, nrpe checks not updated") |
583 | return |
584 | |
585 | @@ -287,19 +327,50 @@ |
586 | |
587 | nrpe_monitors = {} |
588 | monitors = {"monitors": {"remote": {"nrpe": nrpe_monitors}}} |
589 | + |
590 | + # check that the charm can write to the conf dir. If not, then nagios |
591 | + # probably isn't installed, and we can defer. |
592 | + if not self.does_nrpe_conf_dir_exist(): |
593 | + return |
594 | + |
595 | for nrpecheck in self.checks: |
596 | nrpecheck.write(self.nagios_context, self.hostname, |
597 | self.nagios_servicegroups) |
598 | nrpe_monitors[nrpecheck.shortname] = { |
599 | "command": nrpecheck.command, |
600 | } |
601 | + # If we were passed max_check_attempts, add that to the relation data |
602 | + if nrpecheck.max_check_attempts is not None: |
603 | + nrpe_monitors[nrpecheck.shortname]['max_check_attempts'] = nrpecheck.max_check_attempts |
604 | |
605 | - service('restart', 'nagios-nrpe-server') |
606 | + # update-status hooks are configured to firing every 5 minutes by |
607 | + # default. When nagios-nrpe-server is restarted, the nagios server |
608 | + # reports checks failing causing unnecessary alerts. Let's not restart |
609 | + # on update-status hooks. |
610 | + if not hook_name() == 'update-status': |
611 | + service('restart', 'nagios-nrpe-server') |
612 | |
613 | monitor_ids = relation_ids("local-monitors") + \ |
614 | relation_ids("nrpe-external-master") |
615 | for rid in monitor_ids: |
616 | - relation_set(relation_id=rid, monitors=yaml.dump(monitors)) |
617 | + reldata = relation_get(unit=local_unit(), rid=rid) |
618 | + if 'monitors' in reldata: |
619 | + # update the existing set of monitors with the new data |
620 | + old_monitors = yaml.safe_load(reldata['monitors']) |
621 | + old_nrpe_monitors = old_monitors['monitors']['remote']['nrpe'] |
622 | + # remove keys that are in the remove_check_queue |
623 | + old_nrpe_monitors = {k: v for k, v in old_nrpe_monitors.items() |
624 | + if k not in self.remove_check_queue} |
625 | + # update/add nrpe_monitors |
626 | + old_nrpe_monitors.update(nrpe_monitors) |
627 | + old_monitors['monitors']['remote']['nrpe'] = old_nrpe_monitors |
628 | + # write back to the relation |
629 | + relation_set(relation_id=rid, monitors=yaml.dump(old_monitors)) |
630 | + else: |
631 | + # write a brand new set of monitors, as no existing ones. |
632 | + relation_set(relation_id=rid, monitors=yaml.dump(monitors)) |
633 | + |
634 | + self.remove_check_queue.clear() |
635 | |
636 | |
637 | def get_nagios_hostcontext(relation_name='nrpe-external-master'): |
638 | @@ -338,14 +409,29 @@ |
639 | return unit |
640 | |
641 | |
642 | -def add_init_service_checks(nrpe, services, unit_name): |
643 | +def add_init_service_checks(nrpe, services, unit_name, immediate_check=True): |
644 | """ |
645 | Add checks for each service in list |
646 | |
647 | :param NRPE nrpe: NRPE object to add check to |
648 | :param list services: List of services to check |
649 | :param str unit_name: Unit name to use in check description |
650 | + :param bool immediate_check: For sysv init, run the service check immediately |
651 | """ |
652 | + # check_haproxy is redundant in the presence of check_crm. See LP Bug#1880601 for details. |
653 | + # just remove check_haproxy if haproxy is added as a lsb resource in hacluster. |
654 | + for rid in relation_ids("ha"): |
655 | + ha_resources = relation_get("json_resources", rid=rid, unit=local_unit()) |
656 | + if ha_resources: |
657 | + try: |
658 | + ha_resources_parsed = json.loads(ha_resources) |
659 | + except ValueError as e: |
660 | + log('Could not parse JSON from ha resources. {}'.format(e), level=ERROR) |
661 | + raise |
662 | + if "lsb:haproxy" in ha_resources_parsed.values(): |
663 | + if "haproxy" in services: |
664 | + log("removed check_haproxy. This service will be monitored by check_crm") |
665 | + services.remove("haproxy") |
666 | for svc in services: |
667 | # Don't add a check for these services from neutron-gateway |
668 | if svc in ['ext-port', 'os-charm-phy-nic-mtu']: |
669 | @@ -354,7 +440,7 @@ |
670 | upstart_init = '/etc/init/%s.conf' % svc |
671 | sysv_init = '/etc/init.d/%s' % svc |
672 | |
673 | - if host.init_is_systemd(): |
674 | + if host.init_is_systemd(service_name=svc): |
675 | nrpe.add_check( |
676 | shortname=svc, |
677 | description='process check {%s}' % unit_name, |
678 | @@ -368,33 +454,53 @@ |
679 | ) |
680 | elif os.path.exists(sysv_init): |
681 | cronpath = '/etc/cron.d/nagios-service-check-%s' % svc |
682 | - cron_file = ('*/5 * * * * root ' |
683 | - '/usr/local/lib/nagios/plugins/check_exit_status.pl ' |
684 | - '-s /etc/init.d/%s status > ' |
685 | - '/var/lib/nagios/service-check-%s.txt\n' % (svc, |
686 | - svc) |
687 | - ) |
688 | + checkpath = '%s/service-check-%s.txt' % (nrpe.homedir, svc) |
689 | + croncmd = ( |
690 | + '/usr/local/lib/nagios/plugins/check_exit_status.pl ' |
691 | + '-e -s /etc/init.d/%s status' % svc |
692 | + ) |
693 | + cron_file = '*/5 * * * * root %s > %s\n' % (croncmd, checkpath) |
694 | f = open(cronpath, 'w') |
695 | f.write(cron_file) |
696 | f.close() |
697 | nrpe.add_check( |
698 | shortname=svc, |
699 | - description='process check {%s}' % unit_name, |
700 | - check_cmd='check_status_file.py -f ' |
701 | - '/var/lib/nagios/service-check-%s.txt' % svc, |
702 | + description='service check {%s}' % unit_name, |
703 | + check_cmd='check_status_file.py -f %s' % checkpath, |
704 | ) |
705 | - |
706 | - |
707 | -def copy_nrpe_checks(): |
708 | + # if /var/lib/nagios doesn't exist open(checkpath, 'w') will fail |
709 | + # (LP: #1670223). |
710 | + if immediate_check and os.path.isdir(nrpe.homedir): |
711 | + f = open(checkpath, 'w') |
712 | + subprocess.call( |
713 | + croncmd.split(), |
714 | + stdout=f, |
715 | + stderr=subprocess.STDOUT |
716 | + ) |
717 | + f.close() |
718 | + os.chmod(checkpath, 0o644) |
719 | + |
720 | + |
721 | +def copy_nrpe_checks(nrpe_files_dir=None): |
722 | """ |
723 | Copy the nrpe checks into place |
724 | |
725 | """ |
726 | NAGIOS_PLUGINS = '/usr/local/lib/nagios/plugins' |
727 | - nrpe_files_dir = os.path.join(os.getenv('CHARM_DIR'), 'hooks', |
728 | - 'charmhelpers', 'contrib', 'openstack', |
729 | - 'files') |
730 | - |
731 | + if nrpe_files_dir is None: |
732 | + # determine if "charmhelpers" is in CHARMDIR or CHARMDIR/hooks |
733 | + for segment in ['.', 'hooks']: |
734 | + nrpe_files_dir = os.path.abspath(os.path.join( |
735 | + os.getenv('CHARM_DIR'), |
736 | + segment, |
737 | + 'charmhelpers', |
738 | + 'contrib', |
739 | + 'openstack', |
740 | + 'files')) |
741 | + if os.path.isdir(nrpe_files_dir): |
742 | + break |
743 | + else: |
744 | + raise RuntimeError("Couldn't find charmhelpers directory") |
745 | if not os.path.exists(NAGIOS_PLUGINS): |
746 | os.makedirs(NAGIOS_PLUGINS) |
747 | for fname in glob.glob(os.path.join(nrpe_files_dir, "check_*")): |
748 | @@ -418,3 +524,53 @@ |
749 | shortname='haproxy_queue', |
750 | description='Check HAProxy queue depth {%s}' % unit_name, |
751 | check_cmd='check_haproxy_queue_depth.sh') |
752 | + |
753 | + |
754 | +def remove_deprecated_check(nrpe, deprecated_services): |
755 | + """ |
756 | + Remove checks for deprecated services in list |
757 | + |
758 | + :param nrpe: NRPE object to remove check from |
759 | + :type nrpe: NRPE |
760 | + :param deprecated_services: List of deprecated services that are removed |
761 | + :type deprecated_services: list |
762 | + """ |
763 | + for dep_svc in deprecated_services: |
764 | + log('Deprecated service: {}'.format(dep_svc)) |
765 | + nrpe.remove_check(shortname=dep_svc) |
766 | + |
767 | + |
768 | +def add_deferred_restarts_check(nrpe): |
769 | + """ |
770 | + Add NRPE check for services with deferred restarts. |
771 | + |
772 | + :param NRPE nrpe: NRPE object to add check to |
773 | + """ |
774 | + unit_name = local_unit().replace('/', '-') |
775 | + shortname = unit_name + '_deferred_restarts' |
776 | + check_cmd = 'check_deferred_restarts.py --application {}'.format( |
777 | + application_name()) |
778 | + |
779 | + log('Adding deferred restarts nrpe check: {}'.format(shortname)) |
780 | + nrpe.add_check( |
781 | + shortname=shortname, |
782 | + description='Check deferred service restarts {}'.format(unit_name), |
783 | + check_cmd=check_cmd) |
784 | + |
785 | + |
786 | +def remove_deferred_restarts_check(nrpe): |
787 | + """ |
788 | + Remove NRPE check for services with deferred service restarts. |
789 | + |
790 | + :param NRPE nrpe: NRPE object to remove check from |
791 | + """ |
792 | + unit_name = local_unit().replace('/', '-') |
793 | + shortname = unit_name + '_deferred_restarts' |
794 | + check_cmd = 'check_deferred_restarts.py --application {}'.format( |
795 | + application_name()) |
796 | + |
797 | + log('Removing deferred restarts nrpe check: {}'.format(shortname)) |
798 | + nrpe.remove_check( |
799 | + shortname=shortname, |
800 | + description='Check deferred service restarts {}'.format(unit_name), |
801 | + check_cmd=check_cmd) |
802 | |
803 | === modified file 'hooks/charmhelpers/contrib/templating/contexts.py' |
804 | --- hooks/charmhelpers/contrib/templating/contexts.py 2016-12-20 14:35:00 +0000 |
805 | +++ hooks/charmhelpers/contrib/templating/contexts.py 2023-06-30 13:58:42 +0000 |
806 | @@ -20,8 +20,6 @@ |
807 | import os |
808 | import yaml |
809 | |
810 | -import six |
811 | - |
812 | import charmhelpers.core.hookenv |
813 | |
814 | |
815 | @@ -93,7 +91,8 @@ |
816 | By default, hyphens are allowed in keys as this is supported |
817 | by yaml, but for tools like ansible, hyphens are not valid [1]. |
818 | |
819 | - [1] http://www.ansibleworks.com/docs/playbooks_variables.html#what-makes-a-valid-variable-name |
820 | + [1] http://www.ansibleworks.com/docs/playbooks_variables.html |
821 | + #what-makes-a-valid-variable-name |
822 | """ |
823 | config = charmhelpers.core.hookenv.config() |
824 | |
825 | @@ -101,16 +100,17 @@ |
826 | # file resources etc. |
827 | config['charm_dir'] = charm_dir |
828 | config['local_unit'] = charmhelpers.core.hookenv.local_unit() |
829 | - config['unit_private_address'] = charmhelpers.core.hookenv.unit_private_ip() |
830 | + config['unit_private_address'] = ( |
831 | + charmhelpers.core.hookenv.unit_private_ip()) |
832 | config['unit_public_address'] = charmhelpers.core.hookenv.unit_get( |
833 | 'public-address' |
834 | ) |
835 | |
836 | # Don't use non-standard tags for unicode which will not |
837 | # work when salt uses yaml.load_safe. |
838 | - yaml.add_representer(six.text_type, |
839 | + yaml.add_representer(str, |
840 | lambda dumper, value: dumper.represent_scalar( |
841 | - six.u('tag:yaml.org,2002:str'), value)) |
842 | + 'tag:yaml.org,2002:str', value)) |
843 | |
844 | yaml_dir = os.path.dirname(yaml_path) |
845 | if not os.path.exists(yaml_dir): |
846 | @@ -118,7 +118,7 @@ |
847 | |
848 | if os.path.exists(yaml_path): |
849 | with open(yaml_path, "r") as existing_vars_file: |
850 | - existing_vars = yaml.load(existing_vars_file.read()) |
851 | + existing_vars = yaml.safe_load(existing_vars_file.read()) |
852 | else: |
853 | with open(yaml_path, "w+"): |
854 | pass |
855 | |
856 | === modified file 'hooks/charmhelpers/core/decorators.py' |
857 | --- hooks/charmhelpers/core/decorators.py 2016-12-20 14:35:00 +0000 |
858 | +++ hooks/charmhelpers/core/decorators.py 2023-06-30 13:58:42 +0000 |
859 | @@ -53,3 +53,41 @@ |
860 | return _retry_on_exception_inner_2 |
861 | |
862 | return _retry_on_exception_inner_1 |
863 | + |
864 | + |
865 | +def retry_on_predicate(num_retries, predicate_fun, base_delay=0): |
866 | + """Retry based on return value |
867 | + |
868 | + The return value of the decorated function is passed to the given predicate_fun. If the |
869 | + result of the predicate is False, retry the decorated function up to num_retries times |
870 | + |
871 | + An exponential backoff up to base_delay^num_retries seconds can be introduced by setting |
872 | + base_delay to a nonzero value. The default is to run with a zero (i.e. no) delay |
873 | + |
874 | + :param num_retries: Max. number of retries to perform |
875 | + :type num_retries: int |
876 | + :param predicate_fun: Predicate function to determine if a retry is necessary |
877 | + :type predicate_fun: callable |
878 | + :param base_delay: Starting value in seconds for exponential delay, defaults to 0 (no delay) |
879 | + :type base_delay: float |
880 | + """ |
881 | + def _retry_on_pred_inner_1(f): |
882 | + def _retry_on_pred_inner_2(*args, **kwargs): |
883 | + retries = num_retries |
884 | + multiplier = 1 |
885 | + delay = base_delay |
886 | + while True: |
887 | + result = f(*args, **kwargs) |
888 | + if predicate_fun(result) or retries <= 0: |
889 | + return result |
890 | + delay *= multiplier |
891 | + multiplier += 1 |
892 | + log("Result {}, retrying '{}' {} more times (delay={})".format( |
893 | + result, f.__name__, retries, delay), level=INFO) |
894 | + retries -= 1 |
895 | + if delay: |
896 | + time.sleep(delay) |
897 | + |
898 | + return _retry_on_pred_inner_2 |
899 | + |
900 | + return _retry_on_pred_inner_1 |
901 | |
902 | === modified file 'hooks/charmhelpers/core/hookenv.py' |
903 | --- hooks/charmhelpers/core/hookenv.py 2016-12-20 14:35:00 +0000 |
904 | +++ hooks/charmhelpers/core/hookenv.py 2023-06-30 13:58:42 +0000 |
905 | @@ -1,4 +1,4 @@ |
906 | -# Copyright 2014-2015 Canonical Limited. |
907 | +# Copyright 2013-2021 Canonical Limited. |
908 | # |
909 | # Licensed under the Apache License, Version 2.0 (the "License"); |
910 | # you may not use this file except in compliance with the License. |
911 | @@ -13,37 +13,50 @@ |
912 | # limitations under the License. |
913 | |
914 | "Interactions with the Juju environment" |
915 | -# Copyright 2013 Canonical Ltd. |
916 | # |
917 | # Authors: |
918 | # Charm Helpers Developers <juju@lists.ubuntu.com> |
919 | |
920 | -from __future__ import print_function |
921 | import copy |
922 | from distutils.version import LooseVersion |
923 | +from enum import Enum |
924 | from functools import wraps |
925 | +from collections import namedtuple, UserDict |
926 | import glob |
927 | import os |
928 | import json |
929 | import yaml |
930 | +import re |
931 | import subprocess |
932 | import sys |
933 | import errno |
934 | import tempfile |
935 | from subprocess import CalledProcessError |
936 | |
937 | -import six |
938 | -if not six.PY3: |
939 | - from UserDict import UserDict |
940 | -else: |
941 | - from collections import UserDict |
942 | +from charmhelpers import deprecate |
943 | + |
944 | |
945 | CRITICAL = "CRITICAL" |
946 | ERROR = "ERROR" |
947 | WARNING = "WARNING" |
948 | INFO = "INFO" |
949 | DEBUG = "DEBUG" |
950 | +TRACE = "TRACE" |
951 | MARKER = object() |
952 | +SH_MAX_ARG = 131071 |
953 | + |
954 | + |
955 | +RANGE_WARNING = ('Passing NO_PROXY string that includes a cidr. ' |
956 | + 'This may not be compatible with software you are ' |
957 | + 'running in your shell.') |
958 | + |
959 | + |
960 | +class WORKLOAD_STATES(Enum): |
961 | + ACTIVE = 'active' |
962 | + BLOCKED = 'blocked' |
963 | + MAINTENANCE = 'maintenance' |
964 | + WAITING = 'waiting' |
965 | + |
966 | |
967 | cache = {} |
968 | |
969 | @@ -64,7 +77,7 @@ |
970 | @wraps(func) |
971 | def wrapper(*args, **kwargs): |
972 | global cache |
973 | - key = str((func, args, kwargs)) |
974 | + key = json.dumps((func, args, kwargs), sort_keys=True, default=str) |
975 | try: |
976 | return cache[key] |
977 | except KeyError: |
978 | @@ -92,9 +105,9 @@ |
979 | command = ['juju-log'] |
980 | if level: |
981 | command += ['-l', level] |
982 | - if not isinstance(message, six.string_types): |
983 | + if not isinstance(message, str): |
984 | message = repr(message) |
985 | - command += [message] |
986 | + command += [message[:SH_MAX_ARG]] |
987 | # Missing juju-log should not cause failures in unit tests |
988 | # Send log output to stderr |
989 | try: |
990 | @@ -109,6 +122,24 @@ |
991 | raise |
992 | |
993 | |
994 | +def function_log(message): |
995 | + """Write a function progress message""" |
996 | + command = ['function-log'] |
997 | + if not isinstance(message, str): |
998 | + message = repr(message) |
999 | + command += [message[:SH_MAX_ARG]] |
1000 | + # Missing function-log should not cause failures in unit tests |
1001 | + # Send function_log output to stderr |
1002 | + try: |
1003 | + subprocess.call(command) |
1004 | + except OSError as e: |
1005 | + if e.errno == errno.ENOENT: |
1006 | + message = "function-log: {}".format(message) |
1007 | + print(message, file=sys.stderr) |
1008 | + else: |
1009 | + raise |
1010 | + |
1011 | + |
1012 | class Serializable(UserDict): |
1013 | """Wrapper, an object that can be serialized to yaml or json""" |
1014 | |
1015 | @@ -187,6 +218,17 @@ |
1016 | raise ValueError('Must specify neither or both of relation_name and service_or_unit') |
1017 | |
1018 | |
1019 | +def departing_unit(): |
1020 | + """The departing unit for the current relation hook. |
1021 | + |
1022 | + Available since juju 2.8. |
1023 | + |
1024 | + :returns: the departing unit, or None if the information isn't available. |
1025 | + :rtype: Optional[str] |
1026 | + """ |
1027 | + return os.environ.get('JUJU_DEPARTING_UNIT', None) |
1028 | + |
1029 | + |
1030 | def local_unit(): |
1031 | """Local unit ID""" |
1032 | return os.environ['JUJU_UNIT_NAME'] |
1033 | @@ -197,9 +239,56 @@ |
1034 | return os.environ.get('JUJU_REMOTE_UNIT', None) |
1035 | |
1036 | |
1037 | +def application_name(): |
1038 | + """ |
1039 | + The name of the deployed application this unit belongs to. |
1040 | + """ |
1041 | + return local_unit().split('/')[0] |
1042 | + |
1043 | + |
1044 | def service_name(): |
1045 | - """The name service group this unit belongs to""" |
1046 | - return local_unit().split('/')[0] |
1047 | + """ |
1048 | + .. deprecated:: 0.19.1 |
1049 | + Alias for :func:`application_name`. |
1050 | + """ |
1051 | + return application_name() |
1052 | + |
1053 | + |
1054 | +def model_name(): |
1055 | + """ |
1056 | + Name of the model that this unit is deployed in. |
1057 | + """ |
1058 | + return os.environ['JUJU_MODEL_NAME'] |
1059 | + |
1060 | + |
1061 | +def model_uuid(): |
1062 | + """ |
1063 | + UUID of the model that this unit is deployed in. |
1064 | + """ |
1065 | + return os.environ['JUJU_MODEL_UUID'] |
1066 | + |
1067 | + |
1068 | +def principal_unit(): |
1069 | + """Returns the principal unit of this unit, otherwise None""" |
1070 | + # Juju 2.2 and above provides JUJU_PRINCIPAL_UNIT |
1071 | + principal_unit = os.environ.get('JUJU_PRINCIPAL_UNIT', None) |
1072 | + # If it's empty, then this unit is the principal |
1073 | + if principal_unit == '': |
1074 | + return os.environ['JUJU_UNIT_NAME'] |
1075 | + elif principal_unit is not None: |
1076 | + return principal_unit |
1077 | + # For Juju 2.1 and below, let's try work out the principle unit by |
1078 | + # the various charms' metadata.yaml. |
1079 | + for reltype in relation_types(): |
1080 | + for rid in relation_ids(reltype): |
1081 | + for unit in related_units(rid): |
1082 | + md = _metadata_unit(unit) |
1083 | + if not md: |
1084 | + continue |
1085 | + subordinate = md.pop('subordinate', None) |
1086 | + if not subordinate: |
1087 | + return unit |
1088 | + return None |
1089 | |
1090 | |
1091 | @cached |
1092 | @@ -263,7 +352,7 @@ |
1093 | self.implicit_save = True |
1094 | self._prev_dict = None |
1095 | self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME) |
1096 | - if os.path.exists(self.path): |
1097 | + if os.path.exists(self.path) and os.stat(self.path).st_size: |
1098 | self.load_previous() |
1099 | atexit(self._implicit_save) |
1100 | |
1101 | @@ -283,7 +372,13 @@ |
1102 | """ |
1103 | self.path = path or self.path |
1104 | with open(self.path) as f: |
1105 | - self._prev_dict = json.load(f) |
1106 | + try: |
1107 | + self._prev_dict = json.load(f) |
1108 | + except ValueError as e: |
1109 | + log('Found but was unable to parse previous config data, ' |
1110 | + 'ignoring which will report all values as changed - {}' |
1111 | + .format(str(e)), level=ERROR) |
1112 | + return |
1113 | for k, v in copy.deepcopy(self._prev_dict).items(): |
1114 | if k not in self: |
1115 | self[k] = v |
1116 | @@ -319,6 +414,7 @@ |
1117 | |
1118 | """ |
1119 | with open(self.path, 'w') as f: |
1120 | + os.fchmod(f.fileno(), 0o600) |
1121 | json.dump(self, f) |
1122 | |
1123 | def _implicit_save(self): |
1124 | @@ -326,35 +422,52 @@ |
1125 | self.save() |
1126 | |
1127 | |
1128 | -@cached |
1129 | +_cache_config = None |
1130 | + |
1131 | + |
1132 | def config(scope=None): |
1133 | - """Juju charm configuration""" |
1134 | - config_cmd_line = ['config-get'] |
1135 | - if scope is not None: |
1136 | - config_cmd_line.append(scope) |
1137 | - else: |
1138 | - config_cmd_line.append('--all') |
1139 | - config_cmd_line.append('--format=json') |
1140 | + """ |
1141 | + Get the juju charm configuration (scope==None) or individual key, |
1142 | + (scope=str). The returned value is a Python data structure loaded as |
1143 | + JSON from the Juju config command. |
1144 | + |
1145 | + :param scope: If set, return the value for the specified key. |
1146 | + :type scope: Optional[str] |
1147 | + :returns: Either the whole config as a Config, or a key from it. |
1148 | + :rtype: Any |
1149 | + """ |
1150 | + global _cache_config |
1151 | + config_cmd_line = ['config-get', '--all', '--format=json'] |
1152 | try: |
1153 | - config_data = json.loads( |
1154 | - subprocess.check_output(config_cmd_line).decode('UTF-8')) |
1155 | + if _cache_config is None: |
1156 | + config_data = json.loads( |
1157 | + subprocess.check_output(config_cmd_line).decode('UTF-8')) |
1158 | + _cache_config = Config(config_data) |
1159 | if scope is not None: |
1160 | - return config_data |
1161 | - return Config(config_data) |
1162 | - except ValueError: |
1163 | + return _cache_config.get(scope) |
1164 | + return _cache_config |
1165 | + except (json.decoder.JSONDecodeError, UnicodeDecodeError) as e: |
1166 | + log('Unable to parse output from config-get: config_cmd_line="{}" ' |
1167 | + 'message="{}"' |
1168 | + .format(config_cmd_line, str(e)), level=ERROR) |
1169 | return None |
1170 | |
1171 | |
1172 | @cached |
1173 | -def relation_get(attribute=None, unit=None, rid=None): |
1174 | +def relation_get(attribute=None, unit=None, rid=None, app=None): |
1175 | """Get relation information""" |
1176 | _args = ['relation-get', '--format=json'] |
1177 | + if app is not None: |
1178 | + if unit is not None: |
1179 | + raise ValueError("Cannot use both 'unit' and 'app'") |
1180 | + _args.append('--app') |
1181 | if rid: |
1182 | _args.append('-r') |
1183 | _args.append(rid) |
1184 | _args.append(attribute or '-') |
1185 | - if unit: |
1186 | - _args.append(unit) |
1187 | + # unit or application name |
1188 | + if unit or app: |
1189 | + _args.append(unit or app) |
1190 | try: |
1191 | return json.loads(subprocess.check_output(_args).decode('UTF-8')) |
1192 | except ValueError: |
1193 | @@ -365,12 +478,28 @@ |
1194 | raise |
1195 | |
1196 | |
1197 | -def relation_set(relation_id=None, relation_settings=None, **kwargs): |
1198 | +@cached |
1199 | +def _relation_set_accepts_file(): |
1200 | + """Return True if the juju relation-set command accepts a file. |
1201 | + |
1202 | + Cache the result as it won't change during the execution of a hook, and |
1203 | + thus we can make relation_set() more efficient by only checking for the |
1204 | + first relation_set() call. |
1205 | + |
1206 | + :returns: True if relation_set accepts a file. |
1207 | + :rtype: bool |
1208 | + :raises: subprocess.CalledProcessError if the check fails. |
1209 | + """ |
1210 | + return "--file" in subprocess.check_output( |
1211 | + ["relation-set", "--help"], universal_newlines=True) |
1212 | + |
1213 | + |
1214 | +def relation_set(relation_id=None, relation_settings=None, app=False, **kwargs): |
1215 | """Set relation information for the current unit""" |
1216 | relation_settings = relation_settings if relation_settings else {} |
1217 | relation_cmd_line = ['relation-set'] |
1218 | - accepts_file = "--file" in subprocess.check_output( |
1219 | - relation_cmd_line + ["--help"], universal_newlines=True) |
1220 | + if app: |
1221 | + relation_cmd_line.append('--app') |
1222 | if relation_id is not None: |
1223 | relation_cmd_line.extend(('-r', relation_id)) |
1224 | settings = relation_settings.copy() |
1225 | @@ -380,7 +509,7 @@ |
1226 | # sites pass in things like dicts or numbers. |
1227 | if value is not None: |
1228 | settings[key] = "{}".format(value) |
1229 | - if accepts_file: |
1230 | + if _relation_set_accepts_file(): |
1231 | # --file was introduced in Juju 1.23.2. Use it by default if |
1232 | # available, since otherwise we'll break if the relation data is |
1233 | # too big. Ideally we should tell relation-set to read the data from |
1234 | @@ -435,9 +564,70 @@ |
1235 | subprocess.check_output(units_cmd_line).decode('UTF-8')) or [] |
1236 | |
1237 | |
1238 | +def expected_peer_units(): |
1239 | + """Get a generator for units we expect to join peer relation based on |
1240 | + goal-state. |
1241 | + |
1242 | + The local unit is excluded from the result to make it easy to gauge |
1243 | + completion of all peers joining the relation with existing hook tools. |
1244 | + |
1245 | + Example usage: |
1246 | + log('peer {} of {} joined peer relation' |
1247 | + .format(len(related_units()), |
1248 | + len(list(expected_peer_units())))) |
1249 | + |
1250 | + This function will raise NotImplementedError if used with juju versions |
1251 | + without goal-state support. |
1252 | + |
1253 | + :returns: iterator |
1254 | + :rtype: types.GeneratorType |
1255 | + :raises: NotImplementedError |
1256 | + """ |
1257 | + if not has_juju_version("2.4.0"): |
1258 | + # goal-state first appeared in 2.4.0. |
1259 | + raise NotImplementedError("goal-state") |
1260 | + _goal_state = goal_state() |
1261 | + return (key for key in _goal_state['units'] |
1262 | + if '/' in key and key != local_unit()) |
1263 | + |
1264 | + |
1265 | +def expected_related_units(reltype=None): |
1266 | + """Get a generator for units we expect to join relation based on |
1267 | + goal-state. |
1268 | + |
1269 | + Note that you can not use this function for the peer relation, take a look |
1270 | + at expected_peer_units() for that. |
1271 | + |
1272 | + This function will raise KeyError if you request information for a |
1273 | + relation type for which juju goal-state does not have information. It will |
1274 | + raise NotImplementedError if used with juju versions without goal-state |
1275 | + support. |
1276 | + |
1277 | + Example usage: |
1278 | + log('participant {} of {} joined relation {}' |
1279 | + .format(len(related_units()), |
1280 | + len(list(expected_related_units())), |
1281 | + relation_type())) |
1282 | + |
1283 | + :param reltype: Relation type to list data for, default is to list data for |
1284 | + the relation type we are currently executing a hook for. |
1285 | + :type reltype: str |
1286 | + :returns: iterator |
1287 | + :rtype: types.GeneratorType |
1288 | + :raises: KeyError, NotImplementedError |
1289 | + """ |
1290 | + if not has_juju_version("2.4.4"): |
1291 | + # goal-state existed in 2.4.0, but did not list individual units to |
1292 | + # join a relation in 2.4.1 through 2.4.3. (LP: #1794739) |
1293 | + raise NotImplementedError("goal-state relation unit count") |
1294 | + reltype = reltype or relation_type() |
1295 | + _goal_state = goal_state() |
1296 | + return (key for key in _goal_state['relations'][reltype] if '/' in key) |
1297 | + |
1298 | + |
1299 | @cached |
1300 | def relation_for_unit(unit=None, rid=None): |
1301 | - """Get the json represenation of a unit's relation""" |
1302 | + """Get the json representation of a unit's relation""" |
1303 | unit = unit or remote_unit() |
1304 | relation = relation_get(unit=unit, rid=rid) |
1305 | for key in relation: |
1306 | @@ -478,6 +668,24 @@ |
1307 | return yaml.safe_load(md) |
1308 | |
1309 | |
1310 | +def _metadata_unit(unit): |
1311 | + """Given the name of a unit (e.g. apache2/0), get the unit charm's |
1312 | + metadata.yaml. Very similar to metadata() but allows us to inspect |
1313 | + other units. Unit needs to be co-located, such as a subordinate or |
1314 | + principal/primary. |
1315 | + |
1316 | + :returns: metadata.yaml as a python object. |
1317 | + |
1318 | + """ |
1319 | + basedir = os.sep.join(charm_dir().split(os.sep)[:-2]) |
1320 | + unitdir = 'unit-{}'.format(unit.replace(os.sep, '-')) |
1321 | + joineddir = os.path.join(basedir, unitdir, 'charm', 'metadata.yaml') |
1322 | + if not os.path.exists(joineddir): |
1323 | + return None |
1324 | + with open(joineddir) as md: |
1325 | + return yaml.safe_load(md) |
1326 | + |
1327 | + |
1328 | @cached |
1329 | def relation_types(): |
1330 | """Get a list of relation types supported by this charm""" |
1331 | @@ -602,18 +810,31 @@ |
1332 | return False |
1333 | |
1334 | |
1335 | +def _port_op(op_name, port, protocol="TCP"): |
1336 | + """Open or close a service network port""" |
1337 | + _args = [op_name] |
1338 | + icmp = protocol.upper() == "ICMP" |
1339 | + if icmp: |
1340 | + _args.append(protocol) |
1341 | + else: |
1342 | + _args.append('{}/{}'.format(port, protocol)) |
1343 | + try: |
1344 | + subprocess.check_call(_args) |
1345 | + except subprocess.CalledProcessError: |
1346 | + # Older Juju pre 2.3 doesn't support ICMP |
1347 | + # so treat it as a no-op if it fails. |
1348 | + if not icmp: |
1349 | + raise |
1350 | + |
1351 | + |
1352 | def open_port(port, protocol="TCP"): |
1353 | """Open a service network port""" |
1354 | - _args = ['open-port'] |
1355 | - _args.append('{}/{}'.format(port, protocol)) |
1356 | - subprocess.check_call(_args) |
1357 | + _port_op('open-port', port, protocol) |
1358 | |
1359 | |
1360 | def close_port(port, protocol="TCP"): |
1361 | """Close a service network port""" |
1362 | - _args = ['close-port'] |
1363 | - _args.append('{}/{}'.format(port, protocol)) |
1364 | - subprocess.check_call(_args) |
1365 | + _port_op('close-port', port, protocol) |
1366 | |
1367 | |
1368 | def open_ports(start, end, protocol="TCP"): |
1369 | @@ -630,6 +851,17 @@ |
1370 | subprocess.check_call(_args) |
1371 | |
1372 | |
1373 | +def opened_ports(): |
1374 | + """Get the opened ports |
1375 | + |
1376 | + *Note that this will only show ports opened in a previous hook* |
1377 | + |
1378 | + :returns: Opened ports as a list of strings: ``['8080/tcp', '8081-8083/tcp']`` |
1379 | + """ |
1380 | + _args = ['opened-ports', '--format=json'] |
1381 | + return json.loads(subprocess.check_output(_args).decode('UTF-8')) |
1382 | + |
1383 | + |
1384 | @cached |
1385 | def unit_get(attribute): |
1386 | """Get the unit ID for the remote unit""" |
1387 | @@ -751,14 +983,29 @@ |
1388 | return wrapper |
1389 | |
1390 | |
1391 | +class NoNetworkBinding(Exception): |
1392 | + pass |
1393 | + |
1394 | + |
1395 | def charm_dir(): |
1396 | """Return the root directory of the current charm""" |
1397 | + d = os.environ.get('JUJU_CHARM_DIR') |
1398 | + if d is not None: |
1399 | + return d |
1400 | return os.environ.get('CHARM_DIR') |
1401 | |
1402 | |
1403 | +def cmd_exists(cmd): |
1404 | + """Return True if the specified cmd exists in the path""" |
1405 | + return any( |
1406 | + os.access(os.path.join(path, cmd), os.X_OK) |
1407 | + for path in os.environ["PATH"].split(os.pathsep) |
1408 | + ) |
1409 | + |
1410 | + |
1411 | @cached |
1412 | def action_get(key=None): |
1413 | - """Gets the value of an action parameter, or all key/value param pairs""" |
1414 | + """Gets the value of an action parameter, or all key/value param pairs.""" |
1415 | cmd = ['action-get'] |
1416 | if key is not None: |
1417 | cmd.append(key) |
1418 | @@ -767,52 +1014,132 @@ |
1419 | return action_data |
1420 | |
1421 | |
1422 | +@cached |
1423 | +@deprecate("moved to action_get()", log=log) |
1424 | +def function_get(key=None): |
1425 | + """ |
1426 | + .. deprecated:: |
1427 | + Gets the value of an action parameter, or all key/value param pairs. |
1428 | + """ |
1429 | + cmd = ['function-get'] |
1430 | + # Fallback for older charms. |
1431 | + if not cmd_exists('function-get'): |
1432 | + cmd = ['action-get'] |
1433 | + |
1434 | + if key is not None: |
1435 | + cmd.append(key) |
1436 | + cmd.append('--format=json') |
1437 | + function_data = json.loads(subprocess.check_output(cmd).decode('UTF-8')) |
1438 | + return function_data |
1439 | + |
1440 | + |
1441 | def action_set(values): |
1442 | - """Sets the values to be returned after the action finishes""" |
1443 | + """Sets the values to be returned after the action finishes.""" |
1444 | cmd = ['action-set'] |
1445 | for k, v in list(values.items()): |
1446 | cmd.append('{}={}'.format(k, v)) |
1447 | subprocess.check_call(cmd) |
1448 | |
1449 | |
1450 | +@deprecate("moved to action_set()", log=log) |
1451 | +def function_set(values): |
1452 | + """ |
1453 | + .. deprecated:: |
1454 | + Sets the values to be returned after the function finishes. |
1455 | + """ |
1456 | + cmd = ['function-set'] |
1457 | + # Fallback for older charms. |
1458 | + if not cmd_exists('function-get'): |
1459 | + cmd = ['action-set'] |
1460 | + |
1461 | + for k, v in list(values.items()): |
1462 | + cmd.append('{}={}'.format(k, v)) |
1463 | + subprocess.check_call(cmd) |
1464 | + |
1465 | + |
1466 | def action_fail(message): |
1467 | - """Sets the action status to failed and sets the error message. |
1468 | + """ |
1469 | + Sets the action status to failed and sets the error message. |
1470 | |
1471 | - The results set by action_set are preserved.""" |
1472 | + The results set by action_set are preserved. |
1473 | + """ |
1474 | subprocess.check_call(['action-fail', message]) |
1475 | |
1476 | |
1477 | +@deprecate("moved to action_fail()", log=log) |
1478 | +def function_fail(message): |
1479 | + """ |
1480 | + .. deprecated:: |
1481 | + Sets the function status to failed and sets the error message. |
1482 | + |
1483 | + The results set by function_set are preserved. |
1484 | + """ |
1485 | + cmd = ['function-fail'] |
1486 | + # Fallback for older charms. |
1487 | + if not cmd_exists('function-fail'): |
1488 | + cmd = ['action-fail'] |
1489 | + cmd.append(message) |
1490 | + |
1491 | + subprocess.check_call(cmd) |
1492 | + |
1493 | + |
1494 | def action_name(): |
1495 | """Get the name of the currently executing action.""" |
1496 | return os.environ.get('JUJU_ACTION_NAME') |
1497 | |
1498 | |
1499 | +def function_name(): |
1500 | + """Get the name of the currently executing function.""" |
1501 | + return os.environ.get('JUJU_FUNCTION_NAME') or action_name() |
1502 | + |
1503 | + |
1504 | def action_uuid(): |
1505 | """Get the UUID of the currently executing action.""" |
1506 | return os.environ.get('JUJU_ACTION_UUID') |
1507 | |
1508 | |
1509 | +def function_id(): |
1510 | + """Get the ID of the currently executing function.""" |
1511 | + return os.environ.get('JUJU_FUNCTION_ID') or action_uuid() |
1512 | + |
1513 | + |
1514 | def action_tag(): |
1515 | """Get the tag for the currently executing action.""" |
1516 | return os.environ.get('JUJU_ACTION_TAG') |
1517 | |
1518 | |
1519 | -def status_set(workload_state, message): |
1520 | +def function_tag(): |
1521 | + """Get the tag for the currently executing function.""" |
1522 | + return os.environ.get('JUJU_FUNCTION_TAG') or action_tag() |
1523 | + |
1524 | + |
1525 | +def status_set(workload_state, message, application=False): |
1526 | """Set the workload state with a message |
1527 | |
1528 | Use status-set to set the workload state with a message which is visible |
1529 | to the user via juju status. If the status-set command is not found then |
1530 | - assume this is juju < 1.23 and juju-log the message unstead. |
1531 | + assume this is juju < 1.23 and juju-log the message instead. |
1532 | |
1533 | - workload_state -- valid juju workload state. |
1534 | - message -- status update message |
1535 | + workload_state -- valid juju workload state. str or WORKLOAD_STATES |
1536 | + message -- status update message |
1537 | + application -- Whether this is an application state set |
1538 | """ |
1539 | - valid_states = ['maintenance', 'blocked', 'waiting', 'active'] |
1540 | - if workload_state not in valid_states: |
1541 | - raise ValueError( |
1542 | - '{!r} is not a valid workload state'.format(workload_state) |
1543 | - ) |
1544 | - cmd = ['status-set', workload_state, message] |
1545 | + bad_state_msg = '{!r} is not a valid workload state' |
1546 | + |
1547 | + if isinstance(workload_state, str): |
1548 | + try: |
1549 | + # Convert string to enum. |
1550 | + workload_state = WORKLOAD_STATES[workload_state.upper()] |
1551 | + except KeyError: |
1552 | + raise ValueError(bad_state_msg.format(workload_state)) |
1553 | + |
1554 | + if workload_state not in WORKLOAD_STATES: |
1555 | + raise ValueError(bad_state_msg.format(workload_state)) |
1556 | + |
1557 | + cmd = ['status-set'] |
1558 | + if application: |
1559 | + cmd.append('--application') |
1560 | + cmd.extend([workload_state.value, message]) |
1561 | try: |
1562 | ret = subprocess.call(cmd) |
1563 | if ret == 0: |
1564 | @@ -820,7 +1147,7 @@ |
1565 | except OSError as e: |
1566 | if e.errno != errno.ENOENT: |
1567 | raise |
1568 | - log_message = 'status-set failed: {} {}'.format(workload_state, |
1569 | + log_message = 'status-set failed: {} {}'.format(workload_state.value, |
1570 | message) |
1571 | log(log_message, level='INFO') |
1572 | |
1573 | @@ -874,6 +1201,14 @@ |
1574 | |
1575 | |
1576 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) |
1577 | +@cached |
1578 | +def goal_state(): |
1579 | + """Juju goal state values""" |
1580 | + cmd = ['goal-state', '--format=json'] |
1581 | + return json.loads(subprocess.check_output(cmd).decode('UTF-8')) |
1582 | + |
1583 | + |
1584 | +@translate_exc(from_exc=OSError, to_exc=NotImplementedError) |
1585 | def is_leader(): |
1586 | """Does the current unit hold the juju leadership |
1587 | |
1588 | @@ -967,7 +1302,6 @@ |
1589 | universal_newlines=True).strip() |
1590 | |
1591 | |
1592 | -@cached |
1593 | def has_juju_version(minimum_version): |
1594 | """Return True if the Juju version is at least the provided version""" |
1595 | return LooseVersion(juju_version()) >= LooseVersion(minimum_version) |
1596 | @@ -1027,6 +1361,8 @@ |
1597 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) |
1598 | def network_get_primary_address(binding): |
1599 | ''' |
1600 | + Deprecated since Juju 2.3; use network_get() |
1601 | + |
1602 | Retrieve the primary network address for a named binding |
1603 | |
1604 | :param binding: string. The name of a relation of extra-binding |
1605 | @@ -1034,4 +1370,267 @@ |
1606 | :raise: NotImplementedError if run on Juju < 2.0 |
1607 | ''' |
1608 | cmd = ['network-get', '--primary-address', binding] |
1609 | - return subprocess.check_output(cmd).decode('UTF-8').strip() |
1610 | + try: |
1611 | + response = subprocess.check_output( |
1612 | + cmd, |
1613 | + stderr=subprocess.STDOUT).decode('UTF-8').strip() |
1614 | + except CalledProcessError as e: |
1615 | + if 'no network config found for binding' in e.output.decode('UTF-8'): |
1616 | + raise NoNetworkBinding("No network binding for {}" |
1617 | + .format(binding)) |
1618 | + else: |
1619 | + raise |
1620 | + return response |
1621 | + |
1622 | + |
1623 | +def network_get(endpoint, relation_id=None): |
1624 | + """ |
1625 | + Retrieve the network details for a relation endpoint |
1626 | + |
1627 | + :param endpoint: string. The name of a relation endpoint |
1628 | + :param relation_id: int. The ID of the relation for the current context. |
1629 | + :return: dict. The loaded YAML output of the network-get query. |
1630 | + :raise: NotImplementedError if request not supported by the Juju version. |
1631 | + """ |
1632 | + if not has_juju_version('2.2'): |
1633 | + raise NotImplementedError(juju_version()) # earlier versions require --primary-address |
1634 | + if relation_id and not has_juju_version('2.3'): |
1635 | + raise NotImplementedError # 2.3 added the -r option |
1636 | + |
1637 | + cmd = ['network-get', endpoint, '--format', 'yaml'] |
1638 | + if relation_id: |
1639 | + cmd.append('-r') |
1640 | + cmd.append(relation_id) |
1641 | + response = subprocess.check_output( |
1642 | + cmd, |
1643 | + stderr=subprocess.STDOUT).decode('UTF-8').strip() |
1644 | + return yaml.safe_load(response) |
1645 | + |
1646 | + |
1647 | +def add_metric(*args, **kwargs): |
1648 | + """Add metric values. Values may be expressed with keyword arguments. For |
1649 | + metric names containing dashes, these may be expressed as one or more |
1650 | + 'key=value' positional arguments. May only be called from the collect-metrics |
1651 | + hook.""" |
1652 | + _args = ['add-metric'] |
1653 | + _kvpairs = [] |
1654 | + _kvpairs.extend(args) |
1655 | + _kvpairs.extend(['{}={}'.format(k, v) for k, v in kwargs.items()]) |
1656 | + _args.extend(sorted(_kvpairs)) |
1657 | + try: |
1658 | + subprocess.check_call(_args) |
1659 | + return |
1660 | + except EnvironmentError as e: |
1661 | + if e.errno != errno.ENOENT: |
1662 | + raise |
1663 | + log_message = 'add-metric failed: {}'.format(' '.join(_kvpairs)) |
1664 | + log(log_message, level='INFO') |
1665 | + |
1666 | + |
1667 | +def meter_status(): |
1668 | + """Get the meter status, if running in the meter-status-changed hook.""" |
1669 | + return os.environ.get('JUJU_METER_STATUS') |
1670 | + |
1671 | + |
1672 | +def meter_info(): |
1673 | + """Get the meter status information, if running in the meter-status-changed |
1674 | + hook.""" |
1675 | + return os.environ.get('JUJU_METER_INFO') |
1676 | + |
1677 | + |
1678 | +def iter_units_for_relation_name(relation_name): |
1679 | + """Iterate through all units in a relation |
1680 | + |
1681 | + Generator that iterates through all the units in a relation and yields |
1682 | + a named tuple with rid and unit field names. |
1683 | + |
1684 | + Usage: |
1685 | + data = [(u.rid, u.unit) |
1686 | + for u in iter_units_for_relation_name(relation_name)] |
1687 | + |
1688 | + :param relation_name: string relation name |
1689 | + :yield: Named Tuple with rid and unit field names |
1690 | + """ |
1691 | + RelatedUnit = namedtuple('RelatedUnit', 'rid, unit') |
1692 | + for rid in relation_ids(relation_name): |
1693 | + for unit in related_units(rid): |
1694 | + yield RelatedUnit(rid, unit) |
1695 | + |
1696 | + |
1697 | +def ingress_address(rid=None, unit=None): |
1698 | + """ |
1699 | + Retrieve the ingress-address from a relation when available. |
1700 | + Otherwise, return the private-address. |
1701 | + |
1702 | + When used on the consuming side of the relation (unit is a remote |
1703 | + unit), the ingress-address is the IP address that this unit needs |
1704 | + to use to reach the provided service on the remote unit. |
1705 | + |
1706 | + When used on the providing side of the relation (unit == local_unit()), |
1707 | + the ingress-address is the IP address that is advertised to remote |
1708 | + units on this relation. Remote units need to use this address to |
1709 | + reach the local provided service on this unit. |
1710 | + |
1711 | + Note that charms may document some other method to use in |
1712 | + preference to the ingress_address(), such as an address provided |
1713 | + on a different relation attribute or a service discovery mechanism. |
1714 | + This allows charms to redirect inbound connections to their peers |
1715 | + or different applications such as load balancers. |
1716 | + |
1717 | + Usage: |
1718 | + addresses = [ingress_address(rid=u.rid, unit=u.unit) |
1719 | + for u in iter_units_for_relation_name(relation_name)] |
1720 | + |
1721 | + :param rid: string relation id |
1722 | + :param unit: string unit name |
1723 | + :side effect: calls relation_get |
1724 | + :return: string IP address |
1725 | + """ |
1726 | + settings = relation_get(rid=rid, unit=unit) |
1727 | + return (settings.get('ingress-address') or |
1728 | + settings.get('private-address')) |
1729 | + |
1730 | + |
1731 | +def egress_subnets(rid=None, unit=None): |
1732 | + """ |
1733 | + Retrieve the egress-subnets from a relation. |
1734 | + |
1735 | + This function is to be used on the providing side of the |
1736 | + relation, and provides the ranges of addresses that client |
1737 | + connections may come from. The result is uninteresting on |
1738 | + the consuming side of a relation (unit == local_unit()). |
1739 | + |
1740 | + Returns a stable list of subnets in CIDR format. |
1741 | + eg. ['192.168.1.0/24', '2001::F00F/128'] |
1742 | + |
1743 | + If egress-subnets is not available, falls back to using the published |
1744 | + ingress-address, or finally private-address. |
1745 | + |
1746 | + :param rid: string relation id |
1747 | + :param unit: string unit name |
1748 | + :side effect: calls relation_get |
1749 | + :return: list of subnets in CIDR format. eg. ['192.168.1.0/24', '2001::F00F/128'] |
1750 | + """ |
1751 | + def _to_range(addr): |
1752 | + if re.search(r'^(?:\d{1,3}\.){3}\d{1,3}$', addr) is not None: |
1753 | + addr += '/32' |
1754 | + elif ':' in addr and '/' not in addr: # IPv6 |
1755 | + addr += '/128' |
1756 | + return addr |
1757 | + |
1758 | + settings = relation_get(rid=rid, unit=unit) |
1759 | + if 'egress-subnets' in settings: |
1760 | + return [n.strip() for n in settings['egress-subnets'].split(',') if n.strip()] |
1761 | + if 'ingress-address' in settings: |
1762 | + return [_to_range(settings['ingress-address'])] |
1763 | + if 'private-address' in settings: |
1764 | + return [_to_range(settings['private-address'])] |
1765 | + return [] # Should never happen |
1766 | + |
1767 | + |
1768 | +def unit_doomed(unit=None): |
1769 | + """Determines if the unit is being removed from the model |
1770 | + |
1771 | + Requires Juju 2.4.1. |
1772 | + |
1773 | + :param unit: string unit name, defaults to local_unit |
1774 | + :side effect: calls goal_state |
1775 | + :side effect: calls local_unit |
1776 | + :side effect: calls has_juju_version |
1777 | + :return: True if the unit is being removed, already gone, or never existed |
1778 | + """ |
1779 | + if not has_juju_version("2.4.1"): |
1780 | + # We cannot risk blindly returning False for 'we don't know', |
1781 | + # because that could cause data loss; if call sites don't |
1782 | + # need an accurate answer, they likely don't need this helper |
1783 | + # at all. |
1784 | + # goal-state existed in 2.4.0, but did not handle removals |
1785 | + # correctly until 2.4.1. |
1786 | + raise NotImplementedError("is_doomed") |
1787 | + if unit is None: |
1788 | + unit = local_unit() |
1789 | + gs = goal_state() |
1790 | + units = gs.get('units', {}) |
1791 | + if unit not in units: |
1792 | + return True |
1793 | + # I don't think 'dead' units ever show up in the goal-state, but |
1794 | + # check anyway in addition to 'dying'. |
1795 | + return units[unit]['status'] in ('dying', 'dead') |
1796 | + |
1797 | + |
1798 | +def env_proxy_settings(selected_settings=None): |
1799 | + """Get proxy settings from process environment variables. |
1800 | + |
1801 | + Get charm proxy settings from environment variables that correspond to |
1802 | + juju-http-proxy, juju-https-proxy juju-no-proxy (available as of 2.4.2, see |
1803 | + lp:1782236) and juju-ftp-proxy in a format suitable for passing to an |
1804 | + application that reacts to proxy settings passed as environment variables. |
1805 | + Some applications support lowercase or uppercase notation (e.g. curl), some |
1806 | + support only lowercase (e.g. wget), there are also subjectively rare cases |
1807 | + of only uppercase notation support. no_proxy CIDR and wildcard support also |
1808 | + varies between runtimes and applications as there is no enforced standard. |
1809 | + |
1810 | + Some applications may connect to multiple destinations and expose config |
1811 | + options that would affect only proxy settings for a specific destination |
1812 | + these should be handled in charms in an application-specific manner. |
1813 | + |
1814 | + :param selected_settings: format only a subset of possible settings |
1815 | + :type selected_settings: list |
1816 | + :rtype: Option(None, dict[str, str]) |
1817 | + """ |
1818 | + SUPPORTED_SETTINGS = { |
1819 | + 'http': 'HTTP_PROXY', |
1820 | + 'https': 'HTTPS_PROXY', |
1821 | + 'no_proxy': 'NO_PROXY', |
1822 | + 'ftp': 'FTP_PROXY' |
1823 | + } |
1824 | + if selected_settings is None: |
1825 | + selected_settings = SUPPORTED_SETTINGS |
1826 | + |
1827 | + selected_vars = [v for k, v in SUPPORTED_SETTINGS.items() |
1828 | + if k in selected_settings] |
1829 | + proxy_settings = {} |
1830 | + for var in selected_vars: |
1831 | + var_val = os.getenv(var) |
1832 | + if var_val: |
1833 | + proxy_settings[var] = var_val |
1834 | + proxy_settings[var.lower()] = var_val |
1835 | + # Now handle juju-prefixed environment variables. The legacy vs new |
1836 | + # environment variable usage is mutually exclusive |
1837 | + charm_var_val = os.getenv('JUJU_CHARM_{}'.format(var)) |
1838 | + if charm_var_val: |
1839 | + proxy_settings[var] = charm_var_val |
1840 | + proxy_settings[var.lower()] = charm_var_val |
1841 | + if 'no_proxy' in proxy_settings: |
1842 | + if _contains_range(proxy_settings['no_proxy']): |
1843 | + log(RANGE_WARNING, level=WARNING) |
1844 | + return proxy_settings if proxy_settings else None |
1845 | + |
1846 | + |
1847 | +def _contains_range(addresses): |
1848 | + """Check for cidr or wildcard domain in a string. |
1849 | + |
1850 | + Given a string comprising a comma separated list of ip addresses |
1851 | + and domain names, determine whether the string contains IP ranges |
1852 | + or wildcard domains. |
1853 | + |
1854 | + :param addresses: comma separated list of domains and ip addresses. |
1855 | + :type addresses: str |
1856 | + """ |
1857 | + return ( |
1858 | + # Test for cidr (e.g. 10.20.20.0/24) |
1859 | + "/" in addresses or |
1860 | + # Test for wildcard domains (*.foo.com or .foo.com) |
1861 | + "*" in addresses or |
1862 | + addresses.startswith(".") or |
1863 | + ",." in addresses or |
1864 | + " ." in addresses) |
1865 | + |
1866 | + |
1867 | +def is_subordinate(): |
1868 | + """Check whether charm is subordinate in unit metadata. |
1869 | + |
1870 | + :returns: True if unit is subordniate, False otherwise. |
1871 | + :rtype: bool |
1872 | + """ |
1873 | + return metadata().get('subordinate') is True |
1874 | |
1875 | === modified file 'hooks/charmhelpers/core/host.py' |
1876 | --- hooks/charmhelpers/core/host.py 2017-01-16 16:28:40 +0000 |
1877 | +++ hooks/charmhelpers/core/host.py 2023-06-30 13:58:42 +0000 |
1878 | @@ -1,4 +1,4 @@ |
1879 | -# Copyright 2014-2015 Canonical Limited. |
1880 | +# Copyright 2014-2021 Canonical Limited. |
1881 | # |
1882 | # Licensed under the Apache License, Version 2.0 (the "License"); |
1883 | # you may not use this file except in compliance with the License. |
1884 | @@ -19,6 +19,7 @@ |
1885 | # Nick Moffitt <nick.moffitt@canonical.com> |
1886 | # Matthew Wedgwood <matthew.wedgwood@canonical.com> |
1887 | |
1888 | +import errno |
1889 | import os |
1890 | import re |
1891 | import pwd |
1892 | @@ -30,66 +31,199 @@ |
1893 | import hashlib |
1894 | import functools |
1895 | import itertools |
1896 | -import six |
1897 | |
1898 | from contextlib import contextmanager |
1899 | -from collections import OrderedDict |
1900 | -from .hookenv import log |
1901 | +from collections import OrderedDict, defaultdict |
1902 | +from .hookenv import log, INFO, DEBUG, local_unit, charm_name |
1903 | from .fstab import Fstab |
1904 | from charmhelpers.osplatform import get_platform |
1905 | |
1906 | __platform__ = get_platform() |
1907 | if __platform__ == "ubuntu": |
1908 | - from charmhelpers.core.host_factory.ubuntu import ( |
1909 | + from charmhelpers.core.host_factory.ubuntu import ( # NOQA:F401 |
1910 | service_available, |
1911 | add_new_group, |
1912 | lsb_release, |
1913 | cmp_pkgrevno, |
1914 | + CompareHostReleases, |
1915 | + get_distrib_codename, |
1916 | + arch |
1917 | ) # flake8: noqa -- ignore F401 for this import |
1918 | elif __platform__ == "centos": |
1919 | - from charmhelpers.core.host_factory.centos import ( |
1920 | + from charmhelpers.core.host_factory.centos import ( # NOQA:F401 |
1921 | service_available, |
1922 | add_new_group, |
1923 | lsb_release, |
1924 | cmp_pkgrevno, |
1925 | + CompareHostReleases, |
1926 | ) # flake8: noqa -- ignore F401 for this import |
1927 | |
1928 | - |
1929 | -def service_start(service_name): |
1930 | - """Start a system service""" |
1931 | - return service('start', service_name) |
1932 | - |
1933 | - |
1934 | -def service_stop(service_name): |
1935 | - """Stop a system service""" |
1936 | - return service('stop', service_name) |
1937 | - |
1938 | - |
1939 | -def service_restart(service_name): |
1940 | - """Restart a system service""" |
1941 | +UPDATEDB_PATH = '/etc/updatedb.conf' |
1942 | +CA_CERT_DIR = '/usr/local/share/ca-certificates' |
1943 | + |
1944 | + |
1945 | +def service_start(service_name, **kwargs): |
1946 | + """Start a system service. |
1947 | + |
1948 | + The specified service name is managed via the system level init system. |
1949 | + Some init systems (e.g. upstart) require that additional arguments be |
1950 | + provided in order to directly control service instances whereas other init |
1951 | + systems allow for addressing instances of a service directly by name (e.g. |
1952 | + systemd). |
1953 | + |
1954 | + The kwargs allow for the additional parameters to be passed to underlying |
1955 | + init systems for those systems which require/allow for them. For example, |
1956 | + the ceph-osd upstart script requires the id parameter to be passed along |
1957 | + in order to identify which running daemon should be reloaded. The follow- |
1958 | + ing example stops the ceph-osd service for instance id=4: |
1959 | + |
1960 | + service_stop('ceph-osd', id=4) |
1961 | + |
1962 | + :param service_name: the name of the service to stop |
1963 | + :param **kwargs: additional parameters to pass to the init system when |
1964 | + managing services. These will be passed as key=value |
1965 | + parameters to the init system's commandline. kwargs |
1966 | + are ignored for systemd enabled systems. |
1967 | + """ |
1968 | + return service('start', service_name, **kwargs) |
1969 | + |
1970 | + |
1971 | +def service_stop(service_name, **kwargs): |
1972 | + """Stop a system service. |
1973 | + |
1974 | + The specified service name is managed via the system level init system. |
1975 | + Some init systems (e.g. upstart) require that additional arguments be |
1976 | + provided in order to directly control service instances whereas other init |
1977 | + systems allow for addressing instances of a service directly by name (e.g. |
1978 | + systemd). |
1979 | + |
1980 | + The kwargs allow for the additional parameters to be passed to underlying |
1981 | + init systems for those systems which require/allow for them. For example, |
1982 | + the ceph-osd upstart script requires the id parameter to be passed along |
1983 | + in order to identify which running daemon should be reloaded. The follow- |
1984 | + ing example stops the ceph-osd service for instance id=4: |
1985 | + |
1986 | + service_stop('ceph-osd', id=4) |
1987 | + |
1988 | + :param service_name: the name of the service to stop |
1989 | + :param **kwargs: additional parameters to pass to the init system when |
1990 | + managing services. These will be passed as key=value |
1991 | + parameters to the init system's commandline. kwargs |
1992 | + are ignored for systemd enabled systems. |
1993 | + """ |
1994 | + return service('stop', service_name, **kwargs) |
1995 | + |
1996 | + |
1997 | +def service_enable(service_name, **kwargs): |
1998 | + """Enable a system service. |
1999 | + |
2000 | + The specified service name is managed via the system level init system. |
2001 | + Some init systems (e.g. upstart) require that additional arguments be |
2002 | + provided in order to directly control service instances whereas other init |
2003 | + systems allow for addressing instances of a service directly by name (e.g. |
2004 | + systemd). |
2005 | + |
2006 | + The kwargs allow for the additional parameters to be passed to underlying |
2007 | + init systems for those systems which require/allow for them. For example, |
2008 | + the ceph-osd upstart script requires the id parameter to be passed along |
2009 | + in order to identify which running daemon should be restarted. The follow- |
2010 | + ing example restarts the ceph-osd service for instance id=4: |
2011 | + |
2012 | + service_enable('ceph-osd', id=4) |
2013 | + |
2014 | + :param service_name: the name of the service to enable |
2015 | + :param **kwargs: additional parameters to pass to the init system when |
2016 | + managing services. These will be passed as key=value |
2017 | + parameters to the init system's commandline. kwargs |
2018 | + are ignored for init systems not allowing additional |
2019 | + parameters via the commandline (systemd). |
2020 | + """ |
2021 | + return service('enable', service_name, **kwargs) |
2022 | + |
2023 | + |
2024 | +def service_restart(service_name, **kwargs): |
2025 | + """Restart a system service. |
2026 | + |
2027 | + The specified service name is managed via the system level init system. |
2028 | + Some init systems (e.g. upstart) require that additional arguments be |
2029 | + provided in order to directly control service instances whereas other init |
2030 | + systems allow for addressing instances of a service directly by name (e.g. |
2031 | + systemd). |
2032 | + |
2033 | + The kwargs allow for the additional parameters to be passed to underlying |
2034 | + init systems for those systems which require/allow for them. For example, |
2035 | + the ceph-osd upstart script requires the id parameter to be passed along |
2036 | + in order to identify which running daemon should be restarted. The follow- |
2037 | + ing example restarts the ceph-osd service for instance id=4: |
2038 | + |
2039 | + service_restart('ceph-osd', id=4) |
2040 | + |
2041 | + :param service_name: the name of the service to restart |
2042 | + :param **kwargs: additional parameters to pass to the init system when |
2043 | + managing services. These will be passed as key=value |
2044 | + parameters to the init system's commandline. kwargs |
2045 | + are ignored for init systems not allowing additional |
2046 | + parameters via the commandline (systemd). |
2047 | + """ |
2048 | return service('restart', service_name) |
2049 | |
2050 | |
2051 | -def service_reload(service_name, restart_on_failure=False): |
2052 | +def service_reload(service_name, restart_on_failure=False, **kwargs): |
2053 | """Reload a system service, optionally falling back to restart if |
2054 | - reload fails""" |
2055 | - service_result = service('reload', service_name) |
2056 | + reload fails. |
2057 | + |
2058 | + The specified service name is managed via the system level init system. |
2059 | + Some init systems (e.g. upstart) require that additional arguments be |
2060 | + provided in order to directly control service instances whereas other init |
2061 | + systems allow for addressing instances of a service directly by name (e.g. |
2062 | + systemd). |
2063 | + |
2064 | + The kwargs allow for the additional parameters to be passed to underlying |
2065 | + init systems for those systems which require/allow for them. For example, |
2066 | + the ceph-osd upstart script requires the id parameter to be passed along |
2067 | + in order to identify which running daemon should be reloaded. The follow- |
2068 | + ing example restarts the ceph-osd service for instance id=4: |
2069 | + |
2070 | + service_reload('ceph-osd', id=4) |
2071 | + |
2072 | + :param service_name: the name of the service to reload |
2073 | + :param restart_on_failure: boolean indicating whether to fallback to a |
2074 | + restart if the reload fails. |
2075 | + :param **kwargs: additional parameters to pass to the init system when |
2076 | + managing services. These will be passed as key=value |
2077 | + parameters to the init system's commandline. kwargs |
2078 | + are ignored for init systems not allowing additional |
2079 | + parameters via the commandline (systemd). |
2080 | + """ |
2081 | + service_result = service('reload', service_name, **kwargs) |
2082 | if not service_result and restart_on_failure: |
2083 | - service_result = service('restart', service_name) |
2084 | + service_result = service('restart', service_name, **kwargs) |
2085 | return service_result |
2086 | |
2087 | |
2088 | -def service_pause(service_name, init_dir="/etc/init", initd_dir="/etc/init.d"): |
2089 | +def service_pause(service_name, init_dir="/etc/init", initd_dir="/etc/init.d", |
2090 | + **kwargs): |
2091 | """Pause a system service. |
2092 | |
2093 | - Stop it, and prevent it from starting again at boot.""" |
2094 | + Stop it, and prevent it from starting again at boot. |
2095 | + |
2096 | + :param service_name: the name of the service to pause |
2097 | + :param init_dir: path to the upstart init directory |
2098 | + :param initd_dir: path to the sysv init directory |
2099 | + :param **kwargs: additional parameters to pass to the init system when |
2100 | + managing services. These will be passed as key=value |
2101 | + parameters to the init system's commandline. kwargs |
2102 | + are ignored for init systems which do not support |
2103 | + key=value arguments via the commandline. |
2104 | + """ |
2105 | stopped = True |
2106 | - if service_running(service_name): |
2107 | - stopped = service_stop(service_name) |
2108 | + if service_running(service_name, **kwargs): |
2109 | + stopped = service_stop(service_name, **kwargs) |
2110 | upstart_file = os.path.join(init_dir, "{}.conf".format(service_name)) |
2111 | sysv_file = os.path.join(initd_dir, service_name) |
2112 | - if init_is_systemd(): |
2113 | + if init_is_systemd(service_name=service_name): |
2114 | service('disable', service_name) |
2115 | + service('mask', service_name) |
2116 | elif os.path.exists(upstart_file): |
2117 | override_path = os.path.join( |
2118 | init_dir, '{}.override'.format(service_name)) |
2119 | @@ -106,13 +240,23 @@ |
2120 | |
2121 | |
2122 | def service_resume(service_name, init_dir="/etc/init", |
2123 | - initd_dir="/etc/init.d"): |
2124 | + initd_dir="/etc/init.d", **kwargs): |
2125 | """Resume a system service. |
2126 | |
2127 | - Reenable starting again at boot. Start the service""" |
2128 | + Re-enable starting again at boot. Start the service. |
2129 | + |
2130 | + :param service_name: the name of the service to resume |
2131 | + :param init_dir: the path to the init dir |
2132 | + :param initd dir: the path to the initd dir |
2133 | + :param **kwargs: additional parameters to pass to the init system when |
2134 | + managing services. These will be passed as key=value |
2135 | + parameters to the init system's commandline. kwargs |
2136 | + are ignored for systemd enabled systems. |
2137 | + """ |
2138 | upstart_file = os.path.join(init_dir, "{}.conf".format(service_name)) |
2139 | sysv_file = os.path.join(initd_dir, service_name) |
2140 | - if init_is_systemd(): |
2141 | + if init_is_systemd(service_name=service_name): |
2142 | + service('unmask', service_name) |
2143 | service('enable', service_name) |
2144 | elif os.path.exists(upstart_file): |
2145 | override_path = os.path.join( |
2146 | @@ -126,19 +270,30 @@ |
2147 | "Unable to detect {0} as SystemD, Upstart {1} or" |
2148 | " SysV {2}".format( |
2149 | service_name, upstart_file, sysv_file)) |
2150 | + started = service_running(service_name, **kwargs) |
2151 | |
2152 | - started = service_running(service_name) |
2153 | if not started: |
2154 | - started = service_start(service_name) |
2155 | + started = service_start(service_name, **kwargs) |
2156 | return started |
2157 | |
2158 | |
2159 | -def service(action, service_name): |
2160 | - """Control a system service""" |
2161 | - if init_is_systemd(): |
2162 | - cmd = ['systemctl', action, service_name] |
2163 | +def service(action, service_name=None, **kwargs): |
2164 | + """Control a system service. |
2165 | + |
2166 | + :param action: the action to take on the service |
2167 | + :param service_name: the name of the service to perform th action on |
2168 | + :param **kwargs: additional params to be passed to the service command in |
2169 | + the form of key=value. |
2170 | + """ |
2171 | + if init_is_systemd(service_name=service_name): |
2172 | + cmd = ['systemctl', action] |
2173 | + if service_name is not None: |
2174 | + cmd.append(service_name) |
2175 | else: |
2176 | cmd = ['service', service_name, action] |
2177 | + for key, value in kwargs.items(): |
2178 | + parameter = '%s=%s' % (key, value) |
2179 | + cmd.append(parameter) |
2180 | return subprocess.call(cmd) == 0 |
2181 | |
2182 | |
2183 | @@ -146,16 +301,27 @@ |
2184 | _INIT_D_CONF = "/etc/init.d/{}" |
2185 | |
2186 | |
2187 | -def service_running(service_name): |
2188 | - """Determine whether a system service is running""" |
2189 | - if init_is_systemd(): |
2190 | +def service_running(service_name, **kwargs): |
2191 | + """Determine whether a system service is running. |
2192 | + |
2193 | + :param service_name: the name of the service |
2194 | + :param **kwargs: additional args to pass to the service command. This is |
2195 | + used to pass additional key=value arguments to the |
2196 | + service command line for managing specific instance |
2197 | + units (e.g. service ceph-osd status id=2). The kwargs |
2198 | + are ignored in systemd services. |
2199 | + """ |
2200 | + if init_is_systemd(service_name=service_name): |
2201 | return service('is-active', service_name) |
2202 | else: |
2203 | if os.path.exists(_UPSTART_CONF.format(service_name)): |
2204 | try: |
2205 | + cmd = ['status', service_name] |
2206 | + for key, value in kwargs.items(): |
2207 | + parameter = '%s=%s' % (key, value) |
2208 | + cmd.append(parameter) |
2209 | output = subprocess.check_output( |
2210 | - ['status', service_name], |
2211 | - stderr=subprocess.STDOUT).decode('UTF-8') |
2212 | + cmd, stderr=subprocess.STDOUT).decode('UTF-8') |
2213 | except subprocess.CalledProcessError: |
2214 | return False |
2215 | else: |
2216 | @@ -175,8 +341,16 @@ |
2217 | SYSTEMD_SYSTEM = '/run/systemd/system' |
2218 | |
2219 | |
2220 | -def init_is_systemd(): |
2221 | - """Return True if the host system uses systemd, False otherwise.""" |
2222 | +def init_is_systemd(service_name=None): |
2223 | + """ |
2224 | + Returns whether the host uses systemd for the specified service. |
2225 | + |
2226 | + @param Optional[str] service_name: specific name of service |
2227 | + """ |
2228 | + if str(service_name).startswith("snap."): |
2229 | + return True |
2230 | + if lsb_release()['DISTRIB_CODENAME'] == 'trusty': |
2231 | + return False |
2232 | return os.path.isdir(SYSTEMD_SYSTEM) |
2233 | |
2234 | |
2235 | @@ -306,6 +480,51 @@ |
2236 | subprocess.check_call(cmd) |
2237 | |
2238 | |
2239 | +def chage(username, lastday=None, expiredate=None, inactive=None, |
2240 | + mindays=None, maxdays=None, root=None, warndays=None): |
2241 | + """Change user password expiry information |
2242 | + |
2243 | + :param str username: User to update |
2244 | + :param str lastday: Set when password was changed in YYYY-MM-DD format |
2245 | + :param str expiredate: Set when user's account will no longer be |
2246 | + accessible in YYYY-MM-DD format. |
2247 | + -1 will remove an account expiration date. |
2248 | + :param str inactive: Set the number of days of inactivity after a password |
2249 | + has expired before the account is locked. |
2250 | + -1 will remove an account's inactivity. |
2251 | + :param str mindays: Set the minimum number of days between password |
2252 | + changes to MIN_DAYS. |
2253 | + 0 indicates the password can be changed anytime. |
2254 | + :param str maxdays: Set the maximum number of days during which a |
2255 | + password is valid. |
2256 | + -1 as MAX_DAYS will remove checking maxdays |
2257 | + :param str root: Apply changes in the CHROOT_DIR directory |
2258 | + :param str warndays: Set the number of days of warning before a password |
2259 | + change is required |
2260 | + :raises subprocess.CalledProcessError: if call to chage fails |
2261 | + """ |
2262 | + cmd = ['chage'] |
2263 | + if root: |
2264 | + cmd.extend(['--root', root]) |
2265 | + if lastday: |
2266 | + cmd.extend(['--lastday', lastday]) |
2267 | + if expiredate: |
2268 | + cmd.extend(['--expiredate', expiredate]) |
2269 | + if inactive: |
2270 | + cmd.extend(['--inactive', inactive]) |
2271 | + if mindays: |
2272 | + cmd.extend(['--mindays', mindays]) |
2273 | + if maxdays: |
2274 | + cmd.extend(['--maxdays', maxdays]) |
2275 | + if warndays: |
2276 | + cmd.extend(['--warndays', warndays]) |
2277 | + cmd.append(username) |
2278 | + subprocess.check_call(cmd) |
2279 | + |
2280 | + |
2281 | +remove_password_expiry = functools.partial(chage, expiredate='-1', inactive='-1', mindays='0', maxdays='-1') |
2282 | + |
2283 | + |
2284 | def rsync(from_path, to_path, flags='-r', options=None, timeout=None): |
2285 | """Replicate the contents of a path""" |
2286 | options = options or ['--delete', '--executability'] |
2287 | @@ -352,13 +571,45 @@ |
2288 | |
2289 | def write_file(path, content, owner='root', group='root', perms=0o444): |
2290 | """Create or overwrite a file with the contents of a byte string.""" |
2291 | - log("Writing file {} {}:{} {:o}".format(path, owner, group, perms)) |
2292 | uid = pwd.getpwnam(owner).pw_uid |
2293 | gid = grp.getgrnam(group).gr_gid |
2294 | - with open(path, 'wb') as target: |
2295 | - os.fchown(target.fileno(), uid, gid) |
2296 | - os.fchmod(target.fileno(), perms) |
2297 | - target.write(content) |
2298 | + # lets see if we can grab the file and compare the context, to avoid doing |
2299 | + # a write. |
2300 | + existing_content = None |
2301 | + existing_uid, existing_gid, existing_perms = None, None, None |
2302 | + try: |
2303 | + with open(path, 'rb') as target: |
2304 | + existing_content = target.read() |
2305 | + stat = os.stat(path) |
2306 | + existing_uid, existing_gid, existing_perms = ( |
2307 | + stat.st_uid, stat.st_gid, stat.st_mode |
2308 | + ) |
2309 | + except Exception: |
2310 | + pass |
2311 | + if content != existing_content: |
2312 | + log("Writing file {} {}:{} {:o}".format(path, owner, group, perms), |
2313 | + level=DEBUG) |
2314 | + with open(path, 'wb') as target: |
2315 | + os.fchown(target.fileno(), uid, gid) |
2316 | + os.fchmod(target.fileno(), perms) |
2317 | + if isinstance(content, str): |
2318 | + content = content.encode('UTF-8') |
2319 | + target.write(content) |
2320 | + return |
2321 | + # the contents were the same, but we might still need to change the |
2322 | + # ownership or permissions. |
2323 | + if existing_uid != uid: |
2324 | + log("Changing uid on already existing content: {} -> {}" |
2325 | + .format(existing_uid, uid), level=DEBUG) |
2326 | + os.chown(path, uid, -1) |
2327 | + if existing_gid != gid: |
2328 | + log("Changing gid on already existing content: {} -> {}" |
2329 | + .format(existing_gid, gid), level=DEBUG) |
2330 | + os.chown(path, -1, gid) |
2331 | + if existing_perms != perms: |
2332 | + log("Changing permissions on existing content: {} -> {}" |
2333 | + .format(existing_perms, perms), level=DEBUG) |
2334 | + os.chmod(path, perms) |
2335 | |
2336 | |
2337 | def fstab_remove(mp): |
2338 | @@ -456,7 +707,7 @@ |
2339 | |
2340 | :param str checksum: Value of the checksum used to validate the file. |
2341 | :param str hash_type: Hash algorithm used to generate `checksum`. |
2342 | - Can be any hash alrgorithm supported by :mod:`hashlib`, |
2343 | + Can be any hash algorithm supported by :mod:`hashlib`, |
2344 | such as md5, sha1, sha256, sha512, etc. |
2345 | :raises ChecksumError: If the file fails the checksum |
2346 | |
2347 | @@ -471,78 +722,227 @@ |
2348 | pass |
2349 | |
2350 | |
2351 | -def restart_on_change(restart_map, stopstart=False, restart_functions=None): |
2352 | - """Restart services based on configuration files changing |
2353 | - |
2354 | - This function is used a decorator, for example:: |
2355 | - |
2356 | - @restart_on_change({ |
2357 | - '/etc/ceph/ceph.conf': [ 'cinder-api', 'cinder-volume' ] |
2358 | - '/etc/apache/sites-enabled/*': [ 'apache2' ] |
2359 | - }) |
2360 | - def config_changed(): |
2361 | - pass # your code here |
2362 | - |
2363 | - In this example, the cinder-api and cinder-volume services |
2364 | - would be restarted if /etc/ceph/ceph.conf is changed by the |
2365 | - ceph_client_changed function. The apache2 service would be |
2366 | - restarted if any file matching the pattern got changed, created |
2367 | - or removed. Standard wildcards are supported, see documentation |
2368 | - for the 'glob' module for more information. |
2369 | - |
2370 | - @param restart_map: {path_file_name: [service_name, ...] |
2371 | - @param stopstart: DEFAULT false; whether to stop, start OR restart |
2372 | - @param restart_functions: nonstandard functions to use to restart services |
2373 | - {svc: func, ...} |
2374 | - @returns result from decorated function |
2375 | +class restart_on_change(object): |
2376 | + """Decorator and context manager to handle restarts. |
2377 | + |
2378 | + Usage: |
2379 | + |
2380 | + @restart_on_change(restart_map, ...) |
2381 | + def function_that_might_trigger_a_restart(...) |
2382 | + ... |
2383 | + |
2384 | + Or: |
2385 | + |
2386 | + with restart_on_change(restart_map, ...): |
2387 | + do_stuff_that_might_trigger_a_restart() |
2388 | + ... |
2389 | """ |
2390 | - def wrap(f): |
2391 | + |
2392 | + def __init__(self, restart_map, stopstart=False, restart_functions=None, |
2393 | + can_restart_now_f=None, post_svc_restart_f=None, |
2394 | + pre_restarts_wait_f=None): |
2395 | + """ |
2396 | + :param restart_map: {file: [service, ...]} |
2397 | + :type restart_map: Dict[str, List[str,]] |
2398 | + :param stopstart: whether to stop, start or restart a service |
2399 | + :type stopstart: booleean |
2400 | + :param restart_functions: nonstandard functions to use to restart |
2401 | + services {svc: func, ...} |
2402 | + :type restart_functions: Dict[str, Callable[[str], None]] |
2403 | + :param can_restart_now_f: A function used to check if the restart is |
2404 | + permitted. |
2405 | + :type can_restart_now_f: Callable[[str, List[str]], boolean] |
2406 | + :param post_svc_restart_f: A function run after a service has |
2407 | + restarted. |
2408 | + :type post_svc_restart_f: Callable[[str], None] |
2409 | + :param pre_restarts_wait_f: A function called before any restarts. |
2410 | + :type pre_restarts_wait_f: Callable[None, None] |
2411 | + """ |
2412 | + self.restart_map = restart_map |
2413 | + self.stopstart = stopstart |
2414 | + self.restart_functions = restart_functions |
2415 | + self.can_restart_now_f = can_restart_now_f |
2416 | + self.post_svc_restart_f = post_svc_restart_f |
2417 | + self.pre_restarts_wait_f = pre_restarts_wait_f |
2418 | + |
2419 | + def __call__(self, f): |
2420 | + """Work like a decorator. |
2421 | + |
2422 | + Returns a wrapped function that performs the restart if triggered. |
2423 | + |
2424 | + :param f: The function that is being wrapped. |
2425 | + :type f: Callable[[Any], Any] |
2426 | + :returns: the wrapped function |
2427 | + :rtype: Callable[[Any], Any] |
2428 | + """ |
2429 | @functools.wraps(f) |
2430 | def wrapped_f(*args, **kwargs): |
2431 | return restart_on_change_helper( |
2432 | - (lambda: f(*args, **kwargs)), restart_map, stopstart, |
2433 | - restart_functions) |
2434 | + (lambda: f(*args, **kwargs)), |
2435 | + self.restart_map, |
2436 | + stopstart=self.stopstart, |
2437 | + restart_functions=self.restart_functions, |
2438 | + can_restart_now_f=self.can_restart_now_f, |
2439 | + post_svc_restart_f=self.post_svc_restart_f, |
2440 | + pre_restarts_wait_f=self.pre_restarts_wait_f) |
2441 | return wrapped_f |
2442 | - return wrap |
2443 | + |
2444 | + def __enter__(self): |
2445 | + """Enter the runtime context related to this object. """ |
2446 | + self.checksums = _pre_restart_on_change_helper(self.restart_map) |
2447 | + |
2448 | + def __exit__(self, exc_type, exc_val, exc_tb): |
2449 | + """Exit the runtime context related to this object. |
2450 | + |
2451 | + The parameters describe the exception that caused the context to be |
2452 | + exited. If the context was exited without an exception, all three |
2453 | + arguments will be None. |
2454 | + """ |
2455 | + if exc_type is None: |
2456 | + _post_restart_on_change_helper( |
2457 | + self.checksums, |
2458 | + self.restart_map, |
2459 | + stopstart=self.stopstart, |
2460 | + restart_functions=self.restart_functions, |
2461 | + can_restart_now_f=self.can_restart_now_f, |
2462 | + post_svc_restart_f=self.post_svc_restart_f, |
2463 | + pre_restarts_wait_f=self.pre_restarts_wait_f) |
2464 | + # All is good, so return False; any exceptions will propagate. |
2465 | + return False |
2466 | |
2467 | |
2468 | def restart_on_change_helper(lambda_f, restart_map, stopstart=False, |
2469 | - restart_functions=None): |
2470 | + restart_functions=None, |
2471 | + can_restart_now_f=None, |
2472 | + post_svc_restart_f=None, |
2473 | + pre_restarts_wait_f=None): |
2474 | """Helper function to perform the restart_on_change function. |
2475 | |
2476 | This is provided for decorators to restart services if files described |
2477 | in the restart_map have changed after an invocation of lambda_f(). |
2478 | |
2479 | - @param lambda_f: function to call. |
2480 | - @param restart_map: {file: [service, ...]} |
2481 | - @param stopstart: whether to stop, start or restart a service |
2482 | - @param restart_functions: nonstandard functions to use to restart services |
2483 | - {svc: func, ...} |
2484 | - @returns result of lambda_f() |
2485 | + This functions allows for a number of helper functions to be passed. |
2486 | + |
2487 | + `restart_functions` is a map with a service as the key and the |
2488 | + corresponding value being the function to call to restart the service. For |
2489 | + example if `restart_functions={'some-service': my_restart_func}` then |
2490 | + `my_restart_func` should a function which takes one argument which is the |
2491 | + service name to be retstarted. |
2492 | + |
2493 | + `can_restart_now_f` is a function which checks that a restart is permitted. |
2494 | + It should return a bool which indicates if a restart is allowed and should |
2495 | + take a service name (str) and a list of changed files (List[str]) as |
2496 | + arguments. |
2497 | + |
2498 | + `post_svc_restart_f` is a function which runs after a service has been |
2499 | + restarted. It takes the service name that was restarted as an argument. |
2500 | + |
2501 | + `pre_restarts_wait_f` is a function which is called before any restarts |
2502 | + occur. The use case for this is an application which wants to try and |
2503 | + stagger restarts between units. |
2504 | + |
2505 | + :param lambda_f: function to call. |
2506 | + :type lambda_f: Callable[[], ANY] |
2507 | + :param restart_map: {file: [service, ...]} |
2508 | + :type restart_map: Dict[str, List[str,]] |
2509 | + :param stopstart: whether to stop, start or restart a service |
2510 | + :type stopstart: booleean |
2511 | + :param restart_functions: nonstandard functions to use to restart services |
2512 | + {svc: func, ...} |
2513 | + :type restart_functions: Dict[str, Callable[[str], None]] |
2514 | + :param can_restart_now_f: A function used to check if the restart is |
2515 | + permitted. |
2516 | + :type can_restart_now_f: Callable[[str, List[str]], boolean] |
2517 | + :param post_svc_restart_f: A function run after a service has |
2518 | + restarted. |
2519 | + :type post_svc_restart_f: Callable[[str], None] |
2520 | + :param pre_restarts_wait_f: A function called before any restarts. |
2521 | + :type pre_restarts_wait_f: Callable[None, None] |
2522 | + :returns: result of lambda_f() |
2523 | + :rtype: ANY |
2524 | + """ |
2525 | + checksums = _pre_restart_on_change_helper(restart_map) |
2526 | + r = lambda_f() |
2527 | + _post_restart_on_change_helper(checksums, |
2528 | + restart_map, |
2529 | + stopstart, |
2530 | + restart_functions, |
2531 | + can_restart_now_f, |
2532 | + post_svc_restart_f, |
2533 | + pre_restarts_wait_f) |
2534 | + return r |
2535 | + |
2536 | + |
2537 | +def _pre_restart_on_change_helper(restart_map): |
2538 | + """Take a snapshot of file hashes. |
2539 | + |
2540 | + :param restart_map: {file: [service, ...]} |
2541 | + :type restart_map: Dict[str, List[str,]] |
2542 | + :returns: Dictionary of file paths and the files checksum. |
2543 | + :rtype: Dict[str, str] |
2544 | + """ |
2545 | + return {path: path_hash(path) for path in restart_map} |
2546 | + |
2547 | + |
2548 | +def _post_restart_on_change_helper(checksums, |
2549 | + restart_map, |
2550 | + stopstart=False, |
2551 | + restart_functions=None, |
2552 | + can_restart_now_f=None, |
2553 | + post_svc_restart_f=None, |
2554 | + pre_restarts_wait_f=None): |
2555 | + """Check whether files have changed. |
2556 | + |
2557 | + :param checksums: Dictionary of file paths and the files checksum. |
2558 | + :type checksums: Dict[str, str] |
2559 | + :param restart_map: {file: [service, ...]} |
2560 | + :type restart_map: Dict[str, List[str,]] |
2561 | + :param stopstart: whether to stop, start or restart a service |
2562 | + :type stopstart: booleean |
2563 | + :param restart_functions: nonstandard functions to use to restart services |
2564 | + {svc: func, ...} |
2565 | + :type restart_functions: Dict[str, Callable[[str], None]] |
2566 | + :param can_restart_now_f: A function used to check if the restart is |
2567 | + permitted. |
2568 | + :type can_restart_now_f: Callable[[str, List[str]], boolean] |
2569 | + :param post_svc_restart_f: A function run after a service has |
2570 | + restarted. |
2571 | + :type post_svc_restart_f: Callable[[str], None] |
2572 | + :param pre_restarts_wait_f: A function called before any restarts. |
2573 | + :type pre_restarts_wait_f: Callable[None, None] |
2574 | """ |
2575 | if restart_functions is None: |
2576 | restart_functions = {} |
2577 | - checksums = {path: path_hash(path) for path in restart_map} |
2578 | - r = lambda_f() |
2579 | + changed_files = defaultdict(list) |
2580 | + restarts = [] |
2581 | # create a list of lists of the services to restart |
2582 | - restarts = [restart_map[path] |
2583 | - for path in restart_map |
2584 | - if path_hash(path) != checksums[path]] |
2585 | + for path, services in restart_map.items(): |
2586 | + if path_hash(path) != checksums[path]: |
2587 | + restarts.append(services) |
2588 | + for svc in services: |
2589 | + changed_files[svc].append(path) |
2590 | # create a flat list of ordered services without duplicates from lists |
2591 | services_list = list(OrderedDict.fromkeys(itertools.chain(*restarts))) |
2592 | if services_list: |
2593 | + if pre_restarts_wait_f: |
2594 | + pre_restarts_wait_f() |
2595 | actions = ('stop', 'start') if stopstart else ('restart',) |
2596 | for service_name in services_list: |
2597 | + if can_restart_now_f: |
2598 | + if not can_restart_now_f(service_name, |
2599 | + changed_files[service_name]): |
2600 | + continue |
2601 | if service_name in restart_functions: |
2602 | restart_functions[service_name](service_name) |
2603 | else: |
2604 | for action in actions: |
2605 | service(action, service_name) |
2606 | - return r |
2607 | + if post_svc_restart_f: |
2608 | + post_svc_restart_f(service_name) |
2609 | |
2610 | |
2611 | def pwgen(length=None): |
2612 | - """Generate a random pasword.""" |
2613 | + """Generate a random password.""" |
2614 | if length is None: |
2615 | # A random length is ok to use a weak PRNG |
2616 | length = random.choice(range(35, 45)) |
2617 | @@ -554,7 +954,7 @@ |
2618 | random_generator = random.SystemRandom() |
2619 | random_chars = [ |
2620 | random_generator.choice(alphanumeric_chars) for _ in range(length)] |
2621 | - return(''.join(random_chars)) |
2622 | + return ''.join(random_chars) |
2623 | |
2624 | |
2625 | def is_phy_iface(interface): |
2626 | @@ -595,7 +995,7 @@ |
2627 | |
2628 | def list_nics(nic_type=None): |
2629 | """Return a list of nics of given type(s)""" |
2630 | - if isinstance(nic_type, six.string_types): |
2631 | + if isinstance(nic_type, str): |
2632 | int_types = [nic_type] |
2633 | else: |
2634 | int_types = nic_type |
2635 | @@ -604,7 +1004,8 @@ |
2636 | if nic_type: |
2637 | for int_type in int_types: |
2638 | cmd = ['ip', 'addr', 'show', 'label', int_type + '*'] |
2639 | - ip_output = subprocess.check_output(cmd).decode('UTF-8') |
2640 | + ip_output = subprocess.check_output( |
2641 | + cmd).decode('UTF-8', errors='replace') |
2642 | ip_output = ip_output.split('\n') |
2643 | ip_output = (line for line in ip_output if line) |
2644 | for line in ip_output: |
2645 | @@ -620,10 +1021,11 @@ |
2646 | interfaces.append(iface) |
2647 | else: |
2648 | cmd = ['ip', 'a'] |
2649 | - ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') |
2650 | + ip_output = subprocess.check_output( |
2651 | + cmd).decode('UTF-8', errors='replace').split('\n') |
2652 | ip_output = (line.strip() for line in ip_output if line) |
2653 | |
2654 | - key = re.compile('^[0-9]+:\s+(.+):') |
2655 | + key = re.compile(r'^[0-9]+:\s+(.+):') |
2656 | for line in ip_output: |
2657 | matched = re.search(key, line) |
2658 | if matched: |
2659 | @@ -644,7 +1046,8 @@ |
2660 | def get_nic_mtu(nic): |
2661 | """Return the Maximum Transmission Unit (MTU) for a network interface.""" |
2662 | cmd = ['ip', 'addr', 'show', nic] |
2663 | - ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') |
2664 | + ip_output = subprocess.check_output( |
2665 | + cmd).decode('UTF-8', errors='replace').split('\n') |
2666 | mtu = "" |
2667 | for line in ip_output: |
2668 | words = line.split() |
2669 | @@ -656,7 +1059,7 @@ |
2670 | def get_nic_hwaddr(nic): |
2671 | """Return the Media Access Control (MAC) for a network interface.""" |
2672 | cmd = ['ip', '-o', '-0', 'addr', 'show', nic] |
2673 | - ip_output = subprocess.check_output(cmd).decode('UTF-8') |
2674 | + ip_output = subprocess.check_output(cmd).decode('UTF-8', errors='replace') |
2675 | hwaddr = "" |
2676 | words = ip_output.split() |
2677 | if 'link/ether' in words: |
2678 | @@ -668,7 +1071,7 @@ |
2679 | def chdir(directory): |
2680 | """Change the current working directory to a different directory for a code |
2681 | block and return the previous directory after the block exits. Useful to |
2682 | - run commands from a specificed directory. |
2683 | + run commands from a specified directory. |
2684 | |
2685 | :param str directory: The directory path to change to for this context. |
2686 | """ |
2687 | @@ -703,9 +1106,12 @@ |
2688 | for root, dirs, files in os.walk(path, followlinks=follow_links): |
2689 | for name in dirs + files: |
2690 | full = os.path.join(root, name) |
2691 | - broken_symlink = os.path.lexists(full) and not os.path.exists(full) |
2692 | - if not broken_symlink: |
2693 | + try: |
2694 | chown(full, uid, gid) |
2695 | + except (IOError, OSError) as e: |
2696 | + # Intended to ignore "file not found". |
2697 | + if e.errno == errno.ENOENT: |
2698 | + pass |
2699 | |
2700 | |
2701 | def lchownr(path, owner, group): |
2702 | @@ -720,6 +1126,20 @@ |
2703 | chownr(path, owner, group, follow_links=False) |
2704 | |
2705 | |
2706 | +def owner(path): |
2707 | + """Returns a tuple containing the username & groupname owning the path. |
2708 | + |
2709 | + :param str path: the string path to retrieve the ownership |
2710 | + :return tuple(str, str): A (username, groupname) tuple containing the |
2711 | + name of the user and group owning the path. |
2712 | + :raises OSError: if the specified path does not exist |
2713 | + """ |
2714 | + stat = os.stat(path) |
2715 | + username = pwd.getpwuid(stat.st_uid)[0] |
2716 | + groupname = grp.getgrgid(stat.st_gid)[0] |
2717 | + return username, groupname |
2718 | + |
2719 | + |
2720 | def get_total_ram(): |
2721 | """The total amount of system RAM in bytes. |
2722 | |
2723 | @@ -751,3 +1171,136 @@ |
2724 | else: |
2725 | # Detect using upstart container file marker |
2726 | return os.path.exists(UPSTART_CONTAINER_TYPE) |
2727 | + |
2728 | + |
2729 | +def add_to_updatedb_prunepath(path, updatedb_path=UPDATEDB_PATH): |
2730 | + """Adds the specified path to the mlocate's udpatedb.conf PRUNEPATH list. |
2731 | + |
2732 | + This method has no effect if the path specified by updatedb_path does not |
2733 | + exist or is not a file. |
2734 | + |
2735 | + @param path: string the path to add to the updatedb.conf PRUNEPATHS value |
2736 | + @param updatedb_path: the path the updatedb.conf file |
2737 | + """ |
2738 | + if not os.path.exists(updatedb_path) or os.path.isdir(updatedb_path): |
2739 | + # If the updatedb.conf file doesn't exist then don't attempt to update |
2740 | + # the file as the package providing mlocate may not be installed on |
2741 | + # the local system |
2742 | + return |
2743 | + |
2744 | + with open(updatedb_path, 'r+') as f_id: |
2745 | + updatedb_text = f_id.read() |
2746 | + output = updatedb(updatedb_text, path) |
2747 | + f_id.seek(0) |
2748 | + f_id.write(output) |
2749 | + f_id.truncate() |
2750 | + |
2751 | + |
2752 | +def updatedb(updatedb_text, new_path): |
2753 | + lines = [line for line in updatedb_text.split("\n")] |
2754 | + for i, line in enumerate(lines): |
2755 | + if line.startswith("PRUNEPATHS="): |
2756 | + paths_line = line.split("=")[1].replace('"', '') |
2757 | + paths = paths_line.split(" ") |
2758 | + if new_path not in paths: |
2759 | + paths.append(new_path) |
2760 | + lines[i] = 'PRUNEPATHS="{}"'.format(' '.join(paths)) |
2761 | + output = "\n".join(lines) |
2762 | + return output |
2763 | + |
2764 | + |
2765 | +def modulo_distribution(modulo=3, wait=30, non_zero_wait=False): |
2766 | + """ Modulo distribution |
2767 | + |
2768 | + This helper uses the unit number, a modulo value and a constant wait time |
2769 | + to produce a calculated wait time distribution. This is useful in large |
2770 | + scale deployments to distribute load during an expensive operation such as |
2771 | + service restarts. |
2772 | + |
2773 | + If you have 1000 nodes that need to restart 100 at a time 1 minute at a |
2774 | + time: |
2775 | + |
2776 | + time.wait(modulo_distribution(modulo=100, wait=60)) |
2777 | + restart() |
2778 | + |
2779 | + If you need restarts to happen serially set modulo to the exact number of |
2780 | + nodes and set a high constant wait time: |
2781 | + |
2782 | + time.wait(modulo_distribution(modulo=10, wait=120)) |
2783 | + restart() |
2784 | + |
2785 | + @param modulo: int The modulo number creates the group distribution |
2786 | + @param wait: int The constant time wait value |
2787 | + @param non_zero_wait: boolean Override unit % modulo == 0, |
2788 | + return modulo * wait. Used to avoid collisions with |
2789 | + leader nodes which are often given priority. |
2790 | + @return: int Calculated time to wait for unit operation |
2791 | + """ |
2792 | + unit_number = int(local_unit().split('/')[1]) |
2793 | + calculated_wait_time = (unit_number % modulo) * wait |
2794 | + if non_zero_wait and calculated_wait_time == 0: |
2795 | + return modulo * wait |
2796 | + else: |
2797 | + return calculated_wait_time |
2798 | + |
2799 | + |
2800 | +def ca_cert_absolute_path(basename_without_extension): |
2801 | + """Returns absolute path to CA certificate. |
2802 | + |
2803 | + :param basename_without_extension: Filename without extension |
2804 | + :type basename_without_extension: str |
2805 | + :returns: Absolute full path |
2806 | + :rtype: str |
2807 | + """ |
2808 | + return '{}/{}.crt'.format(CA_CERT_DIR, basename_without_extension) |
2809 | + |
2810 | + |
2811 | +def install_ca_cert(ca_cert, name=None): |
2812 | + """ |
2813 | + Install the given cert as a trusted CA. |
2814 | + |
2815 | + The ``name`` is the stem of the filename where the cert is written, and if |
2816 | + not provided, it will default to ``juju-{charm_name}``. |
2817 | + |
2818 | + If the cert is empty or None, or is unchanged, nothing is done. |
2819 | + """ |
2820 | + if not ca_cert: |
2821 | + return |
2822 | + if not isinstance(ca_cert, bytes): |
2823 | + ca_cert = ca_cert.encode('utf8') |
2824 | + if not name: |
2825 | + name = 'juju-{}'.format(charm_name()) |
2826 | + cert_file = ca_cert_absolute_path(name) |
2827 | + new_hash = hashlib.md5(ca_cert).hexdigest() |
2828 | + if file_hash(cert_file) == new_hash: |
2829 | + return |
2830 | + log("Installing new CA cert at: {}".format(cert_file), level=INFO) |
2831 | + write_file(cert_file, ca_cert) |
2832 | + subprocess.check_call(['update-ca-certificates', '--fresh']) |
2833 | + |
2834 | + |
2835 | +def get_system_env(key, default=None): |
2836 | + """Get data from system environment as represented in ``/etc/environment``. |
2837 | + |
2838 | + :param key: Key to look up |
2839 | + :type key: str |
2840 | + :param default: Value to return if key is not found |
2841 | + :type default: any |
2842 | + :returns: Value for key if found or contents of default parameter |
2843 | + :rtype: any |
2844 | + :raises: subprocess.CalledProcessError |
2845 | + """ |
2846 | + env_file = '/etc/environment' |
2847 | + # use the shell and env(1) to parse the global environments file. This is |
2848 | + # done to get the correct result even if the user has shell variable |
2849 | + # substitutions or other shell logic in that file. |
2850 | + output = subprocess.check_output( |
2851 | + ['env', '-i', '/bin/bash', '-c', |
2852 | + 'set -a && source {} && env'.format(env_file)], |
2853 | + universal_newlines=True) |
2854 | + for k, v in (line.split('=', 1) |
2855 | + for line in output.splitlines() if '=' in line): |
2856 | + if k == key: |
2857 | + return v |
2858 | + else: |
2859 | + return default |
2860 | |
2861 | === modified file 'hooks/charmhelpers/core/host_factory/centos.py' |
2862 | --- hooks/charmhelpers/core/host_factory/centos.py 2016-12-20 20:15:28 +0000 |
2863 | +++ hooks/charmhelpers/core/host_factory/centos.py 2023-06-30 13:58:42 +0000 |
2864 | @@ -2,6 +2,22 @@ |
2865 | import yum |
2866 | import os |
2867 | |
2868 | +from charmhelpers.core.strutils import BasicStringComparator |
2869 | + |
2870 | + |
2871 | +class CompareHostReleases(BasicStringComparator): |
2872 | + """Provide comparisons of Host releases. |
2873 | + |
2874 | + Use in the form of |
2875 | + |
2876 | + if CompareHostReleases(release) > 'trusty': |
2877 | + # do something with mitaka |
2878 | + """ |
2879 | + |
2880 | + def __init__(self, item): |
2881 | + raise NotImplementedError( |
2882 | + "CompareHostReleases() is not implemented for CentOS") |
2883 | + |
2884 | |
2885 | def service_available(service_name): |
2886 | # """Determine whether a system service is available.""" |
2887 | |
2888 | === modified file 'hooks/charmhelpers/core/host_factory/ubuntu.py' |
2889 | --- hooks/charmhelpers/core/host_factory/ubuntu.py 2016-12-20 20:15:28 +0000 |
2890 | +++ hooks/charmhelpers/core/host_factory/ubuntu.py 2023-06-30 13:58:42 +0000 |
2891 | @@ -1,5 +1,50 @@ |
2892 | import subprocess |
2893 | |
2894 | +from charmhelpers.core.hookenv import cached |
2895 | +from charmhelpers.core.strutils import BasicStringComparator |
2896 | + |
2897 | + |
2898 | +UBUNTU_RELEASES = ( |
2899 | + 'lucid', |
2900 | + 'maverick', |
2901 | + 'natty', |
2902 | + 'oneiric', |
2903 | + 'precise', |
2904 | + 'quantal', |
2905 | + 'raring', |
2906 | + 'saucy', |
2907 | + 'trusty', |
2908 | + 'utopic', |
2909 | + 'vivid', |
2910 | + 'wily', |
2911 | + 'xenial', |
2912 | + 'yakkety', |
2913 | + 'zesty', |
2914 | + 'artful', |
2915 | + 'bionic', |
2916 | + 'cosmic', |
2917 | + 'disco', |
2918 | + 'eoan', |
2919 | + 'focal', |
2920 | + 'groovy', |
2921 | + 'hirsute', |
2922 | + 'impish', |
2923 | + 'jammy', |
2924 | + 'kinetic', |
2925 | + 'lunar', |
2926 | +) |
2927 | + |
2928 | + |
2929 | +class CompareHostReleases(BasicStringComparator): |
2930 | + """Provide comparisons of Ubuntu releases. |
2931 | + |
2932 | + Use in the form of |
2933 | + |
2934 | + if CompareHostReleases(release) > 'trusty': |
2935 | + # do something with mitaka |
2936 | + """ |
2937 | + _list = UBUNTU_RELEASES |
2938 | + |
2939 | |
2940 | def service_available(service_name): |
2941 | """Determine whether a system service is available""" |
2942 | @@ -37,6 +82,14 @@ |
2943 | return d |
2944 | |
2945 | |
2946 | +def get_distrib_codename(): |
2947 | + """Return the codename of the distribution |
2948 | + :returns: The codename |
2949 | + :rtype: str |
2950 | + """ |
2951 | + return lsb_release()['DISTRIB_CODENAME'].lower() |
2952 | + |
2953 | + |
2954 | def cmp_pkgrevno(package, revno, pkgcache=None): |
2955 | """Compare supplied revno with the revno of the installed package. |
2956 | |
2957 | @@ -48,9 +101,24 @@ |
2958 | the pkgcache argument is None. Be sure to add charmhelpers.fetch if |
2959 | you call this function, or pass an apt_pkg.Cache() instance. |
2960 | """ |
2961 | - import apt_pkg |
2962 | + from charmhelpers.fetch import apt_pkg, get_installed_version |
2963 | if not pkgcache: |
2964 | - from charmhelpers.fetch import apt_cache |
2965 | - pkgcache = apt_cache() |
2966 | - pkg = pkgcache[package] |
2967 | - return apt_pkg.version_compare(pkg.current_ver.ver_str, revno) |
2968 | + current_ver = get_installed_version(package) |
2969 | + else: |
2970 | + pkg = pkgcache[package] |
2971 | + current_ver = pkg.current_ver |
2972 | + |
2973 | + return apt_pkg.version_compare(current_ver.ver_str, revno) |
2974 | + |
2975 | + |
2976 | +@cached |
2977 | +def arch(): |
2978 | + """Return the package architecture as a string. |
2979 | + |
2980 | + :returns: the architecture |
2981 | + :rtype: str |
2982 | + :raises: subprocess.CalledProcessError if dpkg command fails |
2983 | + """ |
2984 | + return subprocess.check_output( |
2985 | + ['dpkg', '--print-architecture'] |
2986 | + ).rstrip().decode('UTF-8') |
2987 | |
2988 | === modified file 'hooks/charmhelpers/core/kernel.py' |
2989 | --- hooks/charmhelpers/core/kernel.py 2016-12-20 20:15:28 +0000 |
2990 | +++ hooks/charmhelpers/core/kernel.py 2023-06-30 13:58:42 +0000 |
2991 | @@ -26,12 +26,12 @@ |
2992 | |
2993 | __platform__ = get_platform() |
2994 | if __platform__ == "ubuntu": |
2995 | - from charmhelpers.core.kernel_factory.ubuntu import ( |
2996 | + from charmhelpers.core.kernel_factory.ubuntu import ( # NOQA:F401 |
2997 | persistent_modprobe, |
2998 | update_initramfs, |
2999 | ) # flake8: noqa -- ignore F401 for this import |
3000 | elif __platform__ == "centos": |
3001 | - from charmhelpers.core.kernel_factory.centos import ( |
3002 | + from charmhelpers.core.kernel_factory.centos import ( # NOQA:F401 |
3003 | persistent_modprobe, |
3004 | update_initramfs, |
3005 | ) # flake8: noqa -- ignore F401 for this import |
3006 | |
3007 | === modified file 'hooks/charmhelpers/core/services/base.py' |
3008 | --- hooks/charmhelpers/core/services/base.py 2016-12-20 14:35:00 +0000 |
3009 | +++ hooks/charmhelpers/core/services/base.py 2023-06-30 13:58:42 +0000 |
3010 | @@ -14,8 +14,9 @@ |
3011 | |
3012 | import os |
3013 | import json |
3014 | -from inspect import getargspec |
3015 | -from collections import Iterable, OrderedDict |
3016 | +import inspect |
3017 | +from collections import OrderedDict |
3018 | +from collections.abc import Iterable |
3019 | |
3020 | from charmhelpers.core import host |
3021 | from charmhelpers.core import hookenv |
3022 | @@ -169,7 +170,7 @@ |
3023 | if not units: |
3024 | continue |
3025 | remote_service = units[0].split('/')[0] |
3026 | - argspec = getargspec(provider.provide_data) |
3027 | + argspec = inspect.getfullargspec(provider.provide_data) |
3028 | if len(argspec.args) > 1: |
3029 | data = provider.provide_data(remote_service, service_ready) |
3030 | else: |
3031 | @@ -307,23 +308,34 @@ |
3032 | """ |
3033 | def __call__(self, manager, service_name, event_name): |
3034 | service = manager.get_service(service_name) |
3035 | - new_ports = service.get('ports', []) |
3036 | + # turn this generator into a list, |
3037 | + # as we'll be going over it multiple times |
3038 | + new_ports = list(service.get('ports', [])) |
3039 | port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name)) |
3040 | if os.path.exists(port_file): |
3041 | with open(port_file) as fp: |
3042 | old_ports = fp.read().split(',') |
3043 | for old_port in old_ports: |
3044 | - if bool(old_port): |
3045 | - old_port = int(old_port) |
3046 | - if old_port not in new_ports: |
3047 | - hookenv.close_port(old_port) |
3048 | + if bool(old_port) and not self.ports_contains(old_port, new_ports): |
3049 | + hookenv.close_port(old_port) |
3050 | with open(port_file, 'w') as fp: |
3051 | fp.write(','.join(str(port) for port in new_ports)) |
3052 | for port in new_ports: |
3053 | + # A port is either a number or 'ICMP' |
3054 | + protocol = 'TCP' |
3055 | + if str(port).upper() == 'ICMP': |
3056 | + protocol = 'ICMP' |
3057 | if event_name == 'start': |
3058 | - hookenv.open_port(port) |
3059 | + hookenv.open_port(port, protocol) |
3060 | elif event_name == 'stop': |
3061 | - hookenv.close_port(port) |
3062 | + hookenv.close_port(port, protocol) |
3063 | + |
3064 | + def ports_contains(self, port, ports): |
3065 | + if not bool(port): |
3066 | + return False |
3067 | + if str(port).upper() != 'ICMP': |
3068 | + port = int(port) |
3069 | + return port in ports |
3070 | |
3071 | |
3072 | def service_stop(service_name): |
3073 | |
3074 | === modified file 'hooks/charmhelpers/core/services/helpers.py' |
3075 | --- hooks/charmhelpers/core/services/helpers.py 2016-12-20 14:35:00 +0000 |
3076 | +++ hooks/charmhelpers/core/services/helpers.py 2023-06-30 13:58:42 +0000 |
3077 | @@ -179,7 +179,7 @@ |
3078 | self.required_options = args |
3079 | self['config'] = hookenv.config() |
3080 | with open(os.path.join(hookenv.charm_dir(), 'config.yaml')) as fp: |
3081 | - self.config = yaml.load(fp).get('options', {}) |
3082 | + self.config = yaml.safe_load(fp).get('options', {}) |
3083 | |
3084 | def __bool__(self): |
3085 | for option in self.required_options: |
3086 | @@ -227,7 +227,7 @@ |
3087 | if not os.path.isabs(file_name): |
3088 | file_name = os.path.join(hookenv.charm_dir(), file_name) |
3089 | with open(file_name, 'r') as file_stream: |
3090 | - data = yaml.load(file_stream) |
3091 | + data = yaml.safe_load(file_stream) |
3092 | if not data: |
3093 | raise OSError("%s is empty" % file_name) |
3094 | return data |
3095 | |
3096 | === modified file 'hooks/charmhelpers/core/strutils.py' |
3097 | --- hooks/charmhelpers/core/strutils.py 2016-12-20 14:35:00 +0000 |
3098 | +++ hooks/charmhelpers/core/strutils.py 2023-06-30 13:58:42 +0000 |
3099 | @@ -15,26 +15,28 @@ |
3100 | # See the License for the specific language governing permissions and |
3101 | # limitations under the License. |
3102 | |
3103 | -import six |
3104 | import re |
3105 | |
3106 | - |
3107 | -def bool_from_string(value): |
3108 | +TRUTHY_STRINGS = {'y', 'yes', 'true', 't', 'on'} |
3109 | +FALSEY_STRINGS = {'n', 'no', 'false', 'f', 'off'} |
3110 | + |
3111 | + |
3112 | +def bool_from_string(value, truthy_strings=TRUTHY_STRINGS, falsey_strings=FALSEY_STRINGS, assume_false=False): |
3113 | """Interpret string value as boolean. |
3114 | |
3115 | Returns True if value translates to True otherwise False. |
3116 | """ |
3117 | - if isinstance(value, six.string_types): |
3118 | - value = six.text_type(value) |
3119 | + if isinstance(value, str): |
3120 | + value = str(value) |
3121 | else: |
3122 | msg = "Unable to interpret non-string value '%s' as boolean" % (value) |
3123 | raise ValueError(msg) |
3124 | |
3125 | value = value.strip().lower() |
3126 | |
3127 | - if value in ['y', 'yes', 'true', 't', 'on']: |
3128 | + if value in truthy_strings: |
3129 | return True |
3130 | - elif value in ['n', 'no', 'false', 'f', 'off']: |
3131 | + elif value in falsey_strings or assume_false: |
3132 | return False |
3133 | |
3134 | msg = "Unable to interpret string value '%s' as boolean" % (value) |
3135 | @@ -58,13 +60,72 @@ |
3136 | 'P': 5, |
3137 | 'PB': 5, |
3138 | } |
3139 | - if isinstance(value, six.string_types): |
3140 | - value = six.text_type(value) |
3141 | + if isinstance(value, str): |
3142 | + value = str(value) |
3143 | else: |
3144 | - msg = "Unable to interpret non-string value '%s' as boolean" % (value) |
3145 | + msg = "Unable to interpret non-string value '%s' as bytes" % (value) |
3146 | raise ValueError(msg) |
3147 | matches = re.match("([0-9]+)([a-zA-Z]+)", value) |
3148 | - if not matches: |
3149 | - msg = "Unable to interpret string value '%s' as bytes" % (value) |
3150 | - raise ValueError(msg) |
3151 | - return int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)]) |
3152 | + if matches: |
3153 | + size = int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)]) |
3154 | + else: |
3155 | + # Assume that value passed in is bytes |
3156 | + try: |
3157 | + size = int(value) |
3158 | + except ValueError: |
3159 | + msg = "Unable to interpret string value '%s' as bytes" % (value) |
3160 | + raise ValueError(msg) |
3161 | + return size |
3162 | + |
3163 | + |
3164 | +class BasicStringComparator(object): |
3165 | + """Provides a class that will compare strings from an iterator type object. |
3166 | + Used to provide > and < comparisons on strings that may not necessarily be |
3167 | + alphanumerically ordered. e.g. OpenStack or Ubuntu releases AFTER the |
3168 | + z-wrap. |
3169 | + """ |
3170 | + |
3171 | + _list = None |
3172 | + |
3173 | + def __init__(self, item): |
3174 | + if self._list is None: |
3175 | + raise Exception("Must define the _list in the class definition!") |
3176 | + try: |
3177 | + self.index = self._list.index(item) |
3178 | + except Exception: |
3179 | + raise KeyError("Item '{}' is not in list '{}'" |
3180 | + .format(item, self._list)) |
3181 | + |
3182 | + def __eq__(self, other): |
3183 | + assert isinstance(other, str) or isinstance(other, self.__class__) |
3184 | + return self.index == self._list.index(other) |
3185 | + |
3186 | + def __ne__(self, other): |
3187 | + return not self.__eq__(other) |
3188 | + |
3189 | + def __lt__(self, other): |
3190 | + assert isinstance(other, str) or isinstance(other, self.__class__) |
3191 | + return self.index < self._list.index(other) |
3192 | + |
3193 | + def __ge__(self, other): |
3194 | + return not self.__lt__(other) |
3195 | + |
3196 | + def __gt__(self, other): |
3197 | + assert isinstance(other, str) or isinstance(other, self.__class__) |
3198 | + return self.index > self._list.index(other) |
3199 | + |
3200 | + def __le__(self, other): |
3201 | + return not self.__gt__(other) |
3202 | + |
3203 | + def __str__(self): |
3204 | + """Always give back the item at the index so it can be used in |
3205 | + comparisons like: |
3206 | + |
3207 | + s_mitaka = CompareOpenStack('mitaka') |
3208 | + s_newton = CompareOpenstack('newton') |
3209 | + |
3210 | + assert s_newton > s_mitaka |
3211 | + |
3212 | + @returns: <string> |
3213 | + """ |
3214 | + return self._list[self.index] |
3215 | |
3216 | === modified file 'hooks/charmhelpers/core/sysctl.py' |
3217 | --- hooks/charmhelpers/core/sysctl.py 2016-12-20 14:35:00 +0000 |
3218 | +++ hooks/charmhelpers/core/sysctl.py 2023-06-30 13:58:42 +0000 |
3219 | @@ -17,38 +17,59 @@ |
3220 | |
3221 | import yaml |
3222 | |
3223 | -from subprocess import check_call |
3224 | +from subprocess import check_call, CalledProcessError |
3225 | |
3226 | from charmhelpers.core.hookenv import ( |
3227 | log, |
3228 | DEBUG, |
3229 | ERROR, |
3230 | + WARNING, |
3231 | ) |
3232 | |
3233 | +from charmhelpers.core.host import is_container |
3234 | + |
3235 | __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' |
3236 | |
3237 | |
3238 | -def create(sysctl_dict, sysctl_file): |
3239 | +def create(sysctl_dict, sysctl_file, ignore=False): |
3240 | """Creates a sysctl.conf file from a YAML associative array |
3241 | |
3242 | - :param sysctl_dict: a YAML-formatted string of sysctl options eg "{ 'kernel.max_pid': 1337 }" |
3243 | + :param sysctl_dict: a dict or YAML-formatted string of sysctl |
3244 | + options eg "{ 'kernel.max_pid': 1337 }" |
3245 | :type sysctl_dict: str |
3246 | :param sysctl_file: path to the sysctl file to be saved |
3247 | :type sysctl_file: str or unicode |
3248 | + :param ignore: If True, ignore "unknown variable" errors. |
3249 | + :type ignore: bool |
3250 | :returns: None |
3251 | """ |
3252 | - try: |
3253 | - sysctl_dict_parsed = yaml.safe_load(sysctl_dict) |
3254 | - except yaml.YAMLError: |
3255 | - log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict), |
3256 | - level=ERROR) |
3257 | - return |
3258 | + if type(sysctl_dict) is not dict: |
3259 | + try: |
3260 | + sysctl_dict_parsed = yaml.safe_load(sysctl_dict) |
3261 | + except yaml.YAMLError: |
3262 | + log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict), |
3263 | + level=ERROR) |
3264 | + return |
3265 | + else: |
3266 | + sysctl_dict_parsed = sysctl_dict |
3267 | |
3268 | with open(sysctl_file, "w") as fd: |
3269 | for key, value in sysctl_dict_parsed.items(): |
3270 | fd.write("{}={}\n".format(key, value)) |
3271 | |
3272 | - log("Updating sysctl_file: %s values: %s" % (sysctl_file, sysctl_dict_parsed), |
3273 | + log("Updating sysctl_file: {} values: {}".format(sysctl_file, |
3274 | + sysctl_dict_parsed), |
3275 | level=DEBUG) |
3276 | |
3277 | - check_call(["sysctl", "-p", sysctl_file]) |
3278 | + call = ["sysctl", "-p", sysctl_file] |
3279 | + if ignore: |
3280 | + call.append("-e") |
3281 | + |
3282 | + try: |
3283 | + check_call(call) |
3284 | + except CalledProcessError as e: |
3285 | + if is_container(): |
3286 | + log("Error setting some sysctl keys in this container: {}".format(e.output), |
3287 | + level=WARNING) |
3288 | + else: |
3289 | + raise e |
3290 | |
3291 | === modified file 'hooks/charmhelpers/core/templating.py' |
3292 | --- hooks/charmhelpers/core/templating.py 2016-12-20 14:35:00 +0000 |
3293 | +++ hooks/charmhelpers/core/templating.py 2023-06-30 13:58:42 +0000 |
3294 | @@ -13,14 +13,14 @@ |
3295 | # limitations under the License. |
3296 | |
3297 | import os |
3298 | -import sys |
3299 | |
3300 | from charmhelpers.core import host |
3301 | from charmhelpers.core import hookenv |
3302 | |
3303 | |
3304 | def render(source, target, context, owner='root', group='root', |
3305 | - perms=0o444, templates_dir=None, encoding='UTF-8', template_loader=None): |
3306 | + perms=0o444, templates_dir=None, encoding='UTF-8', |
3307 | + template_loader=None, config_template=None): |
3308 | """ |
3309 | Render a template. |
3310 | |
3311 | @@ -32,6 +32,9 @@ |
3312 | The context should be a dict containing the values to be replaced in the |
3313 | template. |
3314 | |
3315 | + config_template may be provided to render from a provided template instead |
3316 | + of loading from a file. |
3317 | + |
3318 | The `owner`, `group`, and `perms` options will be passed to `write_file`. |
3319 | |
3320 | If omitted, `templates_dir` defaults to the `templates` folder in the charm. |
3321 | @@ -39,9 +42,8 @@ |
3322 | The rendered template will be written to the file as well as being returned |
3323 | as a string. |
3324 | |
3325 | - Note: Using this requires python-jinja2 or python3-jinja2; if it is not |
3326 | - installed, calling this will attempt to use charmhelpers.fetch.apt_install |
3327 | - to install it. |
3328 | + Note: Using this requires python3-jinja2; if it is not installed, calling |
3329 | + this will attempt to use charmhelpers.fetch.apt_install to install it. |
3330 | """ |
3331 | try: |
3332 | from jinja2 import FileSystemLoader, Environment, exceptions |
3333 | @@ -53,10 +55,7 @@ |
3334 | 'charmhelpers.fetch to install it', |
3335 | level=hookenv.ERROR) |
3336 | raise |
3337 | - if sys.version_info.major == 2: |
3338 | - apt_install('python-jinja2', fatal=True) |
3339 | - else: |
3340 | - apt_install('python3-jinja2', fatal=True) |
3341 | + apt_install('python3-jinja2', fatal=True) |
3342 | from jinja2 import FileSystemLoader, Environment, exceptions |
3343 | |
3344 | if template_loader: |
3345 | @@ -65,14 +64,19 @@ |
3346 | if templates_dir is None: |
3347 | templates_dir = os.path.join(hookenv.charm_dir(), 'templates') |
3348 | template_env = Environment(loader=FileSystemLoader(templates_dir)) |
3349 | - try: |
3350 | - source = source |
3351 | - template = template_env.get_template(source) |
3352 | - except exceptions.TemplateNotFound as e: |
3353 | - hookenv.log('Could not load template %s from %s.' % |
3354 | - (source, templates_dir), |
3355 | - level=hookenv.ERROR) |
3356 | - raise e |
3357 | + |
3358 | + # load from a string if provided explicitly |
3359 | + if config_template is not None: |
3360 | + template = template_env.from_string(config_template) |
3361 | + else: |
3362 | + try: |
3363 | + source = source |
3364 | + template = template_env.get_template(source) |
3365 | + except exceptions.TemplateNotFound as e: |
3366 | + hookenv.log('Could not load template %s from %s.' % |
3367 | + (source, templates_dir), |
3368 | + level=hookenv.ERROR) |
3369 | + raise e |
3370 | content = template.render(context) |
3371 | if target is not None: |
3372 | target_dir = os.path.dirname(target) |
3373 | |
3374 | === modified file 'hooks/charmhelpers/core/unitdata.py' |
3375 | --- hooks/charmhelpers/core/unitdata.py 2016-12-20 14:35:00 +0000 |
3376 | +++ hooks/charmhelpers/core/unitdata.py 2023-06-30 13:58:42 +0000 |
3377 | @@ -1,7 +1,7 @@ |
3378 | #!/usr/bin/env python |
3379 | # -*- coding: utf-8 -*- |
3380 | # |
3381 | -# Copyright 2014-2015 Canonical Limited. |
3382 | +# Copyright 2014-2021 Canonical Limited. |
3383 | # |
3384 | # Licensed under the Apache License, Version 2.0 (the "License"); |
3385 | # you may not use this file except in compliance with the License. |
3386 | @@ -61,7 +61,7 @@ |
3387 | 'previous value', prev, |
3388 | 'current value', cur) |
3389 | |
3390 | - # Get some unit specific bookeeping |
3391 | + # Get some unit specific bookkeeping |
3392 | if not db.get('pkg_key'): |
3393 | key = urllib.urlopen('https://example.com/pkg_key').read() |
3394 | db.set('pkg_key', key) |
3395 | @@ -166,15 +166,23 @@ |
3396 | |
3397 | To support dicts, lists, integer, floats, and booleans values |
3398 | are automatically json encoded/decoded. |
3399 | + |
3400 | + Note: to facilitate unit testing, ':memory:' can be passed as the |
3401 | + path parameter which causes sqlite3 to only build the db in memory. |
3402 | + This should only be used for testing purposes. |
3403 | """ |
3404 | - def __init__(self, path=None): |
3405 | + def __init__(self, path=None, keep_revisions=False): |
3406 | self.db_path = path |
3407 | + self.keep_revisions = keep_revisions |
3408 | if path is None: |
3409 | if 'UNIT_STATE_DB' in os.environ: |
3410 | self.db_path = os.environ['UNIT_STATE_DB'] |
3411 | else: |
3412 | self.db_path = os.path.join( |
3413 | os.environ.get('CHARM_DIR', ''), '.unit-state.db') |
3414 | + if self.db_path != ':memory:': |
3415 | + with open(self.db_path, 'a') as f: |
3416 | + os.fchmod(f.fileno(), 0o600) |
3417 | self.conn = sqlite3.connect('%s' % self.db_path) |
3418 | self.cursor = self.conn.cursor() |
3419 | self.revision = None |
3420 | @@ -235,7 +243,7 @@ |
3421 | Remove a key from the database entirely. |
3422 | """ |
3423 | self.cursor.execute('delete from kv where key=?', [key]) |
3424 | - if self.revision and self.cursor.rowcount: |
3425 | + if self.keep_revisions and self.revision and self.cursor.rowcount: |
3426 | self.cursor.execute( |
3427 | 'insert into kv_revisions values (?, ?, ?)', |
3428 | [key, self.revision, json.dumps('DELETED')]) |
3429 | @@ -252,14 +260,14 @@ |
3430 | if keys is not None: |
3431 | keys = ['%s%s' % (prefix, key) for key in keys] |
3432 | self.cursor.execute('delete from kv where key in (%s)' % ','.join(['?'] * len(keys)), keys) |
3433 | - if self.revision and self.cursor.rowcount: |
3434 | + if self.keep_revisions and self.revision and self.cursor.rowcount: |
3435 | self.cursor.execute( |
3436 | 'insert into kv_revisions values %s' % ','.join(['(?, ?, ?)'] * len(keys)), |
3437 | list(itertools.chain.from_iterable((key, self.revision, json.dumps('DELETED')) for key in keys))) |
3438 | else: |
3439 | self.cursor.execute('delete from kv where key like ?', |
3440 | ['%s%%' % prefix]) |
3441 | - if self.revision and self.cursor.rowcount: |
3442 | + if self.keep_revisions and self.revision and self.cursor.rowcount: |
3443 | self.cursor.execute( |
3444 | 'insert into kv_revisions values (?, ?, ?)', |
3445 | ['%s%%' % prefix, self.revision, json.dumps('DELETED')]) |
3446 | @@ -292,7 +300,7 @@ |
3447 | where key = ?''', [serialized, key]) |
3448 | |
3449 | # Save |
3450 | - if not self.revision: |
3451 | + if (not self.keep_revisions) or (not self.revision): |
3452 | return value |
3453 | |
3454 | self.cursor.execute( |
3455 | @@ -358,7 +366,7 @@ |
3456 | try: |
3457 | yield self.revision |
3458 | self.revision = None |
3459 | - except: |
3460 | + except Exception: |
3461 | self.flush(False) |
3462 | self.revision = None |
3463 | raise |
3464 | @@ -442,7 +450,7 @@ |
3465 | 'previous value', prev, |
3466 | 'current value', cur) |
3467 | |
3468 | - # Get some unit specific bookeeping |
3469 | + # Get some unit specific bookkeeping |
3470 | if not db.get('pkg_key'): |
3471 | key = urllib.urlopen('https://example.com/pkg_key').read() |
3472 | db.set('pkg_key', key) |
3473 | |
3474 | === modified file 'hooks/charmhelpers/fetch/__init__.py' |
3475 | --- hooks/charmhelpers/fetch/__init__.py 2016-12-20 14:35:00 +0000 |
3476 | +++ hooks/charmhelpers/fetch/__init__.py 2023-06-30 13:58:42 +0000 |
3477 | @@ -1,4 +1,4 @@ |
3478 | -# Copyright 2014-2015 Canonical Limited. |
3479 | +# Copyright 2014-2021 Canonical Limited. |
3480 | # |
3481 | # Licensed under the Apache License, Version 2.0 (the "License"); |
3482 | # you may not use this file except in compliance with the License. |
3483 | @@ -20,11 +20,7 @@ |
3484 | log, |
3485 | ) |
3486 | |
3487 | -import six |
3488 | -if six.PY3: |
3489 | - from urllib.parse import urlparse, urlunparse |
3490 | -else: |
3491 | - from urlparse import urlparse, urlunparse |
3492 | +from urllib.parse import urlparse, urlunparse |
3493 | |
3494 | |
3495 | # The order of this list is very important. Handlers should be listed in from |
3496 | @@ -48,6 +44,13 @@ |
3497 | pass |
3498 | |
3499 | |
3500 | +class GPGKeyError(Exception): |
3501 | + """Exception occurs when a GPG key cannot be fetched or used. The message |
3502 | + indicates what the problem is. |
3503 | + """ |
3504 | + pass |
3505 | + |
3506 | + |
3507 | class BaseFetchHandler(object): |
3508 | |
3509 | """Base class for FetchHandler implementations in fetch plugins""" |
3510 | @@ -77,22 +80,30 @@ |
3511 | fetch = importlib.import_module(module) |
3512 | |
3513 | filter_installed_packages = fetch.filter_installed_packages |
3514 | -install = fetch.install |
3515 | -upgrade = fetch.upgrade |
3516 | -update = fetch.update |
3517 | -purge = fetch.purge |
3518 | +filter_missing_packages = fetch.filter_missing_packages |
3519 | +install = fetch.apt_install |
3520 | +upgrade = fetch.apt_upgrade |
3521 | +update = _fetch_update = fetch.apt_update |
3522 | +purge = fetch.apt_purge |
3523 | add_source = fetch.add_source |
3524 | |
3525 | if __platform__ == "ubuntu": |
3526 | apt_cache = fetch.apt_cache |
3527 | - apt_install = fetch.install |
3528 | - apt_update = fetch.update |
3529 | - apt_upgrade = fetch.upgrade |
3530 | - apt_purge = fetch.purge |
3531 | + apt_install = fetch.apt_install |
3532 | + apt_update = fetch.apt_update |
3533 | + apt_upgrade = fetch.apt_upgrade |
3534 | + apt_purge = fetch.apt_purge |
3535 | + apt_autoremove = fetch.apt_autoremove |
3536 | apt_mark = fetch.apt_mark |
3537 | apt_hold = fetch.apt_hold |
3538 | apt_unhold = fetch.apt_unhold |
3539 | + import_key = fetch.import_key |
3540 | get_upstream_version = fetch.get_upstream_version |
3541 | + apt_pkg = fetch.ubuntu_apt_pkg |
3542 | + get_apt_dpkg_env = fetch.get_apt_dpkg_env |
3543 | + get_installed_version = fetch.get_installed_version |
3544 | + OPENSTACK_RELEASES = fetch.OPENSTACK_RELEASES |
3545 | + UBUNTU_OPENSTACK_RELEASE = fetch.UBUNTU_OPENSTACK_RELEASE |
3546 | elif __platform__ == "centos": |
3547 | yum_search = fetch.yum_search |
3548 | |
3549 | @@ -119,14 +130,14 @@ |
3550 | sources = safe_load((config(sources_var) or '').strip()) or [] |
3551 | keys = safe_load((config(keys_var) or '').strip()) or None |
3552 | |
3553 | - if isinstance(sources, six.string_types): |
3554 | + if isinstance(sources, str): |
3555 | sources = [sources] |
3556 | |
3557 | if keys is None: |
3558 | for source in sources: |
3559 | add_source(source, None) |
3560 | else: |
3561 | - if isinstance(keys, six.string_types): |
3562 | + if isinstance(keys, str): |
3563 | keys = [keys] |
3564 | |
3565 | if len(sources) != len(keys): |
3566 | @@ -135,7 +146,7 @@ |
3567 | for source, key in zip(sources, keys): |
3568 | add_source(source, key) |
3569 | if update: |
3570 | - fetch.update(fatal=True) |
3571 | + _fetch_update(fatal=True) |
3572 | |
3573 | |
3574 | def install_remote(source, *args, **kwargs): |
3575 | @@ -190,7 +201,7 @@ |
3576 | classname) |
3577 | plugin_list.append(handler_class()) |
3578 | except NotImplementedError: |
3579 | - # Skip missing plugins so that they can be ommitted from |
3580 | + # Skip missing plugins so that they can be omitted from |
3581 | # installation if desired |
3582 | log("FetchHandler {} not found, skipping plugin".format( |
3583 | handler_name)) |
3584 | |
3585 | === modified file 'hooks/charmhelpers/fetch/archiveurl.py' |
3586 | --- hooks/charmhelpers/fetch/archiveurl.py 2016-12-20 14:35:00 +0000 |
3587 | +++ hooks/charmhelpers/fetch/archiveurl.py 2023-06-30 13:58:42 +0000 |
3588 | @@ -12,6 +12,7 @@ |
3589 | # See the License for the specific language governing permissions and |
3590 | # limitations under the License. |
3591 | |
3592 | +import contextlib |
3593 | import os |
3594 | import hashlib |
3595 | import re |
3596 | @@ -24,28 +25,21 @@ |
3597 | get_archive_handler, |
3598 | extract, |
3599 | ) |
3600 | +from charmhelpers.core.hookenv import ( |
3601 | + env_proxy_settings, |
3602 | +) |
3603 | from charmhelpers.core.host import mkdir, check_hash |
3604 | |
3605 | -import six |
3606 | -if six.PY3: |
3607 | - from urllib.request import ( |
3608 | - build_opener, install_opener, urlopen, urlretrieve, |
3609 | - HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler, |
3610 | - ) |
3611 | - from urllib.parse import urlparse, urlunparse, parse_qs |
3612 | - from urllib.error import URLError |
3613 | -else: |
3614 | - from urllib import urlretrieve |
3615 | - from urllib2 import ( |
3616 | - build_opener, install_opener, urlopen, |
3617 | - HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler, |
3618 | - URLError |
3619 | - ) |
3620 | - from urlparse import urlparse, urlunparse, parse_qs |
3621 | +from urllib.request import ( |
3622 | + build_opener, install_opener, urlopen, urlretrieve, |
3623 | + HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler, |
3624 | + ProxyHandler |
3625 | +) |
3626 | +from urllib.parse import urlparse, urlunparse, parse_qs |
3627 | +from urllib.error import URLError |
3628 | |
3629 | |
3630 | def splituser(host): |
3631 | - '''urllib.splituser(), but six's support of this seems broken''' |
3632 | _userprog = re.compile('^(.*)@(.*)$') |
3633 | match = _userprog.match(host) |
3634 | if match: |
3635 | @@ -54,7 +48,6 @@ |
3636 | |
3637 | |
3638 | def splitpasswd(user): |
3639 | - '''urllib.splitpasswd(), but six's support of this is missing''' |
3640 | _passwdprog = re.compile('^([^:]*):(.*)$', re.S) |
3641 | match = _passwdprog.match(user) |
3642 | if match: |
3643 | @@ -62,6 +55,20 @@ |
3644 | return user, None |
3645 | |
3646 | |
3647 | +@contextlib.contextmanager |
3648 | +def proxy_env(): |
3649 | + """ |
3650 | + Creates a context which temporarily modifies the proxy settings in os.environ. |
3651 | + """ |
3652 | + restore = {**os.environ} # Copy the current os.environ |
3653 | + juju_proxies = env_proxy_settings() or {} |
3654 | + os.environ.update(**juju_proxies) # Insert or Update the os.environ |
3655 | + yield os.environ |
3656 | + for key in juju_proxies: |
3657 | + del os.environ[key] # remove any keys which were added or updated |
3658 | + os.environ.update(**restore) # restore any original values |
3659 | + |
3660 | + |
3661 | class ArchiveUrlFetchHandler(BaseFetchHandler): |
3662 | """ |
3663 | Handler to download archive files from arbitrary URLs. |
3664 | @@ -89,9 +96,10 @@ |
3665 | :param str source: URL pointing to an archive file. |
3666 | :param str dest: Local path location to download archive file to. |
3667 | """ |
3668 | - # propogate all exceptions |
3669 | + # propagate all exceptions |
3670 | # URLError, OSError, etc |
3671 | proto, netloc, path, params, query, fragment = urlparse(source) |
3672 | + handlers = [] |
3673 | if proto in ('http', 'https'): |
3674 | auth, barehost = splituser(netloc) |
3675 | if auth is not None: |
3676 | @@ -101,10 +109,13 @@ |
3677 | # Realm is set to None in add_password to force the username and password |
3678 | # to be used whatever the realm |
3679 | passman.add_password(None, source, username, password) |
3680 | - authhandler = HTTPBasicAuthHandler(passman) |
3681 | - opener = build_opener(authhandler) |
3682 | - install_opener(opener) |
3683 | - response = urlopen(source) |
3684 | + handlers.append(HTTPBasicAuthHandler(passman)) |
3685 | + |
3686 | + with proxy_env(): |
3687 | + handlers.append(ProxyHandler()) |
3688 | + opener = build_opener(*handlers) |
3689 | + install_opener(opener) |
3690 | + response = urlopen(source) |
3691 | try: |
3692 | with open(dest, 'wb') as dest_file: |
3693 | dest_file.write(response.read()) |
3694 | @@ -150,10 +161,7 @@ |
3695 | raise UnhandledSource(e.strerror) |
3696 | options = parse_qs(url_parts.fragment) |
3697 | for key, value in options.items(): |
3698 | - if not six.PY3: |
3699 | - algorithms = hashlib.algorithms |
3700 | - else: |
3701 | - algorithms = hashlib.algorithms_available |
3702 | + algorithms = hashlib.algorithms_available |
3703 | if key in algorithms: |
3704 | if len(value) != 1: |
3705 | raise TypeError( |
3706 | |
3707 | === modified file 'hooks/charmhelpers/fetch/bzrurl.py' |
3708 | --- hooks/charmhelpers/fetch/bzrurl.py 2016-12-20 14:35:00 +0000 |
3709 | +++ hooks/charmhelpers/fetch/bzrurl.py 2023-06-30 13:58:42 +0000 |
3710 | @@ -13,7 +13,7 @@ |
3711 | # limitations under the License. |
3712 | |
3713 | import os |
3714 | -from subprocess import check_call |
3715 | +from subprocess import STDOUT, check_output |
3716 | from charmhelpers.fetch import ( |
3717 | BaseFetchHandler, |
3718 | UnhandledSource, |
3719 | @@ -55,7 +55,7 @@ |
3720 | cmd = ['bzr', 'branch'] |
3721 | cmd += cmd_opts |
3722 | cmd += [source, dest] |
3723 | - check_call(cmd) |
3724 | + check_output(cmd, stderr=STDOUT) |
3725 | |
3726 | def install(self, source, dest=None, revno=None): |
3727 | url_parts = self.parse_url(source) |
3728 | |
3729 | === modified file 'hooks/charmhelpers/fetch/centos.py' |
3730 | --- hooks/charmhelpers/fetch/centos.py 2016-12-20 20:15:28 +0000 |
3731 | +++ hooks/charmhelpers/fetch/centos.py 2023-06-30 13:58:42 +0000 |
3732 | @@ -15,7 +15,6 @@ |
3733 | import subprocess |
3734 | import os |
3735 | import time |
3736 | -import six |
3737 | import yum |
3738 | |
3739 | from tempfile import NamedTemporaryFile |
3740 | @@ -42,7 +41,7 @@ |
3741 | if options is not None: |
3742 | cmd.extend(options) |
3743 | cmd.append('install') |
3744 | - if isinstance(packages, six.string_types): |
3745 | + if isinstance(packages, str): |
3746 | cmd.append(packages) |
3747 | else: |
3748 | cmd.extend(packages) |
3749 | @@ -71,7 +70,7 @@ |
3750 | def purge(packages, fatal=False): |
3751 | """Purge one or more packages.""" |
3752 | cmd = ['yum', '--assumeyes', 'remove'] |
3753 | - if isinstance(packages, six.string_types): |
3754 | + if isinstance(packages, str): |
3755 | cmd.append(packages) |
3756 | else: |
3757 | cmd.extend(packages) |
3758 | @@ -83,7 +82,7 @@ |
3759 | """Search for a package.""" |
3760 | output = {} |
3761 | cmd = ['yum', 'search'] |
3762 | - if isinstance(packages, six.string_types): |
3763 | + if isinstance(packages, str): |
3764 | cmd.append(packages) |
3765 | else: |
3766 | cmd.extend(packages) |
3767 | @@ -132,7 +131,7 @@ |
3768 | key_file.write(key) |
3769 | key_file.flush() |
3770 | key_file.seek(0) |
3771 | - subprocess.check_call(['rpm', '--import', key_file]) |
3772 | + subprocess.check_call(['rpm', '--import', key_file.name]) |
3773 | else: |
3774 | subprocess.check_call(['rpm', '--import', key]) |
3775 | |
3776 | |
3777 | === modified file 'hooks/charmhelpers/fetch/giturl.py' |
3778 | --- hooks/charmhelpers/fetch/giturl.py 2016-12-20 14:35:00 +0000 |
3779 | +++ hooks/charmhelpers/fetch/giturl.py 2023-06-30 13:58:42 +0000 |
3780 | @@ -13,7 +13,7 @@ |
3781 | # limitations under the License. |
3782 | |
3783 | import os |
3784 | -from subprocess import check_call, CalledProcessError |
3785 | +from subprocess import check_output, CalledProcessError, STDOUT |
3786 | from charmhelpers.fetch import ( |
3787 | BaseFetchHandler, |
3788 | UnhandledSource, |
3789 | @@ -50,7 +50,7 @@ |
3790 | cmd = ['git', 'clone', source, dest, '--branch', branch] |
3791 | if depth: |
3792 | cmd.extend(['--depth', depth]) |
3793 | - check_call(cmd) |
3794 | + check_output(cmd, stderr=STDOUT) |
3795 | |
3796 | def install(self, source, branch="master", dest=None, depth=None): |
3797 | url_parts = self.parse_url(source) |
3798 | |
3799 | === added directory 'hooks/charmhelpers/fetch/python' |
3800 | === added file 'hooks/charmhelpers/fetch/python/__init__.py' |
3801 | --- hooks/charmhelpers/fetch/python/__init__.py 1970-01-01 00:00:00 +0000 |
3802 | +++ hooks/charmhelpers/fetch/python/__init__.py 2023-06-30 13:58:42 +0000 |
3803 | @@ -0,0 +1,13 @@ |
3804 | +# Copyright 2014-2019 Canonical Limited. |
3805 | +# |
3806 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
3807 | +# you may not use this file except in compliance with the License. |
3808 | +# You may obtain a copy of the License at |
3809 | +# |
3810 | +# http://www.apache.org/licenses/LICENSE-2.0 |
3811 | +# |
3812 | +# Unless required by applicable law or agreed to in writing, software |
3813 | +# distributed under the License is distributed on an "AS IS" BASIS, |
3814 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
3815 | +# See the License for the specific language governing permissions and |
3816 | +# limitations under the License. |
3817 | |
3818 | === added file 'hooks/charmhelpers/fetch/python/debug.py' |
3819 | --- hooks/charmhelpers/fetch/python/debug.py 1970-01-01 00:00:00 +0000 |
3820 | +++ hooks/charmhelpers/fetch/python/debug.py 2023-06-30 13:58:42 +0000 |
3821 | @@ -0,0 +1,52 @@ |
3822 | +#!/usr/bin/env python |
3823 | +# coding: utf-8 |
3824 | + |
3825 | +# Copyright 2014-2015 Canonical Limited. |
3826 | +# |
3827 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
3828 | +# you may not use this file except in compliance with the License. |
3829 | +# You may obtain a copy of the License at |
3830 | +# |
3831 | +# http://www.apache.org/licenses/LICENSE-2.0 |
3832 | +# |
3833 | +# Unless required by applicable law or agreed to in writing, software |
3834 | +# distributed under the License is distributed on an "AS IS" BASIS, |
3835 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
3836 | +# See the License for the specific language governing permissions and |
3837 | +# limitations under the License. |
3838 | + |
3839 | +import atexit |
3840 | +import sys |
3841 | + |
3842 | +from charmhelpers.fetch.python.rpdb import Rpdb |
3843 | +from charmhelpers.core.hookenv import ( |
3844 | + open_port, |
3845 | + close_port, |
3846 | + ERROR, |
3847 | + log |
3848 | +) |
3849 | + |
3850 | +__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" |
3851 | + |
3852 | +DEFAULT_ADDR = "0.0.0.0" |
3853 | +DEFAULT_PORT = 4444 |
3854 | + |
3855 | + |
3856 | +def _error(message): |
3857 | + log(message, level=ERROR) |
3858 | + |
3859 | + |
3860 | +def set_trace(addr=DEFAULT_ADDR, port=DEFAULT_PORT): |
3861 | + """ |
3862 | + Set a trace point using the remote debugger |
3863 | + """ |
3864 | + atexit.register(close_port, port) |
3865 | + try: |
3866 | + log("Starting a remote python debugger session on %s:%s" % (addr, |
3867 | + port)) |
3868 | + open_port(port) |
3869 | + debugger = Rpdb(addr=addr, port=port) |
3870 | + debugger.set_trace(sys._getframe().f_back) |
3871 | + except Exception: |
3872 | + _error("Cannot start a remote debug session on %s:%s" % (addr, |
3873 | + port)) |
3874 | |
3875 | === added file 'hooks/charmhelpers/fetch/python/packages.py' |
3876 | --- hooks/charmhelpers/fetch/python/packages.py 1970-01-01 00:00:00 +0000 |
3877 | +++ hooks/charmhelpers/fetch/python/packages.py 2023-06-30 13:58:42 +0000 |
3878 | @@ -0,0 +1,148 @@ |
3879 | +#!/usr/bin/env python |
3880 | +# coding: utf-8 |
3881 | + |
3882 | +# Copyright 2014-2021 Canonical Limited. |
3883 | +# |
3884 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
3885 | +# you may not use this file except in compliance with the License. |
3886 | +# You may obtain a copy of the License at |
3887 | +# |
3888 | +# http://www.apache.org/licenses/LICENSE-2.0 |
3889 | +# |
3890 | +# Unless required by applicable law or agreed to in writing, software |
3891 | +# distributed under the License is distributed on an "AS IS" BASIS, |
3892 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
3893 | +# See the License for the specific language governing permissions and |
3894 | +# limitations under the License. |
3895 | + |
3896 | +import os |
3897 | +import subprocess |
3898 | +import sys |
3899 | + |
3900 | +from charmhelpers.fetch import apt_install, apt_update |
3901 | +from charmhelpers.core.hookenv import charm_dir, log |
3902 | + |
3903 | +__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" |
3904 | + |
3905 | + |
3906 | +def pip_execute(*args, **kwargs): |
3907 | + """Overridden pip_execute() to stop sys.path being changed. |
3908 | + |
3909 | + The act of importing main from the pip module seems to cause add wheels |
3910 | + from the /usr/share/python-wheels which are installed by various tools. |
3911 | + This function ensures that sys.path remains the same after the call is |
3912 | + executed. |
3913 | + """ |
3914 | + try: |
3915 | + _path = sys.path |
3916 | + try: |
3917 | + from pip import main as _pip_execute |
3918 | + except ImportError: |
3919 | + apt_update() |
3920 | + apt_install('python3-pip') |
3921 | + from pip import main as _pip_execute |
3922 | + _pip_execute(*args, **kwargs) |
3923 | + finally: |
3924 | + sys.path = _path |
3925 | + |
3926 | + |
3927 | +def parse_options(given, available): |
3928 | + """Given a set of options, check if available""" |
3929 | + for key, value in sorted(given.items()): |
3930 | + if not value: |
3931 | + continue |
3932 | + if key in available: |
3933 | + yield "--{0}={1}".format(key, value) |
3934 | + |
3935 | + |
3936 | +def pip_install_requirements(requirements, constraints=None, **options): |
3937 | + """Install a requirements file. |
3938 | + |
3939 | + :param constraints: Path to pip constraints file. |
3940 | + http://pip.readthedocs.org/en/stable/user_guide/#constraints-files |
3941 | + """ |
3942 | + command = ["install"] |
3943 | + |
3944 | + available_options = ('proxy', 'src', 'log', ) |
3945 | + for option in parse_options(options, available_options): |
3946 | + command.append(option) |
3947 | + |
3948 | + command.append("-r {0}".format(requirements)) |
3949 | + if constraints: |
3950 | + command.append("-c {0}".format(constraints)) |
3951 | + log("Installing from file: {} with constraints {} " |
3952 | + "and options: {}".format(requirements, constraints, command)) |
3953 | + else: |
3954 | + log("Installing from file: {} with options: {}".format(requirements, |
3955 | + command)) |
3956 | + pip_execute(command) |
3957 | + |
3958 | + |
3959 | +def pip_install(package, fatal=False, upgrade=False, venv=None, |
3960 | + constraints=None, **options): |
3961 | + """Install a python package""" |
3962 | + if venv: |
3963 | + venv_python = os.path.join(venv, 'bin/pip') |
3964 | + command = [venv_python, "install"] |
3965 | + else: |
3966 | + command = ["install"] |
3967 | + |
3968 | + available_options = ('proxy', 'src', 'log', 'index-url', ) |
3969 | + for option in parse_options(options, available_options): |
3970 | + command.append(option) |
3971 | + |
3972 | + if upgrade: |
3973 | + command.append('--upgrade') |
3974 | + |
3975 | + if constraints: |
3976 | + command.extend(['-c', constraints]) |
3977 | + |
3978 | + if isinstance(package, list): |
3979 | + command.extend(package) |
3980 | + else: |
3981 | + command.append(package) |
3982 | + |
3983 | + log("Installing {} package with options: {}".format(package, |
3984 | + command)) |
3985 | + if venv: |
3986 | + subprocess.check_call(command) |
3987 | + else: |
3988 | + pip_execute(command) |
3989 | + |
3990 | + |
3991 | +def pip_uninstall(package, **options): |
3992 | + """Uninstall a python package""" |
3993 | + command = ["uninstall", "-q", "-y"] |
3994 | + |
3995 | + available_options = ('proxy', 'log', ) |
3996 | + for option in parse_options(options, available_options): |
3997 | + command.append(option) |
3998 | + |
3999 | + if isinstance(package, list): |
4000 | + command.extend(package) |
4001 | + else: |
4002 | + command.append(package) |
4003 | + |
4004 | + log("Uninstalling {} package with options: {}".format(package, |
4005 | + command)) |
4006 | + pip_execute(command) |
4007 | + |
4008 | + |
4009 | +def pip_list(): |
4010 | + """Returns the list of current python installed packages |
4011 | + """ |
4012 | + return pip_execute(["list"]) |
4013 | + |
4014 | + |
4015 | +def pip_create_virtualenv(path=None): |
4016 | + """Create an isolated Python environment.""" |
4017 | + apt_install(['python3-virtualenv', 'virtualenv']) |
4018 | + extra_flags = ['--python=python3'] |
4019 | + |
4020 | + if path: |
4021 | + venv_path = path |
4022 | + else: |
4023 | + venv_path = os.path.join(charm_dir(), 'venv') |
4024 | + |
4025 | + if not os.path.exists(venv_path): |
4026 | + subprocess.check_call(['virtualenv', venv_path] + extra_flags) |
4027 | |
4028 | === added file 'hooks/charmhelpers/fetch/python/rpdb.py' |
4029 | --- hooks/charmhelpers/fetch/python/rpdb.py 1970-01-01 00:00:00 +0000 |
4030 | +++ hooks/charmhelpers/fetch/python/rpdb.py 2023-06-30 13:58:42 +0000 |
4031 | @@ -0,0 +1,56 @@ |
4032 | +# Copyright 2014-2015 Canonical Limited. |
4033 | +# |
4034 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
4035 | +# you may not use this file except in compliance with the License. |
4036 | +# You may obtain a copy of the License at |
4037 | +# |
4038 | +# http://www.apache.org/licenses/LICENSE-2.0 |
4039 | +# |
4040 | +# Unless required by applicable law or agreed to in writing, software |
4041 | +# distributed under the License is distributed on an "AS IS" BASIS, |
4042 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
4043 | +# See the License for the specific language governing permissions and |
4044 | +# limitations under the License. |
4045 | + |
4046 | +"""Remote Python Debugger (pdb wrapper).""" |
4047 | + |
4048 | +import pdb |
4049 | +import socket |
4050 | +import sys |
4051 | + |
4052 | +__author__ = "Bertrand Janin <b@janin.com>" |
4053 | +__version__ = "0.1.3" |
4054 | + |
4055 | + |
4056 | +class Rpdb(pdb.Pdb): |
4057 | + |
4058 | + def __init__(self, addr="127.0.0.1", port=4444): |
4059 | + """Initialize the socket and initialize pdb.""" |
4060 | + |
4061 | + # Backup stdin and stdout before replacing them by the socket handle |
4062 | + self.old_stdout = sys.stdout |
4063 | + self.old_stdin = sys.stdin |
4064 | + |
4065 | + # Open a 'reusable' socket to let the webapp reload on the same port |
4066 | + self.skt = socket.socket(socket.AF_INET, socket.SOCK_STREAM) |
4067 | + self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True) |
4068 | + self.skt.bind((addr, port)) |
4069 | + self.skt.listen(1) |
4070 | + (clientsocket, address) = self.skt.accept() |
4071 | + handle = clientsocket.makefile('rw') |
4072 | + pdb.Pdb.__init__(self, completekey='tab', stdin=handle, stdout=handle) |
4073 | + sys.stdout = sys.stdin = handle |
4074 | + |
4075 | + def shutdown(self): |
4076 | + """Revert stdin and stdout, close the socket.""" |
4077 | + sys.stdout = self.old_stdout |
4078 | + sys.stdin = self.old_stdin |
4079 | + self.skt.close() |
4080 | + self.set_continue() |
4081 | + |
4082 | + def do_continue(self, arg): |
4083 | + """Stop all operation on ``continue``.""" |
4084 | + self.shutdown() |
4085 | + return 1 |
4086 | + |
4087 | + do_EOF = do_quit = do_exit = do_c = do_cont = do_continue |
4088 | |
4089 | === added file 'hooks/charmhelpers/fetch/python/version.py' |
4090 | --- hooks/charmhelpers/fetch/python/version.py 1970-01-01 00:00:00 +0000 |
4091 | +++ hooks/charmhelpers/fetch/python/version.py 2023-06-30 13:58:42 +0000 |
4092 | @@ -0,0 +1,32 @@ |
4093 | +#!/usr/bin/env python |
4094 | +# coding: utf-8 |
4095 | + |
4096 | +# Copyright 2014-2015 Canonical Limited. |
4097 | +# |
4098 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
4099 | +# you may not use this file except in compliance with the License. |
4100 | +# You may obtain a copy of the License at |
4101 | +# |
4102 | +# http://www.apache.org/licenses/LICENSE-2.0 |
4103 | +# |
4104 | +# Unless required by applicable law or agreed to in writing, software |
4105 | +# distributed under the License is distributed on an "AS IS" BASIS, |
4106 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
4107 | +# See the License for the specific language governing permissions and |
4108 | +# limitations under the License. |
4109 | + |
4110 | +import sys |
4111 | + |
4112 | +__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" |
4113 | + |
4114 | + |
4115 | +def current_version(): |
4116 | + """Current system python version""" |
4117 | + return sys.version_info |
4118 | + |
4119 | + |
4120 | +def current_version_string(): |
4121 | + """Current system python version as string major.minor.micro""" |
4122 | + return "{0}.{1}.{2}".format(sys.version_info.major, |
4123 | + sys.version_info.minor, |
4124 | + sys.version_info.micro) |
4125 | |
4126 | === added file 'hooks/charmhelpers/fetch/snap.py' |
4127 | --- hooks/charmhelpers/fetch/snap.py 1970-01-01 00:00:00 +0000 |
4128 | +++ hooks/charmhelpers/fetch/snap.py 2023-06-30 13:58:42 +0000 |
4129 | @@ -0,0 +1,150 @@ |
4130 | +# Copyright 2014-2021 Canonical Limited. |
4131 | +# |
4132 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
4133 | +# you may not use this file except in compliance with the License. |
4134 | +# You may obtain a copy of the License at |
4135 | +# |
4136 | +# http://www.apache.org/licenses/LICENSE-2.0 |
4137 | +# |
4138 | +# Unless required by applicable law or agreed to in writing, software |
4139 | +# distributed under the License is distributed on an "AS IS" BASIS, |
4140 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
4141 | +# See the License for the specific language governing permissions and |
4142 | +# limitations under the License. |
4143 | +""" |
4144 | +Charm helpers snap for classic charms. |
4145 | + |
4146 | +If writing reactive charms, use the snap layer: |
4147 | +https://lists.ubuntu.com/archives/snapcraft/2016-September/001114.html |
4148 | +""" |
4149 | +import subprocess |
4150 | +import os |
4151 | +from time import sleep |
4152 | +from charmhelpers.core.hookenv import log |
4153 | + |
4154 | +__author__ = 'Joseph Borg <joseph.borg@canonical.com>' |
4155 | + |
4156 | +# The return code for "couldn't acquire lock" in Snap |
4157 | +# (hopefully this will be improved). |
4158 | +SNAP_NO_LOCK = 1 |
4159 | +SNAP_NO_LOCK_RETRY_DELAY = 10 # Wait X seconds between Snap lock checks. |
4160 | +SNAP_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times. |
4161 | +SNAP_CHANNELS = [ |
4162 | + 'edge', |
4163 | + 'beta', |
4164 | + 'candidate', |
4165 | + 'stable', |
4166 | +] |
4167 | + |
4168 | + |
4169 | +class CouldNotAcquireLockException(Exception): |
4170 | + pass |
4171 | + |
4172 | + |
4173 | +class InvalidSnapChannel(Exception): |
4174 | + pass |
4175 | + |
4176 | + |
4177 | +def _snap_exec(commands): |
4178 | + """ |
4179 | + Execute snap commands. |
4180 | + |
4181 | + :param commands: List commands |
4182 | + :return: Integer exit code |
4183 | + """ |
4184 | + assert type(commands) == list |
4185 | + |
4186 | + retry_count = 0 |
4187 | + return_code = None |
4188 | + |
4189 | + while return_code is None or return_code == SNAP_NO_LOCK: |
4190 | + try: |
4191 | + return_code = subprocess.check_call(['snap'] + commands, |
4192 | + env=os.environ) |
4193 | + except subprocess.CalledProcessError as e: |
4194 | + retry_count += + 1 |
4195 | + if retry_count > SNAP_NO_LOCK_RETRY_COUNT: |
4196 | + raise CouldNotAcquireLockException( |
4197 | + 'Could not acquire lock after {} attempts' |
4198 | + .format(SNAP_NO_LOCK_RETRY_COUNT)) |
4199 | + return_code = e.returncode |
4200 | + log('Snap failed to acquire lock, trying again in {} seconds.' |
4201 | + .format(SNAP_NO_LOCK_RETRY_DELAY), level='WARN') |
4202 | + sleep(SNAP_NO_LOCK_RETRY_DELAY) |
4203 | + |
4204 | + return return_code |
4205 | + |
4206 | + |
4207 | +def snap_install(packages, *flags): |
4208 | + """ |
4209 | + Install a snap package. |
4210 | + |
4211 | + :param packages: String or List String package name |
4212 | + :param flags: List String flags to pass to install command |
4213 | + :return: Integer return code from snap |
4214 | + """ |
4215 | + if type(packages) is not list: |
4216 | + packages = [packages] |
4217 | + |
4218 | + flags = list(flags) |
4219 | + |
4220 | + message = 'Installing snap(s) "%s"' % ', '.join(packages) |
4221 | + if flags: |
4222 | + message += ' with option(s) "%s"' % ', '.join(flags) |
4223 | + |
4224 | + log(message, level='INFO') |
4225 | + return _snap_exec(['install'] + flags + packages) |
4226 | + |
4227 | + |
4228 | +def snap_remove(packages, *flags): |
4229 | + """ |
4230 | + Remove a snap package. |
4231 | + |
4232 | + :param packages: String or List String package name |
4233 | + :param flags: List String flags to pass to remove command |
4234 | + :return: Integer return code from snap |
4235 | + """ |
4236 | + if type(packages) is not list: |
4237 | + packages = [packages] |
4238 | + |
4239 | + flags = list(flags) |
4240 | + |
4241 | + message = 'Removing snap(s) "%s"' % ', '.join(packages) |
4242 | + if flags: |
4243 | + message += ' with options "%s"' % ', '.join(flags) |
4244 | + |
4245 | + log(message, level='INFO') |
4246 | + return _snap_exec(['remove'] + flags + packages) |
4247 | + |
4248 | + |
4249 | +def snap_refresh(packages, *flags): |
4250 | + """ |
4251 | + Refresh / Update snap package. |
4252 | + |
4253 | + :param packages: String or List String package name |
4254 | + :param flags: List String flags to pass to refresh command |
4255 | + :return: Integer return code from snap |
4256 | + """ |
4257 | + if type(packages) is not list: |
4258 | + packages = [packages] |
4259 | + |
4260 | + flags = list(flags) |
4261 | + |
4262 | + message = 'Refreshing snap(s) "%s"' % ', '.join(packages) |
4263 | + if flags: |
4264 | + message += ' with options "%s"' % ', '.join(flags) |
4265 | + |
4266 | + log(message, level='INFO') |
4267 | + return _snap_exec(['refresh'] + flags + packages) |
4268 | + |
4269 | + |
4270 | +def valid_snap_channel(channel): |
4271 | + """ Validate snap channel exists |
4272 | + |
4273 | + :raises InvalidSnapChannel: When channel does not exist |
4274 | + :return: Boolean |
4275 | + """ |
4276 | + if channel.lower() in SNAP_CHANNELS: |
4277 | + return True |
4278 | + else: |
4279 | + raise InvalidSnapChannel("Invalid Snap Channel: {}".format(channel)) |
4280 | |
4281 | === modified file 'hooks/charmhelpers/fetch/ubuntu.py' |
4282 | --- hooks/charmhelpers/fetch/ubuntu.py 2016-12-20 20:15:28 +0000 |
4283 | +++ hooks/charmhelpers/fetch/ubuntu.py 2023-06-30 13:58:42 +0000 |
4284 | @@ -1,4 +1,4 @@ |
4285 | -# Copyright 2014-2015 Canonical Limited. |
4286 | +# Copyright 2014-2021 Canonical Limited. |
4287 | # |
4288 | # Licensed under the Apache License, Version 2.0 (the "License"); |
4289 | # you may not use this file except in compliance with the License. |
4290 | @@ -12,29 +12,49 @@ |
4291 | # See the License for the specific language governing permissions and |
4292 | # limitations under the License. |
4293 | |
4294 | -import os |
4295 | -import six |
4296 | +from collections import OrderedDict |
4297 | +import platform |
4298 | +import re |
4299 | +import subprocess |
4300 | +import sys |
4301 | import time |
4302 | -import subprocess |
4303 | - |
4304 | -from tempfile import NamedTemporaryFile |
4305 | -from charmhelpers.core.host import ( |
4306 | - lsb_release |
4307 | + |
4308 | +from charmhelpers import deprecate |
4309 | +from charmhelpers.core.host import get_distrib_codename, get_system_env |
4310 | + |
4311 | +from charmhelpers.core.hookenv import ( |
4312 | + log, |
4313 | + DEBUG, |
4314 | + WARNING, |
4315 | + env_proxy_settings, |
4316 | ) |
4317 | -from charmhelpers.core.hookenv import log |
4318 | -from charmhelpers.fetch import SourceConfigError |
4319 | +from charmhelpers.fetch import SourceConfigError, GPGKeyError |
4320 | +from charmhelpers.fetch import ubuntu_apt_pkg |
4321 | |
4322 | +PROPOSED_POCKET = ( |
4323 | + "# Proposed\n" |
4324 | + "deb http://archive.ubuntu.com/ubuntu {}-proposed main universe " |
4325 | + "multiverse restricted\n") |
4326 | +PROPOSED_PORTS_POCKET = ( |
4327 | + "# Proposed\n" |
4328 | + "deb http://ports.ubuntu.com/ubuntu-ports {}-proposed main universe " |
4329 | + "multiverse restricted\n") |
4330 | +# Only supports 64bit and ppc64 at the moment. |
4331 | +ARCH_TO_PROPOSED_POCKET = { |
4332 | + 'x86_64': PROPOSED_POCKET, |
4333 | + 'ppc64le': PROPOSED_PORTS_POCKET, |
4334 | + 'aarch64': PROPOSED_PORTS_POCKET, |
4335 | + 's390x': PROPOSED_PORTS_POCKET, |
4336 | +} |
4337 | +CLOUD_ARCHIVE_URL = "http://ubuntu-cloud.archive.canonical.com/ubuntu" |
4338 | +CLOUD_ARCHIVE_KEY_ID = '5EDB1B62EC4926EA' |
4339 | CLOUD_ARCHIVE = """# Ubuntu Cloud Archive |
4340 | deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main |
4341 | """ |
4342 | - |
4343 | -PROPOSED_POCKET = """# Proposed |
4344 | -deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted |
4345 | -""" |
4346 | - |
4347 | CLOUD_ARCHIVE_POCKETS = { |
4348 | # Folsom |
4349 | 'folsom': 'precise-updates/folsom', |
4350 | + 'folsom/updates': 'precise-updates/folsom', |
4351 | 'precise-folsom': 'precise-updates/folsom', |
4352 | 'precise-folsom/updates': 'precise-updates/folsom', |
4353 | 'precise-updates/folsom': 'precise-updates/folsom', |
4354 | @@ -43,6 +63,7 @@ |
4355 | 'precise-proposed/folsom': 'precise-proposed/folsom', |
4356 | # Grizzly |
4357 | 'grizzly': 'precise-updates/grizzly', |
4358 | + 'grizzly/updates': 'precise-updates/grizzly', |
4359 | 'precise-grizzly': 'precise-updates/grizzly', |
4360 | 'precise-grizzly/updates': 'precise-updates/grizzly', |
4361 | 'precise-updates/grizzly': 'precise-updates/grizzly', |
4362 | @@ -51,6 +72,7 @@ |
4363 | 'precise-proposed/grizzly': 'precise-proposed/grizzly', |
4364 | # Havana |
4365 | 'havana': 'precise-updates/havana', |
4366 | + 'havana/updates': 'precise-updates/havana', |
4367 | 'precise-havana': 'precise-updates/havana', |
4368 | 'precise-havana/updates': 'precise-updates/havana', |
4369 | 'precise-updates/havana': 'precise-updates/havana', |
4370 | @@ -59,6 +81,7 @@ |
4371 | 'precise-proposed/havana': 'precise-proposed/havana', |
4372 | # Icehouse |
4373 | 'icehouse': 'precise-updates/icehouse', |
4374 | + 'icehouse/updates': 'precise-updates/icehouse', |
4375 | 'precise-icehouse': 'precise-updates/icehouse', |
4376 | 'precise-icehouse/updates': 'precise-updates/icehouse', |
4377 | 'precise-updates/icehouse': 'precise-updates/icehouse', |
4378 | @@ -67,6 +90,7 @@ |
4379 | 'precise-proposed/icehouse': 'precise-proposed/icehouse', |
4380 | # Juno |
4381 | 'juno': 'trusty-updates/juno', |
4382 | + 'juno/updates': 'trusty-updates/juno', |
4383 | 'trusty-juno': 'trusty-updates/juno', |
4384 | 'trusty-juno/updates': 'trusty-updates/juno', |
4385 | 'trusty-updates/juno': 'trusty-updates/juno', |
4386 | @@ -75,6 +99,7 @@ |
4387 | 'trusty-proposed/juno': 'trusty-proposed/juno', |
4388 | # Kilo |
4389 | 'kilo': 'trusty-updates/kilo', |
4390 | + 'kilo/updates': 'trusty-updates/kilo', |
4391 | 'trusty-kilo': 'trusty-updates/kilo', |
4392 | 'trusty-kilo/updates': 'trusty-updates/kilo', |
4393 | 'trusty-updates/kilo': 'trusty-updates/kilo', |
4394 | @@ -83,6 +108,7 @@ |
4395 | 'trusty-proposed/kilo': 'trusty-proposed/kilo', |
4396 | # Liberty |
4397 | 'liberty': 'trusty-updates/liberty', |
4398 | + 'liberty/updates': 'trusty-updates/liberty', |
4399 | 'trusty-liberty': 'trusty-updates/liberty', |
4400 | 'trusty-liberty/updates': 'trusty-updates/liberty', |
4401 | 'trusty-updates/liberty': 'trusty-updates/liberty', |
4402 | @@ -91,6 +117,7 @@ |
4403 | 'trusty-proposed/liberty': 'trusty-proposed/liberty', |
4404 | # Mitaka |
4405 | 'mitaka': 'trusty-updates/mitaka', |
4406 | + 'mitaka/updates': 'trusty-updates/mitaka', |
4407 | 'trusty-mitaka': 'trusty-updates/mitaka', |
4408 | 'trusty-mitaka/updates': 'trusty-updates/mitaka', |
4409 | 'trusty-updates/mitaka': 'trusty-updates/mitaka', |
4410 | @@ -99,6 +126,7 @@ |
4411 | 'trusty-proposed/mitaka': 'trusty-proposed/mitaka', |
4412 | # Newton |
4413 | 'newton': 'xenial-updates/newton', |
4414 | + 'newton/updates': 'xenial-updates/newton', |
4415 | 'xenial-newton': 'xenial-updates/newton', |
4416 | 'xenial-newton/updates': 'xenial-updates/newton', |
4417 | 'xenial-updates/newton': 'xenial-updates/newton', |
4418 | @@ -107,17 +135,175 @@ |
4419 | 'xenial-proposed/newton': 'xenial-proposed/newton', |
4420 | # Ocata |
4421 | 'ocata': 'xenial-updates/ocata', |
4422 | + 'ocata/updates': 'xenial-updates/ocata', |
4423 | 'xenial-ocata': 'xenial-updates/ocata', |
4424 | 'xenial-ocata/updates': 'xenial-updates/ocata', |
4425 | 'xenial-updates/ocata': 'xenial-updates/ocata', |
4426 | 'ocata/proposed': 'xenial-proposed/ocata', |
4427 | 'xenial-ocata/proposed': 'xenial-proposed/ocata', |
4428 | - 'xenial-ocata/newton': 'xenial-proposed/ocata', |
4429 | + 'xenial-proposed/ocata': 'xenial-proposed/ocata', |
4430 | + # Pike |
4431 | + 'pike': 'xenial-updates/pike', |
4432 | + 'xenial-pike': 'xenial-updates/pike', |
4433 | + 'xenial-pike/updates': 'xenial-updates/pike', |
4434 | + 'xenial-updates/pike': 'xenial-updates/pike', |
4435 | + 'pike/proposed': 'xenial-proposed/pike', |
4436 | + 'xenial-pike/proposed': 'xenial-proposed/pike', |
4437 | + 'xenial-proposed/pike': 'xenial-proposed/pike', |
4438 | + # Queens |
4439 | + 'queens': 'xenial-updates/queens', |
4440 | + 'xenial-queens': 'xenial-updates/queens', |
4441 | + 'xenial-queens/updates': 'xenial-updates/queens', |
4442 | + 'xenial-updates/queens': 'xenial-updates/queens', |
4443 | + 'queens/proposed': 'xenial-proposed/queens', |
4444 | + 'xenial-queens/proposed': 'xenial-proposed/queens', |
4445 | + 'xenial-proposed/queens': 'xenial-proposed/queens', |
4446 | + # Rocky |
4447 | + 'rocky': 'bionic-updates/rocky', |
4448 | + 'bionic-rocky': 'bionic-updates/rocky', |
4449 | + 'bionic-rocky/updates': 'bionic-updates/rocky', |
4450 | + 'bionic-updates/rocky': 'bionic-updates/rocky', |
4451 | + 'rocky/proposed': 'bionic-proposed/rocky', |
4452 | + 'bionic-rocky/proposed': 'bionic-proposed/rocky', |
4453 | + 'bionic-proposed/rocky': 'bionic-proposed/rocky', |
4454 | + # Stein |
4455 | + 'stein': 'bionic-updates/stein', |
4456 | + 'bionic-stein': 'bionic-updates/stein', |
4457 | + 'bionic-stein/updates': 'bionic-updates/stein', |
4458 | + 'bionic-updates/stein': 'bionic-updates/stein', |
4459 | + 'stein/proposed': 'bionic-proposed/stein', |
4460 | + 'bionic-stein/proposed': 'bionic-proposed/stein', |
4461 | + 'bionic-proposed/stein': 'bionic-proposed/stein', |
4462 | + # Train |
4463 | + 'train': 'bionic-updates/train', |
4464 | + 'bionic-train': 'bionic-updates/train', |
4465 | + 'bionic-train/updates': 'bionic-updates/train', |
4466 | + 'bionic-updates/train': 'bionic-updates/train', |
4467 | + 'train/proposed': 'bionic-proposed/train', |
4468 | + 'bionic-train/proposed': 'bionic-proposed/train', |
4469 | + 'bionic-proposed/train': 'bionic-proposed/train', |
4470 | + # Ussuri |
4471 | + 'ussuri': 'bionic-updates/ussuri', |
4472 | + 'bionic-ussuri': 'bionic-updates/ussuri', |
4473 | + 'bionic-ussuri/updates': 'bionic-updates/ussuri', |
4474 | + 'bionic-updates/ussuri': 'bionic-updates/ussuri', |
4475 | + 'ussuri/proposed': 'bionic-proposed/ussuri', |
4476 | + 'bionic-ussuri/proposed': 'bionic-proposed/ussuri', |
4477 | + 'bionic-proposed/ussuri': 'bionic-proposed/ussuri', |
4478 | + # Victoria |
4479 | + 'victoria': 'focal-updates/victoria', |
4480 | + 'focal-victoria': 'focal-updates/victoria', |
4481 | + 'focal-victoria/updates': 'focal-updates/victoria', |
4482 | + 'focal-updates/victoria': 'focal-updates/victoria', |
4483 | + 'victoria/proposed': 'focal-proposed/victoria', |
4484 | + 'focal-victoria/proposed': 'focal-proposed/victoria', |
4485 | + 'focal-proposed/victoria': 'focal-proposed/victoria', |
4486 | + # Wallaby |
4487 | + 'wallaby': 'focal-updates/wallaby', |
4488 | + 'focal-wallaby': 'focal-updates/wallaby', |
4489 | + 'focal-wallaby/updates': 'focal-updates/wallaby', |
4490 | + 'focal-updates/wallaby': 'focal-updates/wallaby', |
4491 | + 'wallaby/proposed': 'focal-proposed/wallaby', |
4492 | + 'focal-wallaby/proposed': 'focal-proposed/wallaby', |
4493 | + 'focal-proposed/wallaby': 'focal-proposed/wallaby', |
4494 | + # Xena |
4495 | + 'xena': 'focal-updates/xena', |
4496 | + 'focal-xena': 'focal-updates/xena', |
4497 | + 'focal-xena/updates': 'focal-updates/xena', |
4498 | + 'focal-updates/xena': 'focal-updates/xena', |
4499 | + 'xena/proposed': 'focal-proposed/xena', |
4500 | + 'focal-xena/proposed': 'focal-proposed/xena', |
4501 | + 'focal-proposed/xena': 'focal-proposed/xena', |
4502 | + # Yoga |
4503 | + 'yoga': 'focal-updates/yoga', |
4504 | + 'focal-yoga': 'focal-updates/yoga', |
4505 | + 'focal-yoga/updates': 'focal-updates/yoga', |
4506 | + 'focal-updates/yoga': 'focal-updates/yoga', |
4507 | + 'yoga/proposed': 'focal-proposed/yoga', |
4508 | + 'focal-yoga/proposed': 'focal-proposed/yoga', |
4509 | + 'focal-proposed/yoga': 'focal-proposed/yoga', |
4510 | + # Zed |
4511 | + 'zed': 'jammy-updates/zed', |
4512 | + 'jammy-zed': 'jammy-updates/zed', |
4513 | + 'jammy-zed/updates': 'jammy-updates/zed', |
4514 | + 'jammy-updates/zed': 'jammy-updates/zed', |
4515 | + 'zed/proposed': 'jammy-proposed/zed', |
4516 | + 'jammy-zed/proposed': 'jammy-proposed/zed', |
4517 | + 'jammy-proposed/zed': 'jammy-proposed/zed', |
4518 | + # antelope |
4519 | + 'antelope': 'jammy-updates/antelope', |
4520 | + 'jammy-antelope': 'jammy-updates/antelope', |
4521 | + 'jammy-antelope/updates': 'jammy-updates/antelope', |
4522 | + 'jammy-updates/antelope': 'jammy-updates/antelope', |
4523 | + 'antelope/proposed': 'jammy-proposed/antelope', |
4524 | + 'jammy-antelope/proposed': 'jammy-proposed/antelope', |
4525 | + 'jammy-proposed/antelope': 'jammy-proposed/antelope', |
4526 | + |
4527 | + # OVN |
4528 | + 'focal-ovn-22.03': 'focal-updates/ovn-22.03', |
4529 | + 'focal-ovn-22.03/proposed': 'focal-proposed/ovn-22.03', |
4530 | } |
4531 | |
4532 | + |
4533 | +OPENSTACK_RELEASES = ( |
4534 | + 'diablo', |
4535 | + 'essex', |
4536 | + 'folsom', |
4537 | + 'grizzly', |
4538 | + 'havana', |
4539 | + 'icehouse', |
4540 | + 'juno', |
4541 | + 'kilo', |
4542 | + 'liberty', |
4543 | + 'mitaka', |
4544 | + 'newton', |
4545 | + 'ocata', |
4546 | + 'pike', |
4547 | + 'queens', |
4548 | + 'rocky', |
4549 | + 'stein', |
4550 | + 'train', |
4551 | + 'ussuri', |
4552 | + 'victoria', |
4553 | + 'wallaby', |
4554 | + 'xena', |
4555 | + 'yoga', |
4556 | + 'zed', |
4557 | + 'antelope', |
4558 | +) |
4559 | + |
4560 | + |
4561 | +UBUNTU_OPENSTACK_RELEASE = OrderedDict([ |
4562 | + ('oneiric', 'diablo'), |
4563 | + ('precise', 'essex'), |
4564 | + ('quantal', 'folsom'), |
4565 | + ('raring', 'grizzly'), |
4566 | + ('saucy', 'havana'), |
4567 | + ('trusty', 'icehouse'), |
4568 | + ('utopic', 'juno'), |
4569 | + ('vivid', 'kilo'), |
4570 | + ('wily', 'liberty'), |
4571 | + ('xenial', 'mitaka'), |
4572 | + ('yakkety', 'newton'), |
4573 | + ('zesty', 'ocata'), |
4574 | + ('artful', 'pike'), |
4575 | + ('bionic', 'queens'), |
4576 | + ('cosmic', 'rocky'), |
4577 | + ('disco', 'stein'), |
4578 | + ('eoan', 'train'), |
4579 | + ('focal', 'ussuri'), |
4580 | + ('groovy', 'victoria'), |
4581 | + ('hirsute', 'wallaby'), |
4582 | + ('impish', 'xena'), |
4583 | + ('jammy', 'yoga'), |
4584 | + ('kinetic', 'zed'), |
4585 | + ('lunar', 'antelope'), |
4586 | +]) |
4587 | + |
4588 | + |
4589 | APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT. |
4590 | -APT_NO_LOCK_RETRY_DELAY = 10 # Wait 10 seconds between apt lock checks. |
4591 | -APT_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times. |
4592 | +CMD_RETRY_DELAY = 10 # Wait 10 seconds between command retries. |
4593 | +CMD_RETRY_COUNT = 10 # Retry a failing fatal command X times. |
4594 | |
4595 | |
4596 | def filter_installed_packages(packages): |
4597 | @@ -135,35 +321,93 @@ |
4598 | return _pkgs |
4599 | |
4600 | |
4601 | -def apt_cache(in_memory=True, progress=None): |
4602 | - """Build and return an apt cache.""" |
4603 | - from apt import apt_pkg |
4604 | - apt_pkg.init() |
4605 | - if in_memory: |
4606 | - apt_pkg.config.set("Dir::Cache::pkgcache", "") |
4607 | - apt_pkg.config.set("Dir::Cache::srcpkgcache", "") |
4608 | - return apt_pkg.Cache(progress) |
4609 | - |
4610 | - |
4611 | -def install(packages, options=None, fatal=False): |
4612 | - """Install one or more packages.""" |
4613 | +def filter_missing_packages(packages): |
4614 | + """Return a list of packages that are installed. |
4615 | + |
4616 | + :param packages: list of packages to evaluate. |
4617 | + :returns list: Packages that are installed. |
4618 | + """ |
4619 | + return list( |
4620 | + set(packages) - |
4621 | + set(filter_installed_packages(packages)) |
4622 | + ) |
4623 | + |
4624 | + |
4625 | +def apt_cache(*_, **__): |
4626 | + """Shim returning an object simulating the apt_pkg Cache. |
4627 | + |
4628 | + :param _: Accept arguments for compatibility, not used. |
4629 | + :type _: any |
4630 | + :param __: Accept keyword arguments for compatibility, not used. |
4631 | + :type __: any |
4632 | + :returns:Object used to interrogate the system apt and dpkg databases. |
4633 | + :rtype:ubuntu_apt_pkg.Cache |
4634 | + """ |
4635 | + if 'apt_pkg' in sys.modules: |
4636 | + # NOTE(fnordahl): When our consumer use the upstream ``apt_pkg`` module |
4637 | + # in conjunction with the apt_cache helper function, they may expect us |
4638 | + # to call ``apt_pkg.init()`` for them. |
4639 | + # |
4640 | + # Detect this situation, log a warning and make the call to |
4641 | + # ``apt_pkg.init()`` to avoid the consumer Python interpreter from |
4642 | + # crashing with a segmentation fault. |
4643 | + @deprecate( |
4644 | + 'Support for use of upstream ``apt_pkg`` module in conjunction' |
4645 | + 'with charm-helpers is deprecated since 2019-06-25', |
4646 | + date=None, log=lambda x: log(x, level=WARNING)) |
4647 | + def one_shot_log(): |
4648 | + pass |
4649 | + |
4650 | + one_shot_log() |
4651 | + sys.modules['apt_pkg'].init() |
4652 | + return ubuntu_apt_pkg.Cache() |
4653 | + |
4654 | + |
4655 | +def apt_install(packages, options=None, fatal=False, quiet=False): |
4656 | + """Install one or more packages. |
4657 | + |
4658 | + :param packages: Package(s) to install |
4659 | + :type packages: Option[str, List[str]] |
4660 | + :param options: Options to pass on to apt-get |
4661 | + :type options: Option[None, List[str]] |
4662 | + :param fatal: Whether the command's output should be checked and |
4663 | + retried. |
4664 | + :type fatal: bool |
4665 | + :param quiet: if True (default), suppress log message to stdout/stderr |
4666 | + :type quiet: bool |
4667 | + :raises: subprocess.CalledProcessError |
4668 | + """ |
4669 | + if not packages: |
4670 | + log("Nothing to install", level=DEBUG) |
4671 | + return |
4672 | if options is None: |
4673 | options = ['--option=Dpkg::Options::=--force-confold'] |
4674 | |
4675 | cmd = ['apt-get', '--assume-yes'] |
4676 | cmd.extend(options) |
4677 | cmd.append('install') |
4678 | - if isinstance(packages, six.string_types): |
4679 | + if isinstance(packages, str): |
4680 | cmd.append(packages) |
4681 | else: |
4682 | cmd.extend(packages) |
4683 | - log("Installing {} with options: {}".format(packages, |
4684 | - options)) |
4685 | - _run_apt_command(cmd, fatal) |
4686 | - |
4687 | - |
4688 | -def upgrade(options=None, fatal=False, dist=False): |
4689 | - """Upgrade all packages.""" |
4690 | + if not quiet: |
4691 | + log("Installing {} with options: {}" |
4692 | + .format(packages, options)) |
4693 | + _run_apt_command(cmd, fatal, quiet=quiet) |
4694 | + |
4695 | + |
4696 | +def apt_upgrade(options=None, fatal=False, dist=False): |
4697 | + """Upgrade all packages. |
4698 | + |
4699 | + :param options: Options to pass on to apt-get |
4700 | + :type options: Option[None, List[str]] |
4701 | + :param fatal: Whether the command's output should be checked and |
4702 | + retried. |
4703 | + :type fatal: bool |
4704 | + :param dist: Whether ``dist-upgrade`` should be used over ``upgrade`` |
4705 | + :type dist: bool |
4706 | + :raises: subprocess.CalledProcessError |
4707 | + """ |
4708 | if options is None: |
4709 | options = ['--option=Dpkg::Options::=--force-confold'] |
4710 | |
4711 | @@ -177,16 +421,24 @@ |
4712 | _run_apt_command(cmd, fatal) |
4713 | |
4714 | |
4715 | -def update(fatal=False): |
4716 | +def apt_update(fatal=False): |
4717 | """Update local apt cache.""" |
4718 | cmd = ['apt-get', 'update'] |
4719 | _run_apt_command(cmd, fatal) |
4720 | |
4721 | |
4722 | -def purge(packages, fatal=False): |
4723 | - """Purge one or more packages.""" |
4724 | +def apt_purge(packages, fatal=False): |
4725 | + """Purge one or more packages. |
4726 | + |
4727 | + :param packages: Package(s) to install |
4728 | + :type packages: Option[str, List[str]] |
4729 | + :param fatal: Whether the command's output should be checked and |
4730 | + retried. |
4731 | + :type fatal: bool |
4732 | + :raises: subprocess.CalledProcessError |
4733 | + """ |
4734 | cmd = ['apt-get', '--assume-yes', 'purge'] |
4735 | - if isinstance(packages, six.string_types): |
4736 | + if isinstance(packages, str): |
4737 | cmd.append(packages) |
4738 | else: |
4739 | cmd.extend(packages) |
4740 | @@ -194,11 +446,26 @@ |
4741 | _run_apt_command(cmd, fatal) |
4742 | |
4743 | |
4744 | +def apt_autoremove(purge=True, fatal=False): |
4745 | + """Purge one or more packages. |
4746 | + :param purge: Whether the ``--purge`` option should be passed on or not. |
4747 | + :type purge: bool |
4748 | + :param fatal: Whether the command's output should be checked and |
4749 | + retried. |
4750 | + :type fatal: bool |
4751 | + :raises: subprocess.CalledProcessError |
4752 | + """ |
4753 | + cmd = ['apt-get', '--assume-yes', 'autoremove'] |
4754 | + if purge: |
4755 | + cmd.append('--purge') |
4756 | + _run_apt_command(cmd, fatal) |
4757 | + |
4758 | + |
4759 | def apt_mark(packages, mark, fatal=False): |
4760 | """Flag one or more packages using apt-mark.""" |
4761 | log("Marking {} as {}".format(packages, mark)) |
4762 | cmd = ['apt-mark', mark] |
4763 | - if isinstance(packages, six.string_types): |
4764 | + if isinstance(packages, str): |
4765 | cmd.append(packages) |
4766 | else: |
4767 | cmd.extend(packages) |
4768 | @@ -217,7 +484,154 @@ |
4769 | return apt_mark(packages, 'unhold', fatal=fatal) |
4770 | |
4771 | |
4772 | -def add_source(source, key=None): |
4773 | +def import_key(key): |
4774 | + """Import an ASCII Armor key. |
4775 | + |
4776 | + A Radix64 format keyid is also supported for backwards |
4777 | + compatibility. In this case Ubuntu keyserver will be |
4778 | + queried for a key via HTTPS by its keyid. This method |
4779 | + is less preferable because https proxy servers may |
4780 | + require traffic decryption which is equivalent to a |
4781 | + man-in-the-middle attack (a proxy server impersonates |
4782 | + keyserver TLS certificates and has to be explicitly |
4783 | + trusted by the system). |
4784 | + |
4785 | + :param key: A GPG key in ASCII armor format, |
4786 | + including BEGIN and END markers or a keyid. |
4787 | + :type key: (bytes, str) |
4788 | + :raises: GPGKeyError if the key could not be imported |
4789 | + """ |
4790 | + key = key.strip() |
4791 | + if '-' in key or '\n' in key: |
4792 | + # Send everything not obviously a keyid to GPG to import, as |
4793 | + # we trust its validation better than our own. eg. handling |
4794 | + # comments before the key. |
4795 | + log("PGP key found (looks like ASCII Armor format)", level=DEBUG) |
4796 | + if ('-----BEGIN PGP PUBLIC KEY BLOCK-----' in key and |
4797 | + '-----END PGP PUBLIC KEY BLOCK-----' in key): |
4798 | + log("Writing provided PGP key in the binary format", level=DEBUG) |
4799 | + key_bytes = key.encode('utf-8') |
4800 | + key_name = _get_keyid_by_gpg_key(key_bytes) |
4801 | + key_gpg = _dearmor_gpg_key(key_bytes) |
4802 | + _write_apt_gpg_keyfile(key_name=key_name, key_material=key_gpg) |
4803 | + else: |
4804 | + raise GPGKeyError("ASCII armor markers missing from GPG key") |
4805 | + else: |
4806 | + log("PGP key found (looks like Radix64 format)", level=WARNING) |
4807 | + log("SECURELY importing PGP key from keyserver; " |
4808 | + "full key not provided.", level=WARNING) |
4809 | + # as of bionic add-apt-repository uses curl with an HTTPS keyserver URL |
4810 | + # to retrieve GPG keys. `apt-key adv` command is deprecated as is |
4811 | + # apt-key in general as noted in its manpage. See lp:1433761 for more |
4812 | + # history. Instead, /etc/apt/trusted.gpg.d is used directly to drop |
4813 | + # gpg |
4814 | + key_asc = _get_key_by_keyid(key) |
4815 | + # write the key in GPG format so that apt-key list shows it |
4816 | + key_gpg = _dearmor_gpg_key(key_asc) |
4817 | + _write_apt_gpg_keyfile(key_name=key, key_material=key_gpg) |
4818 | + |
4819 | + |
4820 | +def _get_keyid_by_gpg_key(key_material): |
4821 | + """Get a GPG key fingerprint by GPG key material. |
4822 | + Gets a GPG key fingerprint (40-digit, 160-bit) by the ASCII armor-encoded |
4823 | + or binary GPG key material. Can be used, for example, to generate file |
4824 | + names for keys passed via charm options. |
4825 | + |
4826 | + :param key_material: ASCII armor-encoded or binary GPG key material |
4827 | + :type key_material: bytes |
4828 | + :raises: GPGKeyError if invalid key material has been provided |
4829 | + :returns: A GPG key fingerprint |
4830 | + :rtype: str |
4831 | + """ |
4832 | + # Use the same gpg command for both Xenial and Bionic |
4833 | + cmd = 'gpg --with-colons --with-fingerprint' |
4834 | + ps = subprocess.Popen(cmd.split(), |
4835 | + stdout=subprocess.PIPE, |
4836 | + stderr=subprocess.PIPE, |
4837 | + stdin=subprocess.PIPE) |
4838 | + out, err = ps.communicate(input=key_material) |
4839 | + out = out.decode('utf-8') |
4840 | + err = err.decode('utf-8') |
4841 | + if 'gpg: no valid OpenPGP data found.' in err: |
4842 | + raise GPGKeyError('Invalid GPG key material provided') |
4843 | + # from gnupg2 docs: fpr :: Fingerprint (fingerprint is in field 10) |
4844 | + return re.search(r"^fpr:{9}([0-9A-F]{40}):$", out, re.MULTILINE).group(1) |
4845 | + |
4846 | + |
4847 | +def _get_key_by_keyid(keyid): |
4848 | + """Get a key via HTTPS from the Ubuntu keyserver. |
4849 | + Different key ID formats are supported by SKS keyservers (the longer ones |
4850 | + are more secure, see "dead beef attack" and https://evil32.com/). Since |
4851 | + HTTPS is used, if SSLBump-like HTTPS proxies are in place, they will |
4852 | + impersonate keyserver.ubuntu.com and generate a certificate with |
4853 | + keyserver.ubuntu.com in the CN field or in SubjAltName fields of a |
4854 | + certificate. If such proxy behavior is expected it is necessary to add the |
4855 | + CA certificate chain containing the intermediate CA of the SSLBump proxy to |
4856 | + every machine that this code runs on via ca-certs cloud-init directive (via |
4857 | + cloudinit-userdata model-config) or via other means (such as through a |
4858 | + custom charm option). Also note that DNS resolution for the hostname in a |
4859 | + URL is done at a proxy server - not at the client side. |
4860 | + |
4861 | + 8-digit (32 bit) key ID |
4862 | + https://keyserver.ubuntu.com/pks/lookup?search=0x4652B4E6 |
4863 | + 16-digit (64 bit) key ID |
4864 | + https://keyserver.ubuntu.com/pks/lookup?search=0x6E85A86E4652B4E6 |
4865 | + 40-digit key ID: |
4866 | + https://keyserver.ubuntu.com/pks/lookup?search=0x35F77D63B5CEC106C577ED856E85A86E4652B4E6 |
4867 | + |
4868 | + :param keyid: An 8, 16 or 40 hex digit keyid to find a key for |
4869 | + :type keyid: (bytes, str) |
4870 | + :returns: A key material for the specified GPG key id |
4871 | + :rtype: (str, bytes) |
4872 | + :raises: subprocess.CalledProcessError |
4873 | + """ |
4874 | + # options=mr - machine-readable output (disables html wrappers) |
4875 | + keyserver_url = ('https://keyserver.ubuntu.com' |
4876 | + '/pks/lookup?op=get&options=mr&exact=on&search=0x{}') |
4877 | + curl_cmd = ['curl', keyserver_url.format(keyid)] |
4878 | + # use proxy server settings in order to retrieve the key |
4879 | + return subprocess.check_output(curl_cmd, |
4880 | + env=env_proxy_settings(['https', 'no_proxy'])) |
4881 | + |
4882 | + |
4883 | +def _dearmor_gpg_key(key_asc): |
4884 | + """Converts a GPG key in the ASCII armor format to the binary format. |
4885 | + |
4886 | + :param key_asc: A GPG key in ASCII armor format. |
4887 | + :type key_asc: (str, bytes) |
4888 | + :returns: A GPG key in binary format |
4889 | + :rtype: (str, bytes) |
4890 | + :raises: GPGKeyError |
4891 | + """ |
4892 | + ps = subprocess.Popen(['gpg', '--dearmor'], |
4893 | + stdout=subprocess.PIPE, |
4894 | + stderr=subprocess.PIPE, |
4895 | + stdin=subprocess.PIPE) |
4896 | + out, err = ps.communicate(input=key_asc) |
4897 | + # no need to decode output as it is binary (invalid utf-8), only error |
4898 | + err = err.decode('utf-8') |
4899 | + if 'gpg: no valid OpenPGP data found.' in err: |
4900 | + raise GPGKeyError('Invalid GPG key material. Check your network setup' |
4901 | + ' (MTU, routing, DNS) and/or proxy server settings' |
4902 | + ' as well as destination keyserver status.') |
4903 | + else: |
4904 | + return out |
4905 | + |
4906 | + |
4907 | +def _write_apt_gpg_keyfile(key_name, key_material): |
4908 | + """Writes GPG key material into a file at a provided path. |
4909 | + |
4910 | + :param key_name: A key name to use for a key file (could be a fingerprint) |
4911 | + :type key_name: str |
4912 | + :param key_material: A GPG key material (binary) |
4913 | + :type key_material: (str, bytes) |
4914 | + """ |
4915 | + with open('/etc/apt/trusted.gpg.d/{}.gpg'.format(key_name), |
4916 | + 'wb') as keyf: |
4917 | + keyf.write(key_material) |
4918 | + |
4919 | + |
4920 | +def add_source(source, key=None, fail_invalid=False): |
4921 | """Add a package source to this system. |
4922 | |
4923 | @param source: a URL or sources.list entry, as supported by |
4924 | @@ -233,95 +647,349 @@ |
4925 | such as 'cloud:icehouse' |
4926 | 'distro' may be used as a noop |
4927 | |
4928 | + Full list of source specifications supported by the function are: |
4929 | + |
4930 | + 'distro': A NOP; i.e. it has no effect. |
4931 | + 'proposed': the proposed deb spec [2] is wrtten to |
4932 | + /etc/apt/sources.list/proposed |
4933 | + 'distro-proposed': adds <version>-proposed to the debs [2] |
4934 | + 'ppa:<ppa-name>': add-apt-repository --yes <ppa_name> |
4935 | + 'deb <deb-spec>': add-apt-repository --yes deb <deb-spec> |
4936 | + 'http://....': add-apt-repository --yes http://... |
4937 | + 'cloud-archive:<spec>': add-apt-repository -yes cloud-archive:<spec> |
4938 | + 'cloud:<release>[-staging]': specify a Cloud Archive pocket <release> with |
4939 | + optional staging version. If staging is used then the staging PPA [2] |
4940 | + with be used. If staging is NOT used then the cloud archive [3] will be |
4941 | + added, and the 'ubuntu-cloud-keyring' package will be added for the |
4942 | + current distro. |
4943 | + '<openstack-version>': translate to cloud:<release> based on the current |
4944 | + distro version (i.e. for 'ussuri' this will either be 'bionic-ussuri' or |
4945 | + 'distro'. |
4946 | + '<openstack-version>/proposed': as above, but for proposed. |
4947 | + |
4948 | + Otherwise the source is not recognised and this is logged to the juju log. |
4949 | + However, no error is raised, unless sys_error_on_exit is True. |
4950 | + |
4951 | + [1] deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main |
4952 | + where {} is replaced with the derived pocket name. |
4953 | + [2] deb http://archive.ubuntu.com/ubuntu {}-proposed \ |
4954 | + main universe multiverse restricted |
4955 | + where {} is replaced with the lsb_release codename (e.g. xenial) |
4956 | + [3] deb http://ubuntu-cloud.archive.canonical.com/ubuntu <pocket> |
4957 | + to /etc/apt/sources.list.d/cloud-archive-list |
4958 | + |
4959 | @param key: A key to be added to the system's APT keyring and used |
4960 | to verify the signatures on packages. Ideally, this should be an |
4961 | ASCII format GPG public key including the block headers. A GPG key |
4962 | id may also be used, but be aware that only insecure protocols are |
4963 | available to retrieve the actual public key from a public keyserver |
4964 | placing your Juju environment at risk. ppa and cloud archive keys |
4965 | - are securely added automtically, so sould not be provided. |
4966 | + are securely added automatically, so should not be provided. |
4967 | + |
4968 | + @param fail_invalid: (boolean) if True, then the function raises a |
4969 | + SourceConfigError is there is no matching installation source. |
4970 | + |
4971 | + @raises SourceConfigError() if for cloud:<pocket>, the <pocket> is not a |
4972 | + valid pocket in CLOUD_ARCHIVE_POCKETS |
4973 | """ |
4974 | + # extract the OpenStack versions from the CLOUD_ARCHIVE_POCKETS; can't use |
4975 | + # the list in contrib.openstack.utils as it might not be included in |
4976 | + # classic charms and would break everything. Having OpenStack specific |
4977 | + # code in this file is a bit of an antipattern, anyway. |
4978 | + os_versions_regex = "({})".format("|".join(OPENSTACK_RELEASES)) |
4979 | + |
4980 | + _mapping = OrderedDict([ |
4981 | + (r"^distro$", lambda: None), # This is a NOP |
4982 | + (r"^(?:proposed|distro-proposed)$", _add_proposed), |
4983 | + (r"^cloud-archive:(.*)$", _add_apt_repository), |
4984 | + (r"^((?:deb |http:|https:|ppa:).*)$", _add_apt_repository), |
4985 | + (r"^cloud:(.*)-(.*)\/staging$", _add_cloud_staging), |
4986 | + (r"^cloud:(.*)-(ovn-.*)$", _add_cloud_distro_check), |
4987 | + (r"^cloud:(.*)-(.*)$", _add_cloud_distro_check), |
4988 | + (r"^cloud:(.*)$", _add_cloud_pocket), |
4989 | + (r"^snap:.*-(.*)-(.*)$", _add_cloud_distro_check), |
4990 | + (r"^{}\/proposed$".format(os_versions_regex), |
4991 | + _add_bare_openstack_proposed), |
4992 | + (r"^{}$".format(os_versions_regex), _add_bare_openstack), |
4993 | + ]) |
4994 | if source is None: |
4995 | - log('Source is not present. Skipping') |
4996 | - return |
4997 | - |
4998 | - if (source.startswith('ppa:') or |
4999 | - source.startswith('http') or |
5000 | - source.startswith('deb ') or |
The diff has been truncated for viewing.
lgtm thanks!