Merge lp:~woutervb/charms/trusty/conn-check/update_charmhelpers into lp:~ubuntuone-hackers/charms/trusty/conn-check/trunk
- Trusty Tahr (14.04)
- update_charmhelpers
- Merge into trunk
Proposed by
Wouter van Bommel
Status: | Rejected |
---|---|
Rejected by: | Wouter van Bommel |
Proposed branch: | lp:~woutervb/charms/trusty/conn-check/update_charmhelpers |
Merge into: | lp:~ubuntuone-hackers/charms/trusty/conn-check/trunk |
Diff against target: |
33431 lines (+31025/-441) 173 files modified
hooks/charmhelpers/__init__.py (+67/-4) hooks/charmhelpers/cli/README.rst (+57/-0) hooks/charmhelpers/cli/__init__.py (+196/-0) hooks/charmhelpers/cli/benchmark.py (+34/-0) hooks/charmhelpers/cli/commands.py (+30/-0) hooks/charmhelpers/cli/hookenv.py (+21/-0) hooks/charmhelpers/cli/host.py (+29/-0) hooks/charmhelpers/cli/unitdata.py (+46/-0) hooks/charmhelpers/context.py (+205/-0) hooks/charmhelpers/contrib/amulet/__init__.py (+13/-0) hooks/charmhelpers/contrib/amulet/deployment.py (+99/-0) hooks/charmhelpers/contrib/amulet/utils.py (+820/-0) hooks/charmhelpers/contrib/ansible/__init__.py (+142/-88) hooks/charmhelpers/contrib/benchmark/__init__.py (+124/-0) hooks/charmhelpers/contrib/charmhelpers/IMPORT (+4/-0) hooks/charmhelpers/contrib/charmhelpers/__init__.py (+203/-0) hooks/charmhelpers/contrib/charmsupport/IMPORT (+14/-0) hooks/charmhelpers/contrib/charmsupport/nrpe.py (+131/-28) hooks/charmhelpers/contrib/charmsupport/volumes.py (+173/-0) hooks/charmhelpers/contrib/database/__init__.py (+11/-0) hooks/charmhelpers/contrib/database/mysql.py (+840/-0) hooks/charmhelpers/contrib/hahelpers/__init__.py (+13/-0) hooks/charmhelpers/contrib/hahelpers/apache.py (+90/-0) hooks/charmhelpers/contrib/hahelpers/cluster.py (+451/-0) hooks/charmhelpers/contrib/hardening/README.hardening.md (+38/-0) hooks/charmhelpers/contrib/hardening/__init__.py (+13/-0) hooks/charmhelpers/contrib/hardening/apache/__init__.py (+17/-0) hooks/charmhelpers/contrib/hardening/apache/checks/__init__.py (+29/-0) hooks/charmhelpers/contrib/hardening/apache/checks/config.py (+104/-0) hooks/charmhelpers/contrib/hardening/apache/templates/99-hardening.conf (+32/-0) hooks/charmhelpers/contrib/hardening/apache/templates/alias.conf (+31/-0) hooks/charmhelpers/contrib/hardening/audits/__init__.py (+54/-0) hooks/charmhelpers/contrib/hardening/audits/apache.py (+105/-0) hooks/charmhelpers/contrib/hardening/audits/apt.py (+104/-0) hooks/charmhelpers/contrib/hardening/audits/file.py (+550/-0) hooks/charmhelpers/contrib/hardening/defaults/apache.yaml (+16/-0) hooks/charmhelpers/contrib/hardening/defaults/apache.yaml.schema (+12/-0) hooks/charmhelpers/contrib/hardening/defaults/mysql.yaml (+38/-0) hooks/charmhelpers/contrib/hardening/defaults/mysql.yaml.schema (+15/-0) hooks/charmhelpers/contrib/hardening/defaults/os.yaml (+68/-0) hooks/charmhelpers/contrib/hardening/defaults/os.yaml.schema (+43/-0) hooks/charmhelpers/contrib/hardening/defaults/ssh.yaml (+49/-0) hooks/charmhelpers/contrib/hardening/defaults/ssh.yaml.schema (+42/-0) hooks/charmhelpers/contrib/hardening/harden.py (+96/-0) hooks/charmhelpers/contrib/hardening/host/__init__.py (+17/-0) hooks/charmhelpers/contrib/hardening/host/checks/__init__.py (+48/-0) hooks/charmhelpers/contrib/hardening/host/checks/apt.py (+37/-0) hooks/charmhelpers/contrib/hardening/host/checks/limits.py (+53/-0) hooks/charmhelpers/contrib/hardening/host/checks/login.py (+65/-0) hooks/charmhelpers/contrib/hardening/host/checks/minimize_access.py (+50/-0) hooks/charmhelpers/contrib/hardening/host/checks/pam.py (+132/-0) hooks/charmhelpers/contrib/hardening/host/checks/profile.py (+49/-0) hooks/charmhelpers/contrib/hardening/host/checks/securetty.py (+37/-0) hooks/charmhelpers/contrib/hardening/host/checks/suid_sgid.py (+129/-0) hooks/charmhelpers/contrib/hardening/host/checks/sysctl.py (+209/-0) hooks/charmhelpers/contrib/hardening/host/templates/10.hardcore.conf (+8/-0) hooks/charmhelpers/contrib/hardening/host/templates/99-hardening.sh (+5/-0) hooks/charmhelpers/contrib/hardening/host/templates/99-juju-hardening.conf (+7/-0) hooks/charmhelpers/contrib/hardening/host/templates/login.defs (+349/-0) hooks/charmhelpers/contrib/hardening/host/templates/modules (+117/-0) hooks/charmhelpers/contrib/hardening/host/templates/passwdqc.conf (+11/-0) hooks/charmhelpers/contrib/hardening/host/templates/pinerolo_profile.sh (+8/-0) hooks/charmhelpers/contrib/hardening/host/templates/securetty (+11/-0) hooks/charmhelpers/contrib/hardening/host/templates/tally2 (+14/-0) hooks/charmhelpers/contrib/hardening/mysql/__init__.py (+17/-0) hooks/charmhelpers/contrib/hardening/mysql/checks/__init__.py (+29/-0) hooks/charmhelpers/contrib/hardening/mysql/checks/config.py (+87/-0) hooks/charmhelpers/contrib/hardening/mysql/templates/hardening.cnf (+12/-0) hooks/charmhelpers/contrib/hardening/ssh/__init__.py (+17/-0) hooks/charmhelpers/contrib/hardening/ssh/checks/__init__.py (+29/-0) hooks/charmhelpers/contrib/hardening/ssh/checks/config.py (+435/-0) hooks/charmhelpers/contrib/hardening/ssh/templates/ssh_config (+70/-0) hooks/charmhelpers/contrib/hardening/ssh/templates/sshd_config (+159/-0) hooks/charmhelpers/contrib/hardening/templating.py (+73/-0) hooks/charmhelpers/contrib/hardening/utils.py (+155/-0) hooks/charmhelpers/contrib/mellanox/__init__.py (+13/-0) hooks/charmhelpers/contrib/mellanox/infiniband.py (+153/-0) hooks/charmhelpers/contrib/network/__init__.py (+13/-0) hooks/charmhelpers/contrib/network/ip.py (+603/-0) hooks/charmhelpers/contrib/network/ovs/__init__.py (+693/-0) hooks/charmhelpers/contrib/network/ovs/ovn.py (+233/-0) hooks/charmhelpers/contrib/network/ovs/ovsdb.py (+246/-0) hooks/charmhelpers/contrib/network/ovs/utils.py (+26/-0) hooks/charmhelpers/contrib/network/ufw.py (+386/-0) hooks/charmhelpers/contrib/openstack/__init__.py (+13/-0) hooks/charmhelpers/contrib/openstack/alternatives.py (+44/-0) hooks/charmhelpers/contrib/openstack/amulet/__init__.py (+13/-0) hooks/charmhelpers/contrib/openstack/amulet/deployment.py (+387/-0) hooks/charmhelpers/contrib/openstack/amulet/utils.py (+1595/-0) hooks/charmhelpers/contrib/openstack/audits/__init__.py (+212/-0) hooks/charmhelpers/contrib/openstack/audits/openstack_security_guide.py (+270/-0) hooks/charmhelpers/contrib/openstack/cert_utils.py (+443/-0) hooks/charmhelpers/contrib/openstack/context.py (+3313/-0) hooks/charmhelpers/contrib/openstack/deferred_events.py (+416/-0) hooks/charmhelpers/contrib/openstack/exceptions.py (+26/-0) hooks/charmhelpers/contrib/openstack/files/__init__.py (+16/-0) hooks/charmhelpers/contrib/openstack/files/check_haproxy.sh (+34/-0) hooks/charmhelpers/contrib/openstack/files/check_haproxy_queue_depth.sh (+30/-0) hooks/charmhelpers/contrib/openstack/files/policy_rc_d_script.py (+196/-0) hooks/charmhelpers/contrib/openstack/ha/__init__.py (+13/-0) hooks/charmhelpers/contrib/openstack/ha/utils.py (+348/-0) hooks/charmhelpers/contrib/openstack/ip.py (+235/-0) hooks/charmhelpers/contrib/openstack/keystone.py (+178/-0) hooks/charmhelpers/contrib/openstack/neutron.py (+359/-0) hooks/charmhelpers/contrib/openstack/policy_rcd.py (+173/-0) hooks/charmhelpers/contrib/openstack/policyd.py (+801/-0) hooks/charmhelpers/contrib/openstack/ssh_migrations.py (+412/-0) hooks/charmhelpers/contrib/openstack/templates/__init__.py (+16/-0) hooks/charmhelpers/contrib/openstack/templates/ceph.conf (+28/-0) hooks/charmhelpers/contrib/openstack/templates/git.upstart (+17/-0) hooks/charmhelpers/contrib/openstack/templates/haproxy.cfg (+77/-0) hooks/charmhelpers/contrib/openstack/templates/logrotate (+9/-0) hooks/charmhelpers/contrib/openstack/templates/memcached.conf (+53/-0) hooks/charmhelpers/contrib/openstack/templates/openstack_https_frontend (+35/-0) hooks/charmhelpers/contrib/openstack/templates/section-ceph-bluestore-compression (+28/-0) hooks/charmhelpers/contrib/openstack/templates/section-keystone-authtoken (+12/-0) hooks/charmhelpers/contrib/openstack/templates/section-keystone-authtoken-legacy (+10/-0) hooks/charmhelpers/contrib/openstack/templates/section-keystone-authtoken-mitaka (+22/-0) hooks/charmhelpers/contrib/openstack/templates/section-keystone-authtoken-v3only (+9/-0) hooks/charmhelpers/contrib/openstack/templates/section-oslo-cache (+6/-0) hooks/charmhelpers/contrib/openstack/templates/section-oslo-messaging-rabbit (+10/-0) hooks/charmhelpers/contrib/openstack/templates/section-oslo-messaging-rabbit-ocata (+10/-0) hooks/charmhelpers/contrib/openstack/templates/section-oslo-middleware (+5/-0) hooks/charmhelpers/contrib/openstack/templates/section-oslo-notifications (+15/-0) hooks/charmhelpers/contrib/openstack/templates/section-placement (+20/-0) hooks/charmhelpers/contrib/openstack/templates/section-rabbitmq-oslo (+22/-0) hooks/charmhelpers/contrib/openstack/templates/section-zeromq (+14/-0) hooks/charmhelpers/contrib/openstack/templates/vendor_data.json (+1/-0) hooks/charmhelpers/contrib/openstack/templates/wsgi-openstack-api.conf (+91/-0) hooks/charmhelpers/contrib/openstack/templating.py (+379/-0) hooks/charmhelpers/contrib/openstack/utils.py (+2684/-0) hooks/charmhelpers/contrib/openstack/vaultlocker.py (+179/-0) hooks/charmhelpers/contrib/peerstorage/__init__.py (+267/-0) hooks/charmhelpers/contrib/python.py (+21/-0) hooks/charmhelpers/contrib/saltstack/__init__.py (+116/-0) hooks/charmhelpers/contrib/ssl/__init__.py (+92/-0) hooks/charmhelpers/contrib/ssl/service.py (+277/-0) hooks/charmhelpers/contrib/storage/__init__.py (+13/-0) hooks/charmhelpers/contrib/storage/linux/__init__.py (+13/-0) hooks/charmhelpers/contrib/storage/linux/bcache.py (+74/-0) hooks/charmhelpers/contrib/storage/linux/ceph.py (+2381/-0) hooks/charmhelpers/contrib/storage/linux/loopback.py (+92/-0) hooks/charmhelpers/contrib/storage/linux/lvm.py (+182/-0) hooks/charmhelpers/contrib/storage/linux/utils.py (+128/-0) hooks/charmhelpers/contrib/templating/jinja.py (+51/-0) hooks/charmhelpers/contrib/templating/pyformat.py (+27/-0) hooks/charmhelpers/contrib/unison/__init__.py (+316/-0) hooks/charmhelpers/coordinator.py (+606/-0) hooks/charmhelpers/core/decorators.py (+38/-0) hooks/charmhelpers/core/hookenv.py (+651/-48) hooks/charmhelpers/core/host.py (+622/-96) hooks/charmhelpers/core/host_factory/centos.py (+16/-0) hooks/charmhelpers/core/host_factory/ubuntu.py (+69/-5) hooks/charmhelpers/core/kernel.py (+2/-2) hooks/charmhelpers/core/services/base.py (+25/-9) hooks/charmhelpers/core/strutils.py (+64/-5) hooks/charmhelpers/core/sysctl.py (+32/-11) hooks/charmhelpers/core/templating.py (+18/-9) hooks/charmhelpers/core/unitdata.py (+8/-1) hooks/charmhelpers/fetch/__init__.py (+22/-9) hooks/charmhelpers/fetch/archiveurl.py (+1/-1) hooks/charmhelpers/fetch/bzrurl.py (+2/-2) hooks/charmhelpers/fetch/centos.py (+1/-1) hooks/charmhelpers/fetch/giturl.py (+2/-2) hooks/charmhelpers/fetch/python/__init__.py (+13/-0) hooks/charmhelpers/fetch/python/debug.py (+54/-0) hooks/charmhelpers/fetch/python/packages.py (+154/-0) hooks/charmhelpers/fetch/python/rpdb.py (+56/-0) hooks/charmhelpers/fetch/python/version.py (+32/-0) hooks/charmhelpers/fetch/snap.py (+150/-0) hooks/charmhelpers/fetch/ubuntu.py (+643/-118) hooks/charmhelpers/fetch/ubuntu_apt_pkg.py (+312/-0) hooks/charmhelpers/osplatform.py (+29/-2) |
To merge this branch: | bzr merge lp:~woutervb/charms/trusty/conn-check/update_charmhelpers |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Ubuntu One hackers | Pending | ||
Review via email: mp+402614@code.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Unmerged revisions
- 58. By Wouter van Bommel
-
Updated charmhelpers to revision 4b8c496 from repo https:/
/github. com/juju/ charm-helpers. git
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'hooks/charmhelpers/__init__.py' | |||
2 | --- hooks/charmhelpers/__init__.py 2016-12-20 14:35:00 +0000 | |||
3 | +++ hooks/charmhelpers/__init__.py 2021-05-12 04:07:51 +0000 | |||
4 | @@ -14,23 +14,86 @@ | |||
5 | 14 | 14 | ||
6 | 15 | # Bootstrap charm-helpers, installing its dependencies if necessary using | 15 | # Bootstrap charm-helpers, installing its dependencies if necessary using |
7 | 16 | # only standard libraries. | 16 | # only standard libraries. |
8 | 17 | from __future__ import print_function | ||
9 | 18 | from __future__ import absolute_import | ||
10 | 19 | |||
11 | 20 | import functools | ||
12 | 21 | import inspect | ||
13 | 17 | import subprocess | 22 | import subprocess |
14 | 18 | import sys | 23 | import sys |
15 | 19 | 24 | ||
16 | 20 | try: | 25 | try: |
18 | 21 | import six # flake8: noqa | 26 | import six # NOQA:F401 |
19 | 22 | except ImportError: | 27 | except ImportError: |
20 | 23 | if sys.version_info.major == 2: | 28 | if sys.version_info.major == 2: |
21 | 24 | subprocess.check_call(['apt-get', 'install', '-y', 'python-six']) | 29 | subprocess.check_call(['apt-get', 'install', '-y', 'python-six']) |
22 | 25 | else: | 30 | else: |
23 | 26 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-six']) | 31 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-six']) |
25 | 27 | import six # flake8: noqa | 32 | import six # NOQA:F401 |
26 | 28 | 33 | ||
27 | 29 | try: | 34 | try: |
29 | 30 | import yaml # flake8: noqa | 35 | import yaml # NOQA:F401 |
30 | 31 | except ImportError: | 36 | except ImportError: |
31 | 32 | if sys.version_info.major == 2: | 37 | if sys.version_info.major == 2: |
32 | 33 | subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml']) | 38 | subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml']) |
33 | 34 | else: | 39 | else: |
34 | 35 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml']) | 40 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml']) |
36 | 36 | import yaml # flake8: noqa | 41 | import yaml # NOQA:F401 |
37 | 42 | |||
38 | 43 | |||
39 | 44 | # Holds a list of mapping of mangled function names that have been deprecated | ||
40 | 45 | # using the @deprecate decorator below. This is so that the warning is only | ||
41 | 46 | # printed once for each usage of the function. | ||
42 | 47 | __deprecated_functions = {} | ||
43 | 48 | |||
44 | 49 | |||
45 | 50 | def deprecate(warning, date=None, log=None): | ||
46 | 51 | """Add a deprecation warning the first time the function is used. | ||
47 | 52 | |||
48 | 53 | The date which is a string in semi-ISO8660 format indicates the year-month | ||
49 | 54 | that the function is officially going to be removed. | ||
50 | 55 | |||
51 | 56 | usage: | ||
52 | 57 | |||
53 | 58 | @deprecate('use core/fetch/add_source() instead', '2017-04') | ||
54 | 59 | def contributed_add_source_thing(...): | ||
55 | 60 | ... | ||
56 | 61 | |||
57 | 62 | And it then prints to the log ONCE that the function is deprecated. | ||
58 | 63 | The reason for passing the logging function (log) is so that hookenv.log | ||
59 | 64 | can be used for a charm if needed. | ||
60 | 65 | |||
61 | 66 | :param warning: String to indicate what is to be used instead. | ||
62 | 67 | :param date: Optional string in YYYY-MM format to indicate when the | ||
63 | 68 | function will definitely (probably) be removed. | ||
64 | 69 | :param log: The log function to call in order to log. If None, logs to | ||
65 | 70 | stdout | ||
66 | 71 | """ | ||
67 | 72 | def wrap(f): | ||
68 | 73 | |||
69 | 74 | @functools.wraps(f) | ||
70 | 75 | def wrapped_f(*args, **kwargs): | ||
71 | 76 | try: | ||
72 | 77 | module = inspect.getmodule(f) | ||
73 | 78 | file = inspect.getsourcefile(f) | ||
74 | 79 | lines = inspect.getsourcelines(f) | ||
75 | 80 | f_name = "{}-{}-{}..{}-{}".format( | ||
76 | 81 | module.__name__, file, lines[0], lines[-1], f.__name__) | ||
77 | 82 | except (IOError, TypeError): | ||
78 | 83 | # assume it was local, so just use the name of the function | ||
79 | 84 | f_name = f.__name__ | ||
80 | 85 | if f_name not in __deprecated_functions: | ||
81 | 86 | __deprecated_functions[f_name] = True | ||
82 | 87 | s = "DEPRECATION WARNING: Function {} is being removed".format( | ||
83 | 88 | f.__name__) | ||
84 | 89 | if date: | ||
85 | 90 | s = "{} on/around {}".format(s, date) | ||
86 | 91 | if warning: | ||
87 | 92 | s = "{} : {}".format(s, warning) | ||
88 | 93 | if log: | ||
89 | 94 | log(s) | ||
90 | 95 | else: | ||
91 | 96 | print(s) | ||
92 | 97 | return f(*args, **kwargs) | ||
93 | 98 | return wrapped_f | ||
94 | 99 | return wrap | ||
95 | 37 | 100 | ||
96 | === added directory 'hooks/charmhelpers/cli' | |||
97 | === added file 'hooks/charmhelpers/cli/README.rst' | |||
98 | --- hooks/charmhelpers/cli/README.rst 1970-01-01 00:00:00 +0000 | |||
99 | +++ hooks/charmhelpers/cli/README.rst 2021-05-12 04:07:51 +0000 | |||
100 | @@ -0,0 +1,57 @@ | |||
101 | 1 | ========== | ||
102 | 2 | Commandant | ||
103 | 3 | ========== | ||
104 | 4 | |||
105 | 5 | ----------------------------------------------------- | ||
106 | 6 | Automatic command-line interfaces to Python functions | ||
107 | 7 | ----------------------------------------------------- | ||
108 | 8 | |||
109 | 9 | One of the benefits of ``libvirt`` is the uniformity of the interface: the C API (as well as the bindings in other languages) is a set of functions that accept parameters that are nearly identical to the command-line arguments. If you run ``virsh``, you get an interactive command prompt that supports all of the same commands that your shell scripts use as ``virsh`` subcommands. | ||
110 | 10 | |||
111 | 11 | Command execution and stdio manipulation is the greatest common factor across all development systems in the POSIX environment. By exposing your functions as commands that manipulate streams of text, you can make life easier for all the Ruby and Erlang and Go programmers in your life. | ||
112 | 12 | |||
113 | 13 | Goals | ||
114 | 14 | ===== | ||
115 | 15 | |||
116 | 16 | * Single decorator to expose a function as a command. | ||
117 | 17 | * now two decorators - one "automatic" and one that allows authors to manipulate the arguments for fine-grained control.(MW) | ||
118 | 18 | * Automatic analysis of function signature through ``inspect.getargspec()`` on python 2 or ``inspect.getfullargspec()`` on python 3 | ||
119 | 19 | * Command argument parser built automatically with ``argparse`` | ||
120 | 20 | * Interactive interpreter loop object made with ``Cmd`` | ||
121 | 21 | * Options to output structured return value data via ``pprint``, ``yaml`` or ``json`` dumps. | ||
122 | 22 | |||
123 | 23 | Other Important Features that need writing | ||
124 | 24 | ------------------------------------------ | ||
125 | 25 | |||
126 | 26 | * Help and Usage documentation can be automatically generated, but it will be important to let users override this behaviour | ||
127 | 27 | * The decorator should allow specifying further parameters to the parser's add_argument() calls, to specify types or to make arguments behave as boolean flags, etc. | ||
128 | 28 | - Filename arguments are important, as good practice is for functions to accept file objects as parameters. | ||
129 | 29 | - choices arguments help to limit bad input before the function is called | ||
130 | 30 | * Some automatic behaviour could make for better defaults, once the user can override them. | ||
131 | 31 | - We could automatically detect arguments that default to False or True, and automatically support --no-foo for foo=True. | ||
132 | 32 | - We could automatically support hyphens as alternates for underscores | ||
133 | 33 | - Arguments defaulting to sequence types could support the ``append`` action. | ||
134 | 34 | |||
135 | 35 | |||
136 | 36 | ----------------------------------------------------- | ||
137 | 37 | Implementing subcommands | ||
138 | 38 | ----------------------------------------------------- | ||
139 | 39 | |||
140 | 40 | (WIP) | ||
141 | 41 | |||
142 | 42 | So as to avoid dependencies on the cli module, subcommands should be defined separately from their implementations. The recommmendation would be to place definitions into separate modules near the implementations which they expose. | ||
143 | 43 | |||
144 | 44 | Some examples:: | ||
145 | 45 | |||
146 | 46 | from charmhelpers.cli import CommandLine | ||
147 | 47 | from charmhelpers.payload import execd | ||
148 | 48 | from charmhelpers.foo import bar | ||
149 | 49 | |||
150 | 50 | cli = CommandLine() | ||
151 | 51 | |||
152 | 52 | cli.subcommand(execd.execd_run) | ||
153 | 53 | |||
154 | 54 | @cli.subcommand_builder("bar", help="Bar baz qux") | ||
155 | 55 | def barcmd_builder(subparser): | ||
156 | 56 | subparser.add_argument('argument1', help="yackety") | ||
157 | 57 | return bar | ||
158 | 0 | 58 | ||
159 | === added file 'hooks/charmhelpers/cli/__init__.py' | |||
160 | --- hooks/charmhelpers/cli/__init__.py 1970-01-01 00:00:00 +0000 | |||
161 | +++ hooks/charmhelpers/cli/__init__.py 2021-05-12 04:07:51 +0000 | |||
162 | @@ -0,0 +1,196 @@ | |||
163 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
164 | 2 | # | ||
165 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
166 | 4 | # you may not use this file except in compliance with the License. | ||
167 | 5 | # You may obtain a copy of the License at | ||
168 | 6 | # | ||
169 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
170 | 8 | # | ||
171 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
172 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
173 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
174 | 12 | # See the License for the specific language governing permissions and | ||
175 | 13 | # limitations under the License. | ||
176 | 14 | |||
177 | 15 | import inspect | ||
178 | 16 | import argparse | ||
179 | 17 | import sys | ||
180 | 18 | |||
181 | 19 | import six | ||
182 | 20 | from six.moves import zip | ||
183 | 21 | |||
184 | 22 | import charmhelpers.core.unitdata | ||
185 | 23 | |||
186 | 24 | |||
187 | 25 | class OutputFormatter(object): | ||
188 | 26 | def __init__(self, outfile=sys.stdout): | ||
189 | 27 | self.formats = ( | ||
190 | 28 | "raw", | ||
191 | 29 | "json", | ||
192 | 30 | "py", | ||
193 | 31 | "yaml", | ||
194 | 32 | "csv", | ||
195 | 33 | "tab", | ||
196 | 34 | ) | ||
197 | 35 | self.outfile = outfile | ||
198 | 36 | |||
199 | 37 | def add_arguments(self, argument_parser): | ||
200 | 38 | formatgroup = argument_parser.add_mutually_exclusive_group() | ||
201 | 39 | choices = self.supported_formats | ||
202 | 40 | formatgroup.add_argument("--format", metavar='FMT', | ||
203 | 41 | help="Select output format for returned data, " | ||
204 | 42 | "where FMT is one of: {}".format(choices), | ||
205 | 43 | choices=choices, default='raw') | ||
206 | 44 | for fmt in self.formats: | ||
207 | 45 | fmtfunc = getattr(self, fmt) | ||
208 | 46 | formatgroup.add_argument("-{}".format(fmt[0]), | ||
209 | 47 | "--{}".format(fmt), action='store_const', | ||
210 | 48 | const=fmt, dest='format', | ||
211 | 49 | help=fmtfunc.__doc__) | ||
212 | 50 | |||
213 | 51 | @property | ||
214 | 52 | def supported_formats(self): | ||
215 | 53 | return self.formats | ||
216 | 54 | |||
217 | 55 | def raw(self, output): | ||
218 | 56 | """Output data as raw string (default)""" | ||
219 | 57 | if isinstance(output, (list, tuple)): | ||
220 | 58 | output = '\n'.join(map(str, output)) | ||
221 | 59 | self.outfile.write(str(output)) | ||
222 | 60 | |||
223 | 61 | def py(self, output): | ||
224 | 62 | """Output data as a nicely-formatted python data structure""" | ||
225 | 63 | import pprint | ||
226 | 64 | pprint.pprint(output, stream=self.outfile) | ||
227 | 65 | |||
228 | 66 | def json(self, output): | ||
229 | 67 | """Output data in JSON format""" | ||
230 | 68 | import json | ||
231 | 69 | json.dump(output, self.outfile) | ||
232 | 70 | |||
233 | 71 | def yaml(self, output): | ||
234 | 72 | """Output data in YAML format""" | ||
235 | 73 | import yaml | ||
236 | 74 | yaml.safe_dump(output, self.outfile) | ||
237 | 75 | |||
238 | 76 | def csv(self, output): | ||
239 | 77 | """Output data as excel-compatible CSV""" | ||
240 | 78 | import csv | ||
241 | 79 | csvwriter = csv.writer(self.outfile) | ||
242 | 80 | csvwriter.writerows(output) | ||
243 | 81 | |||
244 | 82 | def tab(self, output): | ||
245 | 83 | """Output data in excel-compatible tab-delimited format""" | ||
246 | 84 | import csv | ||
247 | 85 | csvwriter = csv.writer(self.outfile, dialect=csv.excel_tab) | ||
248 | 86 | csvwriter.writerows(output) | ||
249 | 87 | |||
250 | 88 | def format_output(self, output, fmt='raw'): | ||
251 | 89 | fmtfunc = getattr(self, fmt) | ||
252 | 90 | fmtfunc(output) | ||
253 | 91 | |||
254 | 92 | |||
255 | 93 | class CommandLine(object): | ||
256 | 94 | argument_parser = None | ||
257 | 95 | subparsers = None | ||
258 | 96 | formatter = None | ||
259 | 97 | exit_code = 0 | ||
260 | 98 | |||
261 | 99 | def __init__(self): | ||
262 | 100 | if not self.argument_parser: | ||
263 | 101 | self.argument_parser = argparse.ArgumentParser(description='Perform common charm tasks') | ||
264 | 102 | if not self.formatter: | ||
265 | 103 | self.formatter = OutputFormatter() | ||
266 | 104 | self.formatter.add_arguments(self.argument_parser) | ||
267 | 105 | if not self.subparsers: | ||
268 | 106 | self.subparsers = self.argument_parser.add_subparsers(help='Commands') | ||
269 | 107 | |||
270 | 108 | def subcommand(self, command_name=None): | ||
271 | 109 | """ | ||
272 | 110 | Decorate a function as a subcommand. Use its arguments as the | ||
273 | 111 | command-line arguments""" | ||
274 | 112 | def wrapper(decorated): | ||
275 | 113 | cmd_name = command_name or decorated.__name__ | ||
276 | 114 | subparser = self.subparsers.add_parser(cmd_name, | ||
277 | 115 | description=decorated.__doc__) | ||
278 | 116 | for args, kwargs in describe_arguments(decorated): | ||
279 | 117 | subparser.add_argument(*args, **kwargs) | ||
280 | 118 | subparser.set_defaults(func=decorated) | ||
281 | 119 | return decorated | ||
282 | 120 | return wrapper | ||
283 | 121 | |||
284 | 122 | def test_command(self, decorated): | ||
285 | 123 | """ | ||
286 | 124 | Subcommand is a boolean test function, so bool return values should be | ||
287 | 125 | converted to a 0/1 exit code. | ||
288 | 126 | """ | ||
289 | 127 | decorated._cli_test_command = True | ||
290 | 128 | return decorated | ||
291 | 129 | |||
292 | 130 | def no_output(self, decorated): | ||
293 | 131 | """ | ||
294 | 132 | Subcommand is not expected to return a value, so don't print a spurious None. | ||
295 | 133 | """ | ||
296 | 134 | decorated._cli_no_output = True | ||
297 | 135 | return decorated | ||
298 | 136 | |||
299 | 137 | def subcommand_builder(self, command_name, description=None): | ||
300 | 138 | """ | ||
301 | 139 | Decorate a function that builds a subcommand. Builders should accept a | ||
302 | 140 | single argument (the subparser instance) and return the function to be | ||
303 | 141 | run as the command.""" | ||
304 | 142 | def wrapper(decorated): | ||
305 | 143 | subparser = self.subparsers.add_parser(command_name) | ||
306 | 144 | func = decorated(subparser) | ||
307 | 145 | subparser.set_defaults(func=func) | ||
308 | 146 | subparser.description = description or func.__doc__ | ||
309 | 147 | return wrapper | ||
310 | 148 | |||
311 | 149 | def run(self): | ||
312 | 150 | "Run cli, processing arguments and executing subcommands." | ||
313 | 151 | arguments = self.argument_parser.parse_args() | ||
314 | 152 | if six.PY2: | ||
315 | 153 | argspec = inspect.getargspec(arguments.func) | ||
316 | 154 | else: | ||
317 | 155 | argspec = inspect.getfullargspec(arguments.func) | ||
318 | 156 | vargs = [] | ||
319 | 157 | for arg in argspec.args: | ||
320 | 158 | vargs.append(getattr(arguments, arg)) | ||
321 | 159 | if argspec.varargs: | ||
322 | 160 | vargs.extend(getattr(arguments, argspec.varargs)) | ||
323 | 161 | output = arguments.func(*vargs) | ||
324 | 162 | if getattr(arguments.func, '_cli_test_command', False): | ||
325 | 163 | self.exit_code = 0 if output else 1 | ||
326 | 164 | output = '' | ||
327 | 165 | if getattr(arguments.func, '_cli_no_output', False): | ||
328 | 166 | output = '' | ||
329 | 167 | self.formatter.format_output(output, arguments.format) | ||
330 | 168 | if charmhelpers.core.unitdata._KV: | ||
331 | 169 | charmhelpers.core.unitdata._KV.flush() | ||
332 | 170 | |||
333 | 171 | |||
334 | 172 | cmdline = CommandLine() | ||
335 | 173 | |||
336 | 174 | |||
337 | 175 | def describe_arguments(func): | ||
338 | 176 | """ | ||
339 | 177 | Analyze a function's signature and return a data structure suitable for | ||
340 | 178 | passing in as arguments to an argparse parser's add_argument() method.""" | ||
341 | 179 | |||
342 | 180 | if six.PY2: | ||
343 | 181 | argspec = inspect.getargspec(func) | ||
344 | 182 | else: | ||
345 | 183 | argspec = inspect.getfullargspec(func) | ||
346 | 184 | # we should probably raise an exception somewhere if func includes **kwargs | ||
347 | 185 | if argspec.defaults: | ||
348 | 186 | positional_args = argspec.args[:-len(argspec.defaults)] | ||
349 | 187 | keyword_names = argspec.args[-len(argspec.defaults):] | ||
350 | 188 | for arg, default in zip(keyword_names, argspec.defaults): | ||
351 | 189 | yield ('--{}'.format(arg),), {'default': default} | ||
352 | 190 | else: | ||
353 | 191 | positional_args = argspec.args | ||
354 | 192 | |||
355 | 193 | for arg in positional_args: | ||
356 | 194 | yield (arg,), {} | ||
357 | 195 | if argspec.varargs: | ||
358 | 196 | yield (argspec.varargs,), {'nargs': '*'} | ||
359 | 0 | 197 | ||
360 | === added file 'hooks/charmhelpers/cli/benchmark.py' | |||
361 | --- hooks/charmhelpers/cli/benchmark.py 1970-01-01 00:00:00 +0000 | |||
362 | +++ hooks/charmhelpers/cli/benchmark.py 2021-05-12 04:07:51 +0000 | |||
363 | @@ -0,0 +1,34 @@ | |||
364 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
365 | 2 | # | ||
366 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
367 | 4 | # you may not use this file except in compliance with the License. | ||
368 | 5 | # You may obtain a copy of the License at | ||
369 | 6 | # | ||
370 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
371 | 8 | # | ||
372 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
373 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
374 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
375 | 12 | # See the License for the specific language governing permissions and | ||
376 | 13 | # limitations under the License. | ||
377 | 14 | |||
378 | 15 | from . import cmdline | ||
379 | 16 | from charmhelpers.contrib.benchmark import Benchmark | ||
380 | 17 | |||
381 | 18 | |||
382 | 19 | @cmdline.subcommand(command_name='benchmark-start') | ||
383 | 20 | def start(): | ||
384 | 21 | Benchmark.start() | ||
385 | 22 | |||
386 | 23 | |||
387 | 24 | @cmdline.subcommand(command_name='benchmark-finish') | ||
388 | 25 | def finish(): | ||
389 | 26 | Benchmark.finish() | ||
390 | 27 | |||
391 | 28 | |||
392 | 29 | @cmdline.subcommand_builder('benchmark-composite', description="Set the benchmark composite score") | ||
393 | 30 | def service(subparser): | ||
394 | 31 | subparser.add_argument("value", help="The composite score.") | ||
395 | 32 | subparser.add_argument("units", help="The units the composite score represents, i.e., 'reads/sec'.") | ||
396 | 33 | subparser.add_argument("direction", help="'asc' if a lower score is better, 'desc' if a higher score is better.") | ||
397 | 34 | return Benchmark.set_composite_score | ||
398 | 0 | 35 | ||
399 | === added file 'hooks/charmhelpers/cli/commands.py' | |||
400 | --- hooks/charmhelpers/cli/commands.py 1970-01-01 00:00:00 +0000 | |||
401 | +++ hooks/charmhelpers/cli/commands.py 2021-05-12 04:07:51 +0000 | |||
402 | @@ -0,0 +1,30 @@ | |||
403 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
404 | 2 | # | ||
405 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
406 | 4 | # you may not use this file except in compliance with the License. | ||
407 | 5 | # You may obtain a copy of the License at | ||
408 | 6 | # | ||
409 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
410 | 8 | # | ||
411 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
412 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
413 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
414 | 12 | # See the License for the specific language governing permissions and | ||
415 | 13 | # limitations under the License. | ||
416 | 14 | |||
417 | 15 | """ | ||
418 | 16 | This module loads sub-modules into the python runtime so they can be | ||
419 | 17 | discovered via the inspect module. In order to prevent flake8 from (rightfully) | ||
420 | 18 | telling us these are unused modules, throw a ' # noqa' at the end of each import | ||
421 | 19 | so that the warning is suppressed. | ||
422 | 20 | """ | ||
423 | 21 | |||
424 | 22 | from . import CommandLine # noqa | ||
425 | 23 | |||
426 | 24 | """ | ||
427 | 25 | Import the sub-modules which have decorated subcommands to register with chlp. | ||
428 | 26 | """ | ||
429 | 27 | from . import host # noqa | ||
430 | 28 | from . import benchmark # noqa | ||
431 | 29 | from . import unitdata # noqa | ||
432 | 30 | from . import hookenv # noqa | ||
433 | 0 | 31 | ||
434 | === added file 'hooks/charmhelpers/cli/hookenv.py' | |||
435 | --- hooks/charmhelpers/cli/hookenv.py 1970-01-01 00:00:00 +0000 | |||
436 | +++ hooks/charmhelpers/cli/hookenv.py 2021-05-12 04:07:51 +0000 | |||
437 | @@ -0,0 +1,21 @@ | |||
438 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
439 | 2 | # | ||
440 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
441 | 4 | # you may not use this file except in compliance with the License. | ||
442 | 5 | # You may obtain a copy of the License at | ||
443 | 6 | # | ||
444 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
445 | 8 | # | ||
446 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
447 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
448 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
449 | 12 | # See the License for the specific language governing permissions and | ||
450 | 13 | # limitations under the License. | ||
451 | 14 | |||
452 | 15 | from . import cmdline | ||
453 | 16 | from charmhelpers.core import hookenv | ||
454 | 17 | |||
455 | 18 | |||
456 | 19 | cmdline.subcommand('relation-id')(hookenv.relation_id._wrapped) | ||
457 | 20 | cmdline.subcommand('service-name')(hookenv.service_name) | ||
458 | 21 | cmdline.subcommand('remote-service-name')(hookenv.remote_service_name._wrapped) | ||
459 | 0 | 22 | ||
460 | === added file 'hooks/charmhelpers/cli/host.py' | |||
461 | --- hooks/charmhelpers/cli/host.py 1970-01-01 00:00:00 +0000 | |||
462 | +++ hooks/charmhelpers/cli/host.py 2021-05-12 04:07:51 +0000 | |||
463 | @@ -0,0 +1,29 @@ | |||
464 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
465 | 2 | # | ||
466 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
467 | 4 | # you may not use this file except in compliance with the License. | ||
468 | 5 | # You may obtain a copy of the License at | ||
469 | 6 | # | ||
470 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
471 | 8 | # | ||
472 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
473 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
474 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
475 | 12 | # See the License for the specific language governing permissions and | ||
476 | 13 | # limitations under the License. | ||
477 | 14 | |||
478 | 15 | from . import cmdline | ||
479 | 16 | from charmhelpers.core import host | ||
480 | 17 | |||
481 | 18 | |||
482 | 19 | @cmdline.subcommand() | ||
483 | 20 | def mounts(): | ||
484 | 21 | "List mounts" | ||
485 | 22 | return host.mounts() | ||
486 | 23 | |||
487 | 24 | |||
488 | 25 | @cmdline.subcommand_builder('service', description="Control system services") | ||
489 | 26 | def service(subparser): | ||
490 | 27 | subparser.add_argument("action", help="The action to perform (start, stop, etc...)") | ||
491 | 28 | subparser.add_argument("service_name", help="Name of the service to control") | ||
492 | 29 | return host.service | ||
493 | 0 | 30 | ||
494 | === added file 'hooks/charmhelpers/cli/unitdata.py' | |||
495 | --- hooks/charmhelpers/cli/unitdata.py 1970-01-01 00:00:00 +0000 | |||
496 | +++ hooks/charmhelpers/cli/unitdata.py 2021-05-12 04:07:51 +0000 | |||
497 | @@ -0,0 +1,46 @@ | |||
498 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
499 | 2 | # | ||
500 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
501 | 4 | # you may not use this file except in compliance with the License. | ||
502 | 5 | # You may obtain a copy of the License at | ||
503 | 6 | # | ||
504 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
505 | 8 | # | ||
506 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
507 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
508 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
509 | 12 | # See the License for the specific language governing permissions and | ||
510 | 13 | # limitations under the License. | ||
511 | 14 | |||
512 | 15 | from . import cmdline | ||
513 | 16 | from charmhelpers.core import unitdata | ||
514 | 17 | |||
515 | 18 | |||
516 | 19 | @cmdline.subcommand_builder('unitdata', description="Store and retrieve data") | ||
517 | 20 | def unitdata_cmd(subparser): | ||
518 | 21 | nested = subparser.add_subparsers() | ||
519 | 22 | |||
520 | 23 | get_cmd = nested.add_parser('get', help='Retrieve data') | ||
521 | 24 | get_cmd.add_argument('key', help='Key to retrieve the value of') | ||
522 | 25 | get_cmd.set_defaults(action='get', value=None) | ||
523 | 26 | |||
524 | 27 | getrange_cmd = nested.add_parser('getrange', help='Retrieve multiple data') | ||
525 | 28 | getrange_cmd.add_argument('key', metavar='prefix', | ||
526 | 29 | help='Prefix of the keys to retrieve') | ||
527 | 30 | getrange_cmd.set_defaults(action='getrange', value=None) | ||
528 | 31 | |||
529 | 32 | set_cmd = nested.add_parser('set', help='Store data') | ||
530 | 33 | set_cmd.add_argument('key', help='Key to set') | ||
531 | 34 | set_cmd.add_argument('value', help='Value to store') | ||
532 | 35 | set_cmd.set_defaults(action='set') | ||
533 | 36 | |||
534 | 37 | def _unitdata_cmd(action, key, value): | ||
535 | 38 | if action == 'get': | ||
536 | 39 | return unitdata.kv().get(key) | ||
537 | 40 | elif action == 'getrange': | ||
538 | 41 | return unitdata.kv().getrange(key) | ||
539 | 42 | elif action == 'set': | ||
540 | 43 | unitdata.kv().set(key, value) | ||
541 | 44 | unitdata.kv().flush() | ||
542 | 45 | return '' | ||
543 | 46 | return _unitdata_cmd | ||
544 | 0 | 47 | ||
545 | === added file 'hooks/charmhelpers/context.py' | |||
546 | --- hooks/charmhelpers/context.py 1970-01-01 00:00:00 +0000 | |||
547 | +++ hooks/charmhelpers/context.py 2021-05-12 04:07:51 +0000 | |||
548 | @@ -0,0 +1,205 @@ | |||
549 | 1 | # Copyright 2015 Canonical Limited. | ||
550 | 2 | # | ||
551 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
552 | 4 | # you may not use this file except in compliance with the License. | ||
553 | 5 | # You may obtain a copy of the License at | ||
554 | 6 | # | ||
555 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
556 | 8 | # | ||
557 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
558 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
559 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
560 | 12 | # See the License for the specific language governing permissions and | ||
561 | 13 | # limitations under the License. | ||
562 | 14 | |||
563 | 15 | ''' | ||
564 | 16 | A Pythonic API to interact with the charm hook environment. | ||
565 | 17 | |||
566 | 18 | :author: Stuart Bishop <stuart.bishop@canonical.com> | ||
567 | 19 | ''' | ||
568 | 20 | |||
569 | 21 | import six | ||
570 | 22 | |||
571 | 23 | from charmhelpers.core import hookenv | ||
572 | 24 | |||
573 | 25 | from collections import OrderedDict | ||
574 | 26 | if six.PY3: | ||
575 | 27 | from collections import UserDict # pragma: nocover | ||
576 | 28 | else: | ||
577 | 29 | from UserDict import IterableUserDict as UserDict # pragma: nocover | ||
578 | 30 | |||
579 | 31 | |||
580 | 32 | class Relations(OrderedDict): | ||
581 | 33 | '''Mapping relation name -> relation id -> Relation. | ||
582 | 34 | |||
583 | 35 | >>> rels = Relations() | ||
584 | 36 | >>> rels['sprog']['sprog:12']['client/6']['widget'] | ||
585 | 37 | 'remote widget' | ||
586 | 38 | >>> rels['sprog']['sprog:12'].local['widget'] = 'local widget' | ||
587 | 39 | >>> rels['sprog']['sprog:12'].local['widget'] | ||
588 | 40 | 'local widget' | ||
589 | 41 | >>> rels.peer.local['widget'] | ||
590 | 42 | 'local widget on the peer relation' | ||
591 | 43 | ''' | ||
592 | 44 | def __init__(self): | ||
593 | 45 | super(Relations, self).__init__() | ||
594 | 46 | for relname in sorted(hookenv.relation_types()): | ||
595 | 47 | self[relname] = OrderedDict() | ||
596 | 48 | relids = hookenv.relation_ids(relname) | ||
597 | 49 | relids.sort(key=lambda x: int(x.split(':', 1)[-1])) | ||
598 | 50 | for relid in relids: | ||
599 | 51 | self[relname][relid] = Relation(relid) | ||
600 | 52 | |||
601 | 53 | @property | ||
602 | 54 | def peer(self): | ||
603 | 55 | peer_relid = hookenv.peer_relation_id() | ||
604 | 56 | for rels in self.values(): | ||
605 | 57 | if peer_relid in rels: | ||
606 | 58 | return rels[peer_relid] | ||
607 | 59 | |||
608 | 60 | |||
609 | 61 | class Relation(OrderedDict): | ||
610 | 62 | '''Mapping of unit -> remote RelationInfo for a relation. | ||
611 | 63 | |||
612 | 64 | This is an OrderedDict mapping, ordered numerically by | ||
613 | 65 | by unit number. | ||
614 | 66 | |||
615 | 67 | Also provides access to the local RelationInfo, and peer RelationInfo | ||
616 | 68 | instances by the 'local' and 'peers' attributes. | ||
617 | 69 | |||
618 | 70 | >>> r = Relation('sprog:12') | ||
619 | 71 | >>> r.keys() | ||
620 | 72 | ['client/9', 'client/10'] # Ordered numerically | ||
621 | 73 | >>> r['client/10']['widget'] # A remote RelationInfo setting | ||
622 | 74 | 'remote widget' | ||
623 | 75 | >>> r.local['widget'] # The local RelationInfo setting | ||
624 | 76 | 'local widget' | ||
625 | 77 | ''' | ||
626 | 78 | relid = None # The relation id. | ||
627 | 79 | relname = None # The relation name (also known as relation type). | ||
628 | 80 | service = None # The remote service name, if known. | ||
629 | 81 | local = None # The local end's RelationInfo. | ||
630 | 82 | peers = None # Map of peer -> RelationInfo. None if no peer relation. | ||
631 | 83 | |||
632 | 84 | def __init__(self, relid): | ||
633 | 85 | remote_units = hookenv.related_units(relid) | ||
634 | 86 | remote_units.sort(key=lambda u: int(u.split('/', 1)[-1])) | ||
635 | 87 | super(Relation, self).__init__((unit, RelationInfo(relid, unit)) | ||
636 | 88 | for unit in remote_units) | ||
637 | 89 | |||
638 | 90 | self.relname = relid.split(':', 1)[0] | ||
639 | 91 | self.relid = relid | ||
640 | 92 | self.local = RelationInfo(relid, hookenv.local_unit()) | ||
641 | 93 | |||
642 | 94 | for relinfo in self.values(): | ||
643 | 95 | self.service = relinfo.service | ||
644 | 96 | break | ||
645 | 97 | |||
646 | 98 | # If we have peers, and they have joined both the provided peer | ||
647 | 99 | # relation and this relation, we can peek at their data too. | ||
648 | 100 | # This is useful for creating consensus without leadership. | ||
649 | 101 | peer_relid = hookenv.peer_relation_id() | ||
650 | 102 | if peer_relid and peer_relid != relid: | ||
651 | 103 | peers = hookenv.related_units(peer_relid) | ||
652 | 104 | if peers: | ||
653 | 105 | peers.sort(key=lambda u: int(u.split('/', 1)[-1])) | ||
654 | 106 | self.peers = OrderedDict((peer, RelationInfo(relid, peer)) | ||
655 | 107 | for peer in peers) | ||
656 | 108 | else: | ||
657 | 109 | self.peers = OrderedDict() | ||
658 | 110 | else: | ||
659 | 111 | self.peers = None | ||
660 | 112 | |||
661 | 113 | def __str__(self): | ||
662 | 114 | return '{} ({})'.format(self.relid, self.service) | ||
663 | 115 | |||
664 | 116 | |||
665 | 117 | class RelationInfo(UserDict): | ||
666 | 118 | '''The bag of data at an end of a relation. | ||
667 | 119 | |||
668 | 120 | Every unit participating in a relation has a single bag of | ||
669 | 121 | data associated with that relation. This is that bag. | ||
670 | 122 | |||
671 | 123 | The bag of data for the local unit may be updated. Remote data | ||
672 | 124 | is immutable and will remain static for the duration of the hook. | ||
673 | 125 | |||
674 | 126 | Changes made to the local units relation data only become visible | ||
675 | 127 | to other units after the hook completes successfully. If the hook | ||
676 | 128 | does not complete successfully, the changes are rolled back. | ||
677 | 129 | |||
678 | 130 | Unlike standard Python mappings, setting an item to None is the | ||
679 | 131 | same as deleting it. | ||
680 | 132 | |||
681 | 133 | >>> relinfo = RelationInfo('db:12') # Default is the local unit. | ||
682 | 134 | >>> relinfo['user'] = 'fred' | ||
683 | 135 | >>> relinfo['user'] | ||
684 | 136 | 'fred' | ||
685 | 137 | >>> relinfo['user'] = None | ||
686 | 138 | >>> 'fred' in relinfo | ||
687 | 139 | False | ||
688 | 140 | |||
689 | 141 | This class wraps hookenv.relation_get and hookenv.relation_set. | ||
690 | 142 | All caching is left up to these two methods to avoid synchronization | ||
691 | 143 | issues. Data is only loaded on demand. | ||
692 | 144 | ''' | ||
693 | 145 | relid = None # The relation id. | ||
694 | 146 | relname = None # The relation name (also know as the relation type). | ||
695 | 147 | unit = None # The unit id. | ||
696 | 148 | number = None # The unit number (integer). | ||
697 | 149 | service = None # The service name. | ||
698 | 150 | |||
699 | 151 | def __init__(self, relid, unit): | ||
700 | 152 | self.relname = relid.split(':', 1)[0] | ||
701 | 153 | self.relid = relid | ||
702 | 154 | self.unit = unit | ||
703 | 155 | self.service, num = self.unit.split('/', 1) | ||
704 | 156 | self.number = int(num) | ||
705 | 157 | |||
706 | 158 | def __str__(self): | ||
707 | 159 | return '{} ({})'.format(self.relid, self.unit) | ||
708 | 160 | |||
709 | 161 | @property | ||
710 | 162 | def data(self): | ||
711 | 163 | return hookenv.relation_get(rid=self.relid, unit=self.unit) | ||
712 | 164 | |||
713 | 165 | def __setitem__(self, key, value): | ||
714 | 166 | if self.unit != hookenv.local_unit(): | ||
715 | 167 | raise TypeError('Attempting to set {} on remote unit {}' | ||
716 | 168 | ''.format(key, self.unit)) | ||
717 | 169 | if value is not None and not isinstance(value, six.string_types): | ||
718 | 170 | # We don't do implicit casting. This would cause simple | ||
719 | 171 | # types like integers to be read back as strings in subsequent | ||
720 | 172 | # hooks, and mutable types would require a lot of wrapping | ||
721 | 173 | # to ensure relation-set gets called when they are mutated. | ||
722 | 174 | raise ValueError('Only string values allowed') | ||
723 | 175 | hookenv.relation_set(self.relid, {key: value}) | ||
724 | 176 | |||
725 | 177 | def __delitem__(self, key): | ||
726 | 178 | # Deleting a key and setting it to null is the same thing in | ||
727 | 179 | # Juju relations. | ||
728 | 180 | self[key] = None | ||
729 | 181 | |||
730 | 182 | |||
731 | 183 | class Leader(UserDict): | ||
732 | 184 | def __init__(self): | ||
733 | 185 | pass # Don't call superclass initializer, as it will nuke self.data | ||
734 | 186 | |||
735 | 187 | @property | ||
736 | 188 | def data(self): | ||
737 | 189 | return hookenv.leader_get() | ||
738 | 190 | |||
739 | 191 | def __setitem__(self, key, value): | ||
740 | 192 | if not hookenv.is_leader(): | ||
741 | 193 | raise TypeError('Not the leader. Cannot change leader settings.') | ||
742 | 194 | if value is not None and not isinstance(value, six.string_types): | ||
743 | 195 | # We don't do implicit casting. This would cause simple | ||
744 | 196 | # types like integers to be read back as strings in subsequent | ||
745 | 197 | # hooks, and mutable types would require a lot of wrapping | ||
746 | 198 | # to ensure leader-set gets called when they are mutated. | ||
747 | 199 | raise ValueError('Only string values allowed') | ||
748 | 200 | hookenv.leader_set({key: value}) | ||
749 | 201 | |||
750 | 202 | def __delitem__(self, key): | ||
751 | 203 | # Deleting a key and setting it to null is the same thing in | ||
752 | 204 | # Juju leadership settings. | ||
753 | 205 | self[key] = None | ||
754 | 0 | 206 | ||
755 | === added directory 'hooks/charmhelpers/contrib/amulet' | |||
756 | === added file 'hooks/charmhelpers/contrib/amulet/__init__.py' | |||
757 | --- hooks/charmhelpers/contrib/amulet/__init__.py 1970-01-01 00:00:00 +0000 | |||
758 | +++ hooks/charmhelpers/contrib/amulet/__init__.py 2021-05-12 04:07:51 +0000 | |||
759 | @@ -0,0 +1,13 @@ | |||
760 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
761 | 2 | # | ||
762 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
763 | 4 | # you may not use this file except in compliance with the License. | ||
764 | 5 | # You may obtain a copy of the License at | ||
765 | 6 | # | ||
766 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
767 | 8 | # | ||
768 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
769 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
770 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
771 | 12 | # See the License for the specific language governing permissions and | ||
772 | 13 | # limitations under the License. | ||
773 | 0 | 14 | ||
774 | === added file 'hooks/charmhelpers/contrib/amulet/deployment.py' | |||
775 | --- hooks/charmhelpers/contrib/amulet/deployment.py 1970-01-01 00:00:00 +0000 | |||
776 | +++ hooks/charmhelpers/contrib/amulet/deployment.py 2021-05-12 04:07:51 +0000 | |||
777 | @@ -0,0 +1,99 @@ | |||
778 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
779 | 2 | # | ||
780 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
781 | 4 | # you may not use this file except in compliance with the License. | ||
782 | 5 | # You may obtain a copy of the License at | ||
783 | 6 | # | ||
784 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
785 | 8 | # | ||
786 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
787 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
788 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
789 | 12 | # See the License for the specific language governing permissions and | ||
790 | 13 | # limitations under the License. | ||
791 | 14 | |||
792 | 15 | import amulet | ||
793 | 16 | import os | ||
794 | 17 | import six | ||
795 | 18 | |||
796 | 19 | |||
797 | 20 | class AmuletDeployment(object): | ||
798 | 21 | """Amulet deployment. | ||
799 | 22 | |||
800 | 23 | This class provides generic Amulet deployment and test runner | ||
801 | 24 | methods. | ||
802 | 25 | """ | ||
803 | 26 | |||
804 | 27 | def __init__(self, series=None): | ||
805 | 28 | """Initialize the deployment environment.""" | ||
806 | 29 | self.series = None | ||
807 | 30 | |||
808 | 31 | if series: | ||
809 | 32 | self.series = series | ||
810 | 33 | self.d = amulet.Deployment(series=self.series) | ||
811 | 34 | else: | ||
812 | 35 | self.d = amulet.Deployment() | ||
813 | 36 | |||
814 | 37 | def _add_services(self, this_service, other_services): | ||
815 | 38 | """Add services. | ||
816 | 39 | |||
817 | 40 | Add services to the deployment where this_service is the local charm | ||
818 | 41 | that we're testing and other_services are the other services that | ||
819 | 42 | are being used in the local amulet tests. | ||
820 | 43 | """ | ||
821 | 44 | if this_service['name'] != os.path.basename(os.getcwd()): | ||
822 | 45 | s = this_service['name'] | ||
823 | 46 | msg = "The charm's root directory name needs to be {}".format(s) | ||
824 | 47 | amulet.raise_status(amulet.FAIL, msg=msg) | ||
825 | 48 | |||
826 | 49 | if 'units' not in this_service: | ||
827 | 50 | this_service['units'] = 1 | ||
828 | 51 | |||
829 | 52 | self.d.add(this_service['name'], units=this_service['units'], | ||
830 | 53 | constraints=this_service.get('constraints'), | ||
831 | 54 | storage=this_service.get('storage')) | ||
832 | 55 | |||
833 | 56 | for svc in other_services: | ||
834 | 57 | if 'location' in svc: | ||
835 | 58 | branch_location = svc['location'] | ||
836 | 59 | elif self.series: | ||
837 | 60 | branch_location = 'cs:{}/{}'.format(self.series, svc['name']), | ||
838 | 61 | else: | ||
839 | 62 | branch_location = None | ||
840 | 63 | |||
841 | 64 | if 'units' not in svc: | ||
842 | 65 | svc['units'] = 1 | ||
843 | 66 | |||
844 | 67 | self.d.add(svc['name'], charm=branch_location, units=svc['units'], | ||
845 | 68 | constraints=svc.get('constraints'), | ||
846 | 69 | storage=svc.get('storage')) | ||
847 | 70 | |||
848 | 71 | def _add_relations(self, relations): | ||
849 | 72 | """Add all of the relations for the services.""" | ||
850 | 73 | for k, v in six.iteritems(relations): | ||
851 | 74 | self.d.relate(k, v) | ||
852 | 75 | |||
853 | 76 | def _configure_services(self, configs): | ||
854 | 77 | """Configure all of the services.""" | ||
855 | 78 | for service, config in six.iteritems(configs): | ||
856 | 79 | self.d.configure(service, config) | ||
857 | 80 | |||
858 | 81 | def _deploy(self): | ||
859 | 82 | """Deploy environment and wait for all hooks to finish executing.""" | ||
860 | 83 | timeout = int(os.environ.get('AMULET_SETUP_TIMEOUT', 900)) | ||
861 | 84 | try: | ||
862 | 85 | self.d.setup(timeout=timeout) | ||
863 | 86 | self.d.sentry.wait(timeout=timeout) | ||
864 | 87 | except amulet.helpers.TimeoutError: | ||
865 | 88 | amulet.raise_status( | ||
866 | 89 | amulet.FAIL, | ||
867 | 90 | msg="Deployment timed out ({}s)".format(timeout) | ||
868 | 91 | ) | ||
869 | 92 | except Exception: | ||
870 | 93 | raise | ||
871 | 94 | |||
872 | 95 | def run_tests(self): | ||
873 | 96 | """Run all of the methods that are prefixed with 'test_'.""" | ||
874 | 97 | for test in dir(self): | ||
875 | 98 | if test.startswith('test_'): | ||
876 | 99 | getattr(self, test)() | ||
877 | 0 | 100 | ||
878 | === added file 'hooks/charmhelpers/contrib/amulet/utils.py' | |||
879 | --- hooks/charmhelpers/contrib/amulet/utils.py 1970-01-01 00:00:00 +0000 | |||
880 | +++ hooks/charmhelpers/contrib/amulet/utils.py 2021-05-12 04:07:51 +0000 | |||
881 | @@ -0,0 +1,820 @@ | |||
882 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
883 | 2 | # | ||
884 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
885 | 4 | # you may not use this file except in compliance with the License. | ||
886 | 5 | # You may obtain a copy of the License at | ||
887 | 6 | # | ||
888 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
889 | 8 | # | ||
890 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
891 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
892 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
893 | 12 | # See the License for the specific language governing permissions and | ||
894 | 13 | # limitations under the License. | ||
895 | 14 | |||
896 | 15 | import io | ||
897 | 16 | import json | ||
898 | 17 | import logging | ||
899 | 18 | import os | ||
900 | 19 | import re | ||
901 | 20 | import socket | ||
902 | 21 | import subprocess | ||
903 | 22 | import sys | ||
904 | 23 | import time | ||
905 | 24 | import uuid | ||
906 | 25 | |||
907 | 26 | import amulet | ||
908 | 27 | import distro_info | ||
909 | 28 | import six | ||
910 | 29 | from six.moves import configparser | ||
911 | 30 | if six.PY3: | ||
912 | 31 | from urllib import parse as urlparse | ||
913 | 32 | else: | ||
914 | 33 | import urlparse | ||
915 | 34 | |||
916 | 35 | |||
917 | 36 | class AmuletUtils(object): | ||
918 | 37 | """Amulet utilities. | ||
919 | 38 | |||
920 | 39 | This class provides common utility functions that are used by Amulet | ||
921 | 40 | tests. | ||
922 | 41 | """ | ||
923 | 42 | |||
924 | 43 | def __init__(self, log_level=logging.ERROR): | ||
925 | 44 | self.log = self.get_logger(level=log_level) | ||
926 | 45 | self.ubuntu_releases = self.get_ubuntu_releases() | ||
927 | 46 | |||
928 | 47 | def get_logger(self, name="amulet-logger", level=logging.DEBUG): | ||
929 | 48 | """Get a logger object that will log to stdout.""" | ||
930 | 49 | log = logging | ||
931 | 50 | logger = log.getLogger(name) | ||
932 | 51 | fmt = log.Formatter("%(asctime)s %(funcName)s " | ||
933 | 52 | "%(levelname)s: %(message)s") | ||
934 | 53 | |||
935 | 54 | handler = log.StreamHandler(stream=sys.stdout) | ||
936 | 55 | handler.setLevel(level) | ||
937 | 56 | handler.setFormatter(fmt) | ||
938 | 57 | |||
939 | 58 | logger.addHandler(handler) | ||
940 | 59 | logger.setLevel(level) | ||
941 | 60 | |||
942 | 61 | return logger | ||
943 | 62 | |||
944 | 63 | def valid_ip(self, ip): | ||
945 | 64 | if re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", ip): | ||
946 | 65 | return True | ||
947 | 66 | else: | ||
948 | 67 | return False | ||
949 | 68 | |||
950 | 69 | def valid_url(self, url): | ||
951 | 70 | p = re.compile( | ||
952 | 71 | r'^(?:http|ftp)s?://' | ||
953 | 72 | r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # noqa | ||
954 | 73 | r'localhost|' | ||
955 | 74 | r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' | ||
956 | 75 | r'(?::\d+)?' | ||
957 | 76 | r'(?:/?|[/?]\S+)$', | ||
958 | 77 | re.IGNORECASE) | ||
959 | 78 | if p.match(url): | ||
960 | 79 | return True | ||
961 | 80 | else: | ||
962 | 81 | return False | ||
963 | 82 | |||
964 | 83 | def get_ubuntu_release_from_sentry(self, sentry_unit): | ||
965 | 84 | """Get Ubuntu release codename from sentry unit. | ||
966 | 85 | |||
967 | 86 | :param sentry_unit: amulet sentry/service unit pointer | ||
968 | 87 | :returns: list of strings - release codename, failure message | ||
969 | 88 | """ | ||
970 | 89 | msg = None | ||
971 | 90 | cmd = 'lsb_release -cs' | ||
972 | 91 | release, code = sentry_unit.ssh(cmd) | ||
973 | 92 | if code == 0: | ||
974 | 93 | self.log.debug('{} lsb_release: {}'.format( | ||
975 | 94 | sentry_unit.info['unit_name'], release)) | ||
976 | 95 | else: | ||
977 | 96 | msg = ('{} `{}` returned {} ' | ||
978 | 97 | '{}'.format(sentry_unit.info['unit_name'], | ||
979 | 98 | cmd, release, code)) | ||
980 | 99 | if release not in self.ubuntu_releases: | ||
981 | 100 | msg = ("Release ({}) not found in Ubuntu releases " | ||
982 | 101 | "({})".format(release, self.ubuntu_releases)) | ||
983 | 102 | return release, msg | ||
984 | 103 | |||
985 | 104 | def validate_services(self, commands): | ||
986 | 105 | """Validate that lists of commands succeed on service units. Can be | ||
987 | 106 | used to verify system services are running on the corresponding | ||
988 | 107 | service units. | ||
989 | 108 | |||
990 | 109 | :param commands: dict with sentry keys and arbitrary command list vals | ||
991 | 110 | :returns: None if successful, Failure string message otherwise | ||
992 | 111 | """ | ||
993 | 112 | self.log.debug('Checking status of system services...') | ||
994 | 113 | |||
995 | 114 | # /!\ DEPRECATION WARNING (beisner): | ||
996 | 115 | # New and existing tests should be rewritten to use | ||
997 | 116 | # validate_services_by_name() as it is aware of init systems. | ||
998 | 117 | self.log.warn('DEPRECATION WARNING: use ' | ||
999 | 118 | 'validate_services_by_name instead of validate_services ' | ||
1000 | 119 | 'due to init system differences.') | ||
1001 | 120 | |||
1002 | 121 | for k, v in six.iteritems(commands): | ||
1003 | 122 | for cmd in v: | ||
1004 | 123 | output, code = k.run(cmd) | ||
1005 | 124 | self.log.debug('{} `{}` returned ' | ||
1006 | 125 | '{}'.format(k.info['unit_name'], | ||
1007 | 126 | cmd, code)) | ||
1008 | 127 | if code != 0: | ||
1009 | 128 | return "command `{}` returned {}".format(cmd, str(code)) | ||
1010 | 129 | return None | ||
1011 | 130 | |||
1012 | 131 | def validate_services_by_name(self, sentry_services): | ||
1013 | 132 | """Validate system service status by service name, automatically | ||
1014 | 133 | detecting init system based on Ubuntu release codename. | ||
1015 | 134 | |||
1016 | 135 | :param sentry_services: dict with sentry keys and svc list values | ||
1017 | 136 | :returns: None if successful, Failure string message otherwise | ||
1018 | 137 | """ | ||
1019 | 138 | self.log.debug('Checking status of system services...') | ||
1020 | 139 | |||
1021 | 140 | # Point at which systemd became a thing | ||
1022 | 141 | systemd_switch = self.ubuntu_releases.index('vivid') | ||
1023 | 142 | |||
1024 | 143 | for sentry_unit, services_list in six.iteritems(sentry_services): | ||
1025 | 144 | # Get lsb_release codename from unit | ||
1026 | 145 | release, ret = self.get_ubuntu_release_from_sentry(sentry_unit) | ||
1027 | 146 | if ret: | ||
1028 | 147 | return ret | ||
1029 | 148 | |||
1030 | 149 | for service_name in services_list: | ||
1031 | 150 | if (self.ubuntu_releases.index(release) >= systemd_switch or | ||
1032 | 151 | service_name in ['rabbitmq-server', 'apache2', | ||
1033 | 152 | 'memcached']): | ||
1034 | 153 | # init is systemd (or regular sysv) | ||
1035 | 154 | cmd = 'sudo service {} status'.format(service_name) | ||
1036 | 155 | output, code = sentry_unit.run(cmd) | ||
1037 | 156 | service_running = code == 0 | ||
1038 | 157 | elif self.ubuntu_releases.index(release) < systemd_switch: | ||
1039 | 158 | # init is upstart | ||
1040 | 159 | cmd = 'sudo status {}'.format(service_name) | ||
1041 | 160 | output, code = sentry_unit.run(cmd) | ||
1042 | 161 | service_running = code == 0 and "start/running" in output | ||
1043 | 162 | |||
1044 | 163 | self.log.debug('{} `{}` returned ' | ||
1045 | 164 | '{}'.format(sentry_unit.info['unit_name'], | ||
1046 | 165 | cmd, code)) | ||
1047 | 166 | if not service_running: | ||
1048 | 167 | return u"command `{}` returned {} {}".format( | ||
1049 | 168 | cmd, output, str(code)) | ||
1050 | 169 | return None | ||
1051 | 170 | |||
1052 | 171 | def _get_config(self, unit, filename): | ||
1053 | 172 | """Get a ConfigParser object for parsing a unit's config file.""" | ||
1054 | 173 | file_contents = unit.file_contents(filename) | ||
1055 | 174 | |||
1056 | 175 | # NOTE(beisner): by default, ConfigParser does not handle options | ||
1057 | 176 | # with no value, such as the flags used in the mysql my.cnf file. | ||
1058 | 177 | # https://bugs.python.org/issue7005 | ||
1059 | 178 | config = configparser.ConfigParser(allow_no_value=True) | ||
1060 | 179 | config.readfp(io.StringIO(file_contents)) | ||
1061 | 180 | return config | ||
1062 | 181 | |||
1063 | 182 | def validate_config_data(self, sentry_unit, config_file, section, | ||
1064 | 183 | expected): | ||
1065 | 184 | """Validate config file data. | ||
1066 | 185 | |||
1067 | 186 | Verify that the specified section of the config file contains | ||
1068 | 187 | the expected option key:value pairs. | ||
1069 | 188 | |||
1070 | 189 | Compare expected dictionary data vs actual dictionary data. | ||
1071 | 190 | The values in the 'expected' dictionary can be strings, bools, ints, | ||
1072 | 191 | longs, or can be a function that evaluates a variable and returns a | ||
1073 | 192 | bool. | ||
1074 | 193 | """ | ||
1075 | 194 | self.log.debug('Validating config file data ({} in {} on {})' | ||
1076 | 195 | '...'.format(section, config_file, | ||
1077 | 196 | sentry_unit.info['unit_name'])) | ||
1078 | 197 | config = self._get_config(sentry_unit, config_file) | ||
1079 | 198 | |||
1080 | 199 | if section != 'DEFAULT' and not config.has_section(section): | ||
1081 | 200 | return "section [{}] does not exist".format(section) | ||
1082 | 201 | |||
1083 | 202 | for k in expected.keys(): | ||
1084 | 203 | if not config.has_option(section, k): | ||
1085 | 204 | return "section [{}] is missing option {}".format(section, k) | ||
1086 | 205 | |||
1087 | 206 | actual = config.get(section, k) | ||
1088 | 207 | v = expected[k] | ||
1089 | 208 | if (isinstance(v, six.string_types) or | ||
1090 | 209 | isinstance(v, bool) or | ||
1091 | 210 | isinstance(v, six.integer_types)): | ||
1092 | 211 | # handle explicit values | ||
1093 | 212 | if actual != v: | ||
1094 | 213 | return "section [{}] {}:{} != expected {}:{}".format( | ||
1095 | 214 | section, k, actual, k, expected[k]) | ||
1096 | 215 | # handle function pointers, such as not_null or valid_ip | ||
1097 | 216 | elif not v(actual): | ||
1098 | 217 | return "section [{}] {}:{} != expected {}:{}".format( | ||
1099 | 218 | section, k, actual, k, expected[k]) | ||
1100 | 219 | return None | ||
1101 | 220 | |||
1102 | 221 | def _validate_dict_data(self, expected, actual): | ||
1103 | 222 | """Validate dictionary data. | ||
1104 | 223 | |||
1105 | 224 | Compare expected dictionary data vs actual dictionary data. | ||
1106 | 225 | The values in the 'expected' dictionary can be strings, bools, ints, | ||
1107 | 226 | longs, or can be a function that evaluates a variable and returns a | ||
1108 | 227 | bool. | ||
1109 | 228 | """ | ||
1110 | 229 | self.log.debug('actual: {}'.format(repr(actual))) | ||
1111 | 230 | self.log.debug('expected: {}'.format(repr(expected))) | ||
1112 | 231 | |||
1113 | 232 | for k, v in six.iteritems(expected): | ||
1114 | 233 | if k in actual: | ||
1115 | 234 | if (isinstance(v, six.string_types) or | ||
1116 | 235 | isinstance(v, bool) or | ||
1117 | 236 | isinstance(v, six.integer_types)): | ||
1118 | 237 | # handle explicit values | ||
1119 | 238 | if v != actual[k]: | ||
1120 | 239 | return "{}:{}".format(k, actual[k]) | ||
1121 | 240 | # handle function pointers, such as not_null or valid_ip | ||
1122 | 241 | elif not v(actual[k]): | ||
1123 | 242 | return "{}:{}".format(k, actual[k]) | ||
1124 | 243 | else: | ||
1125 | 244 | return "key '{}' does not exist".format(k) | ||
1126 | 245 | return None | ||
1127 | 246 | |||
1128 | 247 | def validate_relation_data(self, sentry_unit, relation, expected): | ||
1129 | 248 | """Validate actual relation data based on expected relation data.""" | ||
1130 | 249 | actual = sentry_unit.relation(relation[0], relation[1]) | ||
1131 | 250 | return self._validate_dict_data(expected, actual) | ||
1132 | 251 | |||
1133 | 252 | def _validate_list_data(self, expected, actual): | ||
1134 | 253 | """Compare expected list vs actual list data.""" | ||
1135 | 254 | for e in expected: | ||
1136 | 255 | if e not in actual: | ||
1137 | 256 | return "expected item {} not found in actual list".format(e) | ||
1138 | 257 | return None | ||
1139 | 258 | |||
1140 | 259 | def not_null(self, string): | ||
1141 | 260 | if string is not None: | ||
1142 | 261 | return True | ||
1143 | 262 | else: | ||
1144 | 263 | return False | ||
1145 | 264 | |||
1146 | 265 | def _get_file_mtime(self, sentry_unit, filename): | ||
1147 | 266 | """Get last modification time of file.""" | ||
1148 | 267 | return sentry_unit.file_stat(filename)['mtime'] | ||
1149 | 268 | |||
1150 | 269 | def _get_dir_mtime(self, sentry_unit, directory): | ||
1151 | 270 | """Get last modification time of directory.""" | ||
1152 | 271 | return sentry_unit.directory_stat(directory)['mtime'] | ||
1153 | 272 | |||
1154 | 273 | def _get_proc_start_time(self, sentry_unit, service, pgrep_full=None): | ||
1155 | 274 | """Get start time of a process based on the last modification time | ||
1156 | 275 | of the /proc/pid directory. | ||
1157 | 276 | |||
1158 | 277 | :sentry_unit: The sentry unit to check for the service on | ||
1159 | 278 | :service: service name to look for in process table | ||
1160 | 279 | :pgrep_full: [Deprecated] Use full command line search mode with pgrep | ||
1161 | 280 | :returns: epoch time of service process start | ||
1162 | 281 | :param commands: list of bash commands | ||
1163 | 282 | :param sentry_units: list of sentry unit pointers | ||
1164 | 283 | :returns: None if successful; Failure message otherwise | ||
1165 | 284 | """ | ||
1166 | 285 | pid_list = self.get_process_id_list( | ||
1167 | 286 | sentry_unit, service, pgrep_full=pgrep_full) | ||
1168 | 287 | pid = pid_list[0] | ||
1169 | 288 | proc_dir = '/proc/{}'.format(pid) | ||
1170 | 289 | self.log.debug('Pid for {} on {}: {}'.format( | ||
1171 | 290 | service, sentry_unit.info['unit_name'], pid)) | ||
1172 | 291 | |||
1173 | 292 | return self._get_dir_mtime(sentry_unit, proc_dir) | ||
1174 | 293 | |||
1175 | 294 | def service_restarted(self, sentry_unit, service, filename, | ||
1176 | 295 | pgrep_full=None, sleep_time=20): | ||
1177 | 296 | """Check if service was restarted. | ||
1178 | 297 | |||
1179 | 298 | Compare a service's start time vs a file's last modification time | ||
1180 | 299 | (such as a config file for that service) to determine if the service | ||
1181 | 300 | has been restarted. | ||
1182 | 301 | """ | ||
1183 | 302 | # /!\ DEPRECATION WARNING (beisner): | ||
1184 | 303 | # This method is prone to races in that no before-time is known. | ||
1185 | 304 | # Use validate_service_config_changed instead. | ||
1186 | 305 | |||
1187 | 306 | # NOTE(beisner) pgrep_full is no longer implemented, as pidof is now | ||
1188 | 307 | # used instead of pgrep. pgrep_full is still passed through to ensure | ||
1189 | 308 | # deprecation WARNS. lp1474030 | ||
1190 | 309 | self.log.warn('DEPRECATION WARNING: use ' | ||
1191 | 310 | 'validate_service_config_changed instead of ' | ||
1192 | 311 | 'service_restarted due to known races.') | ||
1193 | 312 | |||
1194 | 313 | time.sleep(sleep_time) | ||
1195 | 314 | if (self._get_proc_start_time(sentry_unit, service, pgrep_full) >= | ||
1196 | 315 | self._get_file_mtime(sentry_unit, filename)): | ||
1197 | 316 | return True | ||
1198 | 317 | else: | ||
1199 | 318 | return False | ||
1200 | 319 | |||
1201 | 320 | def service_restarted_since(self, sentry_unit, mtime, service, | ||
1202 | 321 | pgrep_full=None, sleep_time=20, | ||
1203 | 322 | retry_count=30, retry_sleep_time=10): | ||
1204 | 323 | """Check if service was been started after a given time. | ||
1205 | 324 | |||
1206 | 325 | Args: | ||
1207 | 326 | sentry_unit (sentry): The sentry unit to check for the service on | ||
1208 | 327 | mtime (float): The epoch time to check against | ||
1209 | 328 | service (string): service name to look for in process table | ||
1210 | 329 | pgrep_full: [Deprecated] Use full command line search mode with pgrep | ||
1211 | 330 | sleep_time (int): Initial sleep time (s) before looking for file | ||
1212 | 331 | retry_sleep_time (int): Time (s) to sleep between retries | ||
1213 | 332 | retry_count (int): If file is not found, how many times to retry | ||
1214 | 333 | |||
1215 | 334 | Returns: | ||
1216 | 335 | bool: True if service found and its start time it newer than mtime, | ||
1217 | 336 | False if service is older than mtime or if service was | ||
1218 | 337 | not found. | ||
1219 | 338 | """ | ||
1220 | 339 | # NOTE(beisner) pgrep_full is no longer implemented, as pidof is now | ||
1221 | 340 | # used instead of pgrep. pgrep_full is still passed through to ensure | ||
1222 | 341 | # deprecation WARNS. lp1474030 | ||
1223 | 342 | |||
1224 | 343 | unit_name = sentry_unit.info['unit_name'] | ||
1225 | 344 | self.log.debug('Checking that %s service restarted since %s on ' | ||
1226 | 345 | '%s' % (service, mtime, unit_name)) | ||
1227 | 346 | time.sleep(sleep_time) | ||
1228 | 347 | proc_start_time = None | ||
1229 | 348 | tries = 0 | ||
1230 | 349 | while tries <= retry_count and not proc_start_time: | ||
1231 | 350 | try: | ||
1232 | 351 | proc_start_time = self._get_proc_start_time(sentry_unit, | ||
1233 | 352 | service, | ||
1234 | 353 | pgrep_full) | ||
1235 | 354 | self.log.debug('Attempt {} to get {} proc start time on {} ' | ||
1236 | 355 | 'OK'.format(tries, service, unit_name)) | ||
1237 | 356 | except IOError as e: | ||
1238 | 357 | # NOTE(beisner) - race avoidance, proc may not exist yet. | ||
1239 | 358 | # https://bugs.launchpad.net/charm-helpers/+bug/1474030 | ||
1240 | 359 | self.log.debug('Attempt {} to get {} proc start time on {} ' | ||
1241 | 360 | 'failed\n{}'.format(tries, service, | ||
1242 | 361 | unit_name, e)) | ||
1243 | 362 | time.sleep(retry_sleep_time) | ||
1244 | 363 | tries += 1 | ||
1245 | 364 | |||
1246 | 365 | if not proc_start_time: | ||
1247 | 366 | self.log.warn('No proc start time found, assuming service did ' | ||
1248 | 367 | 'not start') | ||
1249 | 368 | return False | ||
1250 | 369 | if proc_start_time >= mtime: | ||
1251 | 370 | self.log.debug('Proc start time is newer than provided mtime' | ||
1252 | 371 | '(%s >= %s) on %s (OK)' % (proc_start_time, | ||
1253 | 372 | mtime, unit_name)) | ||
1254 | 373 | return True | ||
1255 | 374 | else: | ||
1256 | 375 | self.log.warn('Proc start time (%s) is older than provided mtime ' | ||
1257 | 376 | '(%s) on %s, service did not ' | ||
1258 | 377 | 'restart' % (proc_start_time, mtime, unit_name)) | ||
1259 | 378 | return False | ||
1260 | 379 | |||
1261 | 380 | def config_updated_since(self, sentry_unit, filename, mtime, | ||
1262 | 381 | sleep_time=20, retry_count=30, | ||
1263 | 382 | retry_sleep_time=10): | ||
1264 | 383 | """Check if file was modified after a given time. | ||
1265 | 384 | |||
1266 | 385 | Args: | ||
1267 | 386 | sentry_unit (sentry): The sentry unit to check the file mtime on | ||
1268 | 387 | filename (string): The file to check mtime of | ||
1269 | 388 | mtime (float): The epoch time to check against | ||
1270 | 389 | sleep_time (int): Initial sleep time (s) before looking for file | ||
1271 | 390 | retry_sleep_time (int): Time (s) to sleep between retries | ||
1272 | 391 | retry_count (int): If file is not found, how many times to retry | ||
1273 | 392 | |||
1274 | 393 | Returns: | ||
1275 | 394 | bool: True if file was modified more recently than mtime, False if | ||
1276 | 395 | file was modified before mtime, or if file not found. | ||
1277 | 396 | """ | ||
1278 | 397 | unit_name = sentry_unit.info['unit_name'] | ||
1279 | 398 | self.log.debug('Checking that %s updated since %s on ' | ||
1280 | 399 | '%s' % (filename, mtime, unit_name)) | ||
1281 | 400 | time.sleep(sleep_time) | ||
1282 | 401 | file_mtime = None | ||
1283 | 402 | tries = 0 | ||
1284 | 403 | while tries <= retry_count and not file_mtime: | ||
1285 | 404 | try: | ||
1286 | 405 | file_mtime = self._get_file_mtime(sentry_unit, filename) | ||
1287 | 406 | self.log.debug('Attempt {} to get {} file mtime on {} ' | ||
1288 | 407 | 'OK'.format(tries, filename, unit_name)) | ||
1289 | 408 | except IOError as e: | ||
1290 | 409 | # NOTE(beisner) - race avoidance, file may not exist yet. | ||
1291 | 410 | # https://bugs.launchpad.net/charm-helpers/+bug/1474030 | ||
1292 | 411 | self.log.debug('Attempt {} to get {} file mtime on {} ' | ||
1293 | 412 | 'failed\n{}'.format(tries, filename, | ||
1294 | 413 | unit_name, e)) | ||
1295 | 414 | time.sleep(retry_sleep_time) | ||
1296 | 415 | tries += 1 | ||
1297 | 416 | |||
1298 | 417 | if not file_mtime: | ||
1299 | 418 | self.log.warn('Could not determine file mtime, assuming ' | ||
1300 | 419 | 'file does not exist') | ||
1301 | 420 | return False | ||
1302 | 421 | |||
1303 | 422 | if file_mtime >= mtime: | ||
1304 | 423 | self.log.debug('File mtime is newer than provided mtime ' | ||
1305 | 424 | '(%s >= %s) on %s (OK)' % (file_mtime, | ||
1306 | 425 | mtime, unit_name)) | ||
1307 | 426 | return True | ||
1308 | 427 | else: | ||
1309 | 428 | self.log.warn('File mtime is older than provided mtime' | ||
1310 | 429 | '(%s < on %s) on %s' % (file_mtime, | ||
1311 | 430 | mtime, unit_name)) | ||
1312 | 431 | return False | ||
1313 | 432 | |||
1314 | 433 | def validate_service_config_changed(self, sentry_unit, mtime, service, | ||
1315 | 434 | filename, pgrep_full=None, | ||
1316 | 435 | sleep_time=20, retry_count=30, | ||
1317 | 436 | retry_sleep_time=10): | ||
1318 | 437 | """Check service and file were updated after mtime | ||
1319 | 438 | |||
1320 | 439 | Args: | ||
1321 | 440 | sentry_unit (sentry): The sentry unit to check for the service on | ||
1322 | 441 | mtime (float): The epoch time to check against | ||
1323 | 442 | service (string): service name to look for in process table | ||
1324 | 443 | filename (string): The file to check mtime of | ||
1325 | 444 | pgrep_full: [Deprecated] Use full command line search mode with pgrep | ||
1326 | 445 | sleep_time (int): Initial sleep in seconds to pass to test helpers | ||
1327 | 446 | retry_count (int): If service is not found, how many times to retry | ||
1328 | 447 | retry_sleep_time (int): Time in seconds to wait between retries | ||
1329 | 448 | |||
1330 | 449 | Typical Usage: | ||
1331 | 450 | u = OpenStackAmuletUtils(ERROR) | ||
1332 | 451 | ... | ||
1333 | 452 | mtime = u.get_sentry_time(self.cinder_sentry) | ||
1334 | 453 | self.d.configure('cinder', {'verbose': 'True', 'debug': 'True'}) | ||
1335 | 454 | if not u.validate_service_config_changed(self.cinder_sentry, | ||
1336 | 455 | mtime, | ||
1337 | 456 | 'cinder-api', | ||
1338 | 457 | '/etc/cinder/cinder.conf') | ||
1339 | 458 | amulet.raise_status(amulet.FAIL, msg='update failed') | ||
1340 | 459 | Returns: | ||
1341 | 460 | bool: True if both service and file where updated/restarted after | ||
1342 | 461 | mtime, False if service is older than mtime or if service was | ||
1343 | 462 | not found or if filename was modified before mtime. | ||
1344 | 463 | """ | ||
1345 | 464 | |||
1346 | 465 | # NOTE(beisner) pgrep_full is no longer implemented, as pidof is now | ||
1347 | 466 | # used instead of pgrep. pgrep_full is still passed through to ensure | ||
1348 | 467 | # deprecation WARNS. lp1474030 | ||
1349 | 468 | |||
1350 | 469 | service_restart = self.service_restarted_since( | ||
1351 | 470 | sentry_unit, mtime, | ||
1352 | 471 | service, | ||
1353 | 472 | pgrep_full=pgrep_full, | ||
1354 | 473 | sleep_time=sleep_time, | ||
1355 | 474 | retry_count=retry_count, | ||
1356 | 475 | retry_sleep_time=retry_sleep_time) | ||
1357 | 476 | |||
1358 | 477 | config_update = self.config_updated_since( | ||
1359 | 478 | sentry_unit, | ||
1360 | 479 | filename, | ||
1361 | 480 | mtime, | ||
1362 | 481 | sleep_time=sleep_time, | ||
1363 | 482 | retry_count=retry_count, | ||
1364 | 483 | retry_sleep_time=retry_sleep_time) | ||
1365 | 484 | |||
1366 | 485 | return service_restart and config_update | ||
1367 | 486 | |||
1368 | 487 | def get_sentry_time(self, sentry_unit): | ||
1369 | 488 | """Return current epoch time on a sentry""" | ||
1370 | 489 | cmd = "date +'%s'" | ||
1371 | 490 | return float(sentry_unit.run(cmd)[0]) | ||
1372 | 491 | |||
1373 | 492 | def relation_error(self, name, data): | ||
1374 | 493 | return 'unexpected relation data in {} - {}'.format(name, data) | ||
1375 | 494 | |||
1376 | 495 | def endpoint_error(self, name, data): | ||
1377 | 496 | return 'unexpected endpoint data in {} - {}'.format(name, data) | ||
1378 | 497 | |||
1379 | 498 | def get_ubuntu_releases(self): | ||
1380 | 499 | """Return a list of all Ubuntu releases in order of release.""" | ||
1381 | 500 | _d = distro_info.UbuntuDistroInfo() | ||
1382 | 501 | _release_list = _d.all | ||
1383 | 502 | return _release_list | ||
1384 | 503 | |||
1385 | 504 | def file_to_url(self, file_rel_path): | ||
1386 | 505 | """Convert a relative file path to a file URL.""" | ||
1387 | 506 | _abs_path = os.path.abspath(file_rel_path) | ||
1388 | 507 | return urlparse.urlparse(_abs_path, scheme='file').geturl() | ||
1389 | 508 | |||
1390 | 509 | def check_commands_on_units(self, commands, sentry_units): | ||
1391 | 510 | """Check that all commands in a list exit zero on all | ||
1392 | 511 | sentry units in a list. | ||
1393 | 512 | |||
1394 | 513 | :param commands: list of bash commands | ||
1395 | 514 | :param sentry_units: list of sentry unit pointers | ||
1396 | 515 | :returns: None if successful; Failure message otherwise | ||
1397 | 516 | """ | ||
1398 | 517 | self.log.debug('Checking exit codes for {} commands on {} ' | ||
1399 | 518 | 'sentry units...'.format(len(commands), | ||
1400 | 519 | len(sentry_units))) | ||
1401 | 520 | for sentry_unit in sentry_units: | ||
1402 | 521 | for cmd in commands: | ||
1403 | 522 | output, code = sentry_unit.run(cmd) | ||
1404 | 523 | if code == 0: | ||
1405 | 524 | self.log.debug('{} `{}` returned {} ' | ||
1406 | 525 | '(OK)'.format(sentry_unit.info['unit_name'], | ||
1407 | 526 | cmd, code)) | ||
1408 | 527 | else: | ||
1409 | 528 | return ('{} `{}` returned {} ' | ||
1410 | 529 | '{}'.format(sentry_unit.info['unit_name'], | ||
1411 | 530 | cmd, code, output)) | ||
1412 | 531 | return None | ||
1413 | 532 | |||
1414 | 533 | def get_process_id_list(self, sentry_unit, process_name, | ||
1415 | 534 | expect_success=True, pgrep_full=False): | ||
1416 | 535 | """Get a list of process ID(s) from a single sentry juju unit | ||
1417 | 536 | for a single process name. | ||
1418 | 537 | |||
1419 | 538 | :param sentry_unit: Amulet sentry instance (juju unit) | ||
1420 | 539 | :param process_name: Process name | ||
1421 | 540 | :param expect_success: If False, expect the PID to be missing, | ||
1422 | 541 | raise if it is present. | ||
1423 | 542 | :returns: List of process IDs | ||
1424 | 543 | """ | ||
1425 | 544 | if pgrep_full: | ||
1426 | 545 | cmd = 'pgrep -f "{}"'.format(process_name) | ||
1427 | 546 | else: | ||
1428 | 547 | cmd = 'pidof -x "{}"'.format(process_name) | ||
1429 | 548 | if not expect_success: | ||
1430 | 549 | cmd += " || exit 0 && exit 1" | ||
1431 | 550 | output, code = sentry_unit.run(cmd) | ||
1432 | 551 | if code != 0: | ||
1433 | 552 | msg = ('{} `{}` returned {} ' | ||
1434 | 553 | '{}'.format(sentry_unit.info['unit_name'], | ||
1435 | 554 | cmd, code, output)) | ||
1436 | 555 | amulet.raise_status(amulet.FAIL, msg=msg) | ||
1437 | 556 | return str(output).split() | ||
1438 | 557 | |||
1439 | 558 | def get_unit_process_ids( | ||
1440 | 559 | self, unit_processes, expect_success=True, pgrep_full=False): | ||
1441 | 560 | """Construct a dict containing unit sentries, process names, and | ||
1442 | 561 | process IDs. | ||
1443 | 562 | |||
1444 | 563 | :param unit_processes: A dictionary of Amulet sentry instance | ||
1445 | 564 | to list of process names. | ||
1446 | 565 | :param expect_success: if False expect the processes to not be | ||
1447 | 566 | running, raise if they are. | ||
1448 | 567 | :returns: Dictionary of Amulet sentry instance to dictionary | ||
1449 | 568 | of process names to PIDs. | ||
1450 | 569 | """ | ||
1451 | 570 | pid_dict = {} | ||
1452 | 571 | for sentry_unit, process_list in six.iteritems(unit_processes): | ||
1453 | 572 | pid_dict[sentry_unit] = {} | ||
1454 | 573 | for process in process_list: | ||
1455 | 574 | pids = self.get_process_id_list( | ||
1456 | 575 | sentry_unit, process, expect_success=expect_success, | ||
1457 | 576 | pgrep_full=pgrep_full) | ||
1458 | 577 | pid_dict[sentry_unit].update({process: pids}) | ||
1459 | 578 | return pid_dict | ||
1460 | 579 | |||
1461 | 580 | def validate_unit_process_ids(self, expected, actual): | ||
1462 | 581 | """Validate process id quantities for services on units.""" | ||
1463 | 582 | self.log.debug('Checking units for running processes...') | ||
1464 | 583 | self.log.debug('Expected PIDs: {}'.format(expected)) | ||
1465 | 584 | self.log.debug('Actual PIDs: {}'.format(actual)) | ||
1466 | 585 | |||
1467 | 586 | if len(actual) != len(expected): | ||
1468 | 587 | return ('Unit count mismatch. expected, actual: {}, ' | ||
1469 | 588 | '{} '.format(len(expected), len(actual))) | ||
1470 | 589 | |||
1471 | 590 | for (e_sentry, e_proc_names) in six.iteritems(expected): | ||
1472 | 591 | e_sentry_name = e_sentry.info['unit_name'] | ||
1473 | 592 | if e_sentry in actual.keys(): | ||
1474 | 593 | a_proc_names = actual[e_sentry] | ||
1475 | 594 | else: | ||
1476 | 595 | return ('Expected sentry ({}) not found in actual dict data.' | ||
1477 | 596 | '{}'.format(e_sentry_name, e_sentry)) | ||
1478 | 597 | |||
1479 | 598 | if len(e_proc_names.keys()) != len(a_proc_names.keys()): | ||
1480 | 599 | return ('Process name count mismatch. expected, actual: {}, ' | ||
1481 | 600 | '{}'.format(len(expected), len(actual))) | ||
1482 | 601 | |||
1483 | 602 | for (e_proc_name, e_pids), (a_proc_name, a_pids) in \ | ||
1484 | 603 | zip(e_proc_names.items(), a_proc_names.items()): | ||
1485 | 604 | if e_proc_name != a_proc_name: | ||
1486 | 605 | return ('Process name mismatch. expected, actual: {}, ' | ||
1487 | 606 | '{}'.format(e_proc_name, a_proc_name)) | ||
1488 | 607 | |||
1489 | 608 | a_pids_length = len(a_pids) | ||
1490 | 609 | fail_msg = ('PID count mismatch. {} ({}) expected, actual: ' | ||
1491 | 610 | '{}, {} ({})'.format(e_sentry_name, e_proc_name, | ||
1492 | 611 | e_pids, a_pids_length, | ||
1493 | 612 | a_pids)) | ||
1494 | 613 | |||
1495 | 614 | # If expected is a list, ensure at least one PID quantity match | ||
1496 | 615 | if isinstance(e_pids, list) and \ | ||
1497 | 616 | a_pids_length not in e_pids: | ||
1498 | 617 | return fail_msg | ||
1499 | 618 | # If expected is not bool and not list, | ||
1500 | 619 | # ensure PID quantities match | ||
1501 | 620 | elif not isinstance(e_pids, bool) and \ | ||
1502 | 621 | not isinstance(e_pids, list) and \ | ||
1503 | 622 | a_pids_length != e_pids: | ||
1504 | 623 | return fail_msg | ||
1505 | 624 | # If expected is bool True, ensure 1 or more PIDs exist | ||
1506 | 625 | elif isinstance(e_pids, bool) and \ | ||
1507 | 626 | e_pids is True and a_pids_length < 1: | ||
1508 | 627 | return fail_msg | ||
1509 | 628 | # If expected is bool False, ensure 0 PIDs exist | ||
1510 | 629 | elif isinstance(e_pids, bool) and \ | ||
1511 | 630 | e_pids is False and a_pids_length != 0: | ||
1512 | 631 | return fail_msg | ||
1513 | 632 | else: | ||
1514 | 633 | self.log.debug('PID check OK: {} {} {}: ' | ||
1515 | 634 | '{}'.format(e_sentry_name, e_proc_name, | ||
1516 | 635 | e_pids, a_pids)) | ||
1517 | 636 | return None | ||
1518 | 637 | |||
1519 | 638 | def validate_list_of_identical_dicts(self, list_of_dicts): | ||
1520 | 639 | """Check that all dicts within a list are identical.""" | ||
1521 | 640 | hashes = [] | ||
1522 | 641 | for _dict in list_of_dicts: | ||
1523 | 642 | hashes.append(hash(frozenset(_dict.items()))) | ||
1524 | 643 | |||
1525 | 644 | self.log.debug('Hashes: {}'.format(hashes)) | ||
1526 | 645 | if len(set(hashes)) == 1: | ||
1527 | 646 | self.log.debug('Dicts within list are identical') | ||
1528 | 647 | else: | ||
1529 | 648 | return 'Dicts within list are not identical' | ||
1530 | 649 | |||
1531 | 650 | return None | ||
1532 | 651 | |||
1533 | 652 | def validate_sectionless_conf(self, file_contents, expected): | ||
1534 | 653 | """A crude conf parser. Useful to inspect configuration files which | ||
1535 | 654 | do not have section headers (as would be necessary in order to use | ||
1536 | 655 | the configparser). Such as openstack-dashboard or rabbitmq confs.""" | ||
1537 | 656 | for line in file_contents.split('\n'): | ||
1538 | 657 | if '=' in line: | ||
1539 | 658 | args = line.split('=') | ||
1540 | 659 | if len(args) <= 1: | ||
1541 | 660 | continue | ||
1542 | 661 | key = args[0].strip() | ||
1543 | 662 | value = args[1].strip() | ||
1544 | 663 | if key in expected.keys(): | ||
1545 | 664 | if expected[key] != value: | ||
1546 | 665 | msg = ('Config mismatch. Expected, actual: {}, ' | ||
1547 | 666 | '{}'.format(expected[key], value)) | ||
1548 | 667 | amulet.raise_status(amulet.FAIL, msg=msg) | ||
1549 | 668 | |||
1550 | 669 | def get_unit_hostnames(self, units): | ||
1551 | 670 | """Return a dict of juju unit names to hostnames.""" | ||
1552 | 671 | host_names = {} | ||
1553 | 672 | for unit in units: | ||
1554 | 673 | host_names[unit.info['unit_name']] = \ | ||
1555 | 674 | str(unit.file_contents('/etc/hostname').strip()) | ||
1556 | 675 | self.log.debug('Unit host names: {}'.format(host_names)) | ||
1557 | 676 | return host_names | ||
1558 | 677 | |||
1559 | 678 | def run_cmd_unit(self, sentry_unit, cmd): | ||
1560 | 679 | """Run a command on a unit, return the output and exit code.""" | ||
1561 | 680 | output, code = sentry_unit.run(cmd) | ||
1562 | 681 | if code == 0: | ||
1563 | 682 | self.log.debug('{} `{}` command returned {} ' | ||
1564 | 683 | '(OK)'.format(sentry_unit.info['unit_name'], | ||
1565 | 684 | cmd, code)) | ||
1566 | 685 | else: | ||
1567 | 686 | msg = ('{} `{}` command returned {} ' | ||
1568 | 687 | '{}'.format(sentry_unit.info['unit_name'], | ||
1569 | 688 | cmd, code, output)) | ||
1570 | 689 | amulet.raise_status(amulet.FAIL, msg=msg) | ||
1571 | 690 | return str(output), code | ||
1572 | 691 | |||
1573 | 692 | def file_exists_on_unit(self, sentry_unit, file_name): | ||
1574 | 693 | """Check if a file exists on a unit.""" | ||
1575 | 694 | try: | ||
1576 | 695 | sentry_unit.file_stat(file_name) | ||
1577 | 696 | return True | ||
1578 | 697 | except IOError: | ||
1579 | 698 | return False | ||
1580 | 699 | except Exception as e: | ||
1581 | 700 | msg = 'Error checking file {}: {}'.format(file_name, e) | ||
1582 | 701 | amulet.raise_status(amulet.FAIL, msg=msg) | ||
1583 | 702 | |||
1584 | 703 | def file_contents_safe(self, sentry_unit, file_name, | ||
1585 | 704 | max_wait=60, fatal=False): | ||
1586 | 705 | """Get file contents from a sentry unit. Wrap amulet file_contents | ||
1587 | 706 | with retry logic to address races where a file checks as existing, | ||
1588 | 707 | but no longer exists by the time file_contents is called. | ||
1589 | 708 | Return None if file not found. Optionally raise if fatal is True.""" | ||
1590 | 709 | unit_name = sentry_unit.info['unit_name'] | ||
1591 | 710 | file_contents = False | ||
1592 | 711 | tries = 0 | ||
1593 | 712 | while not file_contents and tries < (max_wait / 4): | ||
1594 | 713 | try: | ||
1595 | 714 | file_contents = sentry_unit.file_contents(file_name) | ||
1596 | 715 | except IOError: | ||
1597 | 716 | self.log.debug('Attempt {} to open file {} from {} ' | ||
1598 | 717 | 'failed'.format(tries, file_name, | ||
1599 | 718 | unit_name)) | ||
1600 | 719 | time.sleep(4) | ||
1601 | 720 | tries += 1 | ||
1602 | 721 | |||
1603 | 722 | if file_contents: | ||
1604 | 723 | return file_contents | ||
1605 | 724 | elif not fatal: | ||
1606 | 725 | return None | ||
1607 | 726 | elif fatal: | ||
1608 | 727 | msg = 'Failed to get file contents from unit.' | ||
1609 | 728 | amulet.raise_status(amulet.FAIL, msg) | ||
1610 | 729 | |||
1611 | 730 | def port_knock_tcp(self, host="localhost", port=22, timeout=15): | ||
1612 | 731 | """Open a TCP socket to check for a listening sevice on a host. | ||
1613 | 732 | |||
1614 | 733 | :param host: host name or IP address, default to localhost | ||
1615 | 734 | :param port: TCP port number, default to 22 | ||
1616 | 735 | :param timeout: Connect timeout, default to 15 seconds | ||
1617 | 736 | :returns: True if successful, False if connect failed | ||
1618 | 737 | """ | ||
1619 | 738 | |||
1620 | 739 | # Resolve host name if possible | ||
1621 | 740 | try: | ||
1622 | 741 | connect_host = socket.gethostbyname(host) | ||
1623 | 742 | host_human = "{} ({})".format(connect_host, host) | ||
1624 | 743 | except socket.error as e: | ||
1625 | 744 | self.log.warn('Unable to resolve address: ' | ||
1626 | 745 | '{} ({}) Trying anyway!'.format(host, e)) | ||
1627 | 746 | connect_host = host | ||
1628 | 747 | host_human = connect_host | ||
1629 | 748 | |||
1630 | 749 | # Attempt socket connection | ||
1631 | 750 | try: | ||
1632 | 751 | knock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) | ||
1633 | 752 | knock.settimeout(timeout) | ||
1634 | 753 | knock.connect((connect_host, port)) | ||
1635 | 754 | knock.close() | ||
1636 | 755 | self.log.debug('Socket connect OK for host ' | ||
1637 | 756 | '{} on port {}.'.format(host_human, port)) | ||
1638 | 757 | return True | ||
1639 | 758 | except socket.error as e: | ||
1640 | 759 | self.log.debug('Socket connect FAIL for' | ||
1641 | 760 | ' {} port {} ({})'.format(host_human, port, e)) | ||
1642 | 761 | return False | ||
1643 | 762 | |||
1644 | 763 | def port_knock_units(self, sentry_units, port=22, | ||
1645 | 764 | timeout=15, expect_success=True): | ||
1646 | 765 | """Open a TCP socket to check for a listening sevice on each | ||
1647 | 766 | listed juju unit. | ||
1648 | 767 | |||
1649 | 768 | :param sentry_units: list of sentry unit pointers | ||
1650 | 769 | :param port: TCP port number, default to 22 | ||
1651 | 770 | :param timeout: Connect timeout, default to 15 seconds | ||
1652 | 771 | :expect_success: True by default, set False to invert logic | ||
1653 | 772 | :returns: None if successful, Failure message otherwise | ||
1654 | 773 | """ | ||
1655 | 774 | for unit in sentry_units: | ||
1656 | 775 | host = unit.info['public-address'] | ||
1657 | 776 | connected = self.port_knock_tcp(host, port, timeout) | ||
1658 | 777 | if not connected and expect_success: | ||
1659 | 778 | return 'Socket connect failed.' | ||
1660 | 779 | elif connected and not expect_success: | ||
1661 | 780 | return 'Socket connected unexpectedly.' | ||
1662 | 781 | |||
1663 | 782 | def get_uuid_epoch_stamp(self): | ||
1664 | 783 | """Returns a stamp string based on uuid4 and epoch time. Useful in | ||
1665 | 784 | generating test messages which need to be unique-ish.""" | ||
1666 | 785 | return '[{}-{}]'.format(uuid.uuid4(), time.time()) | ||
1667 | 786 | |||
1668 | 787 | # amulet juju action helpers: | ||
1669 | 788 | def run_action(self, unit_sentry, action, | ||
1670 | 789 | _check_output=subprocess.check_output, | ||
1671 | 790 | params=None): | ||
1672 | 791 | """Translate to amulet's built in run_action(). Deprecated. | ||
1673 | 792 | |||
1674 | 793 | Run the named action on a given unit sentry. | ||
1675 | 794 | |||
1676 | 795 | params a dict of parameters to use | ||
1677 | 796 | _check_output parameter is no longer used | ||
1678 | 797 | |||
1679 | 798 | @return action_id. | ||
1680 | 799 | """ | ||
1681 | 800 | self.log.warn('charmhelpers.contrib.amulet.utils.run_action has been ' | ||
1682 | 801 | 'deprecated for amulet.run_action') | ||
1683 | 802 | return unit_sentry.run_action(action, action_args=params) | ||
1684 | 803 | |||
1685 | 804 | def wait_on_action(self, action_id, _check_output=subprocess.check_output): | ||
1686 | 805 | """Wait for a given action, returning if it completed or not. | ||
1687 | 806 | |||
1688 | 807 | action_id a string action uuid | ||
1689 | 808 | _check_output parameter is no longer used | ||
1690 | 809 | """ | ||
1691 | 810 | data = amulet.actions.get_action_output(action_id, full_output=True) | ||
1692 | 811 | return data.get(u"status") == "completed" | ||
1693 | 812 | |||
1694 | 813 | def status_get(self, unit): | ||
1695 | 814 | """Return the current service status of this unit.""" | ||
1696 | 815 | raw_status, return_code = unit.run( | ||
1697 | 816 | "status-get --format=json --include-data") | ||
1698 | 817 | if return_code != 0: | ||
1699 | 818 | return ("unknown", "") | ||
1700 | 819 | status = json.loads(raw_status) | ||
1701 | 820 | return (status["status"], status["message"]) | ||
1702 | 0 | 821 | ||
1703 | === modified file 'hooks/charmhelpers/contrib/ansible/__init__.py' | |||
1704 | --- hooks/charmhelpers/contrib/ansible/__init__.py 2016-12-20 14:35:00 +0000 | |||
1705 | +++ hooks/charmhelpers/contrib/ansible/__init__.py 2021-05-12 04:07:51 +0000 | |||
1706 | @@ -16,90 +16,107 @@ | |||
1707 | 16 | # | 16 | # |
1708 | 17 | # Authors: | 17 | # Authors: |
1709 | 18 | # Charm Helpers Developers <juju@lists.ubuntu.com> | 18 | # Charm Helpers Developers <juju@lists.ubuntu.com> |
1720 | 19 | """Charm Helpers ansible - declare the state of your machines. | 19 | """ |
1721 | 20 | 20 | The ansible package enables you to easily use the configuration management | |
1722 | 21 | This helper enables you to declare your machine state, rather than | 21 | tool `Ansible`_ to setup and configure your charm. All of your charm |
1723 | 22 | program it procedurally (and have to test each change to your procedures). | 22 | configuration options and relation-data are available as regular Ansible |
1724 | 23 | Your install hook can be as simple as:: | 23 | variables which can be used in your playbooks and templates. |
1725 | 24 | 24 | ||
1726 | 25 | {{{ | 25 | .. _Ansible: https://www.ansible.com/ |
1727 | 26 | import charmhelpers.contrib.ansible | 26 | |
1728 | 27 | 27 | Usage | |
1729 | 28 | 28 | ===== | |
1730 | 29 | |||
1731 | 30 | Here is an example directory structure for a charm to get you started:: | ||
1732 | 31 | |||
1733 | 32 | charm-ansible-example/ | ||
1734 | 33 | |-- ansible | ||
1735 | 34 | | |-- playbook.yaml | ||
1736 | 35 | | `-- templates | ||
1737 | 36 | | `-- example.j2 | ||
1738 | 37 | |-- config.yaml | ||
1739 | 38 | |-- copyright | ||
1740 | 39 | |-- icon.svg | ||
1741 | 40 | |-- layer.yaml | ||
1742 | 41 | |-- metadata.yaml | ||
1743 | 42 | |-- reactive | ||
1744 | 43 | | `-- example.py | ||
1745 | 44 | |-- README.md | ||
1746 | 45 | |||
1747 | 46 | Running a playbook called ``playbook.yaml`` when the ``install`` hook is run | ||
1748 | 47 | can be as simple as:: | ||
1749 | 48 | |||
1750 | 49 | from charmhelpers.contrib import ansible | ||
1751 | 50 | from charms.reactive import hook | ||
1752 | 51 | |||
1753 | 52 | @hook('install') | ||
1754 | 29 | def install(): | 53 | def install(): |
1767 | 30 | charmhelpers.contrib.ansible.install_ansible_support() | 54 | ansible.install_ansible_support() |
1768 | 31 | charmhelpers.contrib.ansible.apply_playbook('playbooks/install.yaml') | 55 | ansible.apply_playbook('ansible/playbook.yaml') |
1769 | 32 | }}} | 56 | |
1770 | 33 | 57 | Here is an example playbook that uses the ``template`` module to template the | |
1771 | 34 | and won't need to change (nor will its tests) when you change the machine | 58 | file ``example.j2`` to the charm host and then uses the ``debug`` module to |
1772 | 35 | state. | 59 | print out all the host and Juju variables that you can use in your playbooks. |
1773 | 36 | 60 | Note that you must target ``localhost`` as the playbook is run locally on the | |
1774 | 37 | All of your juju config and relation-data are available as template | 61 | charm host:: |
1775 | 38 | variables within your playbooks and templates. An install playbook looks | 62 | |
1764 | 39 | something like:: | ||
1765 | 40 | |||
1766 | 41 | {{{ | ||
1776 | 42 | --- | 63 | --- |
1777 | 43 | - hosts: localhost | 64 | - hosts: localhost |
1778 | 44 | user: root | ||
1779 | 45 | |||
1780 | 46 | tasks: | 65 | tasks: |
1782 | 47 | - name: Add private repositories. | 66 | - name: Template a file |
1783 | 48 | template: | 67 | template: |
1812 | 49 | src: ../templates/private-repositories.list.jinja2 | 68 | src: templates/example.j2 |
1813 | 50 | dest: /etc/apt/sources.list.d/private.list | 69 | dest: /tmp/example.j2 |
1814 | 51 | 70 | ||
1815 | 52 | - name: Update the cache. | 71 | - name: Print all variables available to Ansible |
1816 | 53 | apt: update_cache=yes | 72 | debug: |
1817 | 54 | 73 | var: vars | |
1818 | 55 | - name: Install dependencies. | 74 | |
1819 | 56 | apt: pkg={{ item }} | 75 | Read more online about `playbooks`_ and standard Ansible `modules`_. |
1820 | 57 | with_items: | 76 | |
1821 | 58 | - python-mimeparse | 77 | .. _playbooks: https://docs.ansible.com/ansible/latest/user_guide/playbooks.html |
1822 | 59 | - python-webob | 78 | .. _modules: https://docs.ansible.com/ansible/latest/user_guide/modules.html |
1823 | 60 | - sunburnt | 79 | |
1824 | 61 | 80 | A further feature of the Ansible hooks is to provide a light weight "action" | |
1797 | 62 | - name: Setup groups. | ||
1798 | 63 | group: name={{ item.name }} gid={{ item.gid }} | ||
1799 | 64 | with_items: | ||
1800 | 65 | - { name: 'deploy_user', gid: 1800 } | ||
1801 | 66 | - { name: 'service_user', gid: 1500 } | ||
1802 | 67 | |||
1803 | 68 | ... | ||
1804 | 69 | }}} | ||
1805 | 70 | |||
1806 | 71 | Read more online about `playbooks`_ and standard ansible `modules`_. | ||
1807 | 72 | |||
1808 | 73 | .. _playbooks: http://www.ansibleworks.com/docs/playbooks.html | ||
1809 | 74 | .. _modules: http://www.ansibleworks.com/docs/modules.html | ||
1810 | 75 | |||
1811 | 76 | A further feature os the ansible hooks is to provide a light weight "action" | ||
1825 | 77 | scripting tool. This is a decorator that you apply to a function, and that | 81 | scripting tool. This is a decorator that you apply to a function, and that |
1841 | 78 | function can now receive cli args, and can pass extra args to the playbook. | 82 | function can now receive cli args, and can pass extra args to the playbook:: |
1842 | 79 | 83 | ||
1843 | 80 | e.g. | 84 | @hooks.action() |
1844 | 81 | 85 | def some_action(amount, force="False"): | |
1845 | 82 | 86 | "Usage: some-action AMOUNT [force=True]" # <-- shown on error | |
1846 | 83 | @hooks.action() | 87 | # process the arguments |
1847 | 84 | def some_action(amount, force="False"): | 88 | # do some calls |
1848 | 85 | "Usage: some-action AMOUNT [force=True]" # <-- shown on error | 89 | # return extra-vars to be passed to ansible-playbook |
1849 | 86 | # process the arguments | 90 | return { |
1850 | 87 | # do some calls | 91 | 'amount': int(amount), |
1851 | 88 | # return extra-vars to be passed to ansible-playbook | 92 | 'type': force, |
1852 | 89 | return { | 93 | } |
1838 | 90 | 'amount': int(amount), | ||
1839 | 91 | 'type': force, | ||
1840 | 92 | } | ||
1853 | 93 | 94 | ||
1854 | 94 | You can now create a symlink to hooks.py that can be invoked like a hook, but | 95 | You can now create a symlink to hooks.py that can be invoked like a hook, but |
1860 | 95 | with cli params: | 96 | with cli params:: |
1861 | 96 | 97 | ||
1862 | 97 | # link actions/some-action to hooks/hooks.py | 98 | # link actions/some-action to hooks/hooks.py |
1863 | 98 | 99 | ||
1864 | 99 | actions/some-action amount=10 force=true | 100 | actions/some-action amount=10 force=true |
1865 | 101 | |||
1866 | 102 | Install Ansible via pip | ||
1867 | 103 | ======================= | ||
1868 | 104 | |||
1869 | 105 | If you want to install a specific version of Ansible via pip instead of | ||
1870 | 106 | ``install_ansible_support`` which uses APT, consider using the layer options | ||
1871 | 107 | of `layer-basic`_ to install Ansible in a virtualenv:: | ||
1872 | 108 | |||
1873 | 109 | options: | ||
1874 | 110 | basic: | ||
1875 | 111 | python_packages: ['ansible==2.9.0'] | ||
1876 | 112 | include_system_packages: true | ||
1877 | 113 | use_venv: true | ||
1878 | 114 | |||
1879 | 115 | .. _layer-basic: https://charmsreactive.readthedocs.io/en/latest/layer-basic.html#layer-configuration | ||
1880 | 100 | 116 | ||
1881 | 101 | """ | 117 | """ |
1882 | 102 | import os | 118 | import os |
1883 | 119 | import json | ||
1884 | 103 | import stat | 120 | import stat |
1885 | 104 | import subprocess | 121 | import subprocess |
1886 | 105 | import functools | 122 | import functools |
1887 | @@ -117,27 +134,63 @@ | |||
1888 | 117 | ansible_vars_path = '/etc/ansible/host_vars/localhost' | 134 | ansible_vars_path = '/etc/ansible/host_vars/localhost' |
1889 | 118 | 135 | ||
1890 | 119 | 136 | ||
1898 | 120 | def install_ansible_support(from_ppa=True, ppa_location='ppa:rquillo/ansible'): | 137 | def install_ansible_support(from_ppa=True, ppa_location='ppa:ansible/ansible'): |
1899 | 121 | """Installs the ansible package. | 138 | """Installs Ansible via APT. |
1900 | 122 | 139 | ||
1901 | 123 | By default it is installed from the `PPA`_ linked from | 140 | By default this installs Ansible from the `PPA`_ linked from |
1902 | 124 | the ansible `website`_ or from a ppa specified by a charm config.. | 141 | the Ansible `website`_ or from a PPA set in ``ppa_location``. |
1903 | 125 | 142 | ||
1904 | 126 | .. _PPA: https://launchpad.net/~rquillo/+archive/ansible | 143 | .. _PPA: https://launchpad.net/~ansible/+archive/ubuntu/ansible |
1905 | 127 | .. _website: http://docs.ansible.com/intro_installation.html#latest-releases-via-apt-ubuntu | 144 | .. _website: http://docs.ansible.com/intro_installation.html#latest-releases-via-apt-ubuntu |
1906 | 128 | 145 | ||
1909 | 129 | If from_ppa is empty, you must ensure that the package is available | 146 | If ``from_ppa`` is ``False``, then Ansible will be installed from |
1910 | 130 | from a configured repository. | 147 | Ubuntu's Universe repositories. |
1911 | 131 | """ | 148 | """ |
1912 | 132 | if from_ppa: | 149 | if from_ppa: |
1913 | 133 | charmhelpers.fetch.add_source(ppa_location) | 150 | charmhelpers.fetch.add_source(ppa_location) |
1914 | 134 | charmhelpers.fetch.apt_update(fatal=True) | 151 | charmhelpers.fetch.apt_update(fatal=True) |
1915 | 135 | charmhelpers.fetch.apt_install('ansible') | 152 | charmhelpers.fetch.apt_install('ansible') |
1916 | 136 | with open(ansible_hosts_path, 'w+') as hosts_file: | 153 | with open(ansible_hosts_path, 'w+') as hosts_file: |
1918 | 137 | hosts_file.write('localhost ansible_connection=local') | 154 | hosts_file.write('localhost ansible_connection=local ansible_remote_tmp=/root/.ansible/tmp') |
1919 | 138 | 155 | ||
1920 | 139 | 156 | ||
1921 | 140 | def apply_playbook(playbook, tags=None, extra_vars=None): | 157 | def apply_playbook(playbook, tags=None, extra_vars=None): |
1922 | 158 | """Run a playbook. | ||
1923 | 159 | |||
1924 | 160 | This helper runs a playbook with juju state variables as context, | ||
1925 | 161 | therefore variables set in application config can be used directly. | ||
1926 | 162 | List of tags (--tags) and dictionary with extra_vars (--extra-vars) | ||
1927 | 163 | can be passed as additional parameters. | ||
1928 | 164 | |||
1929 | 165 | Read more about playbook `_variables`_ online. | ||
1930 | 166 | |||
1931 | 167 | .. _variables: https://docs.ansible.com/ansible/latest/user_guide/playbooks_variables.html | ||
1932 | 168 | |||
1933 | 169 | Example:: | ||
1934 | 170 | |||
1935 | 171 | # Run ansible/playbook.yaml with tag install and pass extra | ||
1936 | 172 | # variables var_a and var_b | ||
1937 | 173 | apply_playbook( | ||
1938 | 174 | playbook='ansible/playbook.yaml', | ||
1939 | 175 | tags=['install'], | ||
1940 | 176 | extra_vars={'var_a': 'val_a', 'var_b': 'val_b'} | ||
1941 | 177 | ) | ||
1942 | 178 | |||
1943 | 179 | # Run ansible/playbook.yaml with tag config and extra variable nested, | ||
1944 | 180 | # which is passed as json and can be used as dictionary in playbook | ||
1945 | 181 | apply_playbook( | ||
1946 | 182 | playbook='ansible/playbook.yaml', | ||
1947 | 183 | tags=['config'], | ||
1948 | 184 | extra_vars={'nested': {'a': 'value1', 'b': 'value2'}} | ||
1949 | 185 | ) | ||
1950 | 186 | |||
1951 | 187 | # Custom config file can be passed within extra_vars | ||
1952 | 188 | apply_playbook( | ||
1953 | 189 | playbook='ansible/playbook.yaml', | ||
1954 | 190 | extra_vars="@some_file.json" | ||
1955 | 191 | ) | ||
1956 | 192 | |||
1957 | 193 | """ | ||
1958 | 141 | tags = tags or [] | 194 | tags = tags or [] |
1959 | 142 | tags = ",".join(tags) | 195 | tags = ",".join(tags) |
1960 | 143 | charmhelpers.contrib.templating.contexts.juju_state_to_yaml( | 196 | charmhelpers.contrib.templating.contexts.juju_state_to_yaml( |
1961 | @@ -146,6 +199,9 @@ | |||
1962 | 146 | 199 | ||
1963 | 147 | # we want ansible's log output to be unbuffered | 200 | # we want ansible's log output to be unbuffered |
1964 | 148 | env = os.environ.copy() | 201 | env = os.environ.copy() |
1965 | 202 | proxy_settings = charmhelpers.core.hookenv.env_proxy_settings() | ||
1966 | 203 | if proxy_settings: | ||
1967 | 204 | env.update(proxy_settings) | ||
1968 | 149 | env['PYTHONUNBUFFERED'] = "1" | 205 | env['PYTHONUNBUFFERED'] = "1" |
1969 | 150 | call = [ | 206 | call = [ |
1970 | 151 | 'ansible-playbook', | 207 | 'ansible-playbook', |
1971 | @@ -156,8 +212,7 @@ | |||
1972 | 156 | if tags: | 212 | if tags: |
1973 | 157 | call.extend(['--tags', '{}'.format(tags)]) | 213 | call.extend(['--tags', '{}'.format(tags)]) |
1974 | 158 | if extra_vars: | 214 | if extra_vars: |
1977 | 159 | extra = ["%s=%s" % (k, v) for k, v in extra_vars.items()] | 215 | call.extend(['--extra-vars', json.dumps(extra_vars)]) |
1976 | 160 | call.extend(['--extra-vars', " ".join(extra)]) | ||
1978 | 161 | subprocess.check_call(call, env=env) | 216 | subprocess.check_call(call, env=env) |
1979 | 162 | 217 | ||
1980 | 163 | 218 | ||
1981 | @@ -170,7 +225,7 @@ | |||
1982 | 170 | 225 | ||
1983 | 171 | Example:: | 226 | Example:: |
1984 | 172 | 227 | ||
1986 | 173 | hooks = AnsibleHooks(playbook_path='playbooks/my_machine_state.yaml') | 228 | hooks = AnsibleHooks(playbook_path='ansible/my_machine_state.yaml') |
1987 | 174 | 229 | ||
1988 | 175 | # All the tasks within my_machine_state.yaml tagged with 'install' | 230 | # All the tasks within my_machine_state.yaml tagged with 'install' |
1989 | 176 | # will be run automatically after do_custom_work() | 231 | # will be run automatically after do_custom_work() |
1990 | @@ -188,13 +243,12 @@ | |||
1991 | 188 | # the hooks which are handled by ansible-only and they'll be registered | 243 | # the hooks which are handled by ansible-only and they'll be registered |
1992 | 189 | # for you: | 244 | # for you: |
1993 | 190 | # hooks = AnsibleHooks( | 245 | # hooks = AnsibleHooks( |
1995 | 191 | # 'playbooks/my_machine_state.yaml', | 246 | # 'ansible/my_machine_state.yaml', |
1996 | 192 | # default_hooks=['config-changed', 'start', 'stop']) | 247 | # default_hooks=['config-changed', 'start', 'stop']) |
1997 | 193 | 248 | ||
1998 | 194 | if __name__ == "__main__": | 249 | if __name__ == "__main__": |
1999 | 195 | # execute a hook based on the name the program is called by | 250 | # execute a hook based on the name the program is called by |
2000 | 196 | hooks.execute(sys.argv) | 251 | hooks.execute(sys.argv) |
2001 | 197 | |||
2002 | 198 | """ | 252 | """ |
2003 | 199 | 253 | ||
2004 | 200 | def __init__(self, playbook_path, default_hooks=None): | 254 | def __init__(self, playbook_path, default_hooks=None): |
2005 | 201 | 255 | ||
2006 | === added directory 'hooks/charmhelpers/contrib/benchmark' | |||
2007 | === added file 'hooks/charmhelpers/contrib/benchmark/__init__.py' | |||
2008 | --- hooks/charmhelpers/contrib/benchmark/__init__.py 1970-01-01 00:00:00 +0000 | |||
2009 | +++ hooks/charmhelpers/contrib/benchmark/__init__.py 2021-05-12 04:07:51 +0000 | |||
2010 | @@ -0,0 +1,124 @@ | |||
2011 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
2012 | 2 | # | ||
2013 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
2014 | 4 | # you may not use this file except in compliance with the License. | ||
2015 | 5 | # You may obtain a copy of the License at | ||
2016 | 6 | # | ||
2017 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
2018 | 8 | # | ||
2019 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
2020 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
2021 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
2022 | 12 | # See the License for the specific language governing permissions and | ||
2023 | 13 | # limitations under the License. | ||
2024 | 14 | |||
2025 | 15 | import subprocess | ||
2026 | 16 | import time | ||
2027 | 17 | import os | ||
2028 | 18 | from distutils.spawn import find_executable | ||
2029 | 19 | |||
2030 | 20 | from charmhelpers.core.hookenv import ( | ||
2031 | 21 | in_relation_hook, | ||
2032 | 22 | relation_ids, | ||
2033 | 23 | relation_set, | ||
2034 | 24 | relation_get, | ||
2035 | 25 | ) | ||
2036 | 26 | |||
2037 | 27 | |||
2038 | 28 | def action_set(key, val): | ||
2039 | 29 | if find_executable('action-set'): | ||
2040 | 30 | action_cmd = ['action-set'] | ||
2041 | 31 | |||
2042 | 32 | if isinstance(val, dict): | ||
2043 | 33 | for k, v in iter(val.items()): | ||
2044 | 34 | action_set('%s.%s' % (key, k), v) | ||
2045 | 35 | return True | ||
2046 | 36 | |||
2047 | 37 | action_cmd.append('%s=%s' % (key, val)) | ||
2048 | 38 | subprocess.check_call(action_cmd) | ||
2049 | 39 | return True | ||
2050 | 40 | return False | ||
2051 | 41 | |||
2052 | 42 | |||
2053 | 43 | class Benchmark(): | ||
2054 | 44 | """ | ||
2055 | 45 | Helper class for the `benchmark` interface. | ||
2056 | 46 | |||
2057 | 47 | :param list actions: Define the actions that are also benchmarks | ||
2058 | 48 | |||
2059 | 49 | From inside the benchmark-relation-changed hook, you would | ||
2060 | 50 | Benchmark(['memory', 'cpu', 'disk', 'smoke', 'custom']) | ||
2061 | 51 | |||
2062 | 52 | Examples: | ||
2063 | 53 | |||
2064 | 54 | siege = Benchmark(['siege']) | ||
2065 | 55 | siege.start() | ||
2066 | 56 | [... run siege ...] | ||
2067 | 57 | # The higher the score, the better the benchmark | ||
2068 | 58 | siege.set_composite_score(16.70, 'trans/sec', 'desc') | ||
2069 | 59 | siege.finish() | ||
2070 | 60 | |||
2071 | 61 | |||
2072 | 62 | """ | ||
2073 | 63 | |||
2074 | 64 | BENCHMARK_CONF = '/etc/benchmark.conf' # Replaced in testing | ||
2075 | 65 | |||
2076 | 66 | required_keys = [ | ||
2077 | 67 | 'hostname', | ||
2078 | 68 | 'port', | ||
2079 | 69 | 'graphite_port', | ||
2080 | 70 | 'graphite_endpoint', | ||
2081 | 71 | 'api_port' | ||
2082 | 72 | ] | ||
2083 | 73 | |||
2084 | 74 | def __init__(self, benchmarks=None): | ||
2085 | 75 | if in_relation_hook(): | ||
2086 | 76 | if benchmarks is not None: | ||
2087 | 77 | for rid in sorted(relation_ids('benchmark')): | ||
2088 | 78 | relation_set(relation_id=rid, relation_settings={ | ||
2089 | 79 | 'benchmarks': ",".join(benchmarks) | ||
2090 | 80 | }) | ||
2091 | 81 | |||
2092 | 82 | # Check the relation data | ||
2093 | 83 | config = {} | ||
2094 | 84 | for key in self.required_keys: | ||
2095 | 85 | val = relation_get(key) | ||
2096 | 86 | if val is not None: | ||
2097 | 87 | config[key] = val | ||
2098 | 88 | else: | ||
2099 | 89 | # We don't have all of the required keys | ||
2100 | 90 | config = {} | ||
2101 | 91 | break | ||
2102 | 92 | |||
2103 | 93 | if len(config): | ||
2104 | 94 | with open(self.BENCHMARK_CONF, 'w') as f: | ||
2105 | 95 | for key, val in iter(config.items()): | ||
2106 | 96 | f.write("%s=%s\n" % (key, val)) | ||
2107 | 97 | |||
2108 | 98 | @staticmethod | ||
2109 | 99 | def start(): | ||
2110 | 100 | action_set('meta.start', time.strftime('%Y-%m-%dT%H:%M:%SZ')) | ||
2111 | 101 | |||
2112 | 102 | """ | ||
2113 | 103 | If the collectd charm is also installed, tell it to send a snapshot | ||
2114 | 104 | of the current profile data. | ||
2115 | 105 | """ | ||
2116 | 106 | COLLECT_PROFILE_DATA = '/usr/local/bin/collect-profile-data' | ||
2117 | 107 | if os.path.exists(COLLECT_PROFILE_DATA): | ||
2118 | 108 | subprocess.check_output([COLLECT_PROFILE_DATA]) | ||
2119 | 109 | |||
2120 | 110 | @staticmethod | ||
2121 | 111 | def finish(): | ||
2122 | 112 | action_set('meta.stop', time.strftime('%Y-%m-%dT%H:%M:%SZ')) | ||
2123 | 113 | |||
2124 | 114 | @staticmethod | ||
2125 | 115 | def set_composite_score(value, units, direction='asc'): | ||
2126 | 116 | """ | ||
2127 | 117 | Set the composite score for a benchmark run. This is a single number | ||
2128 | 118 | representative of the benchmark results. This could be the most | ||
2129 | 119 | important metric, or an amalgamation of metric scores. | ||
2130 | 120 | """ | ||
2131 | 121 | return action_set( | ||
2132 | 122 | "meta.composite", | ||
2133 | 123 | {'value': value, 'units': units, 'direction': direction} | ||
2134 | 124 | ) | ||
2135 | 0 | 125 | ||
2136 | === added directory 'hooks/charmhelpers/contrib/charmhelpers' | |||
2137 | === added file 'hooks/charmhelpers/contrib/charmhelpers/IMPORT' | |||
2138 | --- hooks/charmhelpers/contrib/charmhelpers/IMPORT 1970-01-01 00:00:00 +0000 | |||
2139 | +++ hooks/charmhelpers/contrib/charmhelpers/IMPORT 2021-05-12 04:07:51 +0000 | |||
2140 | @@ -0,0 +1,4 @@ | |||
2141 | 1 | Source lp:charm-tools/trunk | ||
2142 | 2 | |||
2143 | 3 | charm-tools/helpers/python/charmhelpers/__init__.py -> charmhelpers/charmhelpers/contrib/charmhelpers/__init__.py | ||
2144 | 4 | charm-tools/helpers/python/charmhelpers/tests/test_charmhelpers.py -> charmhelpers/tests/contrib/charmhelpers/test_charmhelpers.py | ||
2145 | 0 | 5 | ||
2146 | === added file 'hooks/charmhelpers/contrib/charmhelpers/__init__.py' | |||
2147 | --- hooks/charmhelpers/contrib/charmhelpers/__init__.py 1970-01-01 00:00:00 +0000 | |||
2148 | +++ hooks/charmhelpers/contrib/charmhelpers/__init__.py 2021-05-12 04:07:51 +0000 | |||
2149 | @@ -0,0 +1,203 @@ | |||
2150 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
2151 | 2 | # | ||
2152 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
2153 | 4 | # you may not use this file except in compliance with the License. | ||
2154 | 5 | # You may obtain a copy of the License at | ||
2155 | 6 | # | ||
2156 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
2157 | 8 | # | ||
2158 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
2159 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
2160 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
2161 | 12 | # See the License for the specific language governing permissions and | ||
2162 | 13 | # limitations under the License. | ||
2163 | 14 | |||
2164 | 15 | import warnings | ||
2165 | 16 | warnings.warn("contrib.charmhelpers is deprecated", DeprecationWarning) # noqa | ||
2166 | 17 | |||
2167 | 18 | import operator | ||
2168 | 19 | import tempfile | ||
2169 | 20 | import time | ||
2170 | 21 | import yaml | ||
2171 | 22 | import subprocess | ||
2172 | 23 | |||
2173 | 24 | import six | ||
2174 | 25 | if six.PY3: | ||
2175 | 26 | from urllib.request import urlopen | ||
2176 | 27 | from urllib.error import (HTTPError, URLError) | ||
2177 | 28 | else: | ||
2178 | 29 | from urllib2 import (urlopen, HTTPError, URLError) | ||
2179 | 30 | |||
2180 | 31 | """Helper functions for writing Juju charms in Python.""" | ||
2181 | 32 | |||
2182 | 33 | __metaclass__ = type | ||
2183 | 34 | __all__ = [ | ||
2184 | 35 | # 'get_config', # core.hookenv.config() | ||
2185 | 36 | # 'log', # core.hookenv.log() | ||
2186 | 37 | # 'log_entry', # core.hookenv.log() | ||
2187 | 38 | # 'log_exit', # core.hookenv.log() | ||
2188 | 39 | # 'relation_get', # core.hookenv.relation_get() | ||
2189 | 40 | # 'relation_set', # core.hookenv.relation_set() | ||
2190 | 41 | # 'relation_ids', # core.hookenv.relation_ids() | ||
2191 | 42 | # 'relation_list', # core.hookenv.relation_units() | ||
2192 | 43 | # 'config_get', # core.hookenv.config() | ||
2193 | 44 | # 'unit_get', # core.hookenv.unit_get() | ||
2194 | 45 | # 'open_port', # core.hookenv.open_port() | ||
2195 | 46 | # 'close_port', # core.hookenv.close_port() | ||
2196 | 47 | # 'service_control', # core.host.service() | ||
2197 | 48 | 'unit_info', # client-side, NOT IMPLEMENTED | ||
2198 | 49 | 'wait_for_machine', # client-side, NOT IMPLEMENTED | ||
2199 | 50 | 'wait_for_page_contents', # client-side, NOT IMPLEMENTED | ||
2200 | 51 | 'wait_for_relation', # client-side, NOT IMPLEMENTED | ||
2201 | 52 | 'wait_for_unit', # client-side, NOT IMPLEMENTED | ||
2202 | 53 | ] | ||
2203 | 54 | |||
2204 | 55 | |||
2205 | 56 | SLEEP_AMOUNT = 0.1 | ||
2206 | 57 | |||
2207 | 58 | |||
2208 | 59 | # We create a juju_status Command here because it makes testing much, | ||
2209 | 60 | # much easier. | ||
2210 | 61 | def juju_status(): | ||
2211 | 62 | subprocess.check_call(['juju', 'status']) | ||
2212 | 63 | |||
2213 | 64 | # re-implemented as charmhelpers.fetch.configure_sources() | ||
2214 | 65 | # def configure_source(update=False): | ||
2215 | 66 | # source = config_get('source') | ||
2216 | 67 | # if ((source.startswith('ppa:') or | ||
2217 | 68 | # source.startswith('cloud:') or | ||
2218 | 69 | # source.startswith('http:'))): | ||
2219 | 70 | # run('add-apt-repository', source) | ||
2220 | 71 | # if source.startswith("http:"): | ||
2221 | 72 | # run('apt-key', 'import', config_get('key')) | ||
2222 | 73 | # if update: | ||
2223 | 74 | # run('apt-get', 'update') | ||
2224 | 75 | |||
2225 | 76 | |||
2226 | 77 | # DEPRECATED: client-side only | ||
2227 | 78 | def make_charm_config_file(charm_config): | ||
2228 | 79 | charm_config_file = tempfile.NamedTemporaryFile(mode='w+') | ||
2229 | 80 | charm_config_file.write(yaml.dump(charm_config)) | ||
2230 | 81 | charm_config_file.flush() | ||
2231 | 82 | # The NamedTemporaryFile instance is returned instead of just the name | ||
2232 | 83 | # because we want to take advantage of garbage collection-triggered | ||
2233 | 84 | # deletion of the temp file when it goes out of scope in the caller. | ||
2234 | 85 | return charm_config_file | ||
2235 | 86 | |||
2236 | 87 | |||
2237 | 88 | # DEPRECATED: client-side only | ||
2238 | 89 | def unit_info(service_name, item_name, data=None, unit=None): | ||
2239 | 90 | if data is None: | ||
2240 | 91 | data = yaml.safe_load(juju_status()) | ||
2241 | 92 | service = data['services'].get(service_name) | ||
2242 | 93 | if service is None: | ||
2243 | 94 | # XXX 2012-02-08 gmb: | ||
2244 | 95 | # This allows us to cope with the race condition that we | ||
2245 | 96 | # have between deploying a service and having it come up in | ||
2246 | 97 | # `juju status`. We could probably do with cleaning it up so | ||
2247 | 98 | # that it fails a bit more noisily after a while. | ||
2248 | 99 | return '' | ||
2249 | 100 | units = service['units'] | ||
2250 | 101 | if unit is not None: | ||
2251 | 102 | item = units[unit][item_name] | ||
2252 | 103 | else: | ||
2253 | 104 | # It might seem odd to sort the units here, but we do it to | ||
2254 | 105 | # ensure that when no unit is specified, the first unit for the | ||
2255 | 106 | # service (or at least the one with the lowest number) is the | ||
2256 | 107 | # one whose data gets returned. | ||
2257 | 108 | sorted_unit_names = sorted(units.keys()) | ||
2258 | 109 | item = units[sorted_unit_names[0]][item_name] | ||
2259 | 110 | return item | ||
2260 | 111 | |||
2261 | 112 | |||
2262 | 113 | # DEPRECATED: client-side only | ||
2263 | 114 | def get_machine_data(): | ||
2264 | 115 | return yaml.safe_load(juju_status())['machines'] | ||
2265 | 116 | |||
2266 | 117 | |||
2267 | 118 | # DEPRECATED: client-side only | ||
2268 | 119 | def wait_for_machine(num_machines=1, timeout=300): | ||
2269 | 120 | """Wait `timeout` seconds for `num_machines` machines to come up. | ||
2270 | 121 | |||
2271 | 122 | This wait_for... function can be called by other wait_for functions | ||
2272 | 123 | whose timeouts might be too short in situations where only a bare | ||
2273 | 124 | Juju setup has been bootstrapped. | ||
2274 | 125 | |||
2275 | 126 | :return: A tuple of (num_machines, time_taken). This is used for | ||
2276 | 127 | testing. | ||
2277 | 128 | """ | ||
2278 | 129 | # You may think this is a hack, and you'd be right. The easiest way | ||
2279 | 130 | # to tell what environment we're working in (LXC vs EC2) is to check | ||
2280 | 131 | # the dns-name of the first machine. If it's localhost we're in LXC | ||
2281 | 132 | # and we can just return here. | ||
2282 | 133 | if get_machine_data()[0]['dns-name'] == 'localhost': | ||
2283 | 134 | return 1, 0 | ||
2284 | 135 | start_time = time.time() | ||
2285 | 136 | while True: | ||
2286 | 137 | # Drop the first machine, since it's the Zookeeper and that's | ||
2287 | 138 | # not a machine that we need to wait for. This will only work | ||
2288 | 139 | # for EC2 environments, which is why we return early above if | ||
2289 | 140 | # we're in LXC. | ||
2290 | 141 | machine_data = get_machine_data() | ||
2291 | 142 | non_zookeeper_machines = [ | ||
2292 | 143 | machine_data[key] for key in list(machine_data.keys())[1:]] | ||
2293 | 144 | if len(non_zookeeper_machines) >= num_machines: | ||
2294 | 145 | all_machines_running = True | ||
2295 | 146 | for machine in non_zookeeper_machines: | ||
2296 | 147 | if machine.get('instance-state') != 'running': | ||
2297 | 148 | all_machines_running = False | ||
2298 | 149 | break | ||
2299 | 150 | if all_machines_running: | ||
2300 | 151 | break | ||
2301 | 152 | if time.time() - start_time >= timeout: | ||
2302 | 153 | raise RuntimeError('timeout waiting for service to start') | ||
2303 | 154 | time.sleep(SLEEP_AMOUNT) | ||
2304 | 155 | return num_machines, time.time() - start_time | ||
2305 | 156 | |||
2306 | 157 | |||
2307 | 158 | # DEPRECATED: client-side only | ||
2308 | 159 | def wait_for_unit(service_name, timeout=480): | ||
2309 | 160 | """Wait `timeout` seconds for a given service name to come up.""" | ||
2310 | 161 | wait_for_machine(num_machines=1) | ||
2311 | 162 | start_time = time.time() | ||
2312 | 163 | while True: | ||
2313 | 164 | state = unit_info(service_name, 'agent-state') | ||
2314 | 165 | if 'error' in state or state == 'started': | ||
2315 | 166 | break | ||
2316 | 167 | if time.time() - start_time >= timeout: | ||
2317 | 168 | raise RuntimeError('timeout waiting for service to start') | ||
2318 | 169 | time.sleep(SLEEP_AMOUNT) | ||
2319 | 170 | if state != 'started': | ||
2320 | 171 | raise RuntimeError('unit did not start, agent-state: ' + state) | ||
2321 | 172 | |||
2322 | 173 | |||
2323 | 174 | # DEPRECATED: client-side only | ||
2324 | 175 | def wait_for_relation(service_name, relation_name, timeout=120): | ||
2325 | 176 | """Wait `timeout` seconds for a given relation to come up.""" | ||
2326 | 177 | start_time = time.time() | ||
2327 | 178 | while True: | ||
2328 | 179 | relation = unit_info(service_name, 'relations').get(relation_name) | ||
2329 | 180 | if relation is not None and relation['state'] == 'up': | ||
2330 | 181 | break | ||
2331 | 182 | if time.time() - start_time >= timeout: | ||
2332 | 183 | raise RuntimeError('timeout waiting for relation to be up') | ||
2333 | 184 | time.sleep(SLEEP_AMOUNT) | ||
2334 | 185 | |||
2335 | 186 | |||
2336 | 187 | # DEPRECATED: client-side only | ||
2337 | 188 | def wait_for_page_contents(url, contents, timeout=120, validate=None): | ||
2338 | 189 | if validate is None: | ||
2339 | 190 | validate = operator.contains | ||
2340 | 191 | start_time = time.time() | ||
2341 | 192 | while True: | ||
2342 | 193 | try: | ||
2343 | 194 | stream = urlopen(url) | ||
2344 | 195 | except (HTTPError, URLError): | ||
2345 | 196 | pass | ||
2346 | 197 | else: | ||
2347 | 198 | page = stream.read() | ||
2348 | 199 | if validate(page, contents): | ||
2349 | 200 | return page | ||
2350 | 201 | if time.time() - start_time >= timeout: | ||
2351 | 202 | raise RuntimeError('timeout waiting for contents of ' + url) | ||
2352 | 203 | time.sleep(SLEEP_AMOUNT) | ||
2353 | 0 | 204 | ||
2354 | === added file 'hooks/charmhelpers/contrib/charmsupport/IMPORT' | |||
2355 | --- hooks/charmhelpers/contrib/charmsupport/IMPORT 1970-01-01 00:00:00 +0000 | |||
2356 | +++ hooks/charmhelpers/contrib/charmsupport/IMPORT 2021-05-12 04:07:51 +0000 | |||
2357 | @@ -0,0 +1,14 @@ | |||
2358 | 1 | Source: lp:charmsupport/trunk | ||
2359 | 2 | |||
2360 | 3 | charmsupport/charmsupport/execd.py -> charm-helpers/charmhelpers/contrib/charmsupport/execd.py | ||
2361 | 4 | charmsupport/charmsupport/hookenv.py -> charm-helpers/charmhelpers/contrib/charmsupport/hookenv.py | ||
2362 | 5 | charmsupport/charmsupport/host.py -> charm-helpers/charmhelpers/contrib/charmsupport/host.py | ||
2363 | 6 | charmsupport/charmsupport/nrpe.py -> charm-helpers/charmhelpers/contrib/charmsupport/nrpe.py | ||
2364 | 7 | charmsupport/charmsupport/volumes.py -> charm-helpers/charmhelpers/contrib/charmsupport/volumes.py | ||
2365 | 8 | |||
2366 | 9 | charmsupport/tests/test_execd.py -> charm-helpers/tests/contrib/charmsupport/test_execd.py | ||
2367 | 10 | charmsupport/tests/test_hookenv.py -> charm-helpers/tests/contrib/charmsupport/test_hookenv.py | ||
2368 | 11 | charmsupport/tests/test_host.py -> charm-helpers/tests/contrib/charmsupport/test_host.py | ||
2369 | 12 | charmsupport/tests/test_nrpe.py -> charm-helpers/tests/contrib/charmsupport/test_nrpe.py | ||
2370 | 13 | |||
2371 | 14 | charmsupport/bin/charmsupport -> charm-helpers/bin/contrib/charmsupport/charmsupport | ||
2372 | 0 | 15 | ||
2373 | === modified file 'hooks/charmhelpers/contrib/charmsupport/nrpe.py' | |||
2374 | --- hooks/charmhelpers/contrib/charmsupport/nrpe.py 2016-12-20 14:35:00 +0000 | |||
2375 | +++ hooks/charmhelpers/contrib/charmsupport/nrpe.py 2021-05-12 04:07:51 +0000 | |||
2376 | @@ -18,20 +18,22 @@ | |||
2377 | 18 | # Authors: | 18 | # Authors: |
2378 | 19 | # Matthew Wedgwood <matthew.wedgwood@canonical.com> | 19 | # Matthew Wedgwood <matthew.wedgwood@canonical.com> |
2379 | 20 | 20 | ||
2382 | 21 | import subprocess | 21 | import glob |
2381 | 22 | import pwd | ||
2383 | 23 | import grp | 22 | import grp |
2384 | 24 | import os | 23 | import os |
2387 | 25 | import glob | 24 | import pwd |
2386 | 26 | import shutil | ||
2388 | 27 | import re | 25 | import re |
2389 | 28 | import shlex | 26 | import shlex |
2390 | 27 | import shutil | ||
2391 | 28 | import subprocess | ||
2392 | 29 | import yaml | 29 | import yaml |
2393 | 30 | 30 | ||
2394 | 31 | from charmhelpers.core.hookenv import ( | 31 | from charmhelpers.core.hookenv import ( |
2395 | 32 | config, | 32 | config, |
2396 | 33 | hook_name, | ||
2397 | 33 | local_unit, | 34 | local_unit, |
2398 | 34 | log, | 35 | log, |
2399 | 36 | relation_get, | ||
2400 | 35 | relation_ids, | 37 | relation_ids, |
2401 | 36 | relation_set, | 38 | relation_set, |
2402 | 37 | relations_of_type, | 39 | relations_of_type, |
2403 | @@ -125,7 +127,7 @@ | |||
2404 | 125 | 127 | ||
2405 | 126 | 128 | ||
2406 | 127 | class Check(object): | 129 | class Check(object): |
2408 | 128 | shortname_re = '[A-Za-z0-9-_]+$' | 130 | shortname_re = '[A-Za-z0-9-_.@]+$' |
2409 | 129 | service_template = (""" | 131 | service_template = (""" |
2410 | 130 | #--------------------------------------------------- | 132 | #--------------------------------------------------- |
2411 | 131 | # This file is Juju managed | 133 | # This file is Juju managed |
2412 | @@ -137,10 +139,11 @@ | |||
2413 | 137 | """{description} | 139 | """{description} |
2414 | 138 | check_command check_nrpe!{command} | 140 | check_command check_nrpe!{command} |
2415 | 139 | servicegroups {nagios_servicegroup} | 141 | servicegroups {nagios_servicegroup} |
2416 | 142 | {service_config_overrides} | ||
2417 | 140 | }} | 143 | }} |
2418 | 141 | """) | 144 | """) |
2419 | 142 | 145 | ||
2421 | 143 | def __init__(self, shortname, description, check_cmd): | 146 | def __init__(self, shortname, description, check_cmd, max_check_attempts=None): |
2422 | 144 | super(Check, self).__init__() | 147 | super(Check, self).__init__() |
2423 | 145 | # XXX: could be better to calculate this from the service name | 148 | # XXX: could be better to calculate this from the service name |
2424 | 146 | if not re.match(self.shortname_re, shortname): | 149 | if not re.match(self.shortname_re, shortname): |
2425 | @@ -153,6 +156,7 @@ | |||
2426 | 153 | # The default is: illegal_object_name_chars=`~!$%^&*"|'<>?,()= | 156 | # The default is: illegal_object_name_chars=`~!$%^&*"|'<>?,()= |
2427 | 154 | self.description = description | 157 | self.description = description |
2428 | 155 | self.check_cmd = self._locate_cmd(check_cmd) | 158 | self.check_cmd = self._locate_cmd(check_cmd) |
2429 | 159 | self.max_check_attempts = max_check_attempts | ||
2430 | 156 | 160 | ||
2431 | 157 | def _get_check_filename(self): | 161 | def _get_check_filename(self): |
2432 | 158 | return os.path.join(NRPE.nrpe_confdir, '{}.cfg'.format(self.command)) | 162 | return os.path.join(NRPE.nrpe_confdir, '{}.cfg'.format(self.command)) |
2433 | @@ -193,6 +197,13 @@ | |||
2434 | 193 | nrpe_check_file = self._get_check_filename() | 197 | nrpe_check_file = self._get_check_filename() |
2435 | 194 | with open(nrpe_check_file, 'w') as nrpe_check_config: | 198 | with open(nrpe_check_file, 'w') as nrpe_check_config: |
2436 | 195 | nrpe_check_config.write("# check {}\n".format(self.shortname)) | 199 | nrpe_check_config.write("# check {}\n".format(self.shortname)) |
2437 | 200 | if nagios_servicegroups: | ||
2438 | 201 | nrpe_check_config.write( | ||
2439 | 202 | "# The following header was added automatically by juju\n") | ||
2440 | 203 | nrpe_check_config.write( | ||
2441 | 204 | "# Modifying it will affect nagios monitoring and alerting\n") | ||
2442 | 205 | nrpe_check_config.write( | ||
2443 | 206 | "# servicegroups: {}\n".format(nagios_servicegroups)) | ||
2444 | 196 | nrpe_check_config.write("command[{}]={}\n".format( | 207 | nrpe_check_config.write("command[{}]={}\n".format( |
2445 | 197 | self.command, self.check_cmd)) | 208 | self.command, self.check_cmd)) |
2446 | 198 | 209 | ||
2447 | @@ -207,12 +218,19 @@ | |||
2448 | 207 | nagios_servicegroups): | 218 | nagios_servicegroups): |
2449 | 208 | self._remove_service_files() | 219 | self._remove_service_files() |
2450 | 209 | 220 | ||
2451 | 221 | if self.max_check_attempts: | ||
2452 | 222 | service_config_overrides = ' max_check_attempts {}'.format( | ||
2453 | 223 | self.max_check_attempts | ||
2454 | 224 | ) # Note indentation is here rather than in the template to avoid trailing spaces | ||
2455 | 225 | else: | ||
2456 | 226 | service_config_overrides = '' # empty string to avoid printing 'None' | ||
2457 | 210 | templ_vars = { | 227 | templ_vars = { |
2458 | 211 | 'nagios_hostname': hostname, | 228 | 'nagios_hostname': hostname, |
2459 | 212 | 'nagios_servicegroup': nagios_servicegroups, | 229 | 'nagios_servicegroup': nagios_servicegroups, |
2460 | 213 | 'description': self.description, | 230 | 'description': self.description, |
2461 | 214 | 'shortname': self.shortname, | 231 | 'shortname': self.shortname, |
2462 | 215 | 'command': self.command, | 232 | 'command': self.command, |
2463 | 233 | 'service_config_overrides': service_config_overrides, | ||
2464 | 216 | } | 234 | } |
2465 | 217 | nrpe_service_text = Check.service_template.format(**templ_vars) | 235 | nrpe_service_text = Check.service_template.format(**templ_vars) |
2466 | 218 | nrpe_service_file = self._get_service_filename(hostname) | 236 | nrpe_service_file = self._get_service_filename(hostname) |
2467 | @@ -227,6 +245,7 @@ | |||
2468 | 227 | nagios_logdir = '/var/log/nagios' | 245 | nagios_logdir = '/var/log/nagios' |
2469 | 228 | nagios_exportdir = '/var/lib/nagios/export' | 246 | nagios_exportdir = '/var/lib/nagios/export' |
2470 | 229 | nrpe_confdir = '/etc/nagios/nrpe.d' | 247 | nrpe_confdir = '/etc/nagios/nrpe.d' |
2471 | 248 | homedir = '/var/lib/nagios' # home dir provided by nagios-nrpe-server | ||
2472 | 230 | 249 | ||
2473 | 231 | def __init__(self, hostname=None, primary=True): | 250 | def __init__(self, hostname=None, primary=True): |
2474 | 232 | super(NRPE, self).__init__() | 251 | super(NRPE, self).__init__() |
2475 | @@ -251,11 +270,28 @@ | |||
2476 | 251 | relation = relation_ids('nrpe-external-master') | 270 | relation = relation_ids('nrpe-external-master') |
2477 | 252 | if relation: | 271 | if relation: |
2478 | 253 | log("Setting charm primary status {}".format(primary)) | 272 | log("Setting charm primary status {}".format(primary)) |
2480 | 254 | for rid in relation_ids('nrpe-external-master'): | 273 | for rid in relation: |
2481 | 255 | relation_set(relation_id=rid, relation_settings={'primary': self.primary}) | 274 | relation_set(relation_id=rid, relation_settings={'primary': self.primary}) |
2482 | 275 | self.remove_check_queue = set() | ||
2483 | 276 | |||
2484 | 277 | @classmethod | ||
2485 | 278 | def does_nrpe_conf_dir_exist(cls): | ||
2486 | 279 | """Return True if th nrpe_confdif directory exists.""" | ||
2487 | 280 | return os.path.isdir(cls.nrpe_confdir) | ||
2488 | 256 | 281 | ||
2489 | 257 | def add_check(self, *args, **kwargs): | 282 | def add_check(self, *args, **kwargs): |
2490 | 283 | shortname = None | ||
2491 | 284 | if kwargs.get('shortname') is None: | ||
2492 | 285 | if len(args) > 0: | ||
2493 | 286 | shortname = args[0] | ||
2494 | 287 | else: | ||
2495 | 288 | shortname = kwargs['shortname'] | ||
2496 | 289 | |||
2497 | 258 | self.checks.append(Check(*args, **kwargs)) | 290 | self.checks.append(Check(*args, **kwargs)) |
2498 | 291 | try: | ||
2499 | 292 | self.remove_check_queue.remove(shortname) | ||
2500 | 293 | except KeyError: | ||
2501 | 294 | pass | ||
2502 | 259 | 295 | ||
2503 | 260 | def remove_check(self, *args, **kwargs): | 296 | def remove_check(self, *args, **kwargs): |
2504 | 261 | if kwargs.get('shortname') is None: | 297 | if kwargs.get('shortname') is None: |
2505 | @@ -272,12 +308,13 @@ | |||
2506 | 272 | 308 | ||
2507 | 273 | check = Check(*args, **kwargs) | 309 | check = Check(*args, **kwargs) |
2508 | 274 | check.remove(self.hostname) | 310 | check.remove(self.hostname) |
2509 | 311 | self.remove_check_queue.add(kwargs['shortname']) | ||
2510 | 275 | 312 | ||
2511 | 276 | def write(self): | 313 | def write(self): |
2512 | 277 | try: | 314 | try: |
2513 | 278 | nagios_uid = pwd.getpwnam('nagios').pw_uid | 315 | nagios_uid = pwd.getpwnam('nagios').pw_uid |
2514 | 279 | nagios_gid = grp.getgrnam('nagios').gr_gid | 316 | nagios_gid = grp.getgrnam('nagios').gr_gid |
2516 | 280 | except: | 317 | except Exception: |
2517 | 281 | log("Nagios user not set up, nrpe checks not updated") | 318 | log("Nagios user not set up, nrpe checks not updated") |
2518 | 282 | return | 319 | return |
2519 | 283 | 320 | ||
2520 | @@ -287,19 +324,50 @@ | |||
2521 | 287 | 324 | ||
2522 | 288 | nrpe_monitors = {} | 325 | nrpe_monitors = {} |
2523 | 289 | monitors = {"monitors": {"remote": {"nrpe": nrpe_monitors}}} | 326 | monitors = {"monitors": {"remote": {"nrpe": nrpe_monitors}}} |
2524 | 327 | |||
2525 | 328 | # check that the charm can write to the conf dir. If not, then nagios | ||
2526 | 329 | # probably isn't installed, and we can defer. | ||
2527 | 330 | if not self.does_nrpe_conf_dir_exist(): | ||
2528 | 331 | return | ||
2529 | 332 | |||
2530 | 290 | for nrpecheck in self.checks: | 333 | for nrpecheck in self.checks: |
2531 | 291 | nrpecheck.write(self.nagios_context, self.hostname, | 334 | nrpecheck.write(self.nagios_context, self.hostname, |
2532 | 292 | self.nagios_servicegroups) | 335 | self.nagios_servicegroups) |
2533 | 293 | nrpe_monitors[nrpecheck.shortname] = { | 336 | nrpe_monitors[nrpecheck.shortname] = { |
2534 | 294 | "command": nrpecheck.command, | 337 | "command": nrpecheck.command, |
2535 | 295 | } | 338 | } |
2536 | 339 | # If we were passed max_check_attempts, add that to the relation data | ||
2537 | 340 | if nrpecheck.max_check_attempts is not None: | ||
2538 | 341 | nrpe_monitors[nrpecheck.shortname]['max_check_attempts'] = nrpecheck.max_check_attempts | ||
2539 | 296 | 342 | ||
2541 | 297 | service('restart', 'nagios-nrpe-server') | 343 | # update-status hooks are configured to firing every 5 minutes by |
2542 | 344 | # default. When nagios-nrpe-server is restarted, the nagios server | ||
2543 | 345 | # reports checks failing causing unnecessary alerts. Let's not restart | ||
2544 | 346 | # on update-status hooks. | ||
2545 | 347 | if not hook_name() == 'update-status': | ||
2546 | 348 | service('restart', 'nagios-nrpe-server') | ||
2547 | 298 | 349 | ||
2548 | 299 | monitor_ids = relation_ids("local-monitors") + \ | 350 | monitor_ids = relation_ids("local-monitors") + \ |
2549 | 300 | relation_ids("nrpe-external-master") | 351 | relation_ids("nrpe-external-master") |
2550 | 301 | for rid in monitor_ids: | 352 | for rid in monitor_ids: |
2552 | 302 | relation_set(relation_id=rid, monitors=yaml.dump(monitors)) | 353 | reldata = relation_get(unit=local_unit(), rid=rid) |
2553 | 354 | if 'monitors' in reldata: | ||
2554 | 355 | # update the existing set of monitors with the new data | ||
2555 | 356 | old_monitors = yaml.safe_load(reldata['monitors']) | ||
2556 | 357 | old_nrpe_monitors = old_monitors['monitors']['remote']['nrpe'] | ||
2557 | 358 | # remove keys that are in the remove_check_queue | ||
2558 | 359 | old_nrpe_monitors = {k: v for k, v in old_nrpe_monitors.items() | ||
2559 | 360 | if k not in self.remove_check_queue} | ||
2560 | 361 | # update/add nrpe_monitors | ||
2561 | 362 | old_nrpe_monitors.update(nrpe_monitors) | ||
2562 | 363 | old_monitors['monitors']['remote']['nrpe'] = old_nrpe_monitors | ||
2563 | 364 | # write back to the relation | ||
2564 | 365 | relation_set(relation_id=rid, monitors=yaml.dump(old_monitors)) | ||
2565 | 366 | else: | ||
2566 | 367 | # write a brand new set of monitors, as no existing ones. | ||
2567 | 368 | relation_set(relation_id=rid, monitors=yaml.dump(monitors)) | ||
2568 | 369 | |||
2569 | 370 | self.remove_check_queue.clear() | ||
2570 | 303 | 371 | ||
2571 | 304 | 372 | ||
2572 | 305 | def get_nagios_hostcontext(relation_name='nrpe-external-master'): | 373 | def get_nagios_hostcontext(relation_name='nrpe-external-master'): |
2573 | @@ -338,13 +406,14 @@ | |||
2574 | 338 | return unit | 406 | return unit |
2575 | 339 | 407 | ||
2576 | 340 | 408 | ||
2578 | 341 | def add_init_service_checks(nrpe, services, unit_name): | 409 | def add_init_service_checks(nrpe, services, unit_name, immediate_check=True): |
2579 | 342 | """ | 410 | """ |
2580 | 343 | Add checks for each service in list | 411 | Add checks for each service in list |
2581 | 344 | 412 | ||
2582 | 345 | :param NRPE nrpe: NRPE object to add check to | 413 | :param NRPE nrpe: NRPE object to add check to |
2583 | 346 | :param list services: List of services to check | 414 | :param list services: List of services to check |
2584 | 347 | :param str unit_name: Unit name to use in check description | 415 | :param str unit_name: Unit name to use in check description |
2585 | 416 | :param bool immediate_check: For sysv init, run the service check immediately | ||
2586 | 348 | """ | 417 | """ |
2587 | 349 | for svc in services: | 418 | for svc in services: |
2588 | 350 | # Don't add a check for these services from neutron-gateway | 419 | # Don't add a check for these services from neutron-gateway |
2589 | @@ -354,7 +423,7 @@ | |||
2590 | 354 | upstart_init = '/etc/init/%s.conf' % svc | 423 | upstart_init = '/etc/init/%s.conf' % svc |
2591 | 355 | sysv_init = '/etc/init.d/%s' % svc | 424 | sysv_init = '/etc/init.d/%s' % svc |
2592 | 356 | 425 | ||
2594 | 357 | if host.init_is_systemd(): | 426 | if host.init_is_systemd(service_name=svc): |
2595 | 358 | nrpe.add_check( | 427 | nrpe.add_check( |
2596 | 359 | shortname=svc, | 428 | shortname=svc, |
2597 | 360 | description='process check {%s}' % unit_name, | 429 | description='process check {%s}' % unit_name, |
2598 | @@ -368,33 +437,53 @@ | |||
2599 | 368 | ) | 437 | ) |
2600 | 369 | elif os.path.exists(sysv_init): | 438 | elif os.path.exists(sysv_init): |
2601 | 370 | cronpath = '/etc/cron.d/nagios-service-check-%s' % svc | 439 | cronpath = '/etc/cron.d/nagios-service-check-%s' % svc |
2608 | 371 | cron_file = ('*/5 * * * * root ' | 440 | checkpath = '%s/service-check-%s.txt' % (nrpe.homedir, svc) |
2609 | 372 | '/usr/local/lib/nagios/plugins/check_exit_status.pl ' | 441 | croncmd = ( |
2610 | 373 | '-s /etc/init.d/%s status > ' | 442 | '/usr/local/lib/nagios/plugins/check_exit_status.pl ' |
2611 | 374 | '/var/lib/nagios/service-check-%s.txt\n' % (svc, | 443 | '-e -s /etc/init.d/%s status' % svc |
2612 | 375 | svc) | 444 | ) |
2613 | 376 | ) | 445 | cron_file = '*/5 * * * * root %s > %s\n' % (croncmd, checkpath) |
2614 | 377 | f = open(cronpath, 'w') | 446 | f = open(cronpath, 'w') |
2615 | 378 | f.write(cron_file) | 447 | f.write(cron_file) |
2616 | 379 | f.close() | 448 | f.close() |
2617 | 380 | nrpe.add_check( | 449 | nrpe.add_check( |
2618 | 381 | shortname=svc, | 450 | shortname=svc, |
2622 | 382 | description='process check {%s}' % unit_name, | 451 | description='service check {%s}' % unit_name, |
2623 | 383 | check_cmd='check_status_file.py -f ' | 452 | check_cmd='check_status_file.py -f %s' % checkpath, |
2621 | 384 | '/var/lib/nagios/service-check-%s.txt' % svc, | ||
2624 | 385 | ) | 453 | ) |
2628 | 386 | 454 | # if /var/lib/nagios doesn't exist open(checkpath, 'w') will fail | |
2629 | 387 | 455 | # (LP: #1670223). | |
2630 | 388 | def copy_nrpe_checks(): | 456 | if immediate_check and os.path.isdir(nrpe.homedir): |
2631 | 457 | f = open(checkpath, 'w') | ||
2632 | 458 | subprocess.call( | ||
2633 | 459 | croncmd.split(), | ||
2634 | 460 | stdout=f, | ||
2635 | 461 | stderr=subprocess.STDOUT | ||
2636 | 462 | ) | ||
2637 | 463 | f.close() | ||
2638 | 464 | os.chmod(checkpath, 0o644) | ||
2639 | 465 | |||
2640 | 466 | |||
2641 | 467 | def copy_nrpe_checks(nrpe_files_dir=None): | ||
2642 | 389 | """ | 468 | """ |
2643 | 390 | Copy the nrpe checks into place | 469 | Copy the nrpe checks into place |
2644 | 391 | 470 | ||
2645 | 392 | """ | 471 | """ |
2646 | 393 | NAGIOS_PLUGINS = '/usr/local/lib/nagios/plugins' | 472 | NAGIOS_PLUGINS = '/usr/local/lib/nagios/plugins' |
2651 | 394 | nrpe_files_dir = os.path.join(os.getenv('CHARM_DIR'), 'hooks', | 473 | if nrpe_files_dir is None: |
2652 | 395 | 'charmhelpers', 'contrib', 'openstack', | 474 | # determine if "charmhelpers" is in CHARMDIR or CHARMDIR/hooks |
2653 | 396 | 'files') | 475 | for segment in ['.', 'hooks']: |
2654 | 397 | 476 | nrpe_files_dir = os.path.abspath(os.path.join( | |
2655 | 477 | os.getenv('CHARM_DIR'), | ||
2656 | 478 | segment, | ||
2657 | 479 | 'charmhelpers', | ||
2658 | 480 | 'contrib', | ||
2659 | 481 | 'openstack', | ||
2660 | 482 | 'files')) | ||
2661 | 483 | if os.path.isdir(nrpe_files_dir): | ||
2662 | 484 | break | ||
2663 | 485 | else: | ||
2664 | 486 | raise RuntimeError("Couldn't find charmhelpers directory") | ||
2665 | 398 | if not os.path.exists(NAGIOS_PLUGINS): | 487 | if not os.path.exists(NAGIOS_PLUGINS): |
2666 | 399 | os.makedirs(NAGIOS_PLUGINS) | 488 | os.makedirs(NAGIOS_PLUGINS) |
2667 | 400 | for fname in glob.glob(os.path.join(nrpe_files_dir, "check_*")): | 489 | for fname in glob.glob(os.path.join(nrpe_files_dir, "check_*")): |
2668 | @@ -418,3 +507,17 @@ | |||
2669 | 418 | shortname='haproxy_queue', | 507 | shortname='haproxy_queue', |
2670 | 419 | description='Check HAProxy queue depth {%s}' % unit_name, | 508 | description='Check HAProxy queue depth {%s}' % unit_name, |
2671 | 420 | check_cmd='check_haproxy_queue_depth.sh') | 509 | check_cmd='check_haproxy_queue_depth.sh') |
2672 | 510 | |||
2673 | 511 | |||
2674 | 512 | def remove_deprecated_check(nrpe, deprecated_services): | ||
2675 | 513 | """ | ||
2676 | 514 | Remove checks fro deprecated services in list | ||
2677 | 515 | |||
2678 | 516 | :param nrpe: NRPE object to remove check from | ||
2679 | 517 | :type nrpe: NRPE | ||
2680 | 518 | :param deprecated_services: List of deprecated services that are removed | ||
2681 | 519 | :type deprecated_services: list | ||
2682 | 520 | """ | ||
2683 | 521 | for dep_svc in deprecated_services: | ||
2684 | 522 | log('Deprecated service: {}'.format(dep_svc)) | ||
2685 | 523 | nrpe.remove_check(shortname=dep_svc) | ||
2686 | 421 | 524 | ||
2687 | === added file 'hooks/charmhelpers/contrib/charmsupport/volumes.py' | |||
2688 | --- hooks/charmhelpers/contrib/charmsupport/volumes.py 1970-01-01 00:00:00 +0000 | |||
2689 | +++ hooks/charmhelpers/contrib/charmsupport/volumes.py 2021-05-12 04:07:51 +0000 | |||
2690 | @@ -0,0 +1,173 @@ | |||
2691 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
2692 | 2 | # | ||
2693 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
2694 | 4 | # you may not use this file except in compliance with the License. | ||
2695 | 5 | # You may obtain a copy of the License at | ||
2696 | 6 | # | ||
2697 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
2698 | 8 | # | ||
2699 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
2700 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
2701 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
2702 | 12 | # See the License for the specific language governing permissions and | ||
2703 | 13 | # limitations under the License. | ||
2704 | 14 | |||
2705 | 15 | ''' | ||
2706 | 16 | Functions for managing volumes in juju units. One volume is supported per unit. | ||
2707 | 17 | Subordinates may have their own storage, provided it is on its own partition. | ||
2708 | 18 | |||
2709 | 19 | Configuration stanzas:: | ||
2710 | 20 | |||
2711 | 21 | volume-ephemeral: | ||
2712 | 22 | type: boolean | ||
2713 | 23 | default: true | ||
2714 | 24 | description: > | ||
2715 | 25 | If false, a volume is mounted as sepecified in "volume-map" | ||
2716 | 26 | If true, ephemeral storage will be used, meaning that log data | ||
2717 | 27 | will only exist as long as the machine. YOU HAVE BEEN WARNED. | ||
2718 | 28 | volume-map: | ||
2719 | 29 | type: string | ||
2720 | 30 | default: {} | ||
2721 | 31 | description: > | ||
2722 | 32 | YAML map of units to device names, e.g: | ||
2723 | 33 | "{ rsyslog/0: /dev/vdb, rsyslog/1: /dev/vdb }" | ||
2724 | 34 | Service units will raise a configure-error if volume-ephemeral | ||
2725 | 35 | is 'true' and no volume-map value is set. Use 'juju set' to set a | ||
2726 | 36 | value and 'juju resolved' to complete configuration. | ||
2727 | 37 | |||
2728 | 38 | Usage:: | ||
2729 | 39 | |||
2730 | 40 | from charmsupport.volumes import configure_volume, VolumeConfigurationError | ||
2731 | 41 | from charmsupport.hookenv import log, ERROR | ||
2732 | 42 | def post_mount_hook(): | ||
2733 | 43 | stop_service('myservice') | ||
2734 | 44 | def post_mount_hook(): | ||
2735 | 45 | start_service('myservice') | ||
2736 | 46 | |||
2737 | 47 | if __name__ == '__main__': | ||
2738 | 48 | try: | ||
2739 | 49 | configure_volume(before_change=pre_mount_hook, | ||
2740 | 50 | after_change=post_mount_hook) | ||
2741 | 51 | except VolumeConfigurationError: | ||
2742 | 52 | log('Storage could not be configured', ERROR) | ||
2743 | 53 | |||
2744 | 54 | ''' | ||
2745 | 55 | |||
2746 | 56 | # XXX: Known limitations | ||
2747 | 57 | # - fstab is neither consulted nor updated | ||
2748 | 58 | |||
2749 | 59 | import os | ||
2750 | 60 | from charmhelpers.core import hookenv | ||
2751 | 61 | from charmhelpers.core import host | ||
2752 | 62 | import yaml | ||
2753 | 63 | |||
2754 | 64 | |||
2755 | 65 | MOUNT_BASE = '/srv/juju/volumes' | ||
2756 | 66 | |||
2757 | 67 | |||
2758 | 68 | class VolumeConfigurationError(Exception): | ||
2759 | 69 | '''Volume configuration data is missing or invalid''' | ||
2760 | 70 | pass | ||
2761 | 71 | |||
2762 | 72 | |||
2763 | 73 | def get_config(): | ||
2764 | 74 | '''Gather and sanity-check volume configuration data''' | ||
2765 | 75 | volume_config = {} | ||
2766 | 76 | config = hookenv.config() | ||
2767 | 77 | |||
2768 | 78 | errors = False | ||
2769 | 79 | |||
2770 | 80 | if config.get('volume-ephemeral') in (True, 'True', 'true', 'Yes', 'yes'): | ||
2771 | 81 | volume_config['ephemeral'] = True | ||
2772 | 82 | else: | ||
2773 | 83 | volume_config['ephemeral'] = False | ||
2774 | 84 | |||
2775 | 85 | try: | ||
2776 | 86 | volume_map = yaml.safe_load(config.get('volume-map', '{}')) | ||
2777 | 87 | except yaml.YAMLError as e: | ||
2778 | 88 | hookenv.log("Error parsing YAML volume-map: {}".format(e), | ||
2779 | 89 | hookenv.ERROR) | ||
2780 | 90 | errors = True | ||
2781 | 91 | if volume_map is None: | ||
2782 | 92 | # probably an empty string | ||
2783 | 93 | volume_map = {} | ||
2784 | 94 | elif not isinstance(volume_map, dict): | ||
2785 | 95 | hookenv.log("Volume-map should be a dictionary, not {}".format( | ||
2786 | 96 | type(volume_map))) | ||
2787 | 97 | errors = True | ||
2788 | 98 | |||
2789 | 99 | volume_config['device'] = volume_map.get(os.environ['JUJU_UNIT_NAME']) | ||
2790 | 100 | if volume_config['device'] and volume_config['ephemeral']: | ||
2791 | 101 | # asked for ephemeral storage but also defined a volume ID | ||
2792 | 102 | hookenv.log('A volume is defined for this unit, but ephemeral ' | ||
2793 | 103 | 'storage was requested', hookenv.ERROR) | ||
2794 | 104 | errors = True | ||
2795 | 105 | elif not volume_config['device'] and not volume_config['ephemeral']: | ||
2796 | 106 | # asked for permanent storage but did not define volume ID | ||
2797 | 107 | hookenv.log('Ephemeral storage was requested, but there is no volume ' | ||
2798 | 108 | 'defined for this unit.', hookenv.ERROR) | ||
2799 | 109 | errors = True | ||
2800 | 110 | |||
2801 | 111 | unit_mount_name = hookenv.local_unit().replace('/', '-') | ||
2802 | 112 | volume_config['mountpoint'] = os.path.join(MOUNT_BASE, unit_mount_name) | ||
2803 | 113 | |||
2804 | 114 | if errors: | ||
2805 | 115 | return None | ||
2806 | 116 | return volume_config | ||
2807 | 117 | |||
2808 | 118 | |||
2809 | 119 | def mount_volume(config): | ||
2810 | 120 | if os.path.exists(config['mountpoint']): | ||
2811 | 121 | if not os.path.isdir(config['mountpoint']): | ||
2812 | 122 | hookenv.log('Not a directory: {}'.format(config['mountpoint'])) | ||
2813 | 123 | raise VolumeConfigurationError() | ||
2814 | 124 | else: | ||
2815 | 125 | host.mkdir(config['mountpoint']) | ||
2816 | 126 | if os.path.ismount(config['mountpoint']): | ||
2817 | 127 | unmount_volume(config) | ||
2818 | 128 | if not host.mount(config['device'], config['mountpoint'], persist=True): | ||
2819 | 129 | raise VolumeConfigurationError() | ||
2820 | 130 | |||
2821 | 131 | |||
2822 | 132 | def unmount_volume(config): | ||
2823 | 133 | if os.path.ismount(config['mountpoint']): | ||
2824 | 134 | if not host.umount(config['mountpoint'], persist=True): | ||
2825 | 135 | raise VolumeConfigurationError() | ||
2826 | 136 | |||
2827 | 137 | |||
2828 | 138 | def managed_mounts(): | ||
2829 | 139 | '''List of all mounted managed volumes''' | ||
2830 | 140 | return filter(lambda mount: mount[0].startswith(MOUNT_BASE), host.mounts()) | ||
2831 | 141 | |||
2832 | 142 | |||
2833 | 143 | def configure_volume(before_change=lambda: None, after_change=lambda: None): | ||
2834 | 144 | '''Set up storage (or don't) according to the charm's volume configuration. | ||
2835 | 145 | Returns the mount point or "ephemeral". before_change and after_change | ||
2836 | 146 | are optional functions to be called if the volume configuration changes. | ||
2837 | 147 | ''' | ||
2838 | 148 | |||
2839 | 149 | config = get_config() | ||
2840 | 150 | if not config: | ||
2841 | 151 | hookenv.log('Failed to read volume configuration', hookenv.CRITICAL) | ||
2842 | 152 | raise VolumeConfigurationError() | ||
2843 | 153 | |||
2844 | 154 | if config['ephemeral']: | ||
2845 | 155 | if os.path.ismount(config['mountpoint']): | ||
2846 | 156 | before_change() | ||
2847 | 157 | unmount_volume(config) | ||
2848 | 158 | after_change() | ||
2849 | 159 | return 'ephemeral' | ||
2850 | 160 | else: | ||
2851 | 161 | # persistent storage | ||
2852 | 162 | if os.path.ismount(config['mountpoint']): | ||
2853 | 163 | mounts = dict(managed_mounts()) | ||
2854 | 164 | if mounts.get(config['mountpoint']) != config['device']: | ||
2855 | 165 | before_change() | ||
2856 | 166 | unmount_volume(config) | ||
2857 | 167 | mount_volume(config) | ||
2858 | 168 | after_change() | ||
2859 | 169 | else: | ||
2860 | 170 | before_change() | ||
2861 | 171 | mount_volume(config) | ||
2862 | 172 | after_change() | ||
2863 | 173 | return config['mountpoint'] | ||
2864 | 0 | 174 | ||
2865 | === added directory 'hooks/charmhelpers/contrib/database' | |||
2866 | === added file 'hooks/charmhelpers/contrib/database/__init__.py' | |||
2867 | --- hooks/charmhelpers/contrib/database/__init__.py 1970-01-01 00:00:00 +0000 | |||
2868 | +++ hooks/charmhelpers/contrib/database/__init__.py 2021-05-12 04:07:51 +0000 | |||
2869 | @@ -0,0 +1,11 @@ | |||
2870 | 1 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
2871 | 2 | # you may not use this file except in compliance with the License. | ||
2872 | 3 | # You may obtain a copy of the License at | ||
2873 | 4 | # | ||
2874 | 5 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
2875 | 6 | # | ||
2876 | 7 | # Unless required by applicable law or agreed to in writing, software | ||
2877 | 8 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
2878 | 9 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
2879 | 10 | # See the License for the specific language governing permissions and | ||
2880 | 11 | # limitations under the License. | ||
2881 | 0 | 12 | ||
2882 | === added file 'hooks/charmhelpers/contrib/database/mysql.py' | |||
2883 | --- hooks/charmhelpers/contrib/database/mysql.py 1970-01-01 00:00:00 +0000 | |||
2884 | +++ hooks/charmhelpers/contrib/database/mysql.py 2021-05-12 04:07:51 +0000 | |||
2885 | @@ -0,0 +1,840 @@ | |||
2886 | 1 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
2887 | 2 | # you may not use this file except in compliance with the License. | ||
2888 | 3 | # You may obtain a copy of the License at | ||
2889 | 4 | # | ||
2890 | 5 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
2891 | 6 | # | ||
2892 | 7 | # Unless required by applicable law or agreed to in writing, software | ||
2893 | 8 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
2894 | 9 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
2895 | 10 | # See the License for the specific language governing permissions and | ||
2896 | 11 | # limitations under the License. | ||
2897 | 12 | |||
2898 | 13 | """Helper for working with a MySQL database""" | ||
2899 | 14 | import collections | ||
2900 | 15 | import copy | ||
2901 | 16 | import json | ||
2902 | 17 | import re | ||
2903 | 18 | import sys | ||
2904 | 19 | import platform | ||
2905 | 20 | import os | ||
2906 | 21 | import glob | ||
2907 | 22 | import six | ||
2908 | 23 | |||
2909 | 24 | # from string import upper | ||
2910 | 25 | |||
2911 | 26 | from charmhelpers.core.host import ( | ||
2912 | 27 | CompareHostReleases, | ||
2913 | 28 | lsb_release, | ||
2914 | 29 | mkdir, | ||
2915 | 30 | pwgen, | ||
2916 | 31 | write_file | ||
2917 | 32 | ) | ||
2918 | 33 | from charmhelpers.core.hookenv import ( | ||
2919 | 34 | config as config_get, | ||
2920 | 35 | relation_get, | ||
2921 | 36 | related_units, | ||
2922 | 37 | unit_get, | ||
2923 | 38 | log, | ||
2924 | 39 | DEBUG, | ||
2925 | 40 | ERROR, | ||
2926 | 41 | INFO, | ||
2927 | 42 | WARNING, | ||
2928 | 43 | leader_get, | ||
2929 | 44 | leader_set, | ||
2930 | 45 | is_leader, | ||
2931 | 46 | ) | ||
2932 | 47 | from charmhelpers.fetch import ( | ||
2933 | 48 | apt_install, | ||
2934 | 49 | apt_update, | ||
2935 | 50 | filter_installed_packages, | ||
2936 | 51 | ) | ||
2937 | 52 | from charmhelpers.contrib.network.ip import get_host_ip | ||
2938 | 53 | |||
2939 | 54 | try: | ||
2940 | 55 | import MySQLdb | ||
2941 | 56 | except ImportError: | ||
2942 | 57 | apt_update(fatal=True) | ||
2943 | 58 | if six.PY2: | ||
2944 | 59 | apt_install(filter_installed_packages(['python-mysqldb']), fatal=True) | ||
2945 | 60 | else: | ||
2946 | 61 | apt_install(filter_installed_packages(['python3-mysqldb']), fatal=True) | ||
2947 | 62 | import MySQLdb | ||
2948 | 63 | |||
2949 | 64 | |||
2950 | 65 | class MySQLSetPasswordError(Exception): | ||
2951 | 66 | pass | ||
2952 | 67 | |||
2953 | 68 | |||
2954 | 69 | class MySQLHelper(object): | ||
2955 | 70 | |||
2956 | 71 | def __init__(self, rpasswdf_template, upasswdf_template, host='localhost', | ||
2957 | 72 | migrate_passwd_to_leader_storage=True, | ||
2958 | 73 | delete_ondisk_passwd_file=True, user="root", password=None, | ||
2959 | 74 | port=None, connect_timeout=None): | ||
2960 | 75 | self.user = user | ||
2961 | 76 | self.host = host | ||
2962 | 77 | self.password = password | ||
2963 | 78 | self.port = port | ||
2964 | 79 | # default timeout of 30 seconds. | ||
2965 | 80 | self.connect_timeout = connect_timeout or 30 | ||
2966 | 81 | |||
2967 | 82 | # Password file path templates | ||
2968 | 83 | self.root_passwd_file_template = rpasswdf_template | ||
2969 | 84 | self.user_passwd_file_template = upasswdf_template | ||
2970 | 85 | |||
2971 | 86 | self.migrate_passwd_to_leader_storage = migrate_passwd_to_leader_storage | ||
2972 | 87 | # If we migrate we have the option to delete local copy of root passwd | ||
2973 | 88 | self.delete_ondisk_passwd_file = delete_ondisk_passwd_file | ||
2974 | 89 | self.connection = None | ||
2975 | 90 | |||
2976 | 91 | def connect(self, user='root', password=None, host=None, port=None, | ||
2977 | 92 | connect_timeout=None): | ||
2978 | 93 | _connection_info = { | ||
2979 | 94 | "user": user or self.user, | ||
2980 | 95 | "passwd": password or self.password, | ||
2981 | 96 | "host": host or self.host | ||
2982 | 97 | } | ||
2983 | 98 | # set the connection timeout; for mysql8 it can hang forever, so some | ||
2984 | 99 | # timeout is required. | ||
2985 | 100 | timeout = connect_timeout or self.connect_timeout | ||
2986 | 101 | if timeout: | ||
2987 | 102 | _connection_info["connect_timeout"] = timeout | ||
2988 | 103 | # port cannot be None but we also do not want to specify it unless it | ||
2989 | 104 | # has been explicit set. | ||
2990 | 105 | port = port or self.port | ||
2991 | 106 | if port is not None: | ||
2992 | 107 | _connection_info["port"] = port | ||
2993 | 108 | |||
2994 | 109 | log("Opening db connection for %s@%s" % (user, host), level=DEBUG) | ||
2995 | 110 | try: | ||
2996 | 111 | self.connection = MySQLdb.connect(**_connection_info) | ||
2997 | 112 | except Exception as e: | ||
2998 | 113 | log("Failed to connect to database due to '{}'".format(str(e)), | ||
2999 | 114 | level=ERROR) | ||
3000 | 115 | raise | ||
3001 | 116 | |||
3002 | 117 | def database_exists(self, db_name): | ||
3003 | 118 | cursor = self.connection.cursor() | ||
3004 | 119 | try: | ||
3005 | 120 | cursor.execute("SHOW DATABASES") | ||
3006 | 121 | databases = [i[0] for i in cursor.fetchall()] | ||
3007 | 122 | finally: | ||
3008 | 123 | cursor.close() | ||
3009 | 124 | |||
3010 | 125 | return db_name in databases | ||
3011 | 126 | |||
3012 | 127 | def create_database(self, db_name): | ||
3013 | 128 | cursor = self.connection.cursor() | ||
3014 | 129 | try: | ||
3015 | 130 | cursor.execute("CREATE DATABASE `{}` CHARACTER SET UTF8" | ||
3016 | 131 | .format(db_name)) | ||
3017 | 132 | finally: | ||
3018 | 133 | cursor.close() | ||
3019 | 134 | |||
3020 | 135 | def grant_exists(self, db_name, db_user, remote_ip): | ||
3021 | 136 | cursor = self.connection.cursor() | ||
3022 | 137 | priv_string = "GRANT ALL PRIVILEGES ON `{}`.* " \ | ||
3023 | 138 | "TO '{}'@'{}'".format(db_name, db_user, remote_ip) | ||
3024 | 139 | try: | ||
3025 | 140 | cursor.execute("SHOW GRANTS for '{}'@'{}'".format(db_user, | ||
3026 | 141 | remote_ip)) | ||
3027 | 142 | grants = [i[0] for i in cursor.fetchall()] | ||
3028 | 143 | except MySQLdb.OperationalError: | ||
3029 | 144 | return False | ||
3030 | 145 | finally: | ||
3031 | 146 | cursor.close() | ||
3032 | 147 | |||
3033 | 148 | # TODO: review for different grants | ||
3034 | 149 | return priv_string in grants | ||
3035 | 150 | |||
3036 | 151 | def create_grant(self, db_name, db_user, remote_ip, password): | ||
3037 | 152 | cursor = self.connection.cursor() | ||
3038 | 153 | try: | ||
3039 | 154 | # TODO: review for different grants | ||
3040 | 155 | cursor.execute("GRANT ALL PRIVILEGES ON `{}`.* TO '{}'@'{}' " | ||
3041 | 156 | "IDENTIFIED BY '{}'".format(db_name, | ||
3042 | 157 | db_user, | ||
3043 | 158 | remote_ip, | ||
3044 | 159 | password)) | ||
3045 | 160 | finally: | ||
3046 | 161 | cursor.close() | ||
3047 | 162 | |||
3048 | 163 | def create_admin_grant(self, db_user, remote_ip, password): | ||
3049 | 164 | cursor = self.connection.cursor() | ||
3050 | 165 | try: | ||
3051 | 166 | cursor.execute("GRANT ALL PRIVILEGES ON *.* TO '{}'@'{}' " | ||
3052 | 167 | "IDENTIFIED BY '{}'".format(db_user, | ||
3053 | 168 | remote_ip, | ||
3054 | 169 | password)) | ||
3055 | 170 | finally: | ||
3056 | 171 | cursor.close() | ||
3057 | 172 | |||
3058 | 173 | def cleanup_grant(self, db_user, remote_ip): | ||
3059 | 174 | cursor = self.connection.cursor() | ||
3060 | 175 | try: | ||
3061 | 176 | cursor.execute("DROP FROM mysql.user WHERE user='{}' " | ||
3062 | 177 | "AND HOST='{}'".format(db_user, | ||
3063 | 178 | remote_ip)) | ||
3064 | 179 | finally: | ||
3065 | 180 | cursor.close() | ||
3066 | 181 | |||
3067 | 182 | def flush_priviledges(self): | ||
3068 | 183 | cursor = self.connection.cursor() | ||
3069 | 184 | try: | ||
3070 | 185 | cursor.execute("FLUSH PRIVILEGES") | ||
3071 | 186 | finally: | ||
3072 | 187 | cursor.close() | ||
3073 | 188 | |||
3074 | 189 | def execute(self, sql): | ||
3075 | 190 | """Execute arbitary SQL against the database.""" | ||
3076 | 191 | cursor = self.connection.cursor() | ||
3077 | 192 | try: | ||
3078 | 193 | cursor.execute(sql) | ||
3079 | 194 | finally: | ||
3080 | 195 | cursor.close() | ||
3081 | 196 | |||
3082 | 197 | def select(self, sql): | ||
3083 | 198 | """ | ||
3084 | 199 | Execute arbitrary SQL select query against the database | ||
3085 | 200 | and return the results. | ||
3086 | 201 | |||
3087 | 202 | :param sql: SQL select query to execute | ||
3088 | 203 | :type sql: string | ||
3089 | 204 | :returns: SQL select query result | ||
3090 | 205 | :rtype: list of lists | ||
3091 | 206 | :raises: MySQLdb.Error | ||
3092 | 207 | """ | ||
3093 | 208 | cursor = self.connection.cursor() | ||
3094 | 209 | try: | ||
3095 | 210 | cursor.execute(sql) | ||
3096 | 211 | results = [list(i) for i in cursor.fetchall()] | ||
3097 | 212 | finally: | ||
3098 | 213 | cursor.close() | ||
3099 | 214 | return results | ||
3100 | 215 | |||
3101 | 216 | def migrate_passwords_to_leader_storage(self, excludes=None): | ||
3102 | 217 | """Migrate any passwords storage on disk to leader storage.""" | ||
3103 | 218 | if not is_leader(): | ||
3104 | 219 | log("Skipping password migration as not the lead unit", | ||
3105 | 220 | level=DEBUG) | ||
3106 | 221 | return | ||
3107 | 222 | dirname = os.path.dirname(self.root_passwd_file_template) | ||
3108 | 223 | path = os.path.join(dirname, '*.passwd') | ||
3109 | 224 | for f in glob.glob(path): | ||
3110 | 225 | if excludes and f in excludes: | ||
3111 | 226 | log("Excluding %s from leader storage migration" % (f), | ||
3112 | 227 | level=DEBUG) | ||
3113 | 228 | continue | ||
3114 | 229 | |||
3115 | 230 | key = os.path.basename(f) | ||
3116 | 231 | with open(f, 'r') as passwd: | ||
3117 | 232 | _value = passwd.read().strip() | ||
3118 | 233 | |||
3119 | 234 | try: | ||
3120 | 235 | leader_set(settings={key: _value}) | ||
3121 | 236 | |||
3122 | 237 | if self.delete_ondisk_passwd_file: | ||
3123 | 238 | os.unlink(f) | ||
3124 | 239 | except ValueError: | ||
3125 | 240 | # NOTE cluster relation not yet ready - skip for now | ||
3126 | 241 | pass | ||
3127 | 242 | |||
3128 | 243 | def get_mysql_password_on_disk(self, username=None, password=None): | ||
3129 | 244 | """Retrieve, generate or store a mysql password for the provided | ||
3130 | 245 | username on disk.""" | ||
3131 | 246 | if username: | ||
3132 | 247 | template = self.user_passwd_file_template | ||
3133 | 248 | passwd_file = template.format(username) | ||
3134 | 249 | else: | ||
3135 | 250 | passwd_file = self.root_passwd_file_template | ||
3136 | 251 | |||
3137 | 252 | _password = None | ||
3138 | 253 | if os.path.exists(passwd_file): | ||
3139 | 254 | log("Using existing password file '%s'" % passwd_file, level=DEBUG) | ||
3140 | 255 | with open(passwd_file, 'r') as passwd: | ||
3141 | 256 | _password = passwd.read().strip() | ||
3142 | 257 | else: | ||
3143 | 258 | log("Generating new password file '%s'" % passwd_file, level=DEBUG) | ||
3144 | 259 | if not os.path.isdir(os.path.dirname(passwd_file)): | ||
3145 | 260 | # NOTE: need to ensure this is not mysql root dir (which needs | ||
3146 | 261 | # to be mysql readable) | ||
3147 | 262 | mkdir(os.path.dirname(passwd_file), owner='root', group='root', | ||
3148 | 263 | perms=0o770) | ||
3149 | 264 | # Force permissions - for some reason the chmod in makedirs | ||
3150 | 265 | # fails | ||
3151 | 266 | os.chmod(os.path.dirname(passwd_file), 0o770) | ||
3152 | 267 | |||
3153 | 268 | _password = password or pwgen(length=32) | ||
3154 | 269 | write_file(passwd_file, _password, owner='root', group='root', | ||
3155 | 270 | perms=0o660) | ||
3156 | 271 | |||
3157 | 272 | return _password | ||
3158 | 273 | |||
3159 | 274 | def passwd_keys(self, username): | ||
3160 | 275 | """Generator to return keys used to store passwords in peer store. | ||
3161 | 276 | |||
3162 | 277 | NOTE: we support both legacy and new format to support mysql | ||
3163 | 278 | charm prior to refactor. This is necessary to avoid LP 1451890. | ||
3164 | 279 | """ | ||
3165 | 280 | keys = [] | ||
3166 | 281 | if username == 'mysql': | ||
3167 | 282 | log("Bad username '%s'" % (username), level=WARNING) | ||
3168 | 283 | |||
3169 | 284 | if username: | ||
3170 | 285 | # IMPORTANT: *newer* format must be returned first | ||
3171 | 286 | keys.append('mysql-%s.passwd' % (username)) | ||
3172 | 287 | keys.append('%s.passwd' % (username)) | ||
3173 | 288 | else: | ||
3174 | 289 | keys.append('mysql.passwd') | ||
3175 | 290 | |||
3176 | 291 | for key in keys: | ||
3177 | 292 | yield key | ||
3178 | 293 | |||
3179 | 294 | def get_mysql_password(self, username=None, password=None): | ||
3180 | 295 | """Retrieve, generate or store a mysql password for the provided | ||
3181 | 296 | username using peer relation cluster.""" | ||
3182 | 297 | excludes = [] | ||
3183 | 298 | |||
3184 | 299 | # First check peer relation. | ||
3185 | 300 | try: | ||
3186 | 301 | for key in self.passwd_keys(username): | ||
3187 | 302 | _password = leader_get(key) | ||
3188 | 303 | if _password: | ||
3189 | 304 | break | ||
3190 | 305 | |||
3191 | 306 | # If root password available don't update peer relation from local | ||
3192 | 307 | if _password and not username: | ||
3193 | 308 | excludes.append(self.root_passwd_file_template) | ||
3194 | 309 | |||
3195 | 310 | except ValueError: | ||
3196 | 311 | # cluster relation is not yet started; use on-disk | ||
3197 | 312 | _password = None | ||
3198 | 313 | |||
3199 | 314 | # If none available, generate new one | ||
3200 | 315 | if not _password: | ||
3201 | 316 | _password = self.get_mysql_password_on_disk(username, password) | ||
3202 | 317 | |||
3203 | 318 | # Put on wire if required | ||
3204 | 319 | if self.migrate_passwd_to_leader_storage: | ||
3205 | 320 | self.migrate_passwords_to_leader_storage(excludes=excludes) | ||
3206 | 321 | |||
3207 | 322 | return _password | ||
3208 | 323 | |||
3209 | 324 | def get_mysql_root_password(self, password=None): | ||
3210 | 325 | """Retrieve or generate mysql root password for service units.""" | ||
3211 | 326 | return self.get_mysql_password(username=None, password=password) | ||
3212 | 327 | |||
3213 | 328 | def set_mysql_password(self, username, password, current_password=None): | ||
3214 | 329 | """Update a mysql password for the provided username changing the | ||
3215 | 330 | leader settings | ||
3216 | 331 | |||
3217 | 332 | To update root's password pass `None` in the username | ||
3218 | 333 | |||
3219 | 334 | :param username: Username to change password of | ||
3220 | 335 | :type username: str | ||
3221 | 336 | :param password: New password for user. | ||
3222 | 337 | :type password: str | ||
3223 | 338 | :param current_password: Existing password for user. | ||
3224 | 339 | :type current_password: str | ||
3225 | 340 | """ | ||
3226 | 341 | |||
3227 | 342 | if username is None: | ||
3228 | 343 | username = 'root' | ||
3229 | 344 | |||
3230 | 345 | # get root password via leader-get, it may be that in the past (when | ||
3231 | 346 | # changes to root-password were not supported) the user changed the | ||
3232 | 347 | # password, so leader-get is more reliable source than | ||
3233 | 348 | # config.previous('root-password'). | ||
3234 | 349 | rel_username = None if username == 'root' else username | ||
3235 | 350 | if not current_password: | ||
3236 | 351 | current_password = self.get_mysql_password(rel_username) | ||
3237 | 352 | |||
3238 | 353 | # password that needs to be set | ||
3239 | 354 | new_passwd = password | ||
3240 | 355 | |||
3241 | 356 | # update password for all users (e.g. root@localhost, root@::1, etc) | ||
3242 | 357 | try: | ||
3243 | 358 | self.connect(user=username, password=current_password) | ||
3244 | 359 | cursor = self.connection.cursor() | ||
3245 | 360 | except MySQLdb.OperationalError as ex: | ||
3246 | 361 | raise MySQLSetPasswordError(('Cannot connect using password in ' | ||
3247 | 362 | 'leader settings (%s)') % ex, ex) | ||
3248 | 363 | |||
3249 | 364 | try: | ||
3250 | 365 | # NOTE(freyes): Due to skip-name-resolve root@$HOSTNAME account | ||
3251 | 366 | # fails when using SET PASSWORD so using UPDATE against the | ||
3252 | 367 | # mysql.user table is needed, but changes to this table are not | ||
3253 | 368 | # replicated across the cluster, so this update needs to run in | ||
3254 | 369 | # all the nodes. More info at | ||
3255 | 370 | # http://galeracluster.com/documentation-webpages/userchanges.html | ||
3256 | 371 | release = CompareHostReleases(lsb_release()['DISTRIB_CODENAME']) | ||
3257 | 372 | if release < 'bionic': | ||
3258 | 373 | SQL_UPDATE_PASSWD = ("UPDATE mysql.user SET password = " | ||
3259 | 374 | "PASSWORD( %s ) WHERE user = %s;") | ||
3260 | 375 | else: | ||
3261 | 376 | # PXC 5.7 (introduced in Bionic) uses authentication_string | ||
3262 | 377 | SQL_UPDATE_PASSWD = ("UPDATE mysql.user SET " | ||
3263 | 378 | "authentication_string = " | ||
3264 | 379 | "PASSWORD( %s ) WHERE user = %s;") | ||
3265 | 380 | cursor.execute(SQL_UPDATE_PASSWD, (new_passwd, username)) | ||
3266 | 381 | cursor.execute('FLUSH PRIVILEGES;') | ||
3267 | 382 | self.connection.commit() | ||
3268 | 383 | except MySQLdb.OperationalError as ex: | ||
3269 | 384 | raise MySQLSetPasswordError('Cannot update password: %s' % str(ex), | ||
3270 | 385 | ex) | ||
3271 | 386 | finally: | ||
3272 | 387 | cursor.close() | ||
3273 | 388 | |||
3274 | 389 | # check the password was changed | ||
3275 | 390 | try: | ||
3276 | 391 | self.connect(user=username, password=new_passwd) | ||
3277 | 392 | self.execute('select 1;') | ||
3278 | 393 | except MySQLdb.OperationalError as ex: | ||
3279 | 394 | raise MySQLSetPasswordError(('Cannot connect using new password: ' | ||
3280 | 395 | '%s') % str(ex), ex) | ||
3281 | 396 | |||
3282 | 397 | if not is_leader(): | ||
3283 | 398 | log('Only the leader can set a new password in the relation', | ||
3284 | 399 | level=DEBUG) | ||
3285 | 400 | return | ||
3286 | 401 | |||
3287 | 402 | for key in self.passwd_keys(rel_username): | ||
3288 | 403 | _password = leader_get(key) | ||
3289 | 404 | if _password: | ||
3290 | 405 | log('Updating password for %s (%s)' % (key, rel_username), | ||
3291 | 406 | level=DEBUG) | ||
3292 | 407 | leader_set(settings={key: new_passwd}) | ||
3293 | 408 | |||
3294 | 409 | def set_mysql_root_password(self, password, current_password=None): | ||
3295 | 410 | """Update mysql root password changing the leader settings | ||
3296 | 411 | |||
3297 | 412 | :param password: New password for user. | ||
3298 | 413 | :type password: str | ||
3299 | 414 | :param current_password: Existing password for user. | ||
3300 | 415 | :type current_password: str | ||
3301 | 416 | """ | ||
3302 | 417 | self.set_mysql_password( | ||
3303 | 418 | 'root', | ||
3304 | 419 | password, | ||
3305 | 420 | current_password=current_password) | ||
3306 | 421 | |||
3307 | 422 | def normalize_address(self, hostname): | ||
3308 | 423 | """Ensure that address returned is an IP address (i.e. not fqdn)""" | ||
3309 | 424 | if config_get('prefer-ipv6'): | ||
3310 | 425 | # TODO: add support for ipv6 dns | ||
3311 | 426 | return hostname | ||
3312 | 427 | |||
3313 | 428 | if hostname != unit_get('private-address'): | ||
3314 | 429 | return get_host_ip(hostname, fallback=hostname) | ||
3315 | 430 | |||
3316 | 431 | # Otherwise assume localhost | ||
3317 | 432 | return '127.0.0.1' | ||
3318 | 433 | |||
3319 | 434 | def get_allowed_units(self, database, username, relation_id=None, prefix=None): | ||
3320 | 435 | """Get list of units with access grants for database with username. | ||
3321 | 436 | |||
3322 | 437 | This is typically used to provide shared-db relations with a list of | ||
3323 | 438 | which units have been granted access to the given database. | ||
3324 | 439 | """ | ||
3325 | 440 | if not self.connection: | ||
3326 | 441 | self.connect(password=self.get_mysql_root_password()) | ||
3327 | 442 | allowed_units = set() | ||
3328 | 443 | if not prefix: | ||
3329 | 444 | prefix = database | ||
3330 | 445 | for unit in related_units(relation_id): | ||
3331 | 446 | settings = relation_get(rid=relation_id, unit=unit) | ||
3332 | 447 | # First check for setting with prefix, then without | ||
3333 | 448 | for attr in ["%s_hostname" % (prefix), 'hostname']: | ||
3334 | 449 | hosts = settings.get(attr, None) | ||
3335 | 450 | if hosts: | ||
3336 | 451 | break | ||
3337 | 452 | |||
3338 | 453 | if hosts: | ||
3339 | 454 | # hostname can be json-encoded list of hostnames | ||
3340 | 455 | try: | ||
3341 | 456 | hosts = json.loads(hosts) | ||
3342 | 457 | except ValueError: | ||
3343 | 458 | hosts = [hosts] | ||
3344 | 459 | else: | ||
3345 | 460 | hosts = [settings['private-address']] | ||
3346 | 461 | |||
3347 | 462 | if hosts: | ||
3348 | 463 | for host in hosts: | ||
3349 | 464 | host = self.normalize_address(host) | ||
3350 | 465 | if self.grant_exists(database, username, host): | ||
3351 | 466 | log("Grant exists for host '%s' on db '%s'" % | ||
3352 | 467 | (host, database), level=DEBUG) | ||
3353 | 468 | if unit not in allowed_units: | ||
3354 | 469 | allowed_units.add(unit) | ||
3355 | 470 | else: | ||
3356 | 471 | log("Grant does NOT exist for host '%s' on db '%s'" % | ||
3357 | 472 | (host, database), level=DEBUG) | ||
3358 | 473 | else: | ||
3359 | 474 | log("No hosts found for grant check", level=INFO) | ||
3360 | 475 | |||
3361 | 476 | return allowed_units | ||
3362 | 477 | |||
3363 | 478 | def configure_db(self, hostname, database, username, admin=False): | ||
3364 | 479 | """Configure access to database for username from hostname.""" | ||
3365 | 480 | if not self.connection: | ||
3366 | 481 | self.connect(password=self.get_mysql_root_password()) | ||
3367 | 482 | if not self.database_exists(database): | ||
3368 | 483 | self.create_database(database) | ||
3369 | 484 | |||
3370 | 485 | remote_ip = self.normalize_address(hostname) | ||
3371 | 486 | password = self.get_mysql_password(username) | ||
3372 | 487 | if not self.grant_exists(database, username, remote_ip): | ||
3373 | 488 | if not admin: | ||
3374 | 489 | self.create_grant(database, username, remote_ip, password) | ||
3375 | 490 | else: | ||
3376 | 491 | self.create_admin_grant(username, remote_ip, password) | ||
3377 | 492 | self.flush_priviledges() | ||
3378 | 493 | |||
3379 | 494 | return password | ||
3380 | 495 | |||
3381 | 496 | |||
3382 | 497 | # `_singleton_config_helper` stores the instance of the helper class that is | ||
3383 | 498 | # being used during a hook invocation. | ||
3384 | 499 | _singleton_config_helper = None | ||
3385 | 500 | |||
3386 | 501 | |||
3387 | 502 | def get_mysql_config_helper(): | ||
3388 | 503 | global _singleton_config_helper | ||
3389 | 504 | if _singleton_config_helper is None: | ||
3390 | 505 | _singleton_config_helper = MySQLConfigHelper() | ||
3391 | 506 | return _singleton_config_helper | ||
3392 | 507 | |||
3393 | 508 | |||
3394 | 509 | class MySQLConfigHelper(object): | ||
3395 | 510 | """Base configuration helper for MySQL.""" | ||
3396 | 511 | |||
3397 | 512 | # Going for the biggest page size to avoid wasted bytes. | ||
3398 | 513 | # InnoDB page size is 16MB | ||
3399 | 514 | |||
3400 | 515 | DEFAULT_PAGE_SIZE = 16 * 1024 * 1024 | ||
3401 | 516 | DEFAULT_INNODB_BUFFER_FACTOR = 0.50 | ||
3402 | 517 | DEFAULT_INNODB_BUFFER_SIZE_MAX = 512 * 1024 * 1024 | ||
3403 | 518 | |||
3404 | 519 | # Validation and lookups for InnoDB configuration | ||
3405 | 520 | INNODB_VALID_BUFFERING_VALUES = [ | ||
3406 | 521 | 'none', | ||
3407 | 522 | 'inserts', | ||
3408 | 523 | 'deletes', | ||
3409 | 524 | 'changes', | ||
3410 | 525 | 'purges', | ||
3411 | 526 | 'all' | ||
3412 | 527 | ] | ||
3413 | 528 | INNODB_FLUSH_CONFIG_VALUES = { | ||
3414 | 529 | 'fast': 2, | ||
3415 | 530 | 'safest': 1, | ||
3416 | 531 | 'unsafe': 0, | ||
3417 | 532 | } | ||
3418 | 533 | |||
3419 | 534 | def human_to_bytes(self, human): | ||
3420 | 535 | """Convert human readable configuration options to bytes.""" | ||
3421 | 536 | num_re = re.compile('^[0-9]+$') | ||
3422 | 537 | if num_re.match(human): | ||
3423 | 538 | return human | ||
3424 | 539 | |||
3425 | 540 | factors = { | ||
3426 | 541 | 'K': 1024, | ||
3427 | 542 | 'M': 1048576, | ||
3428 | 543 | 'G': 1073741824, | ||
3429 | 544 | 'T': 1099511627776 | ||
3430 | 545 | } | ||
3431 | 546 | modifier = human[-1] | ||
3432 | 547 | if modifier in factors: | ||
3433 | 548 | return int(human[:-1]) * factors[modifier] | ||
3434 | 549 | |||
3435 | 550 | if modifier == '%': | ||
3436 | 551 | total_ram = self.human_to_bytes(self.get_mem_total()) | ||
3437 | 552 | if self.is_32bit_system() and total_ram > self.sys_mem_limit(): | ||
3438 | 553 | total_ram = self.sys_mem_limit() | ||
3439 | 554 | factor = int(human[:-1]) * 0.01 | ||
3440 | 555 | pctram = total_ram * factor | ||
3441 | 556 | return int(pctram - (pctram % self.DEFAULT_PAGE_SIZE)) | ||
3442 | 557 | |||
3443 | 558 | raise ValueError("Can only convert K,M,G, or T") | ||
3444 | 559 | |||
3445 | 560 | def is_32bit_system(self): | ||
3446 | 561 | """Determine whether system is 32 or 64 bit.""" | ||
3447 | 562 | try: | ||
3448 | 563 | return sys.maxsize < 2 ** 32 | ||
3449 | 564 | except OverflowError: | ||
3450 | 565 | return False | ||
3451 | 566 | |||
3452 | 567 | def sys_mem_limit(self): | ||
3453 | 568 | """Determine the default memory limit for the current service unit.""" | ||
3454 | 569 | if platform.machine() in ['armv7l']: | ||
3455 | 570 | _mem_limit = self.human_to_bytes('2700M') # experimentally determined | ||
3456 | 571 | else: | ||
3457 | 572 | # Limit for x86 based 32bit systems | ||
3458 | 573 | _mem_limit = self.human_to_bytes('4G') | ||
3459 | 574 | |||
3460 | 575 | return _mem_limit | ||
3461 | 576 | |||
3462 | 577 | def get_mem_total(self): | ||
3463 | 578 | """Calculate the total memory in the current service unit.""" | ||
3464 | 579 | with open('/proc/meminfo') as meminfo_file: | ||
3465 | 580 | for line in meminfo_file: | ||
3466 | 581 | key, mem = line.split(':', 2) | ||
3467 | 582 | if key == 'MemTotal': | ||
3468 | 583 | mtot, modifier = mem.strip().split(' ') | ||
3469 | 584 | return '%s%s' % (mtot, modifier[0].upper()) | ||
3470 | 585 | |||
3471 | 586 | def get_innodb_flush_log_at_trx_commit(self): | ||
3472 | 587 | """Get value for innodb_flush_log_at_trx_commit. | ||
3473 | 588 | |||
3474 | 589 | Use the innodb-flush-log-at-trx-commit or the tunning-level setting | ||
3475 | 590 | translated by INNODB_FLUSH_CONFIG_VALUES to get the | ||
3476 | 591 | innodb_flush_log_at_trx_commit value. | ||
3477 | 592 | |||
3478 | 593 | :returns: Numeric value for innodb_flush_log_at_trx_commit | ||
3479 | 594 | :rtype: Union[None, int] | ||
3480 | 595 | """ | ||
3481 | 596 | _iflatc = config_get('innodb-flush-log-at-trx-commit') | ||
3482 | 597 | _tuning_level = config_get('tuning-level') | ||
3483 | 598 | if _iflatc: | ||
3484 | 599 | return _iflatc | ||
3485 | 600 | elif _tuning_level: | ||
3486 | 601 | return self.INNODB_FLUSH_CONFIG_VALUES.get(_tuning_level, 1) | ||
3487 | 602 | |||
3488 | 603 | def get_innodb_change_buffering(self): | ||
3489 | 604 | """Get value for innodb_change_buffering. | ||
3490 | 605 | |||
3491 | 606 | Use the innodb-change-buffering validated against | ||
3492 | 607 | INNODB_VALID_BUFFERING_VALUES to get the innodb_change_buffering value. | ||
3493 | 608 | |||
3494 | 609 | :returns: String value for innodb_change_buffering. | ||
3495 | 610 | :rtype: Union[None, str] | ||
3496 | 611 | """ | ||
3497 | 612 | _icb = config_get('innodb-change-buffering') | ||
3498 | 613 | if _icb and _icb in self.INNODB_VALID_BUFFERING_VALUES: | ||
3499 | 614 | return _icb | ||
3500 | 615 | |||
3501 | 616 | def get_innodb_buffer_pool_size(self): | ||
3502 | 617 | """Get value for innodb_buffer_pool_size. | ||
3503 | 618 | |||
3504 | 619 | Return the number value of innodb-buffer-pool-size or dataset-size. If | ||
3505 | 620 | neither is set, calculate a sane default based on total memory. | ||
3506 | 621 | |||
3507 | 622 | :returns: Numeric value for innodb_buffer_pool_size. | ||
3508 | 623 | :rtype: int | ||
3509 | 624 | """ | ||
3510 | 625 | total_memory = self.human_to_bytes(self.get_mem_total()) | ||
3511 | 626 | |||
3512 | 627 | dataset_bytes = config_get('dataset-size') | ||
3513 | 628 | innodb_buffer_pool_size = config_get('innodb-buffer-pool-size') | ||
3514 | 629 | |||
3515 | 630 | if innodb_buffer_pool_size: | ||
3516 | 631 | innodb_buffer_pool_size = self.human_to_bytes( | ||
3517 | 632 | innodb_buffer_pool_size) | ||
3518 | 633 | elif dataset_bytes: | ||
3519 | 634 | log("Option 'dataset-size' has been deprecated, please use" | ||
3520 | 635 | "innodb_buffer_pool_size option instead", level="WARN") | ||
3521 | 636 | innodb_buffer_pool_size = self.human_to_bytes( | ||
3522 | 637 | dataset_bytes) | ||
3523 | 638 | else: | ||
3524 | 639 | # NOTE(jamespage): pick the smallest of 50% of RAM or 512MB | ||
3525 | 640 | # to ensure that deployments in containers | ||
3526 | 641 | # without constraints don't try to consume | ||
3527 | 642 | # silly amounts of memory. | ||
3528 | 643 | innodb_buffer_pool_size = min( | ||
3529 | 644 | int(total_memory * self.DEFAULT_INNODB_BUFFER_FACTOR), | ||
3530 | 645 | self.DEFAULT_INNODB_BUFFER_SIZE_MAX | ||
3531 | 646 | ) | ||
3532 | 647 | |||
3533 | 648 | if innodb_buffer_pool_size > total_memory: | ||
3534 | 649 | log("innodb_buffer_pool_size; {} is greater than system available memory:{}".format( | ||
3535 | 650 | innodb_buffer_pool_size, | ||
3536 | 651 | total_memory), level='WARN') | ||
3537 | 652 | |||
3538 | 653 | return innodb_buffer_pool_size | ||
3539 | 654 | |||
3540 | 655 | |||
3541 | 656 | class PerconaClusterHelper(MySQLConfigHelper): | ||
3542 | 657 | """Percona-cluster specific configuration helper.""" | ||
3543 | 658 | |||
3544 | 659 | def parse_config(self): | ||
3545 | 660 | """Parse charm configuration and calculate values for config files.""" | ||
3546 | 661 | config = config_get() | ||
3547 | 662 | mysql_config = {} | ||
3548 | 663 | if 'max-connections' in config: | ||
3549 | 664 | mysql_config['max_connections'] = config['max-connections'] | ||
3550 | 665 | |||
3551 | 666 | if 'wait-timeout' in config: | ||
3552 | 667 | mysql_config['wait_timeout'] = config['wait-timeout'] | ||
3553 | 668 | |||
3554 | 669 | if self.get_innodb_flush_log_at_trx_commit() is not None: | ||
3555 | 670 | mysql_config['innodb_flush_log_at_trx_commit'] = \ | ||
3556 | 671 | self.get_innodb_flush_log_at_trx_commit() | ||
3557 | 672 | |||
3558 | 673 | if self.get_innodb_change_buffering() is not None: | ||
3559 | 674 | mysql_config['innodb_change_buffering'] = config['innodb-change-buffering'] | ||
3560 | 675 | |||
3561 | 676 | if 'innodb-io-capacity' in config: | ||
3562 | 677 | mysql_config['innodb_io_capacity'] = config['innodb-io-capacity'] | ||
3563 | 678 | |||
3564 | 679 | # Set a sane default key_buffer size | ||
3565 | 680 | mysql_config['key_buffer'] = self.human_to_bytes('32M') | ||
3566 | 681 | mysql_config['innodb_buffer_pool_size'] = self.get_innodb_buffer_pool_size() | ||
3567 | 682 | return mysql_config | ||
3568 | 683 | |||
3569 | 684 | |||
3570 | 685 | class MySQL8Helper(MySQLHelper): | ||
3571 | 686 | |||
3572 | 687 | def grant_exists(self, db_name, db_user, remote_ip): | ||
3573 | 688 | cursor = self.connection.cursor() | ||
3574 | 689 | priv_string = ("GRANT ALL PRIVILEGES ON {}.* " | ||
3575 | 690 | "TO {}@{}".format(db_name, db_user, remote_ip)) | ||
3576 | 691 | try: | ||
3577 | 692 | cursor.execute("SHOW GRANTS FOR '{}'@'{}'".format(db_user, | ||
3578 | 693 | remote_ip)) | ||
3579 | 694 | grants = [i[0] for i in cursor.fetchall()] | ||
3580 | 695 | except MySQLdb.OperationalError: | ||
3581 | 696 | return False | ||
3582 | 697 | finally: | ||
3583 | 698 | cursor.close() | ||
3584 | 699 | |||
3585 | 700 | # Different versions of MySQL use ' or `. Ignore these in the check. | ||
3586 | 701 | return priv_string in [ | ||
3587 | 702 | i.replace("'", "").replace("`", "") for i in grants] | ||
3588 | 703 | |||
3589 | 704 | def create_grant(self, db_name, db_user, remote_ip, password): | ||
3590 | 705 | if self.grant_exists(db_name, db_user, remote_ip): | ||
3591 | 706 | return | ||
3592 | 707 | |||
3593 | 708 | # Make sure the user exists | ||
3594 | 709 | # MySQL8 must create the user before the grant | ||
3595 | 710 | self.create_user(db_user, remote_ip, password) | ||
3596 | 711 | |||
3597 | 712 | cursor = self.connection.cursor() | ||
3598 | 713 | try: | ||
3599 | 714 | cursor.execute("GRANT ALL PRIVILEGES ON `{}`.* TO '{}'@'{}'" | ||
3600 | 715 | .format(db_name, db_user, remote_ip)) | ||
3601 | 716 | finally: | ||
3602 | 717 | cursor.close() | ||
3603 | 718 | |||
3604 | 719 | def create_user(self, db_user, remote_ip, password): | ||
3605 | 720 | |||
3606 | 721 | SQL_USER_CREATE = ( | ||
3607 | 722 | "CREATE USER '{db_user}'@'{remote_ip}' " | ||
3608 | 723 | "IDENTIFIED BY '{password}'") | ||
3609 | 724 | |||
3610 | 725 | cursor = self.connection.cursor() | ||
3611 | 726 | try: | ||
3612 | 727 | cursor.execute(SQL_USER_CREATE.format( | ||
3613 | 728 | db_user=db_user, | ||
3614 | 729 | remote_ip=remote_ip, | ||
3615 | 730 | password=password) | ||
3616 | 731 | ) | ||
3617 | 732 | except MySQLdb._exceptions.OperationalError: | ||
3618 | 733 | log("DB user {} already exists.".format(db_user), | ||
3619 | 734 | "WARNING") | ||
3620 | 735 | finally: | ||
3621 | 736 | cursor.close() | ||
3622 | 737 | |||
3623 | 738 | def create_router_grant(self, db_user, remote_ip, password): | ||
3624 | 739 | |||
3625 | 740 | # Make sure the user exists | ||
3626 | 741 | # MySQL8 must create the user before the grant | ||
3627 | 742 | self.create_user(db_user, remote_ip, password) | ||
3628 | 743 | |||
3629 | 744 | # Mysql-Router specific grants | ||
3630 | 745 | cursor = self.connection.cursor() | ||
3631 | 746 | try: | ||
3632 | 747 | cursor.execute("GRANT CREATE USER ON *.* TO '{}'@'{}' WITH GRANT " | ||
3633 | 748 | "OPTION".format(db_user, remote_ip)) | ||
3634 | 749 | cursor.execute("GRANT SELECT, INSERT, UPDATE, DELETE, EXECUTE ON " | ||
3635 | 750 | "mysql_innodb_cluster_metadata.* TO '{}'@'{}'" | ||
3636 | 751 | .format(db_user, remote_ip)) | ||
3637 | 752 | cursor.execute("GRANT SELECT ON mysql.user TO '{}'@'{}'" | ||
3638 | 753 | .format(db_user, remote_ip)) | ||
3639 | 754 | cursor.execute("GRANT SELECT ON " | ||
3640 | 755 | "performance_schema.replication_group_members " | ||
3641 | 756 | "TO '{}'@'{}'".format(db_user, remote_ip)) | ||
3642 | 757 | cursor.execute("GRANT SELECT ON " | ||
3643 | 758 | "performance_schema.replication_group_member_stats " | ||
3644 | 759 | "TO '{}'@'{}'".format(db_user, remote_ip)) | ||
3645 | 760 | cursor.execute("GRANT SELECT ON " | ||
3646 | 761 | "performance_schema.global_variables " | ||
3647 | 762 | "TO '{}'@'{}'".format(db_user, remote_ip)) | ||
3648 | 763 | finally: | ||
3649 | 764 | cursor.close() | ||
3650 | 765 | |||
3651 | 766 | def configure_router(self, hostname, username): | ||
3652 | 767 | |||
3653 | 768 | if self.connection is None: | ||
3654 | 769 | self.connect(password=self.get_mysql_root_password()) | ||
3655 | 770 | |||
3656 | 771 | remote_ip = self.normalize_address(hostname) | ||
3657 | 772 | password = self.get_mysql_password(username) | ||
3658 | 773 | self.create_user(username, remote_ip, password) | ||
3659 | 774 | self.create_router_grant(username, remote_ip, password) | ||
3660 | 775 | |||
3661 | 776 | return password | ||
3662 | 777 | |||
3663 | 778 | |||
3664 | 779 | def get_prefix(requested, keys=None): | ||
3665 | 780 | """Return existing prefix or None. | ||
3666 | 781 | |||
3667 | 782 | :param requested: Request string. i.e. novacell0_username | ||
3668 | 783 | :type requested: str | ||
3669 | 784 | :param keys: Keys to determine prefix. Defaults set in function. | ||
3670 | 785 | :type keys: List of str keys | ||
3671 | 786 | :returns: String prefix i.e. novacell0 | ||
3672 | 787 | :rtype: Union[None, str] | ||
3673 | 788 | """ | ||
3674 | 789 | if keys is None: | ||
3675 | 790 | # Shared-DB default keys | ||
3676 | 791 | keys = ["_database", "_username", "_hostname"] | ||
3677 | 792 | for key in keys: | ||
3678 | 793 | if requested.endswith(key): | ||
3679 | 794 | return requested[:-len(key)] | ||
3680 | 795 | |||
3681 | 796 | |||
3682 | 797 | def get_db_data(relation_data, unprefixed): | ||
3683 | 798 | """Organize database requests into a collections.OrderedDict | ||
3684 | 799 | |||
3685 | 800 | :param relation_data: shared-db relation data | ||
3686 | 801 | :type relation_data: dict | ||
3687 | 802 | :param unprefixed: Prefix to use for requests without a prefix. This should | ||
3688 | 803 | be unique for each side of the relation to avoid | ||
3689 | 804 | conflicts. | ||
3690 | 805 | :type unprefixed: str | ||
3691 | 806 | :returns: Order dict of databases and users | ||
3692 | 807 | :rtype: collections.OrderedDict | ||
3693 | 808 | """ | ||
3694 | 809 | # Deep copy to avoid unintentionally changing relation data | ||
3695 | 810 | settings = copy.deepcopy(relation_data) | ||
3696 | 811 | databases = collections.OrderedDict() | ||
3697 | 812 | |||
3698 | 813 | # Clear non-db related elements | ||
3699 | 814 | if "egress-subnets" in settings.keys(): | ||
3700 | 815 | settings.pop("egress-subnets") | ||
3701 | 816 | if "ingress-address" in settings.keys(): | ||
3702 | 817 | settings.pop("ingress-address") | ||
3703 | 818 | if "private-address" in settings.keys(): | ||
3704 | 819 | settings.pop("private-address") | ||
3705 | 820 | |||
3706 | 821 | singleset = {"database", "username", "hostname"} | ||
3707 | 822 | if singleset.issubset(settings): | ||
3708 | 823 | settings["{}_{}".format(unprefixed, "hostname")] = ( | ||
3709 | 824 | settings["hostname"]) | ||
3710 | 825 | settings.pop("hostname") | ||
3711 | 826 | settings["{}_{}".format(unprefixed, "database")] = ( | ||
3712 | 827 | settings["database"]) | ||
3713 | 828 | settings.pop("database") | ||
3714 | 829 | settings["{}_{}".format(unprefixed, "username")] = ( | ||
3715 | 830 | settings["username"]) | ||
3716 | 831 | settings.pop("username") | ||
3717 | 832 | |||
3718 | 833 | for k, v in settings.items(): | ||
3719 | 834 | db = k.split("_")[0] | ||
3720 | 835 | x = "_".join(k.split("_")[1:]) | ||
3721 | 836 | if db not in databases: | ||
3722 | 837 | databases[db] = collections.OrderedDict() | ||
3723 | 838 | databases[db][x] = v | ||
3724 | 839 | |||
3725 | 840 | return databases | ||
3726 | 0 | 841 | ||
3727 | === added directory 'hooks/charmhelpers/contrib/hahelpers' | |||
3728 | === added file 'hooks/charmhelpers/contrib/hahelpers/__init__.py' | |||
3729 | --- hooks/charmhelpers/contrib/hahelpers/__init__.py 1970-01-01 00:00:00 +0000 | |||
3730 | +++ hooks/charmhelpers/contrib/hahelpers/__init__.py 2021-05-12 04:07:51 +0000 | |||
3731 | @@ -0,0 +1,13 @@ | |||
3732 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
3733 | 2 | # | ||
3734 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
3735 | 4 | # you may not use this file except in compliance with the License. | ||
3736 | 5 | # You may obtain a copy of the License at | ||
3737 | 6 | # | ||
3738 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
3739 | 8 | # | ||
3740 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
3741 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
3742 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
3743 | 12 | # See the License for the specific language governing permissions and | ||
3744 | 13 | # limitations under the License. | ||
3745 | 0 | 14 | ||
3746 | === added file 'hooks/charmhelpers/contrib/hahelpers/apache.py' | |||
3747 | --- hooks/charmhelpers/contrib/hahelpers/apache.py 1970-01-01 00:00:00 +0000 | |||
3748 | +++ hooks/charmhelpers/contrib/hahelpers/apache.py 2021-05-12 04:07:51 +0000 | |||
3749 | @@ -0,0 +1,90 @@ | |||
3750 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
3751 | 2 | # | ||
3752 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
3753 | 4 | # you may not use this file except in compliance with the License. | ||
3754 | 5 | # You may obtain a copy of the License at | ||
3755 | 6 | # | ||
3756 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
3757 | 8 | # | ||
3758 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
3759 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
3760 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
3761 | 12 | # See the License for the specific language governing permissions and | ||
3762 | 13 | # limitations under the License. | ||
3763 | 14 | |||
3764 | 15 | # | ||
3765 | 16 | # Copyright 2012 Canonical Ltd. | ||
3766 | 17 | # | ||
3767 | 18 | # This file is sourced from lp:openstack-charm-helpers | ||
3768 | 19 | # | ||
3769 | 20 | # Authors: | ||
3770 | 21 | # James Page <james.page@ubuntu.com> | ||
3771 | 22 | # Adam Gandelman <adamg@ubuntu.com> | ||
3772 | 23 | # | ||
3773 | 24 | |||
3774 | 25 | import os | ||
3775 | 26 | |||
3776 | 27 | from charmhelpers.core import host | ||
3777 | 28 | from charmhelpers.core.hookenv import ( | ||
3778 | 29 | config as config_get, | ||
3779 | 30 | relation_get, | ||
3780 | 31 | relation_ids, | ||
3781 | 32 | related_units as relation_list, | ||
3782 | 33 | log, | ||
3783 | 34 | INFO, | ||
3784 | 35 | ) | ||
3785 | 36 | |||
3786 | 37 | # This file contains the CA cert from the charms ssl_ca configuration | ||
3787 | 38 | # option, in future the file name should be updated reflect that. | ||
3788 | 39 | CONFIG_CA_CERT_FILE = 'keystone_juju_ca_cert' | ||
3789 | 40 | |||
3790 | 41 | |||
3791 | 42 | def get_cert(cn=None): | ||
3792 | 43 | # TODO: deal with multiple https endpoints via charm config | ||
3793 | 44 | cert = config_get('ssl_cert') | ||
3794 | 45 | key = config_get('ssl_key') | ||
3795 | 46 | if not (cert and key): | ||
3796 | 47 | log("Inspecting identity-service relations for SSL certificate.", | ||
3797 | 48 | level=INFO) | ||
3798 | 49 | cert = key = None | ||
3799 | 50 | if cn: | ||
3800 | 51 | ssl_cert_attr = 'ssl_cert_{}'.format(cn) | ||
3801 | 52 | ssl_key_attr = 'ssl_key_{}'.format(cn) | ||
3802 | 53 | else: | ||
3803 | 54 | ssl_cert_attr = 'ssl_cert' | ||
3804 | 55 | ssl_key_attr = 'ssl_key' | ||
3805 | 56 | for r_id in relation_ids('identity-service'): | ||
3806 | 57 | for unit in relation_list(r_id): | ||
3807 | 58 | if not cert: | ||
3808 | 59 | cert = relation_get(ssl_cert_attr, | ||
3809 | 60 | rid=r_id, unit=unit) | ||
3810 | 61 | if not key: | ||
3811 | 62 | key = relation_get(ssl_key_attr, | ||
3812 | 63 | rid=r_id, unit=unit) | ||
3813 | 64 | return (cert, key) | ||
3814 | 65 | |||
3815 | 66 | |||
3816 | 67 | def get_ca_cert(): | ||
3817 | 68 | ca_cert = config_get('ssl_ca') | ||
3818 | 69 | if ca_cert is None: | ||
3819 | 70 | log("Inspecting identity-service relations for CA SSL certificate.", | ||
3820 | 71 | level=INFO) | ||
3821 | 72 | for r_id in (relation_ids('identity-service') + | ||
3822 | 73 | relation_ids('identity-credentials')): | ||
3823 | 74 | for unit in relation_list(r_id): | ||
3824 | 75 | if ca_cert is None: | ||
3825 | 76 | ca_cert = relation_get('ca_cert', | ||
3826 | 77 | rid=r_id, unit=unit) | ||
3827 | 78 | return ca_cert | ||
3828 | 79 | |||
3829 | 80 | |||
3830 | 81 | def retrieve_ca_cert(cert_file): | ||
3831 | 82 | cert = None | ||
3832 | 83 | if os.path.isfile(cert_file): | ||
3833 | 84 | with open(cert_file, 'rb') as crt: | ||
3834 | 85 | cert = crt.read() | ||
3835 | 86 | return cert | ||
3836 | 87 | |||
3837 | 88 | |||
3838 | 89 | def install_ca_cert(ca_cert): | ||
3839 | 90 | host.install_ca_cert(ca_cert, CONFIG_CA_CERT_FILE) | ||
3840 | 0 | 91 | ||
3841 | === added file 'hooks/charmhelpers/contrib/hahelpers/cluster.py' | |||
3842 | --- hooks/charmhelpers/contrib/hahelpers/cluster.py 1970-01-01 00:00:00 +0000 | |||
3843 | +++ hooks/charmhelpers/contrib/hahelpers/cluster.py 2021-05-12 04:07:51 +0000 | |||
3844 | @@ -0,0 +1,451 @@ | |||
3845 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
3846 | 2 | # | ||
3847 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
3848 | 4 | # you may not use this file except in compliance with the License. | ||
3849 | 5 | # You may obtain a copy of the License at | ||
3850 | 6 | # | ||
3851 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
3852 | 8 | # | ||
3853 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
3854 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
3855 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
3856 | 12 | # See the License for the specific language governing permissions and | ||
3857 | 13 | # limitations under the License. | ||
3858 | 14 | |||
3859 | 15 | # | ||
3860 | 16 | # Copyright 2012 Canonical Ltd. | ||
3861 | 17 | # | ||
3862 | 18 | # Authors: | ||
3863 | 19 | # James Page <james.page@ubuntu.com> | ||
3864 | 20 | # Adam Gandelman <adamg@ubuntu.com> | ||
3865 | 21 | # | ||
3866 | 22 | |||
3867 | 23 | """ | ||
3868 | 24 | Helpers for clustering and determining "cluster leadership" and other | ||
3869 | 25 | clustering-related helpers. | ||
3870 | 26 | """ | ||
3871 | 27 | |||
3872 | 28 | import functools | ||
3873 | 29 | import subprocess | ||
3874 | 30 | import os | ||
3875 | 31 | import time | ||
3876 | 32 | |||
3877 | 33 | from socket import gethostname as get_unit_hostname | ||
3878 | 34 | |||
3879 | 35 | import six | ||
3880 | 36 | |||
3881 | 37 | from charmhelpers.core.hookenv import ( | ||
3882 | 38 | log, | ||
3883 | 39 | relation_ids, | ||
3884 | 40 | related_units as relation_list, | ||
3885 | 41 | relation_get, | ||
3886 | 42 | config as config_get, | ||
3887 | 43 | INFO, | ||
3888 | 44 | DEBUG, | ||
3889 | 45 | WARNING, | ||
3890 | 46 | unit_get, | ||
3891 | 47 | is_leader as juju_is_leader, | ||
3892 | 48 | status_set, | ||
3893 | 49 | ) | ||
3894 | 50 | from charmhelpers.core.host import ( | ||
3895 | 51 | modulo_distribution, | ||
3896 | 52 | ) | ||
3897 | 53 | from charmhelpers.core.decorators import ( | ||
3898 | 54 | retry_on_exception, | ||
3899 | 55 | ) | ||
3900 | 56 | from charmhelpers.core.strutils import ( | ||
3901 | 57 | bool_from_string, | ||
3902 | 58 | ) | ||
3903 | 59 | |||
3904 | 60 | DC_RESOURCE_NAME = 'DC' | ||
3905 | 61 | |||
3906 | 62 | |||
3907 | 63 | class HAIncompleteConfig(Exception): | ||
3908 | 64 | pass | ||
3909 | 65 | |||
3910 | 66 | |||
3911 | 67 | class HAIncorrectConfig(Exception): | ||
3912 | 68 | pass | ||
3913 | 69 | |||
3914 | 70 | |||
3915 | 71 | class CRMResourceNotFound(Exception): | ||
3916 | 72 | pass | ||
3917 | 73 | |||
3918 | 74 | |||
3919 | 75 | class CRMDCNotFound(Exception): | ||
3920 | 76 | pass | ||
3921 | 77 | |||
3922 | 78 | |||
3923 | 79 | def is_elected_leader(resource): | ||
3924 | 80 | """ | ||
3925 | 81 | Returns True if the charm executing this is the elected cluster leader. | ||
3926 | 82 | |||
3927 | 83 | It relies on two mechanisms to determine leadership: | ||
3928 | 84 | 1. If juju is sufficiently new and leadership election is supported, | ||
3929 | 85 | the is_leader command will be used. | ||
3930 | 86 | 2. If the charm is part of a corosync cluster, call corosync to | ||
3931 | 87 | determine leadership. | ||
3932 | 88 | 3. If the charm is not part of a corosync cluster, the leader is | ||
3933 | 89 | determined as being "the alive unit with the lowest unit numer". In | ||
3934 | 90 | other words, the oldest surviving unit. | ||
3935 | 91 | """ | ||
3936 | 92 | try: | ||
3937 | 93 | return juju_is_leader() | ||
3938 | 94 | except NotImplementedError: | ||
3939 | 95 | log('Juju leadership election feature not enabled' | ||
3940 | 96 | ', using fallback support', | ||
3941 | 97 | level=WARNING) | ||
3942 | 98 | |||
3943 | 99 | if is_clustered(): | ||
3944 | 100 | if not is_crm_leader(resource): | ||
3945 | 101 | log('Deferring action to CRM leader.', level=INFO) | ||
3946 | 102 | return False | ||
3947 | 103 | else: | ||
3948 | 104 | peers = peer_units() | ||
3949 | 105 | if peers and not oldest_peer(peers): | ||
3950 | 106 | log('Deferring action to oldest service unit.', level=INFO) | ||
3951 | 107 | return False | ||
3952 | 108 | return True | ||
3953 | 109 | |||
3954 | 110 | |||
3955 | 111 | def is_clustered(): | ||
3956 | 112 | for r_id in (relation_ids('ha') or []): | ||
3957 | 113 | for unit in (relation_list(r_id) or []): | ||
3958 | 114 | clustered = relation_get('clustered', | ||
3959 | 115 | rid=r_id, | ||
3960 | 116 | unit=unit) | ||
3961 | 117 | if clustered: | ||
3962 | 118 | return True | ||
3963 | 119 | return False | ||
3964 | 120 | |||
3965 | 121 | |||
3966 | 122 | def is_crm_dc(): | ||
3967 | 123 | """ | ||
3968 | 124 | Determine leadership by querying the pacemaker Designated Controller | ||
3969 | 125 | """ | ||
3970 | 126 | cmd = ['crm', 'status'] | ||
3971 | 127 | try: | ||
3972 | 128 | status = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
3973 | 129 | if not isinstance(status, six.text_type): | ||
3974 | 130 | status = six.text_type(status, "utf-8") | ||
3975 | 131 | except subprocess.CalledProcessError as ex: | ||
3976 | 132 | raise CRMDCNotFound(str(ex)) | ||
3977 | 133 | |||
3978 | 134 | current_dc = '' | ||
3979 | 135 | for line in status.split('\n'): | ||
3980 | 136 | if line.startswith('Current DC'): | ||
3981 | 137 | # Current DC: juju-lytrusty-machine-2 (168108163) - partition with quorum | ||
3982 | 138 | current_dc = line.split(':')[1].split()[0] | ||
3983 | 139 | if current_dc == get_unit_hostname(): | ||
3984 | 140 | return True | ||
3985 | 141 | elif current_dc == 'NONE': | ||
3986 | 142 | raise CRMDCNotFound('Current DC: NONE') | ||
3987 | 143 | |||
3988 | 144 | return False | ||
3989 | 145 | |||
3990 | 146 | |||
3991 | 147 | @retry_on_exception(5, base_delay=2, | ||
3992 | 148 | exc_type=(CRMResourceNotFound, CRMDCNotFound)) | ||
3993 | 149 | def is_crm_leader(resource, retry=False): | ||
3994 | 150 | """ | ||
3995 | 151 | Returns True if the charm calling this is the elected corosync leader, | ||
3996 | 152 | as returned by calling the external "crm" command. | ||
3997 | 153 | |||
3998 | 154 | We allow this operation to be retried to avoid the possibility of getting a | ||
3999 | 155 | false negative. See LP #1396246 for more info. | ||
4000 | 156 | """ | ||
4001 | 157 | if resource == DC_RESOURCE_NAME: | ||
4002 | 158 | return is_crm_dc() | ||
4003 | 159 | cmd = ['crm', 'resource', 'show', resource] | ||
4004 | 160 | try: | ||
4005 | 161 | status = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
4006 | 162 | if not isinstance(status, six.text_type): | ||
4007 | 163 | status = six.text_type(status, "utf-8") | ||
4008 | 164 | except subprocess.CalledProcessError: | ||
4009 | 165 | status = None | ||
4010 | 166 | |||
4011 | 167 | if status and get_unit_hostname() in status: | ||
4012 | 168 | return True | ||
4013 | 169 | |||
4014 | 170 | if status and "resource %s is NOT running" % (resource) in status: | ||
4015 | 171 | raise CRMResourceNotFound("CRM resource %s not found" % (resource)) | ||
4016 | 172 | |||
4017 | 173 | return False | ||
4018 | 174 | |||
4019 | 175 | |||
4020 | 176 | def is_leader(resource): | ||
4021 | 177 | log("is_leader is deprecated. Please consider using is_crm_leader " | ||
4022 | 178 | "instead.", level=WARNING) | ||
4023 | 179 | return is_crm_leader(resource) | ||
4024 | 180 | |||
4025 | 181 | |||
4026 | 182 | def peer_units(peer_relation="cluster"): | ||
4027 | 183 | peers = [] | ||
4028 | 184 | for r_id in (relation_ids(peer_relation) or []): | ||
4029 | 185 | for unit in (relation_list(r_id) or []): | ||
4030 | 186 | peers.append(unit) | ||
4031 | 187 | return peers | ||
4032 | 188 | |||
4033 | 189 | |||
4034 | 190 | def peer_ips(peer_relation='cluster', addr_key='private-address'): | ||
4035 | 191 | '''Return a dict of peers and their private-address''' | ||
4036 | 192 | peers = {} | ||
4037 | 193 | for r_id in relation_ids(peer_relation): | ||
4038 | 194 | for unit in relation_list(r_id): | ||
4039 | 195 | peers[unit] = relation_get(addr_key, rid=r_id, unit=unit) | ||
4040 | 196 | return peers | ||
4041 | 197 | |||
4042 | 198 | |||
4043 | 199 | def oldest_peer(peers): | ||
4044 | 200 | """Determines who the oldest peer is by comparing unit numbers.""" | ||
4045 | 201 | local_unit_no = int(os.getenv('JUJU_UNIT_NAME').split('/')[1]) | ||
4046 | 202 | for peer in peers: | ||
4047 | 203 | remote_unit_no = int(peer.split('/')[1]) | ||
4048 | 204 | if remote_unit_no < local_unit_no: | ||
4049 | 205 | return False | ||
4050 | 206 | return True | ||
4051 | 207 | |||
4052 | 208 | |||
4053 | 209 | def eligible_leader(resource): | ||
4054 | 210 | log("eligible_leader is deprecated. Please consider using " | ||
4055 | 211 | "is_elected_leader instead.", level=WARNING) | ||
4056 | 212 | return is_elected_leader(resource) | ||
4057 | 213 | |||
4058 | 214 | |||
4059 | 215 | def https(): | ||
4060 | 216 | ''' | ||
4061 | 217 | Determines whether enough data has been provided in configuration | ||
4062 | 218 | or relation data to configure HTTPS | ||
4063 | 219 | . | ||
4064 | 220 | returns: boolean | ||
4065 | 221 | ''' | ||
4066 | 222 | use_https = config_get('use-https') | ||
4067 | 223 | if use_https and bool_from_string(use_https): | ||
4068 | 224 | return True | ||
4069 | 225 | if config_get('ssl_cert') and config_get('ssl_key'): | ||
4070 | 226 | return True | ||
4071 | 227 | for r_id in relation_ids('certificates'): | ||
4072 | 228 | for unit in relation_list(r_id): | ||
4073 | 229 | ca = relation_get('ca', rid=r_id, unit=unit) | ||
4074 | 230 | if ca: | ||
4075 | 231 | return True | ||
4076 | 232 | for r_id in relation_ids('identity-service'): | ||
4077 | 233 | for unit in relation_list(r_id): | ||
4078 | 234 | # TODO - needs fixing for new helper as ssl_cert/key suffixes with CN | ||
4079 | 235 | rel_state = [ | ||
4080 | 236 | relation_get('https_keystone', rid=r_id, unit=unit), | ||
4081 | 237 | relation_get('ca_cert', rid=r_id, unit=unit), | ||
4082 | 238 | ] | ||
4083 | 239 | # NOTE: works around (LP: #1203241) | ||
4084 | 240 | if (None not in rel_state) and ('' not in rel_state): | ||
4085 | 241 | return True | ||
4086 | 242 | return False | ||
4087 | 243 | |||
4088 | 244 | |||
4089 | 245 | def determine_api_port(public_port, singlenode_mode=False): | ||
4090 | 246 | ''' | ||
4091 | 247 | Determine correct API server listening port based on | ||
4092 | 248 | existence of HTTPS reverse proxy and/or haproxy. | ||
4093 | 249 | |||
4094 | 250 | public_port: int: standard public port for given service | ||
4095 | 251 | |||
4096 | 252 | singlenode_mode: boolean: Shuffle ports when only a single unit is present | ||
4097 | 253 | |||
4098 | 254 | returns: int: the correct listening port for the API service | ||
4099 | 255 | ''' | ||
4100 | 256 | i = 0 | ||
4101 | 257 | if singlenode_mode: | ||
4102 | 258 | i += 1 | ||
4103 | 259 | elif len(peer_units()) > 0 or is_clustered(): | ||
4104 | 260 | i += 1 | ||
4105 | 261 | if https(): | ||
4106 | 262 | i += 1 | ||
4107 | 263 | return public_port - (i * 10) | ||
4108 | 264 | |||
4109 | 265 | |||
4110 | 266 | def determine_apache_port(public_port, singlenode_mode=False): | ||
4111 | 267 | ''' | ||
4112 | 268 | Description: Determine correct apache listening port based on public IP + | ||
4113 | 269 | state of the cluster. | ||
4114 | 270 | |||
4115 | 271 | public_port: int: standard public port for given service | ||
4116 | 272 | |||
4117 | 273 | singlenode_mode: boolean: Shuffle ports when only a single unit is present | ||
4118 | 274 | |||
4119 | 275 | returns: int: the correct listening port for the HAProxy service | ||
4120 | 276 | ''' | ||
4121 | 277 | i = 0 | ||
4122 | 278 | if singlenode_mode: | ||
4123 | 279 | i += 1 | ||
4124 | 280 | elif len(peer_units()) > 0 or is_clustered(): | ||
4125 | 281 | i += 1 | ||
4126 | 282 | return public_port - (i * 10) | ||
4127 | 283 | |||
4128 | 284 | |||
4129 | 285 | determine_apache_port_single = functools.partial( | ||
4130 | 286 | determine_apache_port, singlenode_mode=True) | ||
4131 | 287 | |||
4132 | 288 | |||
4133 | 289 | def get_hacluster_config(exclude_keys=None): | ||
4134 | 290 | ''' | ||
4135 | 291 | Obtains all relevant configuration from charm configuration required | ||
4136 | 292 | for initiating a relation to hacluster: | ||
4137 | 293 | |||
4138 | 294 | ha-bindiface, ha-mcastport, vip, os-internal-hostname, | ||
4139 | 295 | os-admin-hostname, os-public-hostname, os-access-hostname | ||
4140 | 296 | |||
4141 | 297 | param: exclude_keys: list of setting key(s) to be excluded. | ||
4142 | 298 | returns: dict: A dict containing settings keyed by setting name. | ||
4143 | 299 | raises: HAIncompleteConfig if settings are missing or incorrect. | ||
4144 | 300 | ''' | ||
4145 | 301 | settings = ['ha-bindiface', 'ha-mcastport', 'vip', 'os-internal-hostname', | ||
4146 | 302 | 'os-admin-hostname', 'os-public-hostname', 'os-access-hostname'] | ||
4147 | 303 | conf = {} | ||
4148 | 304 | for setting in settings: | ||
4149 | 305 | if exclude_keys and setting in exclude_keys: | ||
4150 | 306 | continue | ||
4151 | 307 | |||
4152 | 308 | conf[setting] = config_get(setting) | ||
4153 | 309 | |||
4154 | 310 | if not valid_hacluster_config(): | ||
4155 | 311 | raise HAIncorrectConfig('Insufficient or incorrect config data to ' | ||
4156 | 312 | 'configure hacluster.') | ||
4157 | 313 | return conf | ||
4158 | 314 | |||
4159 | 315 | |||
4160 | 316 | def valid_hacluster_config(): | ||
4161 | 317 | ''' | ||
4162 | 318 | Check that either vip or dns-ha is set. If dns-ha then one of os-*-hostname | ||
4163 | 319 | must be set. | ||
4164 | 320 | |||
4165 | 321 | Note: ha-bindiface and ha-macastport both have defaults and will always | ||
4166 | 322 | be set. We only care that either vip or dns-ha is set. | ||
4167 | 323 | |||
4168 | 324 | :returns: boolean: valid config returns true. | ||
4169 | 325 | raises: HAIncompatibileConfig if settings conflict. | ||
4170 | 326 | raises: HAIncompleteConfig if settings are missing. | ||
4171 | 327 | ''' | ||
4172 | 328 | vip = config_get('vip') | ||
4173 | 329 | dns = config_get('dns-ha') | ||
4174 | 330 | if not(bool(vip) ^ bool(dns)): | ||
4175 | 331 | msg = ('HA: Either vip or dns-ha must be set but not both in order to ' | ||
4176 | 332 | 'use high availability') | ||
4177 | 333 | status_set('blocked', msg) | ||
4178 | 334 | raise HAIncorrectConfig(msg) | ||
4179 | 335 | |||
4180 | 336 | # If dns-ha then one of os-*-hostname must be set | ||
4181 | 337 | if dns: | ||
4182 | 338 | dns_settings = ['os-internal-hostname', 'os-admin-hostname', | ||
4183 | 339 | 'os-public-hostname', 'os-access-hostname'] | ||
4184 | 340 | # At this point it is unknown if one or all of the possible | ||
4185 | 341 | # network spaces are in HA. Validate at least one is set which is | ||
4186 | 342 | # the minimum required. | ||
4187 | 343 | for setting in dns_settings: | ||
4188 | 344 | if config_get(setting): | ||
4189 | 345 | log('DNS HA: At least one hostname is set {}: {}' | ||
4190 | 346 | ''.format(setting, config_get(setting)), | ||
4191 | 347 | level=DEBUG) | ||
4192 | 348 | return True | ||
4193 | 349 | |||
4194 | 350 | msg = ('DNS HA: At least one os-*-hostname(s) must be set to use ' | ||
4195 | 351 | 'DNS HA') | ||
4196 | 352 | status_set('blocked', msg) | ||
4197 | 353 | raise HAIncompleteConfig(msg) | ||
4198 | 354 | |||
4199 | 355 | log('VIP HA: VIP is set {}'.format(vip), level=DEBUG) | ||
4200 | 356 | return True | ||
4201 | 357 | |||
4202 | 358 | |||
4203 | 359 | def canonical_url(configs, vip_setting='vip'): | ||
4204 | 360 | ''' | ||
4205 | 361 | Returns the correct HTTP URL to this host given the state of HTTPS | ||
4206 | 362 | configuration and hacluster. | ||
4207 | 363 | |||
4208 | 364 | :configs : OSTemplateRenderer: A config tempating object to inspect for | ||
4209 | 365 | a complete https context. | ||
4210 | 366 | |||
4211 | 367 | :vip_setting: str: Setting in charm config that specifies | ||
4212 | 368 | VIP address. | ||
4213 | 369 | ''' | ||
4214 | 370 | scheme = 'http' | ||
4215 | 371 | if 'https' in configs.complete_contexts(): | ||
4216 | 372 | scheme = 'https' | ||
4217 | 373 | if is_clustered(): | ||
4218 | 374 | addr = config_get(vip_setting) | ||
4219 | 375 | else: | ||
4220 | 376 | addr = unit_get('private-address') | ||
4221 | 377 | return '%s://%s' % (scheme, addr) | ||
4222 | 378 | |||
4223 | 379 | |||
4224 | 380 | def distributed_wait(modulo=None, wait=None, operation_name='operation'): | ||
4225 | 381 | ''' Distribute operations by waiting based on modulo_distribution | ||
4226 | 382 | |||
4227 | 383 | If modulo and or wait are not set, check config_get for those values. | ||
4228 | 384 | If config values are not set, default to modulo=3 and wait=30. | ||
4229 | 385 | |||
4230 | 386 | :param modulo: int The modulo number creates the group distribution | ||
4231 | 387 | :param wait: int The constant time wait value | ||
4232 | 388 | :param operation_name: string Operation name for status message | ||
4233 | 389 | i.e. 'restart' | ||
4234 | 390 | :side effect: Calls config_get() | ||
4235 | 391 | :side effect: Calls log() | ||
4236 | 392 | :side effect: Calls status_set() | ||
4237 | 393 | :side effect: Calls time.sleep() | ||
4238 | 394 | ''' | ||
4239 | 395 | if modulo is None: | ||
4240 | 396 | modulo = config_get('modulo-nodes') or 3 | ||
4241 | 397 | if wait is None: | ||
4242 | 398 | wait = config_get('known-wait') or 30 | ||
4243 | 399 | if juju_is_leader(): | ||
4244 | 400 | # The leader should never wait | ||
4245 | 401 | calculated_wait = 0 | ||
4246 | 402 | else: | ||
4247 | 403 | # non_zero_wait=True guarantees the non-leader who gets modulo 0 | ||
4248 | 404 | # will still wait | ||
4249 | 405 | calculated_wait = modulo_distribution(modulo=modulo, wait=wait, | ||
4250 | 406 | non_zero_wait=True) | ||
4251 | 407 | msg = "Waiting {} seconds for {} ...".format(calculated_wait, | ||
4252 | 408 | operation_name) | ||
4253 | 409 | log(msg, DEBUG) | ||
4254 | 410 | status_set('maintenance', msg) | ||
4255 | 411 | time.sleep(calculated_wait) | ||
4256 | 412 | |||
4257 | 413 | |||
4258 | 414 | def get_managed_services_and_ports(services, external_ports, | ||
4259 | 415 | external_services=None, | ||
4260 | 416 | port_conv_f=determine_apache_port_single): | ||
4261 | 417 | """Get the services and ports managed by this charm. | ||
4262 | 418 | |||
4263 | 419 | Return only the services and corresponding ports that are managed by this | ||
4264 | 420 | charm. This excludes haproxy when there is a relation with hacluster. This | ||
4265 | 421 | is because this charm passes responsability for stopping and starting | ||
4266 | 422 | haproxy to hacluster. | ||
4267 | 423 | |||
4268 | 424 | Similarly, if a relation with hacluster exists then the ports returned by | ||
4269 | 425 | this method correspond to those managed by the apache server rather than | ||
4270 | 426 | haproxy. | ||
4271 | 427 | |||
4272 | 428 | :param services: List of services. | ||
4273 | 429 | :type services: List[str] | ||
4274 | 430 | :param external_ports: List of ports managed by external services. | ||
4275 | 431 | :type external_ports: List[int] | ||
4276 | 432 | :param external_services: List of services to be removed if ha relation is | ||
4277 | 433 | present. | ||
4278 | 434 | :type external_services: List[str] | ||
4279 | 435 | :param port_conv_f: Function to apply to ports to calculate the ports | ||
4280 | 436 | managed by services controlled by this charm. | ||
4281 | 437 | :type port_convert_func: f() | ||
4282 | 438 | :returns: A tuple containing a list of services first followed by a list of | ||
4283 | 439 | ports. | ||
4284 | 440 | :rtype: Tuple[List[str], List[int]] | ||
4285 | 441 | """ | ||
4286 | 442 | if external_services is None: | ||
4287 | 443 | external_services = ['haproxy'] | ||
4288 | 444 | if relation_ids('ha'): | ||
4289 | 445 | for svc in external_services: | ||
4290 | 446 | try: | ||
4291 | 447 | services.remove(svc) | ||
4292 | 448 | except ValueError: | ||
4293 | 449 | pass | ||
4294 | 450 | external_ports = [port_conv_f(p) for p in external_ports] | ||
4295 | 451 | return services, external_ports | ||
4296 | 0 | 452 | ||
4297 | === added directory 'hooks/charmhelpers/contrib/hardening' | |||
4298 | === added file 'hooks/charmhelpers/contrib/hardening/README.hardening.md' | |||
4299 | --- hooks/charmhelpers/contrib/hardening/README.hardening.md 1970-01-01 00:00:00 +0000 | |||
4300 | +++ hooks/charmhelpers/contrib/hardening/README.hardening.md 2021-05-12 04:07:51 +0000 | |||
4301 | @@ -0,0 +1,38 @@ | |||
4302 | 1 | # Juju charm-helpers hardening library | ||
4303 | 2 | |||
4304 | 3 | ## Description | ||
4305 | 4 | |||
4306 | 5 | This library provides multiple implementations of system and application | ||
4307 | 6 | hardening that conform to the standards of http://hardening.io/. | ||
4308 | 7 | |||
4309 | 8 | Current implementations include: | ||
4310 | 9 | |||
4311 | 10 | * OS | ||
4312 | 11 | * SSH | ||
4313 | 12 | * MySQL | ||
4314 | 13 | * Apache | ||
4315 | 14 | |||
4316 | 15 | ## Requirements | ||
4317 | 16 | |||
4318 | 17 | * Juju Charms | ||
4319 | 18 | |||
4320 | 19 | ## Usage | ||
4321 | 20 | |||
4322 | 21 | 1. Synchronise this library into your charm and add the harden() decorator | ||
4323 | 22 | (from contrib.hardening.harden) to any functions or methods you want to use | ||
4324 | 23 | to trigger hardening of your application/system. | ||
4325 | 24 | |||
4326 | 25 | 2. Add a config option called 'harden' to your charm config.yaml and set it to | ||
4327 | 26 | a space-delimited list of hardening modules you want to run e.g. "os ssh" | ||
4328 | 27 | |||
4329 | 28 | 3. Override any config defaults (contrib.hardening.defaults) by adding a file | ||
4330 | 29 | called hardening.yaml to your charm root containing the name(s) of the | ||
4331 | 30 | modules whose settings you want override at root level and then any settings | ||
4332 | 31 | with overrides e.g. | ||
4333 | 32 | |||
4334 | 33 | os: | ||
4335 | 34 | general: | ||
4336 | 35 | desktop_enable: True | ||
4337 | 36 | |||
4338 | 37 | 4. Now just run your charm as usual and hardening will be applied each time the | ||
4339 | 38 | hook runs. | ||
4340 | 0 | 39 | ||
4341 | === added file 'hooks/charmhelpers/contrib/hardening/__init__.py' | |||
4342 | --- hooks/charmhelpers/contrib/hardening/__init__.py 1970-01-01 00:00:00 +0000 | |||
4343 | +++ hooks/charmhelpers/contrib/hardening/__init__.py 2021-05-12 04:07:51 +0000 | |||
4344 | @@ -0,0 +1,13 @@ | |||
4345 | 1 | # Copyright 2016 Canonical Limited. | ||
4346 | 2 | # | ||
4347 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
4348 | 4 | # you may not use this file except in compliance with the License. | ||
4349 | 5 | # You may obtain a copy of the License at | ||
4350 | 6 | # | ||
4351 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
4352 | 8 | # | ||
4353 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
4354 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
4355 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
4356 | 12 | # See the License for the specific language governing permissions and | ||
4357 | 13 | # limitations under the License. | ||
4358 | 0 | 14 | ||
4359 | === added directory 'hooks/charmhelpers/contrib/hardening/apache' | |||
4360 | === added file 'hooks/charmhelpers/contrib/hardening/apache/__init__.py' | |||
4361 | --- hooks/charmhelpers/contrib/hardening/apache/__init__.py 1970-01-01 00:00:00 +0000 | |||
4362 | +++ hooks/charmhelpers/contrib/hardening/apache/__init__.py 2021-05-12 04:07:51 +0000 | |||
4363 | @@ -0,0 +1,17 @@ | |||
4364 | 1 | # Copyright 2016 Canonical Limited. | ||
4365 | 2 | # | ||
4366 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
4367 | 4 | # you may not use this file except in compliance with the License. | ||
4368 | 5 | # You may obtain a copy of the License at | ||
4369 | 6 | # | ||
4370 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
4371 | 8 | # | ||
4372 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
4373 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
4374 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
4375 | 12 | # See the License for the specific language governing permissions and | ||
4376 | 13 | # limitations under the License. | ||
4377 | 14 | |||
4378 | 15 | from os import path | ||
4379 | 16 | |||
4380 | 17 | TEMPLATES_DIR = path.join(path.dirname(__file__), 'templates') | ||
4381 | 0 | 18 | ||
4382 | === added directory 'hooks/charmhelpers/contrib/hardening/apache/checks' | |||
4383 | === added file 'hooks/charmhelpers/contrib/hardening/apache/checks/__init__.py' | |||
4384 | --- hooks/charmhelpers/contrib/hardening/apache/checks/__init__.py 1970-01-01 00:00:00 +0000 | |||
4385 | +++ hooks/charmhelpers/contrib/hardening/apache/checks/__init__.py 2021-05-12 04:07:51 +0000 | |||
4386 | @@ -0,0 +1,29 @@ | |||
4387 | 1 | # Copyright 2016 Canonical Limited. | ||
4388 | 2 | # | ||
4389 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
4390 | 4 | # you may not use this file except in compliance with the License. | ||
4391 | 5 | # You may obtain a copy of the License at | ||
4392 | 6 | # | ||
4393 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
4394 | 8 | # | ||
4395 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
4396 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
4397 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
4398 | 12 | # See the License for the specific language governing permissions and | ||
4399 | 13 | # limitations under the License. | ||
4400 | 14 | |||
4401 | 15 | from charmhelpers.core.hookenv import ( | ||
4402 | 16 | log, | ||
4403 | 17 | DEBUG, | ||
4404 | 18 | ) | ||
4405 | 19 | from charmhelpers.contrib.hardening.apache.checks import config | ||
4406 | 20 | |||
4407 | 21 | |||
4408 | 22 | def run_apache_checks(): | ||
4409 | 23 | log("Starting Apache hardening checks.", level=DEBUG) | ||
4410 | 24 | checks = config.get_audits() | ||
4411 | 25 | for check in checks: | ||
4412 | 26 | log("Running '%s' check" % (check.__class__.__name__), level=DEBUG) | ||
4413 | 27 | check.ensure_compliance() | ||
4414 | 28 | |||
4415 | 29 | log("Apache hardening checks complete.", level=DEBUG) | ||
4416 | 0 | 30 | ||
4417 | === added file 'hooks/charmhelpers/contrib/hardening/apache/checks/config.py' | |||
4418 | --- hooks/charmhelpers/contrib/hardening/apache/checks/config.py 1970-01-01 00:00:00 +0000 | |||
4419 | +++ hooks/charmhelpers/contrib/hardening/apache/checks/config.py 2021-05-12 04:07:51 +0000 | |||
4420 | @@ -0,0 +1,104 @@ | |||
4421 | 1 | # Copyright 2016 Canonical Limited. | ||
4422 | 2 | # | ||
4423 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
4424 | 4 | # you may not use this file except in compliance with the License. | ||
4425 | 5 | # You may obtain a copy of the License at | ||
4426 | 6 | # | ||
4427 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
4428 | 8 | # | ||
4429 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
4430 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
4431 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
4432 | 12 | # See the License for the specific language governing permissions and | ||
4433 | 13 | # limitations under the License. | ||
4434 | 14 | |||
4435 | 15 | import os | ||
4436 | 16 | import re | ||
4437 | 17 | import six | ||
4438 | 18 | import subprocess | ||
4439 | 19 | |||
4440 | 20 | |||
4441 | 21 | from charmhelpers.core.hookenv import ( | ||
4442 | 22 | log, | ||
4443 | 23 | INFO, | ||
4444 | 24 | ) | ||
4445 | 25 | from charmhelpers.contrib.hardening.audits.file import ( | ||
4446 | 26 | FilePermissionAudit, | ||
4447 | 27 | DirectoryPermissionAudit, | ||
4448 | 28 | NoReadWriteForOther, | ||
4449 | 29 | TemplatedFile, | ||
4450 | 30 | DeletedFile | ||
4451 | 31 | ) | ||
4452 | 32 | from charmhelpers.contrib.hardening.audits.apache import DisabledModuleAudit | ||
4453 | 33 | from charmhelpers.contrib.hardening.apache import TEMPLATES_DIR | ||
4454 | 34 | from charmhelpers.contrib.hardening import utils | ||
4455 | 35 | |||
4456 | 36 | |||
4457 | 37 | def get_audits(): | ||
4458 | 38 | """Get Apache hardening config audits. | ||
4459 | 39 | |||
4460 | 40 | :returns: dictionary of audits | ||
4461 | 41 | """ | ||
4462 | 42 | if subprocess.call(['which', 'apache2'], stdout=subprocess.PIPE) != 0: | ||
4463 | 43 | log("Apache server does not appear to be installed on this node - " | ||
4464 | 44 | "skipping apache hardening", level=INFO) | ||
4465 | 45 | return [] | ||
4466 | 46 | |||
4467 | 47 | context = ApacheConfContext() | ||
4468 | 48 | settings = utils.get_settings('apache') | ||
4469 | 49 | audits = [ | ||
4470 | 50 | FilePermissionAudit(paths=os.path.join( | ||
4471 | 51 | settings['common']['apache_dir'], 'apache2.conf'), | ||
4472 | 52 | user='root', group='root', mode=0o0640), | ||
4473 | 53 | |||
4474 | 54 | TemplatedFile(os.path.join(settings['common']['apache_dir'], | ||
4475 | 55 | 'mods-available/alias.conf'), | ||
4476 | 56 | context, | ||
4477 | 57 | TEMPLATES_DIR, | ||
4478 | 58 | mode=0o0640, | ||
4479 | 59 | user='root', | ||
4480 | 60 | service_actions=[{'service': 'apache2', | ||
4481 | 61 | 'actions': ['restart']}]), | ||
4482 | 62 | |||
4483 | 63 | TemplatedFile(os.path.join(settings['common']['apache_dir'], | ||
4484 | 64 | 'conf-enabled/99-hardening.conf'), | ||
4485 | 65 | context, | ||
4486 | 66 | TEMPLATES_DIR, | ||
4487 | 67 | mode=0o0640, | ||
4488 | 68 | user='root', | ||
4489 | 69 | service_actions=[{'service': 'apache2', | ||
4490 | 70 | 'actions': ['restart']}]), | ||
4491 | 71 | |||
4492 | 72 | DirectoryPermissionAudit(settings['common']['apache_dir'], | ||
4493 | 73 | user='root', | ||
4494 | 74 | group='root', | ||
4495 | 75 | mode=0o0750), | ||
4496 | 76 | |||
4497 | 77 | DisabledModuleAudit(settings['hardening']['modules_to_disable']), | ||
4498 | 78 | |||
4499 | 79 | NoReadWriteForOther(settings['common']['apache_dir']), | ||
4500 | 80 | |||
4501 | 81 | DeletedFile(['/var/www/html/index.html']) | ||
4502 | 82 | ] | ||
4503 | 83 | |||
4504 | 84 | return audits | ||
4505 | 85 | |||
4506 | 86 | |||
4507 | 87 | class ApacheConfContext(object): | ||
4508 | 88 | """Defines the set of key/value pairs to set in a apache config file. | ||
4509 | 89 | |||
4510 | 90 | This context, when called, will return a dictionary containing the | ||
4511 | 91 | key/value pairs of setting to specify in the | ||
4512 | 92 | /etc/apache/conf-enabled/hardening.conf file. | ||
4513 | 93 | """ | ||
4514 | 94 | def __call__(self): | ||
4515 | 95 | settings = utils.get_settings('apache') | ||
4516 | 96 | ctxt = settings['hardening'] | ||
4517 | 97 | |||
4518 | 98 | out = subprocess.check_output(['apache2', '-v']) | ||
4519 | 99 | if six.PY3: | ||
4520 | 100 | out = out.decode('utf-8') | ||
4521 | 101 | ctxt['apache_version'] = re.search(r'.+version: Apache/(.+?)\s.+', | ||
4522 | 102 | out).group(1) | ||
4523 | 103 | ctxt['apache_icondir'] = '/usr/share/apache2/icons/' | ||
4524 | 104 | return ctxt | ||
4525 | 0 | 105 | ||
4526 | === added directory 'hooks/charmhelpers/contrib/hardening/apache/templates' | |||
4527 | === added file 'hooks/charmhelpers/contrib/hardening/apache/templates/99-hardening.conf' | |||
4528 | --- hooks/charmhelpers/contrib/hardening/apache/templates/99-hardening.conf 1970-01-01 00:00:00 +0000 | |||
4529 | +++ hooks/charmhelpers/contrib/hardening/apache/templates/99-hardening.conf 2021-05-12 04:07:51 +0000 | |||
4530 | @@ -0,0 +1,32 @@ | |||
4531 | 1 | ############################################################################### | ||
4532 | 2 | # WARNING: This configuration file is maintained by Juju. Local changes may | ||
4533 | 3 | # be overwritten. | ||
4534 | 4 | ############################################################################### | ||
4535 | 5 | |||
4536 | 6 | <Location / > | ||
4537 | 7 | <LimitExcept {{ allowed_http_methods }} > | ||
4538 | 8 | # http://httpd.apache.org/docs/2.4/upgrading.html | ||
4539 | 9 | {% if apache_version > '2.2' -%} | ||
4540 | 10 | Require all granted | ||
4541 | 11 | {% else -%} | ||
4542 | 12 | Order Allow,Deny | ||
4543 | 13 | Deny from all | ||
4544 | 14 | {% endif %} | ||
4545 | 15 | </LimitExcept> | ||
4546 | 16 | </Location> | ||
4547 | 17 | |||
4548 | 18 | <Directory /> | ||
4549 | 19 | Options -Indexes -FollowSymLinks | ||
4550 | 20 | AllowOverride None | ||
4551 | 21 | </Directory> | ||
4552 | 22 | |||
4553 | 23 | <Directory /var/www/> | ||
4554 | 24 | Options -Indexes -FollowSymLinks | ||
4555 | 25 | AllowOverride None | ||
4556 | 26 | </Directory> | ||
4557 | 27 | |||
4558 | 28 | TraceEnable {{ traceenable }} | ||
4559 | 29 | ServerTokens {{ servertokens }} | ||
4560 | 30 | |||
4561 | 31 | SSLHonorCipherOrder {{ honor_cipher_order }} | ||
4562 | 32 | SSLCipherSuite {{ cipher_suite }} | ||
4563 | 0 | 33 | ||
4564 | === added file 'hooks/charmhelpers/contrib/hardening/apache/templates/alias.conf' | |||
4565 | --- hooks/charmhelpers/contrib/hardening/apache/templates/alias.conf 1970-01-01 00:00:00 +0000 | |||
4566 | +++ hooks/charmhelpers/contrib/hardening/apache/templates/alias.conf 2021-05-12 04:07:51 +0000 | |||
4567 | @@ -0,0 +1,31 @@ | |||
4568 | 1 | ############################################################################### | ||
4569 | 2 | # WARNING: This configuration file is maintained by Juju. Local changes may | ||
4570 | 3 | # be overwritten. | ||
4571 | 4 | ############################################################################### | ||
4572 | 5 | <IfModule alias_module> | ||
4573 | 6 | # | ||
4574 | 7 | # Aliases: Add here as many aliases as you need (with no limit). The format is | ||
4575 | 8 | # Alias fakename realname | ||
4576 | 9 | # | ||
4577 | 10 | # Note that if you include a trailing / on fakename then the server will | ||
4578 | 11 | # require it to be present in the URL. So "/icons" isn't aliased in this | ||
4579 | 12 | # example, only "/icons/". If the fakename is slash-terminated, then the | ||
4580 | 13 | # realname must also be slash terminated, and if the fakename omits the | ||
4581 | 14 | # trailing slash, the realname must also omit it. | ||
4582 | 15 | # | ||
4583 | 16 | # We include the /icons/ alias for FancyIndexed directory listings. If | ||
4584 | 17 | # you do not use FancyIndexing, you may comment this out. | ||
4585 | 18 | # | ||
4586 | 19 | Alias /icons/ "{{ apache_icondir }}/" | ||
4587 | 20 | |||
4588 | 21 | <Directory "{{ apache_icondir }}"> | ||
4589 | 22 | Options -Indexes -MultiViews -FollowSymLinks | ||
4590 | 23 | AllowOverride None | ||
4591 | 24 | {% if apache_version == '2.4' -%} | ||
4592 | 25 | Require all granted | ||
4593 | 26 | {% else -%} | ||
4594 | 27 | Order allow,deny | ||
4595 | 28 | Allow from all | ||
4596 | 29 | {% endif %} | ||
4597 | 30 | </Directory> | ||
4598 | 31 | </IfModule> | ||
4599 | 0 | 32 | ||
4600 | === added directory 'hooks/charmhelpers/contrib/hardening/audits' | |||
4601 | === added file 'hooks/charmhelpers/contrib/hardening/audits/__init__.py' | |||
4602 | --- hooks/charmhelpers/contrib/hardening/audits/__init__.py 1970-01-01 00:00:00 +0000 | |||
4603 | +++ hooks/charmhelpers/contrib/hardening/audits/__init__.py 2021-05-12 04:07:51 +0000 | |||
4604 | @@ -0,0 +1,54 @@ | |||
4605 | 1 | # Copyright 2016 Canonical Limited. | ||
4606 | 2 | # | ||
4607 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
4608 | 4 | # you may not use this file except in compliance with the License. | ||
4609 | 5 | # You may obtain a copy of the License at | ||
4610 | 6 | # | ||
4611 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
4612 | 8 | # | ||
4613 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
4614 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
4615 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
4616 | 12 | # See the License for the specific language governing permissions and | ||
4617 | 13 | # limitations under the License. | ||
4618 | 14 | |||
4619 | 15 | |||
4620 | 16 | class BaseAudit(object): # NO-QA | ||
4621 | 17 | """Base class for hardening checks. | ||
4622 | 18 | |||
4623 | 19 | The lifecycle of a hardening check is to first check to see if the system | ||
4624 | 20 | is in compliance for the specified check. If it is not in compliance, the | ||
4625 | 21 | check method will return a value which will be supplied to the. | ||
4626 | 22 | """ | ||
4627 | 23 | def __init__(self, *args, **kwargs): | ||
4628 | 24 | self.unless = kwargs.get('unless', None) | ||
4629 | 25 | super(BaseAudit, self).__init__() | ||
4630 | 26 | |||
4631 | 27 | def ensure_compliance(self): | ||
4632 | 28 | """Checks to see if the current hardening check is in compliance or | ||
4633 | 29 | not. | ||
4634 | 30 | |||
4635 | 31 | If the check that is performed is not in compliance, then an exception | ||
4636 | 32 | should be raised. | ||
4637 | 33 | """ | ||
4638 | 34 | pass | ||
4639 | 35 | |||
4640 | 36 | def _take_action(self): | ||
4641 | 37 | """Determines whether to perform the action or not. | ||
4642 | 38 | |||
4643 | 39 | Checks whether or not an action should be taken. This is determined by | ||
4644 | 40 | the truthy value for the unless parameter. If unless is a callback | ||
4645 | 41 | method, it will be invoked with no parameters in order to determine | ||
4646 | 42 | whether or not the action should be taken. Otherwise, the truthy value | ||
4647 | 43 | of the unless attribute will determine if the action should be | ||
4648 | 44 | performed. | ||
4649 | 45 | """ | ||
4650 | 46 | # Do the action if there isn't an unless override. | ||
4651 | 47 | if self.unless is None: | ||
4652 | 48 | return True | ||
4653 | 49 | |||
4654 | 50 | # Invoke the callback if there is one. | ||
4655 | 51 | if hasattr(self.unless, '__call__'): | ||
4656 | 52 | return not self.unless() | ||
4657 | 53 | |||
4658 | 54 | return not self.unless | ||
4659 | 0 | 55 | ||
4660 | === added file 'hooks/charmhelpers/contrib/hardening/audits/apache.py' | |||
4661 | --- hooks/charmhelpers/contrib/hardening/audits/apache.py 1970-01-01 00:00:00 +0000 | |||
4662 | +++ hooks/charmhelpers/contrib/hardening/audits/apache.py 2021-05-12 04:07:51 +0000 | |||
4663 | @@ -0,0 +1,105 @@ | |||
4664 | 1 | # Copyright 2016 Canonical Limited. | ||
4665 | 2 | # | ||
4666 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
4667 | 4 | # you may not use this file except in compliance with the License. | ||
4668 | 5 | # You may obtain a copy of the License at | ||
4669 | 6 | # | ||
4670 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
4671 | 8 | # | ||
4672 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
4673 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
4674 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
4675 | 12 | # See the License for the specific language governing permissions and | ||
4676 | 13 | # limitations under the License. | ||
4677 | 14 | |||
4678 | 15 | import re | ||
4679 | 16 | import subprocess | ||
4680 | 17 | |||
4681 | 18 | import six | ||
4682 | 19 | |||
4683 | 20 | from charmhelpers.core.hookenv import ( | ||
4684 | 21 | log, | ||
4685 | 22 | INFO, | ||
4686 | 23 | ERROR, | ||
4687 | 24 | ) | ||
4688 | 25 | |||
4689 | 26 | from charmhelpers.contrib.hardening.audits import BaseAudit | ||
4690 | 27 | |||
4691 | 28 | |||
4692 | 29 | class DisabledModuleAudit(BaseAudit): | ||
4693 | 30 | """Audits Apache2 modules. | ||
4694 | 31 | |||
4695 | 32 | Determines if the apache2 modules are enabled. If the modules are enabled | ||
4696 | 33 | then they are removed in the ensure_compliance. | ||
4697 | 34 | """ | ||
4698 | 35 | def __init__(self, modules): | ||
4699 | 36 | if modules is None: | ||
4700 | 37 | self.modules = [] | ||
4701 | 38 | elif isinstance(modules, six.string_types): | ||
4702 | 39 | self.modules = [modules] | ||
4703 | 40 | else: | ||
4704 | 41 | self.modules = modules | ||
4705 | 42 | |||
4706 | 43 | def ensure_compliance(self): | ||
4707 | 44 | """Ensures that the modules are not loaded.""" | ||
4708 | 45 | if not self.modules: | ||
4709 | 46 | return | ||
4710 | 47 | |||
4711 | 48 | try: | ||
4712 | 49 | loaded_modules = self._get_loaded_modules() | ||
4713 | 50 | non_compliant_modules = [] | ||
4714 | 51 | for module in self.modules: | ||
4715 | 52 | if module in loaded_modules: | ||
4716 | 53 | log("Module '%s' is enabled but should not be." % | ||
4717 | 54 | (module), level=INFO) | ||
4718 | 55 | non_compliant_modules.append(module) | ||
4719 | 56 | |||
4720 | 57 | if len(non_compliant_modules) == 0: | ||
4721 | 58 | return | ||
4722 | 59 | |||
4723 | 60 | for module in non_compliant_modules: | ||
4724 | 61 | self._disable_module(module) | ||
4725 | 62 | self._restart_apache() | ||
4726 | 63 | except subprocess.CalledProcessError as e: | ||
4727 | 64 | log('Error occurred auditing apache module compliance. ' | ||
4728 | 65 | 'This may have been already reported. ' | ||
4729 | 66 | 'Output is: %s' % e.output, level=ERROR) | ||
4730 | 67 | |||
4731 | 68 | @staticmethod | ||
4732 | 69 | def _get_loaded_modules(): | ||
4733 | 70 | """Returns the modules which are enabled in Apache.""" | ||
4734 | 71 | output = subprocess.check_output(['apache2ctl', '-M']) | ||
4735 | 72 | if six.PY3: | ||
4736 | 73 | output = output.decode('utf-8') | ||
4737 | 74 | modules = [] | ||
4738 | 75 | for line in output.splitlines(): | ||
4739 | 76 | # Each line of the enabled module output looks like: | ||
4740 | 77 | # module_name (static|shared) | ||
4741 | 78 | # Plus a header line at the top of the output which is stripped | ||
4742 | 79 | # out by the regex. | ||
4743 | 80 | matcher = re.search(r'^ (\S*)_module (\S*)', line) | ||
4744 | 81 | if matcher: | ||
4745 | 82 | modules.append(matcher.group(1)) | ||
4746 | 83 | return modules | ||
4747 | 84 | |||
4748 | 85 | @staticmethod | ||
4749 | 86 | def _disable_module(module): | ||
4750 | 87 | """Disables the specified module in Apache.""" | ||
4751 | 88 | try: | ||
4752 | 89 | subprocess.check_call(['a2dismod', module]) | ||
4753 | 90 | except subprocess.CalledProcessError as e: | ||
4754 | 91 | # Note: catch error here to allow the attempt of disabling | ||
4755 | 92 | # multiple modules in one go rather than failing after the | ||
4756 | 93 | # first module fails. | ||
4757 | 94 | log('Error occurred disabling module %s. ' | ||
4758 | 95 | 'Output is: %s' % (module, e.output), level=ERROR) | ||
4759 | 96 | |||
4760 | 97 | @staticmethod | ||
4761 | 98 | def _restart_apache(): | ||
4762 | 99 | """Restarts the apache process""" | ||
4763 | 100 | subprocess.check_output(['service', 'apache2', 'restart']) | ||
4764 | 101 | |||
4765 | 102 | @staticmethod | ||
4766 | 103 | def is_ssl_enabled(): | ||
4767 | 104 | """Check if SSL module is enabled or not""" | ||
4768 | 105 | return 'ssl' in DisabledModuleAudit._get_loaded_modules() | ||
4769 | 0 | 106 | ||
4770 | === added file 'hooks/charmhelpers/contrib/hardening/audits/apt.py' | |||
4771 | --- hooks/charmhelpers/contrib/hardening/audits/apt.py 1970-01-01 00:00:00 +0000 | |||
4772 | +++ hooks/charmhelpers/contrib/hardening/audits/apt.py 2021-05-12 04:07:51 +0000 | |||
4773 | @@ -0,0 +1,104 @@ | |||
4774 | 1 | # Copyright 2016 Canonical Limited. | ||
4775 | 2 | # | ||
4776 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
4777 | 4 | # you may not use this file except in compliance with the License. | ||
4778 | 5 | # You may obtain a copy of the License at | ||
4779 | 6 | # | ||
4780 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
4781 | 8 | # | ||
4782 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
4783 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
4784 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
4785 | 12 | # See the License for the specific language governing permissions and | ||
4786 | 13 | # limitations under the License. | ||
4787 | 14 | |||
4788 | 15 | from __future__ import absolute_import # required for external apt import | ||
4789 | 16 | from six import string_types | ||
4790 | 17 | |||
4791 | 18 | from charmhelpers.fetch import ( | ||
4792 | 19 | apt_cache, | ||
4793 | 20 | apt_purge | ||
4794 | 21 | ) | ||
4795 | 22 | from charmhelpers.core.hookenv import ( | ||
4796 | 23 | log, | ||
4797 | 24 | DEBUG, | ||
4798 | 25 | WARNING, | ||
4799 | 26 | ) | ||
4800 | 27 | from charmhelpers.contrib.hardening.audits import BaseAudit | ||
4801 | 28 | from charmhelpers.fetch import ubuntu_apt_pkg as apt_pkg | ||
4802 | 29 | |||
4803 | 30 | |||
4804 | 31 | class AptConfig(BaseAudit): | ||
4805 | 32 | |||
4806 | 33 | def __init__(self, config, **kwargs): | ||
4807 | 34 | self.config = config | ||
4808 | 35 | |||
4809 | 36 | def verify_config(self): | ||
4810 | 37 | apt_pkg.init() | ||
4811 | 38 | for cfg in self.config: | ||
4812 | 39 | value = apt_pkg.config.get(cfg['key'], cfg.get('default', '')) | ||
4813 | 40 | if value and value != cfg['expected']: | ||
4814 | 41 | log("APT config '%s' has unexpected value '%s' " | ||
4815 | 42 | "(expected='%s')" % | ||
4816 | 43 | (cfg['key'], value, cfg['expected']), level=WARNING) | ||
4817 | 44 | |||
4818 | 45 | def ensure_compliance(self): | ||
4819 | 46 | self.verify_config() | ||
4820 | 47 | |||
4821 | 48 | |||
4822 | 49 | class RestrictedPackages(BaseAudit): | ||
4823 | 50 | """Class used to audit restricted packages on the system.""" | ||
4824 | 51 | |||
4825 | 52 | def __init__(self, pkgs, **kwargs): | ||
4826 | 53 | super(RestrictedPackages, self).__init__(**kwargs) | ||
4827 | 54 | if isinstance(pkgs, string_types) or not hasattr(pkgs, '__iter__'): | ||
4828 | 55 | self.pkgs = pkgs.split() | ||
4829 | 56 | else: | ||
4830 | 57 | self.pkgs = pkgs | ||
4831 | 58 | |||
4832 | 59 | def ensure_compliance(self): | ||
4833 | 60 | cache = apt_cache() | ||
4834 | 61 | |||
4835 | 62 | for p in self.pkgs: | ||
4836 | 63 | if p not in cache: | ||
4837 | 64 | continue | ||
4838 | 65 | |||
4839 | 66 | pkg = cache[p] | ||
4840 | 67 | if not self.is_virtual_package(pkg): | ||
4841 | 68 | if not pkg.current_ver: | ||
4842 | 69 | log("Package '%s' is not installed." % pkg.name, | ||
4843 | 70 | level=DEBUG) | ||
4844 | 71 | continue | ||
4845 | 72 | else: | ||
4846 | 73 | log("Restricted package '%s' is installed" % pkg.name, | ||
4847 | 74 | level=WARNING) | ||
4848 | 75 | self.delete_package(cache, pkg) | ||
4849 | 76 | else: | ||
4850 | 77 | log("Checking restricted virtual package '%s' provides" % | ||
4851 | 78 | pkg.name, level=DEBUG) | ||
4852 | 79 | self.delete_package(cache, pkg) | ||
4853 | 80 | |||
4854 | 81 | def delete_package(self, cache, pkg): | ||
4855 | 82 | """Deletes the package from the system. | ||
4856 | 83 | |||
4857 | 84 | Deletes the package form the system, properly handling virtual | ||
4858 | 85 | packages. | ||
4859 | 86 | |||
4860 | 87 | :param cache: the apt cache | ||
4861 | 88 | :param pkg: the package to remove | ||
4862 | 89 | """ | ||
4863 | 90 | if self.is_virtual_package(pkg): | ||
4864 | 91 | log("Package '%s' appears to be virtual - purging provides" % | ||
4865 | 92 | pkg.name, level=DEBUG) | ||
4866 | 93 | for _p in pkg.provides_list: | ||
4867 | 94 | self.delete_package(cache, _p[2].parent_pkg) | ||
4868 | 95 | elif not pkg.current_ver: | ||
4869 | 96 | log("Package '%s' not installed" % pkg.name, level=DEBUG) | ||
4870 | 97 | return | ||
4871 | 98 | else: | ||
4872 | 99 | log("Purging package '%s'" % pkg.name, level=DEBUG) | ||
4873 | 100 | apt_purge(pkg.name) | ||
4874 | 101 | |||
4875 | 102 | def is_virtual_package(self, pkg): | ||
4876 | 103 | return (pkg.get('has_provides', False) and | ||
4877 | 104 | not pkg.get('has_versions', False)) | ||
4878 | 0 | 105 | ||
4879 | === added file 'hooks/charmhelpers/contrib/hardening/audits/file.py' | |||
4880 | --- hooks/charmhelpers/contrib/hardening/audits/file.py 1970-01-01 00:00:00 +0000 | |||
4881 | +++ hooks/charmhelpers/contrib/hardening/audits/file.py 2021-05-12 04:07:51 +0000 | |||
4882 | @@ -0,0 +1,550 @@ | |||
4883 | 1 | # Copyright 2016 Canonical Limited. | ||
4884 | 2 | # | ||
4885 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
4886 | 4 | # you may not use this file except in compliance with the License. | ||
4887 | 5 | # You may obtain a copy of the License at | ||
4888 | 6 | # | ||
4889 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
4890 | 8 | # | ||
4891 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
4892 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
4893 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
4894 | 12 | # See the License for the specific language governing permissions and | ||
4895 | 13 | # limitations under the License. | ||
4896 | 14 | |||
4897 | 15 | import grp | ||
4898 | 16 | import os | ||
4899 | 17 | import pwd | ||
4900 | 18 | import re | ||
4901 | 19 | |||
4902 | 20 | from subprocess import ( | ||
4903 | 21 | CalledProcessError, | ||
4904 | 22 | check_output, | ||
4905 | 23 | check_call, | ||
4906 | 24 | ) | ||
4907 | 25 | from traceback import format_exc | ||
4908 | 26 | from six import string_types | ||
4909 | 27 | from stat import ( | ||
4910 | 28 | S_ISGID, | ||
4911 | 29 | S_ISUID | ||
4912 | 30 | ) | ||
4913 | 31 | |||
4914 | 32 | from charmhelpers.core.hookenv import ( | ||
4915 | 33 | log, | ||
4916 | 34 | DEBUG, | ||
4917 | 35 | INFO, | ||
4918 | 36 | WARNING, | ||
4919 | 37 | ERROR, | ||
4920 | 38 | ) | ||
4921 | 39 | from charmhelpers.core import unitdata | ||
4922 | 40 | from charmhelpers.core.host import file_hash | ||
4923 | 41 | from charmhelpers.contrib.hardening.audits import BaseAudit | ||
4924 | 42 | from charmhelpers.contrib.hardening.templating import ( | ||
4925 | 43 | get_template_path, | ||
4926 | 44 | render_and_write, | ||
4927 | 45 | ) | ||
4928 | 46 | from charmhelpers.contrib.hardening import utils | ||
4929 | 47 | |||
4930 | 48 | |||
4931 | 49 | class BaseFileAudit(BaseAudit): | ||
4932 | 50 | """Base class for file audits. | ||
4933 | 51 | |||
4934 | 52 | Provides api stubs for compliance check flow that must be used by any class | ||
4935 | 53 | that implemented this one. | ||
4936 | 54 | """ | ||
4937 | 55 | |||
4938 | 56 | def __init__(self, paths, always_comply=False, *args, **kwargs): | ||
4939 | 57 | """ | ||
4940 | 58 | :param paths: string path of list of paths of files we want to apply | ||
4941 | 59 | compliance checks are criteria to. | ||
4942 | 60 | :param always_comply: if true compliance criteria is always applied | ||
4943 | 61 | else compliance is skipped for non-existent | ||
4944 | 62 | paths. | ||
4945 | 63 | """ | ||
4946 | 64 | super(BaseFileAudit, self).__init__(*args, **kwargs) | ||
4947 | 65 | self.always_comply = always_comply | ||
4948 | 66 | if isinstance(paths, string_types) or not hasattr(paths, '__iter__'): | ||
4949 | 67 | self.paths = [paths] | ||
4950 | 68 | else: | ||
4951 | 69 | self.paths = paths | ||
4952 | 70 | |||
4953 | 71 | def ensure_compliance(self): | ||
4954 | 72 | """Ensure that the all registered files comply to registered criteria. | ||
4955 | 73 | """ | ||
4956 | 74 | for p in self.paths: | ||
4957 | 75 | if os.path.exists(p): | ||
4958 | 76 | if self.is_compliant(p): | ||
4959 | 77 | continue | ||
4960 | 78 | |||
4961 | 79 | log('File %s is not in compliance.' % p, level=INFO) | ||
4962 | 80 | else: | ||
4963 | 81 | if not self.always_comply: | ||
4964 | 82 | log("Non-existent path '%s' - skipping compliance check" | ||
4965 | 83 | % (p), level=INFO) | ||
4966 | 84 | continue | ||
4967 | 85 | |||
4968 | 86 | if self._take_action(): | ||
4969 | 87 | log("Applying compliance criteria to '%s'" % (p), level=INFO) | ||
4970 | 88 | self.comply(p) | ||
4971 | 89 | |||
4972 | 90 | def is_compliant(self, path): | ||
4973 | 91 | """Audits the path to see if it is compliance. | ||
4974 | 92 | |||
4975 | 93 | :param path: the path to the file that should be checked. | ||
4976 | 94 | """ | ||
4977 | 95 | raise NotImplementedError | ||
4978 | 96 | |||
4979 | 97 | def comply(self, path): | ||
4980 | 98 | """Enforces the compliance of a path. | ||
4981 | 99 | |||
4982 | 100 | :param path: the path to the file that should be enforced. | ||
4983 | 101 | """ | ||
4984 | 102 | raise NotImplementedError | ||
4985 | 103 | |||
4986 | 104 | @classmethod | ||
4987 | 105 | def _get_stat(cls, path): | ||
4988 | 106 | """Returns the Posix st_stat information for the specified file path. | ||
4989 | 107 | |||
4990 | 108 | :param path: the path to get the st_stat information for. | ||
4991 | 109 | :returns: an st_stat object for the path or None if the path doesn't | ||
4992 | 110 | exist. | ||
4993 | 111 | """ | ||
4994 | 112 | return os.stat(path) | ||
4995 | 113 | |||
4996 | 114 | |||
4997 | 115 | class FilePermissionAudit(BaseFileAudit): | ||
4998 | 116 | """Implements an audit for file permissions and ownership for a user. | ||
4999 | 117 | |||
5000 | 118 | This class implements functionality that ensures that a specific user/group |
The diff has been truncated for viewing.