Merge ~aieri/charm-nagios:bug/1864192 into ~nagios-charmers/charm-nagios:master
- Git
- lp:~aieri/charm-nagios
- bug/1864192
- Merge into master
Status: | Superseded |
---|---|
Proposed branch: | ~aieri/charm-nagios:bug/1864192 |
Merge into: | ~nagios-charmers/charm-nagios:master |
Diff against target: |
3760 lines (+2529/-266) 32 files modified
Makefile (+1/-2) bin/charm_helpers_sync.py (+20/-11) charm-helpers.yaml (+2/-1) hooks/charmhelpers/__init__.py (+4/-4) hooks/charmhelpers/contrib/charmsupport/__init__.py (+13/-0) hooks/charmhelpers/contrib/charmsupport/nrpe.py (+500/-0) hooks/charmhelpers/contrib/charmsupport/volumes.py (+173/-0) hooks/charmhelpers/core/hookenv.py (+525/-56) hooks/charmhelpers/core/host.py (+166/-10) hooks/charmhelpers/core/host_factory/ubuntu.py (+28/-1) hooks/charmhelpers/core/kernel.py (+2/-2) hooks/charmhelpers/core/services/base.py (+18/-7) hooks/charmhelpers/core/strutils.py (+11/-5) hooks/charmhelpers/core/sysctl.py (+32/-11) hooks/charmhelpers/core/templating.py (+18/-9) hooks/charmhelpers/core/unitdata.py (+8/-1) hooks/charmhelpers/fetch/__init__.py (+4/-0) hooks/charmhelpers/fetch/archiveurl.py (+1/-1) hooks/charmhelpers/fetch/bzrurl.py (+2/-2) hooks/charmhelpers/fetch/giturl.py (+2/-2) hooks/charmhelpers/fetch/python/__init__.py (+13/-0) hooks/charmhelpers/fetch/python/debug.py (+54/-0) hooks/charmhelpers/fetch/python/packages.py (+154/-0) hooks/charmhelpers/fetch/python/rpdb.py (+56/-0) hooks/charmhelpers/fetch/python/version.py (+32/-0) hooks/charmhelpers/fetch/snap.py (+17/-1) hooks/charmhelpers/fetch/ubuntu.py (+305/-83) hooks/charmhelpers/fetch/ubuntu_apt_pkg.py (+267/-0) hooks/charmhelpers/osplatform.py (+24/-3) hooks/common.py (+6/-15) hooks/install (+1/-1) hooks/monitors-relation-changed (+70/-38) |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Peter Sabaini | Pending | ||
Adam Dyess | Pending | ||
Chris Sanders | Pending | ||
Review via email: mp+387302@code.launchpad.net |
This proposal supersedes a proposal from 2020-06-29.
This proposal has been superseded by a proposal from 2020-07-13.
Commit message
Description of the change
Peter Sabaini (peter-sabaini) wrote : Posted in a previous version of this proposal | # |
Andrea Ieri (aieri) wrote : Posted in a previous version of this proposal | # |
Thanks, the charm helpers sync script actually comes from https:/
Adam Dyess (addyess) wrote : Posted in a previous version of this proposal | # |
Great. No issues
Chris Sanders (chris.sanders) wrote : Posted in a previous version of this proposal | # |
A few comments inline, and while I hate to do this I think the charmhelpers and this change need to be split. While reviewing it I'm not convinced that this merge isn't confusing local vs charmhelpers functions. For example 'ingress_address' is defined in this change and I *believe* is only actually used from charmhelpers.
If I'm just having difficulty understanding and you *do* think the change is dependent on charmhelpers you can make this MR dependent on the charmhelpers MR so the changes specific to this bug can be reviewed seperately.
Unmerged commits
- e4bb62e... by Andrea Ieri
-
Gracefully handle incorrect relation data sent over the nagios relation
Closes-Bug: 1864192
- cc67cb4... by Andrea Ieri
-
Charmhelpers sync
Install enum for python2 as this is needed by hookenv
- 3a6dc7c... by Andrea Ieri
-
Revert "Fully switch to the network-get primitives"
This reverts commit 66b8e0577d7f7f5
761da4ff7dd50a0 d01e04029c.
The fix was completely wrong, because network-get can only retrieve
local data; learning the ingress-address of a remote unit must be done
via relation-get.
Preview Diff
1 | diff --git a/Makefile b/Makefile | |||
2 | index dbbeab3..5ed72eb 100644 | |||
3 | --- a/Makefile | |||
4 | +++ b/Makefile | |||
5 | @@ -35,8 +35,7 @@ test: | |||
6 | 35 | 35 | ||
7 | 36 | bin/charm_helpers_sync.py: | 36 | bin/charm_helpers_sync.py: |
8 | 37 | @mkdir -p bin | 37 | @mkdir -p bin |
11 | 38 | @bzr cat lp:charm-helpers/tools/charm_helpers_sync/charm_helpers_sync.py \ | 38 | @curl -o bin/charm_helpers_sync.py https://raw.githubusercontent.com/juju/charm-helpers/master/tools/charm_helpers_sync/charm_helpers_sync.py |
10 | 39 | > bin/charm_helpers_sync.py | ||
12 | 40 | 39 | ||
13 | 41 | sync: bin/charm_helpers_sync.py | 40 | sync: bin/charm_helpers_sync.py |
14 | 42 | @$(PYTHON) bin/charm_helpers_sync.py -c charm-helpers.yaml | 41 | @$(PYTHON) bin/charm_helpers_sync.py -c charm-helpers.yaml |
15 | diff --git a/bin/charm_helpers_sync.py b/bin/charm_helpers_sync.py | |||
16 | index bd79460..7c0c194 100644 | |||
17 | --- a/bin/charm_helpers_sync.py | |||
18 | +++ b/bin/charm_helpers_sync.py | |||
19 | @@ -29,7 +29,7 @@ from fnmatch import fnmatch | |||
20 | 29 | 29 | ||
21 | 30 | import six | 30 | import six |
22 | 31 | 31 | ||
24 | 32 | CHARM_HELPERS_BRANCH = 'lp:charm-helpers' | 32 | CHARM_HELPERS_REPO = 'https://github.com/juju/charm-helpers' |
25 | 33 | 33 | ||
26 | 34 | 34 | ||
27 | 35 | def parse_config(conf_file): | 35 | def parse_config(conf_file): |
28 | @@ -39,10 +39,16 @@ def parse_config(conf_file): | |||
29 | 39 | return yaml.load(open(conf_file).read()) | 39 | return yaml.load(open(conf_file).read()) |
30 | 40 | 40 | ||
31 | 41 | 41 | ||
33 | 42 | def clone_helpers(work_dir, branch): | 42 | def clone_helpers(work_dir, repo): |
34 | 43 | dest = os.path.join(work_dir, 'charm-helpers') | 43 | dest = os.path.join(work_dir, 'charm-helpers') |
37 | 44 | logging.info('Checking out %s to %s.' % (branch, dest)) | 44 | logging.info('Cloning out %s to %s.' % (repo, dest)) |
38 | 45 | cmd = ['bzr', 'checkout', '--lightweight', branch, dest] | 45 | branch = None |
39 | 46 | if '@' in repo: | ||
40 | 47 | repo, branch = repo.split('@', 1) | ||
41 | 48 | cmd = ['git', 'clone', '--depth=1'] | ||
42 | 49 | if branch is not None: | ||
43 | 50 | cmd += ['--branch', branch] | ||
44 | 51 | cmd += [repo, dest] | ||
45 | 46 | subprocess.check_call(cmd) | 52 | subprocess.check_call(cmd) |
46 | 47 | return dest | 53 | return dest |
47 | 48 | 54 | ||
48 | @@ -174,6 +180,9 @@ def extract_options(inc, global_options=None): | |||
49 | 174 | 180 | ||
50 | 175 | 181 | ||
51 | 176 | def sync_helpers(include, src, dest, options=None): | 182 | def sync_helpers(include, src, dest, options=None): |
52 | 183 | if os.path.exists(dest): | ||
53 | 184 | logging.debug('Removing existing directory: %s' % dest) | ||
54 | 185 | shutil.rmtree(dest) | ||
55 | 177 | if not os.path.isdir(dest): | 186 | if not os.path.isdir(dest): |
56 | 178 | os.makedirs(dest) | 187 | os.makedirs(dest) |
57 | 179 | 188 | ||
58 | @@ -198,8 +207,8 @@ if __name__ == '__main__': | |||
59 | 198 | default=None, help='helper config file') | 207 | default=None, help='helper config file') |
60 | 199 | parser.add_option('-D', '--debug', action='store_true', dest='debug', | 208 | parser.add_option('-D', '--debug', action='store_true', dest='debug', |
61 | 200 | default=False, help='debug') | 209 | default=False, help='debug') |
64 | 201 | parser.add_option('-b', '--branch', action='store', dest='branch', | 210 | parser.add_option('-r', '--repository', action='store', dest='repo', |
65 | 202 | help='charm-helpers bzr branch (overrides config)') | 211 | help='charm-helpers git repository (overrides config)') |
66 | 203 | parser.add_option('-d', '--destination', action='store', dest='dest_dir', | 212 | parser.add_option('-d', '--destination', action='store', dest='dest_dir', |
67 | 204 | help='sync destination dir (overrides config)') | 213 | help='sync destination dir (overrides config)') |
68 | 205 | (opts, args) = parser.parse_args() | 214 | (opts, args) = parser.parse_args() |
69 | @@ -218,10 +227,10 @@ if __name__ == '__main__': | |||
70 | 218 | else: | 227 | else: |
71 | 219 | config = {} | 228 | config = {} |
72 | 220 | 229 | ||
77 | 221 | if 'branch' not in config: | 230 | if 'repo' not in config: |
78 | 222 | config['branch'] = CHARM_HELPERS_BRANCH | 231 | config['repo'] = CHARM_HELPERS_REPO |
79 | 223 | if opts.branch: | 232 | if opts.repo: |
80 | 224 | config['branch'] = opts.branch | 233 | config['repo'] = opts.repo |
81 | 225 | if opts.dest_dir: | 234 | if opts.dest_dir: |
82 | 226 | config['destination'] = opts.dest_dir | 235 | config['destination'] = opts.dest_dir |
83 | 227 | 236 | ||
84 | @@ -241,7 +250,7 @@ if __name__ == '__main__': | |||
85 | 241 | sync_options = config['options'] | 250 | sync_options = config['options'] |
86 | 242 | tmpd = tempfile.mkdtemp() | 251 | tmpd = tempfile.mkdtemp() |
87 | 243 | try: | 252 | try: |
89 | 244 | checkout = clone_helpers(tmpd, config['branch']) | 253 | checkout = clone_helpers(tmpd, config['repo']) |
90 | 245 | sync_helpers(config['include'], checkout, config['destination'], | 254 | sync_helpers(config['include'], checkout, config['destination'], |
91 | 246 | options=sync_options) | 255 | options=sync_options) |
92 | 247 | except Exception as e: | 256 | except Exception as e: |
93 | diff --git a/charm-helpers.yaml b/charm-helpers.yaml | |||
94 | index e5f7760..640679e 100644 | |||
95 | --- a/charm-helpers.yaml | |||
96 | +++ b/charm-helpers.yaml | |||
97 | @@ -1,7 +1,8 @@ | |||
98 | 1 | repo: https://github.com/juju/charm-helpers | ||
99 | 1 | destination: hooks/charmhelpers | 2 | destination: hooks/charmhelpers |
100 | 2 | branch: lp:charm-helpers | ||
101 | 3 | include: | 3 | include: |
102 | 4 | - core | 4 | - core |
103 | 5 | - fetch | 5 | - fetch |
104 | 6 | - osplatform | 6 | - osplatform |
105 | 7 | - contrib.ssl | 7 | - contrib.ssl |
106 | 8 | - contrib.charmsupport | ||
107 | diff --git a/hooks/charmhelpers/__init__.py b/hooks/charmhelpers/__init__.py | |||
108 | index e7aa471..61ef907 100644 | |||
109 | --- a/hooks/charmhelpers/__init__.py | |||
110 | +++ b/hooks/charmhelpers/__init__.py | |||
111 | @@ -23,22 +23,22 @@ import subprocess | |||
112 | 23 | import sys | 23 | import sys |
113 | 24 | 24 | ||
114 | 25 | try: | 25 | try: |
116 | 26 | import six # flake8: noqa | 26 | import six # NOQA:F401 |
117 | 27 | except ImportError: | 27 | except ImportError: |
118 | 28 | if sys.version_info.major == 2: | 28 | if sys.version_info.major == 2: |
119 | 29 | subprocess.check_call(['apt-get', 'install', '-y', 'python-six']) | 29 | subprocess.check_call(['apt-get', 'install', '-y', 'python-six']) |
120 | 30 | else: | 30 | else: |
121 | 31 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-six']) | 31 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-six']) |
123 | 32 | import six # flake8: noqa | 32 | import six # NOQA:F401 |
124 | 33 | 33 | ||
125 | 34 | try: | 34 | try: |
127 | 35 | import yaml # flake8: noqa | 35 | import yaml # NOQA:F401 |
128 | 36 | except ImportError: | 36 | except ImportError: |
129 | 37 | if sys.version_info.major == 2: | 37 | if sys.version_info.major == 2: |
130 | 38 | subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml']) | 38 | subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml']) |
131 | 39 | else: | 39 | else: |
132 | 40 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml']) | 40 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml']) |
134 | 41 | import yaml # flake8: noqa | 41 | import yaml # NOQA:F401 |
135 | 42 | 42 | ||
136 | 43 | 43 | ||
137 | 44 | # Holds a list of mapping of mangled function names that have been deprecated | 44 | # Holds a list of mapping of mangled function names that have been deprecated |
138 | diff --git a/hooks/charmhelpers/contrib/charmsupport/__init__.py b/hooks/charmhelpers/contrib/charmsupport/__init__.py | |||
139 | 45 | new file mode 100644 | 45 | new file mode 100644 |
140 | index 0000000..d7567b8 | |||
141 | --- /dev/null | |||
142 | +++ b/hooks/charmhelpers/contrib/charmsupport/__init__.py | |||
143 | @@ -0,0 +1,13 @@ | |||
144 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
145 | 2 | # | ||
146 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
147 | 4 | # you may not use this file except in compliance with the License. | ||
148 | 5 | # You may obtain a copy of the License at | ||
149 | 6 | # | ||
150 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
151 | 8 | # | ||
152 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
153 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
154 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
155 | 12 | # See the License for the specific language governing permissions and | ||
156 | 13 | # limitations under the License. | ||
157 | diff --git a/hooks/charmhelpers/contrib/charmsupport/nrpe.py b/hooks/charmhelpers/contrib/charmsupport/nrpe.py | |||
158 | 0 | new file mode 100644 | 14 | new file mode 100644 |
159 | index 0000000..d775861 | |||
160 | --- /dev/null | |||
161 | +++ b/hooks/charmhelpers/contrib/charmsupport/nrpe.py | |||
162 | @@ -0,0 +1,500 @@ | |||
163 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
164 | 2 | # | ||
165 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
166 | 4 | # you may not use this file except in compliance with the License. | ||
167 | 5 | # You may obtain a copy of the License at | ||
168 | 6 | # | ||
169 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
170 | 8 | # | ||
171 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
172 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
173 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
174 | 12 | # See the License for the specific language governing permissions and | ||
175 | 13 | # limitations under the License. | ||
176 | 14 | |||
177 | 15 | """Compatibility with the nrpe-external-master charm""" | ||
178 | 16 | # Copyright 2012 Canonical Ltd. | ||
179 | 17 | # | ||
180 | 18 | # Authors: | ||
181 | 19 | # Matthew Wedgwood <matthew.wedgwood@canonical.com> | ||
182 | 20 | |||
183 | 21 | import subprocess | ||
184 | 22 | import pwd | ||
185 | 23 | import grp | ||
186 | 24 | import os | ||
187 | 25 | import glob | ||
188 | 26 | import shutil | ||
189 | 27 | import re | ||
190 | 28 | import shlex | ||
191 | 29 | import yaml | ||
192 | 30 | |||
193 | 31 | from charmhelpers.core.hookenv import ( | ||
194 | 32 | config, | ||
195 | 33 | hook_name, | ||
196 | 34 | local_unit, | ||
197 | 35 | log, | ||
198 | 36 | relation_get, | ||
199 | 37 | relation_ids, | ||
200 | 38 | relation_set, | ||
201 | 39 | relations_of_type, | ||
202 | 40 | ) | ||
203 | 41 | |||
204 | 42 | from charmhelpers.core.host import service | ||
205 | 43 | from charmhelpers.core import host | ||
206 | 44 | |||
207 | 45 | # This module adds compatibility with the nrpe-external-master and plain nrpe | ||
208 | 46 | # subordinate charms. To use it in your charm: | ||
209 | 47 | # | ||
210 | 48 | # 1. Update metadata.yaml | ||
211 | 49 | # | ||
212 | 50 | # provides: | ||
213 | 51 | # (...) | ||
214 | 52 | # nrpe-external-master: | ||
215 | 53 | # interface: nrpe-external-master | ||
216 | 54 | # scope: container | ||
217 | 55 | # | ||
218 | 56 | # and/or | ||
219 | 57 | # | ||
220 | 58 | # provides: | ||
221 | 59 | # (...) | ||
222 | 60 | # local-monitors: | ||
223 | 61 | # interface: local-monitors | ||
224 | 62 | # scope: container | ||
225 | 63 | |||
226 | 64 | # | ||
227 | 65 | # 2. Add the following to config.yaml | ||
228 | 66 | # | ||
229 | 67 | # nagios_context: | ||
230 | 68 | # default: "juju" | ||
231 | 69 | # type: string | ||
232 | 70 | # description: | | ||
233 | 71 | # Used by the nrpe subordinate charms. | ||
234 | 72 | # A string that will be prepended to instance name to set the host name | ||
235 | 73 | # in nagios. So for instance the hostname would be something like: | ||
236 | 74 | # juju-myservice-0 | ||
237 | 75 | # If you're running multiple environments with the same services in them | ||
238 | 76 | # this allows you to differentiate between them. | ||
239 | 77 | # nagios_servicegroups: | ||
240 | 78 | # default: "" | ||
241 | 79 | # type: string | ||
242 | 80 | # description: | | ||
243 | 81 | # A comma-separated list of nagios servicegroups. | ||
244 | 82 | # If left empty, the nagios_context will be used as the servicegroup | ||
245 | 83 | # | ||
246 | 84 | # 3. Add custom checks (Nagios plugins) to files/nrpe-external-master | ||
247 | 85 | # | ||
248 | 86 | # 4. Update your hooks.py with something like this: | ||
249 | 87 | # | ||
250 | 88 | # from charmsupport.nrpe import NRPE | ||
251 | 89 | # (...) | ||
252 | 90 | # def update_nrpe_config(): | ||
253 | 91 | # nrpe_compat = NRPE() | ||
254 | 92 | # nrpe_compat.add_check( | ||
255 | 93 | # shortname = "myservice", | ||
256 | 94 | # description = "Check MyService", | ||
257 | 95 | # check_cmd = "check_http -w 2 -c 10 http://localhost" | ||
258 | 96 | # ) | ||
259 | 97 | # nrpe_compat.add_check( | ||
260 | 98 | # "myservice_other", | ||
261 | 99 | # "Check for widget failures", | ||
262 | 100 | # check_cmd = "/srv/myapp/scripts/widget_check" | ||
263 | 101 | # ) | ||
264 | 102 | # nrpe_compat.write() | ||
265 | 103 | # | ||
266 | 104 | # def config_changed(): | ||
267 | 105 | # (...) | ||
268 | 106 | # update_nrpe_config() | ||
269 | 107 | # | ||
270 | 108 | # def nrpe_external_master_relation_changed(): | ||
271 | 109 | # update_nrpe_config() | ||
272 | 110 | # | ||
273 | 111 | # def local_monitors_relation_changed(): | ||
274 | 112 | # update_nrpe_config() | ||
275 | 113 | # | ||
276 | 114 | # 4.a If your charm is a subordinate charm set primary=False | ||
277 | 115 | # | ||
278 | 116 | # from charmsupport.nrpe import NRPE | ||
279 | 117 | # (...) | ||
280 | 118 | # def update_nrpe_config(): | ||
281 | 119 | # nrpe_compat = NRPE(primary=False) | ||
282 | 120 | # | ||
283 | 121 | # 5. ln -s hooks.py nrpe-external-master-relation-changed | ||
284 | 122 | # ln -s hooks.py local-monitors-relation-changed | ||
285 | 123 | |||
286 | 124 | |||
287 | 125 | class CheckException(Exception): | ||
288 | 126 | pass | ||
289 | 127 | |||
290 | 128 | |||
291 | 129 | class Check(object): | ||
292 | 130 | shortname_re = '[A-Za-z0-9-_.@]+$' | ||
293 | 131 | service_template = (""" | ||
294 | 132 | #--------------------------------------------------- | ||
295 | 133 | # This file is Juju managed | ||
296 | 134 | #--------------------------------------------------- | ||
297 | 135 | define service {{ | ||
298 | 136 | use active-service | ||
299 | 137 | host_name {nagios_hostname} | ||
300 | 138 | service_description {nagios_hostname}[{shortname}] """ | ||
301 | 139 | """{description} | ||
302 | 140 | check_command check_nrpe!{command} | ||
303 | 141 | servicegroups {nagios_servicegroup} | ||
304 | 142 | }} | ||
305 | 143 | """) | ||
306 | 144 | |||
307 | 145 | def __init__(self, shortname, description, check_cmd): | ||
308 | 146 | super(Check, self).__init__() | ||
309 | 147 | # XXX: could be better to calculate this from the service name | ||
310 | 148 | if not re.match(self.shortname_re, shortname): | ||
311 | 149 | raise CheckException("shortname must match {}".format( | ||
312 | 150 | Check.shortname_re)) | ||
313 | 151 | self.shortname = shortname | ||
314 | 152 | self.command = "check_{}".format(shortname) | ||
315 | 153 | # Note: a set of invalid characters is defined by the | ||
316 | 154 | # Nagios server config | ||
317 | 155 | # The default is: illegal_object_name_chars=`~!$%^&*"|'<>?,()= | ||
318 | 156 | self.description = description | ||
319 | 157 | self.check_cmd = self._locate_cmd(check_cmd) | ||
320 | 158 | |||
321 | 159 | def _get_check_filename(self): | ||
322 | 160 | return os.path.join(NRPE.nrpe_confdir, '{}.cfg'.format(self.command)) | ||
323 | 161 | |||
324 | 162 | def _get_service_filename(self, hostname): | ||
325 | 163 | return os.path.join(NRPE.nagios_exportdir, | ||
326 | 164 | 'service__{}_{}.cfg'.format(hostname, self.command)) | ||
327 | 165 | |||
328 | 166 | def _locate_cmd(self, check_cmd): | ||
329 | 167 | search_path = ( | ||
330 | 168 | '/usr/lib/nagios/plugins', | ||
331 | 169 | '/usr/local/lib/nagios/plugins', | ||
332 | 170 | ) | ||
333 | 171 | parts = shlex.split(check_cmd) | ||
334 | 172 | for path in search_path: | ||
335 | 173 | if os.path.exists(os.path.join(path, parts[0])): | ||
336 | 174 | command = os.path.join(path, parts[0]) | ||
337 | 175 | if len(parts) > 1: | ||
338 | 176 | command += " " + " ".join(parts[1:]) | ||
339 | 177 | return command | ||
340 | 178 | log('Check command not found: {}'.format(parts[0])) | ||
341 | 179 | return '' | ||
342 | 180 | |||
343 | 181 | def _remove_service_files(self): | ||
344 | 182 | if not os.path.exists(NRPE.nagios_exportdir): | ||
345 | 183 | return | ||
346 | 184 | for f in os.listdir(NRPE.nagios_exportdir): | ||
347 | 185 | if f.endswith('_{}.cfg'.format(self.command)): | ||
348 | 186 | os.remove(os.path.join(NRPE.nagios_exportdir, f)) | ||
349 | 187 | |||
350 | 188 | def remove(self, hostname): | ||
351 | 189 | nrpe_check_file = self._get_check_filename() | ||
352 | 190 | if os.path.exists(nrpe_check_file): | ||
353 | 191 | os.remove(nrpe_check_file) | ||
354 | 192 | self._remove_service_files() | ||
355 | 193 | |||
356 | 194 | def write(self, nagios_context, hostname, nagios_servicegroups): | ||
357 | 195 | nrpe_check_file = self._get_check_filename() | ||
358 | 196 | with open(nrpe_check_file, 'w') as nrpe_check_config: | ||
359 | 197 | nrpe_check_config.write("# check {}\n".format(self.shortname)) | ||
360 | 198 | if nagios_servicegroups: | ||
361 | 199 | nrpe_check_config.write( | ||
362 | 200 | "# The following header was added automatically by juju\n") | ||
363 | 201 | nrpe_check_config.write( | ||
364 | 202 | "# Modifying it will affect nagios monitoring and alerting\n") | ||
365 | 203 | nrpe_check_config.write( | ||
366 | 204 | "# servicegroups: {}\n".format(nagios_servicegroups)) | ||
367 | 205 | nrpe_check_config.write("command[{}]={}\n".format( | ||
368 | 206 | self.command, self.check_cmd)) | ||
369 | 207 | |||
370 | 208 | if not os.path.exists(NRPE.nagios_exportdir): | ||
371 | 209 | log('Not writing service config as {} is not accessible'.format( | ||
372 | 210 | NRPE.nagios_exportdir)) | ||
373 | 211 | else: | ||
374 | 212 | self.write_service_config(nagios_context, hostname, | ||
375 | 213 | nagios_servicegroups) | ||
376 | 214 | |||
377 | 215 | def write_service_config(self, nagios_context, hostname, | ||
378 | 216 | nagios_servicegroups): | ||
379 | 217 | self._remove_service_files() | ||
380 | 218 | |||
381 | 219 | templ_vars = { | ||
382 | 220 | 'nagios_hostname': hostname, | ||
383 | 221 | 'nagios_servicegroup': nagios_servicegroups, | ||
384 | 222 | 'description': self.description, | ||
385 | 223 | 'shortname': self.shortname, | ||
386 | 224 | 'command': self.command, | ||
387 | 225 | } | ||
388 | 226 | nrpe_service_text = Check.service_template.format(**templ_vars) | ||
389 | 227 | nrpe_service_file = self._get_service_filename(hostname) | ||
390 | 228 | with open(nrpe_service_file, 'w') as nrpe_service_config: | ||
391 | 229 | nrpe_service_config.write(str(nrpe_service_text)) | ||
392 | 230 | |||
393 | 231 | def run(self): | ||
394 | 232 | subprocess.call(self.check_cmd) | ||
395 | 233 | |||
396 | 234 | |||
397 | 235 | class NRPE(object): | ||
398 | 236 | nagios_logdir = '/var/log/nagios' | ||
399 | 237 | nagios_exportdir = '/var/lib/nagios/export' | ||
400 | 238 | nrpe_confdir = '/etc/nagios/nrpe.d' | ||
401 | 239 | homedir = '/var/lib/nagios' # home dir provided by nagios-nrpe-server | ||
402 | 240 | |||
403 | 241 | def __init__(self, hostname=None, primary=True): | ||
404 | 242 | super(NRPE, self).__init__() | ||
405 | 243 | self.config = config() | ||
406 | 244 | self.primary = primary | ||
407 | 245 | self.nagios_context = self.config['nagios_context'] | ||
408 | 246 | if 'nagios_servicegroups' in self.config and self.config['nagios_servicegroups']: | ||
409 | 247 | self.nagios_servicegroups = self.config['nagios_servicegroups'] | ||
410 | 248 | else: | ||
411 | 249 | self.nagios_servicegroups = self.nagios_context | ||
412 | 250 | self.unit_name = local_unit().replace('/', '-') | ||
413 | 251 | if hostname: | ||
414 | 252 | self.hostname = hostname | ||
415 | 253 | else: | ||
416 | 254 | nagios_hostname = get_nagios_hostname() | ||
417 | 255 | if nagios_hostname: | ||
418 | 256 | self.hostname = nagios_hostname | ||
419 | 257 | else: | ||
420 | 258 | self.hostname = "{}-{}".format(self.nagios_context, self.unit_name) | ||
421 | 259 | self.checks = [] | ||
422 | 260 | # Iff in an nrpe-external-master relation hook, set primary status | ||
423 | 261 | relation = relation_ids('nrpe-external-master') | ||
424 | 262 | if relation: | ||
425 | 263 | log("Setting charm primary status {}".format(primary)) | ||
426 | 264 | for rid in relation: | ||
427 | 265 | relation_set(relation_id=rid, relation_settings={'primary': self.primary}) | ||
428 | 266 | self.remove_check_queue = set() | ||
429 | 267 | |||
430 | 268 | def add_check(self, *args, **kwargs): | ||
431 | 269 | shortname = None | ||
432 | 270 | if kwargs.get('shortname') is None: | ||
433 | 271 | if len(args) > 0: | ||
434 | 272 | shortname = args[0] | ||
435 | 273 | else: | ||
436 | 274 | shortname = kwargs['shortname'] | ||
437 | 275 | |||
438 | 276 | self.checks.append(Check(*args, **kwargs)) | ||
439 | 277 | try: | ||
440 | 278 | self.remove_check_queue.remove(shortname) | ||
441 | 279 | except KeyError: | ||
442 | 280 | pass | ||
443 | 281 | |||
444 | 282 | def remove_check(self, *args, **kwargs): | ||
445 | 283 | if kwargs.get('shortname') is None: | ||
446 | 284 | raise ValueError('shortname of check must be specified') | ||
447 | 285 | |||
448 | 286 | # Use sensible defaults if they're not specified - these are not | ||
449 | 287 | # actually used during removal, but they're required for constructing | ||
450 | 288 | # the Check object; check_disk is chosen because it's part of the | ||
451 | 289 | # nagios-plugins-basic package. | ||
452 | 290 | if kwargs.get('check_cmd') is None: | ||
453 | 291 | kwargs['check_cmd'] = 'check_disk' | ||
454 | 292 | if kwargs.get('description') is None: | ||
455 | 293 | kwargs['description'] = '' | ||
456 | 294 | |||
457 | 295 | check = Check(*args, **kwargs) | ||
458 | 296 | check.remove(self.hostname) | ||
459 | 297 | self.remove_check_queue.add(kwargs['shortname']) | ||
460 | 298 | |||
461 | 299 | def write(self): | ||
462 | 300 | try: | ||
463 | 301 | nagios_uid = pwd.getpwnam('nagios').pw_uid | ||
464 | 302 | nagios_gid = grp.getgrnam('nagios').gr_gid | ||
465 | 303 | except Exception: | ||
466 | 304 | log("Nagios user not set up, nrpe checks not updated") | ||
467 | 305 | return | ||
468 | 306 | |||
469 | 307 | if not os.path.exists(NRPE.nagios_logdir): | ||
470 | 308 | os.mkdir(NRPE.nagios_logdir) | ||
471 | 309 | os.chown(NRPE.nagios_logdir, nagios_uid, nagios_gid) | ||
472 | 310 | |||
473 | 311 | nrpe_monitors = {} | ||
474 | 312 | monitors = {"monitors": {"remote": {"nrpe": nrpe_monitors}}} | ||
475 | 313 | for nrpecheck in self.checks: | ||
476 | 314 | nrpecheck.write(self.nagios_context, self.hostname, | ||
477 | 315 | self.nagios_servicegroups) | ||
478 | 316 | nrpe_monitors[nrpecheck.shortname] = { | ||
479 | 317 | "command": nrpecheck.command, | ||
480 | 318 | } | ||
481 | 319 | |||
482 | 320 | # update-status hooks are configured to firing every 5 minutes by | ||
483 | 321 | # default. When nagios-nrpe-server is restarted, the nagios server | ||
484 | 322 | # reports checks failing causing unnecessary alerts. Let's not restart | ||
485 | 323 | # on update-status hooks. | ||
486 | 324 | if not hook_name() == 'update-status': | ||
487 | 325 | service('restart', 'nagios-nrpe-server') | ||
488 | 326 | |||
489 | 327 | monitor_ids = relation_ids("local-monitors") + \ | ||
490 | 328 | relation_ids("nrpe-external-master") | ||
491 | 329 | for rid in monitor_ids: | ||
492 | 330 | reldata = relation_get(unit=local_unit(), rid=rid) | ||
493 | 331 | if 'monitors' in reldata: | ||
494 | 332 | # update the existing set of monitors with the new data | ||
495 | 333 | old_monitors = yaml.safe_load(reldata['monitors']) | ||
496 | 334 | old_nrpe_monitors = old_monitors['monitors']['remote']['nrpe'] | ||
497 | 335 | # remove keys that are in the remove_check_queue | ||
498 | 336 | old_nrpe_monitors = {k: v for k, v in old_nrpe_monitors.items() | ||
499 | 337 | if k not in self.remove_check_queue} | ||
500 | 338 | # update/add nrpe_monitors | ||
501 | 339 | old_nrpe_monitors.update(nrpe_monitors) | ||
502 | 340 | old_monitors['monitors']['remote']['nrpe'] = old_nrpe_monitors | ||
503 | 341 | # write back to the relation | ||
504 | 342 | relation_set(relation_id=rid, monitors=yaml.dump(old_monitors)) | ||
505 | 343 | else: | ||
506 | 344 | # write a brand new set of monitors, as no existing ones. | ||
507 | 345 | relation_set(relation_id=rid, monitors=yaml.dump(monitors)) | ||
508 | 346 | |||
509 | 347 | self.remove_check_queue.clear() | ||
510 | 348 | |||
511 | 349 | |||
512 | 350 | def get_nagios_hostcontext(relation_name='nrpe-external-master'): | ||
513 | 351 | """ | ||
514 | 352 | Query relation with nrpe subordinate, return the nagios_host_context | ||
515 | 353 | |||
516 | 354 | :param str relation_name: Name of relation nrpe sub joined to | ||
517 | 355 | """ | ||
518 | 356 | for rel in relations_of_type(relation_name): | ||
519 | 357 | if 'nagios_host_context' in rel: | ||
520 | 358 | return rel['nagios_host_context'] | ||
521 | 359 | |||
522 | 360 | |||
523 | 361 | def get_nagios_hostname(relation_name='nrpe-external-master'): | ||
524 | 362 | """ | ||
525 | 363 | Query relation with nrpe subordinate, return the nagios_hostname | ||
526 | 364 | |||
527 | 365 | :param str relation_name: Name of relation nrpe sub joined to | ||
528 | 366 | """ | ||
529 | 367 | for rel in relations_of_type(relation_name): | ||
530 | 368 | if 'nagios_hostname' in rel: | ||
531 | 369 | return rel['nagios_hostname'] | ||
532 | 370 | |||
533 | 371 | |||
534 | 372 | def get_nagios_unit_name(relation_name='nrpe-external-master'): | ||
535 | 373 | """ | ||
536 | 374 | Return the nagios unit name prepended with host_context if needed | ||
537 | 375 | |||
538 | 376 | :param str relation_name: Name of relation nrpe sub joined to | ||
539 | 377 | """ | ||
540 | 378 | host_context = get_nagios_hostcontext(relation_name) | ||
541 | 379 | if host_context: | ||
542 | 380 | unit = "%s:%s" % (host_context, local_unit()) | ||
543 | 381 | else: | ||
544 | 382 | unit = local_unit() | ||
545 | 383 | return unit | ||
546 | 384 | |||
547 | 385 | |||
548 | 386 | def add_init_service_checks(nrpe, services, unit_name, immediate_check=True): | ||
549 | 387 | """ | ||
550 | 388 | Add checks for each service in list | ||
551 | 389 | |||
552 | 390 | :param NRPE nrpe: NRPE object to add check to | ||
553 | 391 | :param list services: List of services to check | ||
554 | 392 | :param str unit_name: Unit name to use in check description | ||
555 | 393 | :param bool immediate_check: For sysv init, run the service check immediately | ||
556 | 394 | """ | ||
557 | 395 | for svc in services: | ||
558 | 396 | # Don't add a check for these services from neutron-gateway | ||
559 | 397 | if svc in ['ext-port', 'os-charm-phy-nic-mtu']: | ||
560 | 398 | next | ||
561 | 399 | |||
562 | 400 | upstart_init = '/etc/init/%s.conf' % svc | ||
563 | 401 | sysv_init = '/etc/init.d/%s' % svc | ||
564 | 402 | |||
565 | 403 | if host.init_is_systemd(): | ||
566 | 404 | nrpe.add_check( | ||
567 | 405 | shortname=svc, | ||
568 | 406 | description='process check {%s}' % unit_name, | ||
569 | 407 | check_cmd='check_systemd.py %s' % svc | ||
570 | 408 | ) | ||
571 | 409 | elif os.path.exists(upstart_init): | ||
572 | 410 | nrpe.add_check( | ||
573 | 411 | shortname=svc, | ||
574 | 412 | description='process check {%s}' % unit_name, | ||
575 | 413 | check_cmd='check_upstart_job %s' % svc | ||
576 | 414 | ) | ||
577 | 415 | elif os.path.exists(sysv_init): | ||
578 | 416 | cronpath = '/etc/cron.d/nagios-service-check-%s' % svc | ||
579 | 417 | checkpath = '%s/service-check-%s.txt' % (nrpe.homedir, svc) | ||
580 | 418 | croncmd = ( | ||
581 | 419 | '/usr/local/lib/nagios/plugins/check_exit_status.pl ' | ||
582 | 420 | '-e -s /etc/init.d/%s status' % svc | ||
583 | 421 | ) | ||
584 | 422 | cron_file = '*/5 * * * * root %s > %s\n' % (croncmd, checkpath) | ||
585 | 423 | f = open(cronpath, 'w') | ||
586 | 424 | f.write(cron_file) | ||
587 | 425 | f.close() | ||
588 | 426 | nrpe.add_check( | ||
589 | 427 | shortname=svc, | ||
590 | 428 | description='service check {%s}' % unit_name, | ||
591 | 429 | check_cmd='check_status_file.py -f %s' % checkpath, | ||
592 | 430 | ) | ||
593 | 431 | # if /var/lib/nagios doesn't exist open(checkpath, 'w') will fail | ||
594 | 432 | # (LP: #1670223). | ||
595 | 433 | if immediate_check and os.path.isdir(nrpe.homedir): | ||
596 | 434 | f = open(checkpath, 'w') | ||
597 | 435 | subprocess.call( | ||
598 | 436 | croncmd.split(), | ||
599 | 437 | stdout=f, | ||
600 | 438 | stderr=subprocess.STDOUT | ||
601 | 439 | ) | ||
602 | 440 | f.close() | ||
603 | 441 | os.chmod(checkpath, 0o644) | ||
604 | 442 | |||
605 | 443 | |||
606 | 444 | def copy_nrpe_checks(nrpe_files_dir=None): | ||
607 | 445 | """ | ||
608 | 446 | Copy the nrpe checks into place | ||
609 | 447 | |||
610 | 448 | """ | ||
611 | 449 | NAGIOS_PLUGINS = '/usr/local/lib/nagios/plugins' | ||
612 | 450 | if nrpe_files_dir is None: | ||
613 | 451 | # determine if "charmhelpers" is in CHARMDIR or CHARMDIR/hooks | ||
614 | 452 | for segment in ['.', 'hooks']: | ||
615 | 453 | nrpe_files_dir = os.path.abspath(os.path.join( | ||
616 | 454 | os.getenv('CHARM_DIR'), | ||
617 | 455 | segment, | ||
618 | 456 | 'charmhelpers', | ||
619 | 457 | 'contrib', | ||
620 | 458 | 'openstack', | ||
621 | 459 | 'files')) | ||
622 | 460 | if os.path.isdir(nrpe_files_dir): | ||
623 | 461 | break | ||
624 | 462 | else: | ||
625 | 463 | raise RuntimeError("Couldn't find charmhelpers directory") | ||
626 | 464 | if not os.path.exists(NAGIOS_PLUGINS): | ||
627 | 465 | os.makedirs(NAGIOS_PLUGINS) | ||
628 | 466 | for fname in glob.glob(os.path.join(nrpe_files_dir, "check_*")): | ||
629 | 467 | if os.path.isfile(fname): | ||
630 | 468 | shutil.copy2(fname, | ||
631 | 469 | os.path.join(NAGIOS_PLUGINS, os.path.basename(fname))) | ||
632 | 470 | |||
633 | 471 | |||
634 | 472 | def add_haproxy_checks(nrpe, unit_name): | ||
635 | 473 | """ | ||
636 | 474 | Add checks for each service in list | ||
637 | 475 | |||
638 | 476 | :param NRPE nrpe: NRPE object to add check to | ||
639 | 477 | :param str unit_name: Unit name to use in check description | ||
640 | 478 | """ | ||
641 | 479 | nrpe.add_check( | ||
642 | 480 | shortname='haproxy_servers', | ||
643 | 481 | description='Check HAProxy {%s}' % unit_name, | ||
644 | 482 | check_cmd='check_haproxy.sh') | ||
645 | 483 | nrpe.add_check( | ||
646 | 484 | shortname='haproxy_queue', | ||
647 | 485 | description='Check HAProxy queue depth {%s}' % unit_name, | ||
648 | 486 | check_cmd='check_haproxy_queue_depth.sh') | ||
649 | 487 | |||
650 | 488 | |||
651 | 489 | def remove_deprecated_check(nrpe, deprecated_services): | ||
652 | 490 | """ | ||
653 | 491 | Remove checks fro deprecated services in list | ||
654 | 492 | |||
655 | 493 | :param nrpe: NRPE object to remove check from | ||
656 | 494 | :type nrpe: NRPE | ||
657 | 495 | :param deprecated_services: List of deprecated services that are removed | ||
658 | 496 | :type deprecated_services: list | ||
659 | 497 | """ | ||
660 | 498 | for dep_svc in deprecated_services: | ||
661 | 499 | log('Deprecated service: {}'.format(dep_svc)) | ||
662 | 500 | nrpe.remove_check(shortname=dep_svc) | ||
663 | diff --git a/hooks/charmhelpers/contrib/charmsupport/volumes.py b/hooks/charmhelpers/contrib/charmsupport/volumes.py | |||
664 | 0 | new file mode 100644 | 501 | new file mode 100644 |
665 | index 0000000..7ea43f0 | |||
666 | --- /dev/null | |||
667 | +++ b/hooks/charmhelpers/contrib/charmsupport/volumes.py | |||
668 | @@ -0,0 +1,173 @@ | |||
669 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
670 | 2 | # | ||
671 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
672 | 4 | # you may not use this file except in compliance with the License. | ||
673 | 5 | # You may obtain a copy of the License at | ||
674 | 6 | # | ||
675 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
676 | 8 | # | ||
677 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
678 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
679 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
680 | 12 | # See the License for the specific language governing permissions and | ||
681 | 13 | # limitations under the License. | ||
682 | 14 | |||
683 | 15 | ''' | ||
684 | 16 | Functions for managing volumes in juju units. One volume is supported per unit. | ||
685 | 17 | Subordinates may have their own storage, provided it is on its own partition. | ||
686 | 18 | |||
687 | 19 | Configuration stanzas:: | ||
688 | 20 | |||
689 | 21 | volume-ephemeral: | ||
690 | 22 | type: boolean | ||
691 | 23 | default: true | ||
692 | 24 | description: > | ||
693 | 25 | If false, a volume is mounted as sepecified in "volume-map" | ||
694 | 26 | If true, ephemeral storage will be used, meaning that log data | ||
695 | 27 | will only exist as long as the machine. YOU HAVE BEEN WARNED. | ||
696 | 28 | volume-map: | ||
697 | 29 | type: string | ||
698 | 30 | default: {} | ||
699 | 31 | description: > | ||
700 | 32 | YAML map of units to device names, e.g: | ||
701 | 33 | "{ rsyslog/0: /dev/vdb, rsyslog/1: /dev/vdb }" | ||
702 | 34 | Service units will raise a configure-error if volume-ephemeral | ||
703 | 35 | is 'true' and no volume-map value is set. Use 'juju set' to set a | ||
704 | 36 | value and 'juju resolved' to complete configuration. | ||
705 | 37 | |||
706 | 38 | Usage:: | ||
707 | 39 | |||
708 | 40 | from charmsupport.volumes import configure_volume, VolumeConfigurationError | ||
709 | 41 | from charmsupport.hookenv import log, ERROR | ||
710 | 42 | def post_mount_hook(): | ||
711 | 43 | stop_service('myservice') | ||
712 | 44 | def post_mount_hook(): | ||
713 | 45 | start_service('myservice') | ||
714 | 46 | |||
715 | 47 | if __name__ == '__main__': | ||
716 | 48 | try: | ||
717 | 49 | configure_volume(before_change=pre_mount_hook, | ||
718 | 50 | after_change=post_mount_hook) | ||
719 | 51 | except VolumeConfigurationError: | ||
720 | 52 | log('Storage could not be configured', ERROR) | ||
721 | 53 | |||
722 | 54 | ''' | ||
723 | 55 | |||
724 | 56 | # XXX: Known limitations | ||
725 | 57 | # - fstab is neither consulted nor updated | ||
726 | 58 | |||
727 | 59 | import os | ||
728 | 60 | from charmhelpers.core import hookenv | ||
729 | 61 | from charmhelpers.core import host | ||
730 | 62 | import yaml | ||
731 | 63 | |||
732 | 64 | |||
733 | 65 | MOUNT_BASE = '/srv/juju/volumes' | ||
734 | 66 | |||
735 | 67 | |||
736 | 68 | class VolumeConfigurationError(Exception): | ||
737 | 69 | '''Volume configuration data is missing or invalid''' | ||
738 | 70 | pass | ||
739 | 71 | |||
740 | 72 | |||
741 | 73 | def get_config(): | ||
742 | 74 | '''Gather and sanity-check volume configuration data''' | ||
743 | 75 | volume_config = {} | ||
744 | 76 | config = hookenv.config() | ||
745 | 77 | |||
746 | 78 | errors = False | ||
747 | 79 | |||
748 | 80 | if config.get('volume-ephemeral') in (True, 'True', 'true', 'Yes', 'yes'): | ||
749 | 81 | volume_config['ephemeral'] = True | ||
750 | 82 | else: | ||
751 | 83 | volume_config['ephemeral'] = False | ||
752 | 84 | |||
753 | 85 | try: | ||
754 | 86 | volume_map = yaml.safe_load(config.get('volume-map', '{}')) | ||
755 | 87 | except yaml.YAMLError as e: | ||
756 | 88 | hookenv.log("Error parsing YAML volume-map: {}".format(e), | ||
757 | 89 | hookenv.ERROR) | ||
758 | 90 | errors = True | ||
759 | 91 | if volume_map is None: | ||
760 | 92 | # probably an empty string | ||
761 | 93 | volume_map = {} | ||
762 | 94 | elif not isinstance(volume_map, dict): | ||
763 | 95 | hookenv.log("Volume-map should be a dictionary, not {}".format( | ||
764 | 96 | type(volume_map))) | ||
765 | 97 | errors = True | ||
766 | 98 | |||
767 | 99 | volume_config['device'] = volume_map.get(os.environ['JUJU_UNIT_NAME']) | ||
768 | 100 | if volume_config['device'] and volume_config['ephemeral']: | ||
769 | 101 | # asked for ephemeral storage but also defined a volume ID | ||
770 | 102 | hookenv.log('A volume is defined for this unit, but ephemeral ' | ||
771 | 103 | 'storage was requested', hookenv.ERROR) | ||
772 | 104 | errors = True | ||
773 | 105 | elif not volume_config['device'] and not volume_config['ephemeral']: | ||
774 | 106 | # asked for permanent storage but did not define volume ID | ||
775 | 107 | hookenv.log('Ephemeral storage was requested, but there is no volume ' | ||
776 | 108 | 'defined for this unit.', hookenv.ERROR) | ||
777 | 109 | errors = True | ||
778 | 110 | |||
779 | 111 | unit_mount_name = hookenv.local_unit().replace('/', '-') | ||
780 | 112 | volume_config['mountpoint'] = os.path.join(MOUNT_BASE, unit_mount_name) | ||
781 | 113 | |||
782 | 114 | if errors: | ||
783 | 115 | return None | ||
784 | 116 | return volume_config | ||
785 | 117 | |||
786 | 118 | |||
787 | 119 | def mount_volume(config): | ||
788 | 120 | if os.path.exists(config['mountpoint']): | ||
789 | 121 | if not os.path.isdir(config['mountpoint']): | ||
790 | 122 | hookenv.log('Not a directory: {}'.format(config['mountpoint'])) | ||
791 | 123 | raise VolumeConfigurationError() | ||
792 | 124 | else: | ||
793 | 125 | host.mkdir(config['mountpoint']) | ||
794 | 126 | if os.path.ismount(config['mountpoint']): | ||
795 | 127 | unmount_volume(config) | ||
796 | 128 | if not host.mount(config['device'], config['mountpoint'], persist=True): | ||
797 | 129 | raise VolumeConfigurationError() | ||
798 | 130 | |||
799 | 131 | |||
800 | 132 | def unmount_volume(config): | ||
801 | 133 | if os.path.ismount(config['mountpoint']): | ||
802 | 134 | if not host.umount(config['mountpoint'], persist=True): | ||
803 | 135 | raise VolumeConfigurationError() | ||
804 | 136 | |||
805 | 137 | |||
806 | 138 | def managed_mounts(): | ||
807 | 139 | '''List of all mounted managed volumes''' | ||
808 | 140 | return filter(lambda mount: mount[0].startswith(MOUNT_BASE), host.mounts()) | ||
809 | 141 | |||
810 | 142 | |||
811 | 143 | def configure_volume(before_change=lambda: None, after_change=lambda: None): | ||
812 | 144 | '''Set up storage (or don't) according to the charm's volume configuration. | ||
813 | 145 | Returns the mount point or "ephemeral". before_change and after_change | ||
814 | 146 | are optional functions to be called if the volume configuration changes. | ||
815 | 147 | ''' | ||
816 | 148 | |||
817 | 149 | config = get_config() | ||
818 | 150 | if not config: | ||
819 | 151 | hookenv.log('Failed to read volume configuration', hookenv.CRITICAL) | ||
820 | 152 | raise VolumeConfigurationError() | ||
821 | 153 | |||
822 | 154 | if config['ephemeral']: | ||
823 | 155 | if os.path.ismount(config['mountpoint']): | ||
824 | 156 | before_change() | ||
825 | 157 | unmount_volume(config) | ||
826 | 158 | after_change() | ||
827 | 159 | return 'ephemeral' | ||
828 | 160 | else: | ||
829 | 161 | # persistent storage | ||
830 | 162 | if os.path.ismount(config['mountpoint']): | ||
831 | 163 | mounts = dict(managed_mounts()) | ||
832 | 164 | if mounts.get(config['mountpoint']) != config['device']: | ||
833 | 165 | before_change() | ||
834 | 166 | unmount_volume(config) | ||
835 | 167 | mount_volume(config) | ||
836 | 168 | after_change() | ||
837 | 169 | else: | ||
838 | 170 | before_change() | ||
839 | 171 | mount_volume(config) | ||
840 | 172 | after_change() | ||
841 | 173 | return config['mountpoint'] | ||
842 | diff --git a/hooks/charmhelpers/core/hookenv.py b/hooks/charmhelpers/core/hookenv.py | |||
843 | index 67ad691..d7c37c1 100644 | |||
844 | --- a/hooks/charmhelpers/core/hookenv.py | |||
845 | +++ b/hooks/charmhelpers/core/hookenv.py | |||
846 | @@ -21,23 +21,29 @@ | |||
847 | 21 | from __future__ import print_function | 21 | from __future__ import print_function |
848 | 22 | import copy | 22 | import copy |
849 | 23 | from distutils.version import LooseVersion | 23 | from distutils.version import LooseVersion |
850 | 24 | from enum import Enum | ||
851 | 24 | from functools import wraps | 25 | from functools import wraps |
852 | 26 | from collections import namedtuple | ||
853 | 25 | import glob | 27 | import glob |
854 | 26 | import os | 28 | import os |
855 | 27 | import json | 29 | import json |
856 | 28 | import yaml | 30 | import yaml |
857 | 31 | import re | ||
858 | 29 | import subprocess | 32 | import subprocess |
859 | 30 | import sys | 33 | import sys |
860 | 31 | import errno | 34 | import errno |
861 | 32 | import tempfile | 35 | import tempfile |
862 | 33 | from subprocess import CalledProcessError | 36 | from subprocess import CalledProcessError |
863 | 34 | 37 | ||
864 | 38 | from charmhelpers import deprecate | ||
865 | 39 | |||
866 | 35 | import six | 40 | import six |
867 | 36 | if not six.PY3: | 41 | if not six.PY3: |
868 | 37 | from UserDict import UserDict | 42 | from UserDict import UserDict |
869 | 38 | else: | 43 | else: |
870 | 39 | from collections import UserDict | 44 | from collections import UserDict |
871 | 40 | 45 | ||
872 | 46 | |||
873 | 41 | CRITICAL = "CRITICAL" | 47 | CRITICAL = "CRITICAL" |
874 | 42 | ERROR = "ERROR" | 48 | ERROR = "ERROR" |
875 | 43 | WARNING = "WARNING" | 49 | WARNING = "WARNING" |
876 | @@ -45,6 +51,20 @@ INFO = "INFO" | |||
877 | 45 | DEBUG = "DEBUG" | 51 | DEBUG = "DEBUG" |
878 | 46 | TRACE = "TRACE" | 52 | TRACE = "TRACE" |
879 | 47 | MARKER = object() | 53 | MARKER = object() |
880 | 54 | SH_MAX_ARG = 131071 | ||
881 | 55 | |||
882 | 56 | |||
883 | 57 | RANGE_WARNING = ('Passing NO_PROXY string that includes a cidr. ' | ||
884 | 58 | 'This may not be compatible with software you are ' | ||
885 | 59 | 'running in your shell.') | ||
886 | 60 | |||
887 | 61 | |||
888 | 62 | class WORKLOAD_STATES(Enum): | ||
889 | 63 | ACTIVE = 'active' | ||
890 | 64 | BLOCKED = 'blocked' | ||
891 | 65 | MAINTENANCE = 'maintenance' | ||
892 | 66 | WAITING = 'waiting' | ||
893 | 67 | |||
894 | 48 | 68 | ||
895 | 49 | cache = {} | 69 | cache = {} |
896 | 50 | 70 | ||
897 | @@ -65,7 +85,7 @@ def cached(func): | |||
898 | 65 | @wraps(func) | 85 | @wraps(func) |
899 | 66 | def wrapper(*args, **kwargs): | 86 | def wrapper(*args, **kwargs): |
900 | 67 | global cache | 87 | global cache |
902 | 68 | key = str((func, args, kwargs)) | 88 | key = json.dumps((func, args, kwargs), sort_keys=True, default=str) |
903 | 69 | try: | 89 | try: |
904 | 70 | return cache[key] | 90 | return cache[key] |
905 | 71 | except KeyError: | 91 | except KeyError: |
906 | @@ -95,7 +115,7 @@ def log(message, level=None): | |||
907 | 95 | command += ['-l', level] | 115 | command += ['-l', level] |
908 | 96 | if not isinstance(message, six.string_types): | 116 | if not isinstance(message, six.string_types): |
909 | 97 | message = repr(message) | 117 | message = repr(message) |
911 | 98 | command += [message] | 118 | command += [message[:SH_MAX_ARG]] |
912 | 99 | # Missing juju-log should not cause failures in unit tests | 119 | # Missing juju-log should not cause failures in unit tests |
913 | 100 | # Send log output to stderr | 120 | # Send log output to stderr |
914 | 101 | try: | 121 | try: |
915 | @@ -110,6 +130,24 @@ def log(message, level=None): | |||
916 | 110 | raise | 130 | raise |
917 | 111 | 131 | ||
918 | 112 | 132 | ||
919 | 133 | def function_log(message): | ||
920 | 134 | """Write a function progress message""" | ||
921 | 135 | command = ['function-log'] | ||
922 | 136 | if not isinstance(message, six.string_types): | ||
923 | 137 | message = repr(message) | ||
924 | 138 | command += [message[:SH_MAX_ARG]] | ||
925 | 139 | # Missing function-log should not cause failures in unit tests | ||
926 | 140 | # Send function_log output to stderr | ||
927 | 141 | try: | ||
928 | 142 | subprocess.call(command) | ||
929 | 143 | except OSError as e: | ||
930 | 144 | if e.errno == errno.ENOENT: | ||
931 | 145 | message = "function-log: {}".format(message) | ||
932 | 146 | print(message, file=sys.stderr) | ||
933 | 147 | else: | ||
934 | 148 | raise | ||
935 | 149 | |||
936 | 150 | |||
937 | 113 | class Serializable(UserDict): | 151 | class Serializable(UserDict): |
938 | 114 | """Wrapper, an object that can be serialized to yaml or json""" | 152 | """Wrapper, an object that can be serialized to yaml or json""" |
939 | 115 | 153 | ||
940 | @@ -198,11 +236,35 @@ def remote_unit(): | |||
941 | 198 | return os.environ.get('JUJU_REMOTE_UNIT', None) | 236 | return os.environ.get('JUJU_REMOTE_UNIT', None) |
942 | 199 | 237 | ||
943 | 200 | 238 | ||
946 | 201 | def service_name(): | 239 | def application_name(): |
947 | 202 | """The name service group this unit belongs to""" | 240 | """ |
948 | 241 | The name of the deployed application this unit belongs to. | ||
949 | 242 | """ | ||
950 | 203 | return local_unit().split('/')[0] | 243 | return local_unit().split('/')[0] |
951 | 204 | 244 | ||
952 | 205 | 245 | ||
953 | 246 | def service_name(): | ||
954 | 247 | """ | ||
955 | 248 | .. deprecated:: 0.19.1 | ||
956 | 249 | Alias for :func:`application_name`. | ||
957 | 250 | """ | ||
958 | 251 | return application_name() | ||
959 | 252 | |||
960 | 253 | |||
961 | 254 | def model_name(): | ||
962 | 255 | """ | ||
963 | 256 | Name of the model that this unit is deployed in. | ||
964 | 257 | """ | ||
965 | 258 | return os.environ['JUJU_MODEL_NAME'] | ||
966 | 259 | |||
967 | 260 | |||
968 | 261 | def model_uuid(): | ||
969 | 262 | """ | ||
970 | 263 | UUID of the model that this unit is deployed in. | ||
971 | 264 | """ | ||
972 | 265 | return os.environ['JUJU_MODEL_UUID'] | ||
973 | 266 | |||
974 | 267 | |||
975 | 206 | def principal_unit(): | 268 | def principal_unit(): |
976 | 207 | """Returns the principal unit of this unit, otherwise None""" | 269 | """Returns the principal unit of this unit, otherwise None""" |
977 | 208 | # Juju 2.2 and above provides JUJU_PRINCIPAL_UNIT | 270 | # Juju 2.2 and above provides JUJU_PRINCIPAL_UNIT |
978 | @@ -287,7 +349,7 @@ class Config(dict): | |||
979 | 287 | self.implicit_save = True | 349 | self.implicit_save = True |
980 | 288 | self._prev_dict = None | 350 | self._prev_dict = None |
981 | 289 | self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME) | 351 | self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME) |
983 | 290 | if os.path.exists(self.path): | 352 | if os.path.exists(self.path) and os.stat(self.path).st_size: |
984 | 291 | self.load_previous() | 353 | self.load_previous() |
985 | 292 | atexit(self._implicit_save) | 354 | atexit(self._implicit_save) |
986 | 293 | 355 | ||
987 | @@ -307,7 +369,11 @@ class Config(dict): | |||
988 | 307 | """ | 369 | """ |
989 | 308 | self.path = path or self.path | 370 | self.path = path or self.path |
990 | 309 | with open(self.path) as f: | 371 | with open(self.path) as f: |
992 | 310 | self._prev_dict = json.load(f) | 372 | try: |
993 | 373 | self._prev_dict = json.load(f) | ||
994 | 374 | except ValueError as e: | ||
995 | 375 | log('Unable to parse previous config data - {}'.format(str(e)), | ||
996 | 376 | level=ERROR) | ||
997 | 311 | for k, v in copy.deepcopy(self._prev_dict).items(): | 377 | for k, v in copy.deepcopy(self._prev_dict).items(): |
998 | 312 | if k not in self: | 378 | if k not in self: |
999 | 313 | self[k] = v | 379 | self[k] = v |
1000 | @@ -343,6 +409,7 @@ class Config(dict): | |||
1001 | 343 | 409 | ||
1002 | 344 | """ | 410 | """ |
1003 | 345 | with open(self.path, 'w') as f: | 411 | with open(self.path, 'w') as f: |
1004 | 412 | os.fchmod(f.fileno(), 0o600) | ||
1005 | 346 | json.dump(self, f) | 413 | json.dump(self, f) |
1006 | 347 | 414 | ||
1007 | 348 | def _implicit_save(self): | 415 | def _implicit_save(self): |
1008 | @@ -350,22 +417,40 @@ class Config(dict): | |||
1009 | 350 | self.save() | 417 | self.save() |
1010 | 351 | 418 | ||
1011 | 352 | 419 | ||
1013 | 353 | @cached | 420 | _cache_config = None |
1014 | 421 | |||
1015 | 422 | |||
1016 | 354 | def config(scope=None): | 423 | def config(scope=None): |
1024 | 355 | """Juju charm configuration""" | 424 | """ |
1025 | 356 | config_cmd_line = ['config-get'] | 425 | Get the juju charm configuration (scope==None) or individual key, |
1026 | 357 | if scope is not None: | 426 | (scope=str). The returned value is a Python data structure loaded as |
1027 | 358 | config_cmd_line.append(scope) | 427 | JSON from the Juju config command. |
1028 | 359 | else: | 428 | |
1029 | 360 | config_cmd_line.append('--all') | 429 | :param scope: If set, return the value for the specified key. |
1030 | 361 | config_cmd_line.append('--format=json') | 430 | :type scope: Optional[str] |
1031 | 431 | :returns: Either the whole config as a Config, or a key from it. | ||
1032 | 432 | :rtype: Any | ||
1033 | 433 | """ | ||
1034 | 434 | global _cache_config | ||
1035 | 435 | config_cmd_line = ['config-get', '--all', '--format=json'] | ||
1036 | 436 | try: | ||
1037 | 437 | # JSON Decode Exception for Python3.5+ | ||
1038 | 438 | exc_json = json.decoder.JSONDecodeError | ||
1039 | 439 | except AttributeError: | ||
1040 | 440 | # JSON Decode Exception for Python2.7 through Python3.4 | ||
1041 | 441 | exc_json = ValueError | ||
1042 | 362 | try: | 442 | try: |
1045 | 363 | config_data = json.loads( | 443 | if _cache_config is None: |
1046 | 364 | subprocess.check_output(config_cmd_line).decode('UTF-8')) | 444 | config_data = json.loads( |
1047 | 445 | subprocess.check_output(config_cmd_line).decode('UTF-8')) | ||
1048 | 446 | _cache_config = Config(config_data) | ||
1049 | 365 | if scope is not None: | 447 | if scope is not None: |
1053 | 366 | return config_data | 448 | return _cache_config.get(scope) |
1054 | 367 | return Config(config_data) | 449 | return _cache_config |
1055 | 368 | except ValueError: | 450 | except (exc_json, UnicodeDecodeError) as e: |
1056 | 451 | log('Unable to parse output from config-get: config_cmd_line="{}" ' | ||
1057 | 452 | 'message="{}"' | ||
1058 | 453 | .format(config_cmd_line, str(e)), level=ERROR) | ||
1059 | 369 | return None | 454 | return None |
1060 | 370 | 455 | ||
1061 | 371 | 456 | ||
1062 | @@ -459,6 +544,67 @@ def related_units(relid=None): | |||
1063 | 459 | subprocess.check_output(units_cmd_line).decode('UTF-8')) or [] | 544 | subprocess.check_output(units_cmd_line).decode('UTF-8')) or [] |
1064 | 460 | 545 | ||
1065 | 461 | 546 | ||
1066 | 547 | def expected_peer_units(): | ||
1067 | 548 | """Get a generator for units we expect to join peer relation based on | ||
1068 | 549 | goal-state. | ||
1069 | 550 | |||
1070 | 551 | The local unit is excluded from the result to make it easy to gauge | ||
1071 | 552 | completion of all peers joining the relation with existing hook tools. | ||
1072 | 553 | |||
1073 | 554 | Example usage: | ||
1074 | 555 | log('peer {} of {} joined peer relation' | ||
1075 | 556 | .format(len(related_units()), | ||
1076 | 557 | len(list(expected_peer_units())))) | ||
1077 | 558 | |||
1078 | 559 | This function will raise NotImplementedError if used with juju versions | ||
1079 | 560 | without goal-state support. | ||
1080 | 561 | |||
1081 | 562 | :returns: iterator | ||
1082 | 563 | :rtype: types.GeneratorType | ||
1083 | 564 | :raises: NotImplementedError | ||
1084 | 565 | """ | ||
1085 | 566 | if not has_juju_version("2.4.0"): | ||
1086 | 567 | # goal-state first appeared in 2.4.0. | ||
1087 | 568 | raise NotImplementedError("goal-state") | ||
1088 | 569 | _goal_state = goal_state() | ||
1089 | 570 | return (key for key in _goal_state['units'] | ||
1090 | 571 | if '/' in key and key != local_unit()) | ||
1091 | 572 | |||
1092 | 573 | |||
1093 | 574 | def expected_related_units(reltype=None): | ||
1094 | 575 | """Get a generator for units we expect to join relation based on | ||
1095 | 576 | goal-state. | ||
1096 | 577 | |||
1097 | 578 | Note that you can not use this function for the peer relation, take a look | ||
1098 | 579 | at expected_peer_units() for that. | ||
1099 | 580 | |||
1100 | 581 | This function will raise KeyError if you request information for a | ||
1101 | 582 | relation type for which juju goal-state does not have information. It will | ||
1102 | 583 | raise NotImplementedError if used with juju versions without goal-state | ||
1103 | 584 | support. | ||
1104 | 585 | |||
1105 | 586 | Example usage: | ||
1106 | 587 | log('participant {} of {} joined relation {}' | ||
1107 | 588 | .format(len(related_units()), | ||
1108 | 589 | len(list(expected_related_units())), | ||
1109 | 590 | relation_type())) | ||
1110 | 591 | |||
1111 | 592 | :param reltype: Relation type to list data for, default is to list data for | ||
1112 | 593 | the realtion type we are currently executing a hook for. | ||
1113 | 594 | :type reltype: str | ||
1114 | 595 | :returns: iterator | ||
1115 | 596 | :rtype: types.GeneratorType | ||
1116 | 597 | :raises: KeyError, NotImplementedError | ||
1117 | 598 | """ | ||
1118 | 599 | if not has_juju_version("2.4.4"): | ||
1119 | 600 | # goal-state existed in 2.4.0, but did not list individual units to | ||
1120 | 601 | # join a relation in 2.4.1 through 2.4.3. (LP: #1794739) | ||
1121 | 602 | raise NotImplementedError("goal-state relation unit count") | ||
1122 | 603 | reltype = reltype or relation_type() | ||
1123 | 604 | _goal_state = goal_state() | ||
1124 | 605 | return (key for key in _goal_state['relations'][reltype] if '/' in key) | ||
1125 | 606 | |||
1126 | 607 | |||
1127 | 462 | @cached | 608 | @cached |
1128 | 463 | def relation_for_unit(unit=None, rid=None): | 609 | def relation_for_unit(unit=None, rid=None): |
1129 | 464 | """Get the json represenation of a unit's relation""" | 610 | """Get the json represenation of a unit's relation""" |
1130 | @@ -644,18 +790,31 @@ def is_relation_made(relation, keys='private-address'): | |||
1131 | 644 | return False | 790 | return False |
1132 | 645 | 791 | ||
1133 | 646 | 792 | ||
1134 | 793 | def _port_op(op_name, port, protocol="TCP"): | ||
1135 | 794 | """Open or close a service network port""" | ||
1136 | 795 | _args = [op_name] | ||
1137 | 796 | icmp = protocol.upper() == "ICMP" | ||
1138 | 797 | if icmp: | ||
1139 | 798 | _args.append(protocol) | ||
1140 | 799 | else: | ||
1141 | 800 | _args.append('{}/{}'.format(port, protocol)) | ||
1142 | 801 | try: | ||
1143 | 802 | subprocess.check_call(_args) | ||
1144 | 803 | except subprocess.CalledProcessError: | ||
1145 | 804 | # Older Juju pre 2.3 doesn't support ICMP | ||
1146 | 805 | # so treat it as a no-op if it fails. | ||
1147 | 806 | if not icmp: | ||
1148 | 807 | raise | ||
1149 | 808 | |||
1150 | 809 | |||
1151 | 647 | def open_port(port, protocol="TCP"): | 810 | def open_port(port, protocol="TCP"): |
1152 | 648 | """Open a service network port""" | 811 | """Open a service network port""" |
1156 | 649 | _args = ['open-port'] | 812 | _port_op('open-port', port, protocol) |
1154 | 650 | _args.append('{}/{}'.format(port, protocol)) | ||
1155 | 651 | subprocess.check_call(_args) | ||
1157 | 652 | 813 | ||
1158 | 653 | 814 | ||
1159 | 654 | def close_port(port, protocol="TCP"): | 815 | def close_port(port, protocol="TCP"): |
1160 | 655 | """Close a service network port""" | 816 | """Close a service network port""" |
1164 | 656 | _args = ['close-port'] | 817 | _port_op('close-port', port, protocol) |
1162 | 657 | _args.append('{}/{}'.format(port, protocol)) | ||
1163 | 658 | subprocess.check_call(_args) | ||
1165 | 659 | 818 | ||
1166 | 660 | 819 | ||
1167 | 661 | def open_ports(start, end, protocol="TCP"): | 820 | def open_ports(start, end, protocol="TCP"): |
1168 | @@ -672,6 +831,17 @@ def close_ports(start, end, protocol="TCP"): | |||
1169 | 672 | subprocess.check_call(_args) | 831 | subprocess.check_call(_args) |
1170 | 673 | 832 | ||
1171 | 674 | 833 | ||
1172 | 834 | def opened_ports(): | ||
1173 | 835 | """Get the opened ports | ||
1174 | 836 | |||
1175 | 837 | *Note that this will only show ports opened in a previous hook* | ||
1176 | 838 | |||
1177 | 839 | :returns: Opened ports as a list of strings: ``['8080/tcp', '8081-8083/tcp']`` | ||
1178 | 840 | """ | ||
1179 | 841 | _args = ['opened-ports', '--format=json'] | ||
1180 | 842 | return json.loads(subprocess.check_output(_args).decode('UTF-8')) | ||
1181 | 843 | |||
1182 | 844 | |||
1183 | 675 | @cached | 845 | @cached |
1184 | 676 | def unit_get(attribute): | 846 | def unit_get(attribute): |
1185 | 677 | """Get the unit ID for the remote unit""" | 847 | """Get the unit ID for the remote unit""" |
1186 | @@ -793,6 +963,10 @@ class Hooks(object): | |||
1187 | 793 | return wrapper | 963 | return wrapper |
1188 | 794 | 964 | ||
1189 | 795 | 965 | ||
1190 | 966 | class NoNetworkBinding(Exception): | ||
1191 | 967 | pass | ||
1192 | 968 | |||
1193 | 969 | |||
1194 | 796 | def charm_dir(): | 970 | def charm_dir(): |
1195 | 797 | """Return the root directory of the current charm""" | 971 | """Return the root directory of the current charm""" |
1196 | 798 | d = os.environ.get('JUJU_CHARM_DIR') | 972 | d = os.environ.get('JUJU_CHARM_DIR') |
1197 | @@ -801,9 +975,23 @@ def charm_dir(): | |||
1198 | 801 | return os.environ.get('CHARM_DIR') | 975 | return os.environ.get('CHARM_DIR') |
1199 | 802 | 976 | ||
1200 | 803 | 977 | ||
1201 | 978 | def cmd_exists(cmd): | ||
1202 | 979 | """Return True if the specified cmd exists in the path""" | ||
1203 | 980 | return any( | ||
1204 | 981 | os.access(os.path.join(path, cmd), os.X_OK) | ||
1205 | 982 | for path in os.environ["PATH"].split(os.pathsep) | ||
1206 | 983 | ) | ||
1207 | 984 | |||
1208 | 985 | |||
1209 | 804 | @cached | 986 | @cached |
1210 | 987 | @deprecate("moved to function_get()", log=log) | ||
1211 | 805 | def action_get(key=None): | 988 | def action_get(key=None): |
1213 | 806 | """Gets the value of an action parameter, or all key/value param pairs""" | 989 | """ |
1214 | 990 | .. deprecated:: 0.20.7 | ||
1215 | 991 | Alias for :func:`function_get`. | ||
1216 | 992 | |||
1217 | 993 | Gets the value of an action parameter, or all key/value param pairs. | ||
1218 | 994 | """ | ||
1219 | 807 | cmd = ['action-get'] | 995 | cmd = ['action-get'] |
1220 | 808 | if key is not None: | 996 | if key is not None: |
1221 | 809 | cmd.append(key) | 997 | cmd.append(key) |
1222 | @@ -812,52 +1000,130 @@ def action_get(key=None): | |||
1223 | 812 | return action_data | 1000 | return action_data |
1224 | 813 | 1001 | ||
1225 | 814 | 1002 | ||
1226 | 1003 | @cached | ||
1227 | 1004 | def function_get(key=None): | ||
1228 | 1005 | """Gets the value of an action parameter, or all key/value param pairs""" | ||
1229 | 1006 | cmd = ['function-get'] | ||
1230 | 1007 | # Fallback for older charms. | ||
1231 | 1008 | if not cmd_exists('function-get'): | ||
1232 | 1009 | cmd = ['action-get'] | ||
1233 | 1010 | |||
1234 | 1011 | if key is not None: | ||
1235 | 1012 | cmd.append(key) | ||
1236 | 1013 | cmd.append('--format=json') | ||
1237 | 1014 | function_data = json.loads(subprocess.check_output(cmd).decode('UTF-8')) | ||
1238 | 1015 | return function_data | ||
1239 | 1016 | |||
1240 | 1017 | |||
1241 | 1018 | @deprecate("moved to function_set()", log=log) | ||
1242 | 815 | def action_set(values): | 1019 | def action_set(values): |
1244 | 816 | """Sets the values to be returned after the action finishes""" | 1020 | """ |
1245 | 1021 | .. deprecated:: 0.20.7 | ||
1246 | 1022 | Alias for :func:`function_set`. | ||
1247 | 1023 | |||
1248 | 1024 | Sets the values to be returned after the action finishes. | ||
1249 | 1025 | """ | ||
1250 | 817 | cmd = ['action-set'] | 1026 | cmd = ['action-set'] |
1251 | 818 | for k, v in list(values.items()): | 1027 | for k, v in list(values.items()): |
1252 | 819 | cmd.append('{}={}'.format(k, v)) | 1028 | cmd.append('{}={}'.format(k, v)) |
1253 | 820 | subprocess.check_call(cmd) | 1029 | subprocess.check_call(cmd) |
1254 | 821 | 1030 | ||
1255 | 822 | 1031 | ||
1256 | 1032 | def function_set(values): | ||
1257 | 1033 | """Sets the values to be returned after the function finishes""" | ||
1258 | 1034 | cmd = ['function-set'] | ||
1259 | 1035 | # Fallback for older charms. | ||
1260 | 1036 | if not cmd_exists('function-get'): | ||
1261 | 1037 | cmd = ['action-set'] | ||
1262 | 1038 | |||
1263 | 1039 | for k, v in list(values.items()): | ||
1264 | 1040 | cmd.append('{}={}'.format(k, v)) | ||
1265 | 1041 | subprocess.check_call(cmd) | ||
1266 | 1042 | |||
1267 | 1043 | |||
1268 | 1044 | @deprecate("moved to function_fail()", log=log) | ||
1269 | 823 | def action_fail(message): | 1045 | def action_fail(message): |
1271 | 824 | """Sets the action status to failed and sets the error message. | 1046 | """ |
1272 | 1047 | .. deprecated:: 0.20.7 | ||
1273 | 1048 | Alias for :func:`function_fail`. | ||
1274 | 1049 | |||
1275 | 1050 | Sets the action status to failed and sets the error message. | ||
1276 | 825 | 1051 | ||
1278 | 826 | The results set by action_set are preserved.""" | 1052 | The results set by action_set are preserved. |
1279 | 1053 | """ | ||
1280 | 827 | subprocess.check_call(['action-fail', message]) | 1054 | subprocess.check_call(['action-fail', message]) |
1281 | 828 | 1055 | ||
1282 | 829 | 1056 | ||
1283 | 1057 | def function_fail(message): | ||
1284 | 1058 | """Sets the function status to failed and sets the error message. | ||
1285 | 1059 | |||
1286 | 1060 | The results set by function_set are preserved.""" | ||
1287 | 1061 | cmd = ['function-fail'] | ||
1288 | 1062 | # Fallback for older charms. | ||
1289 | 1063 | if not cmd_exists('function-fail'): | ||
1290 | 1064 | cmd = ['action-fail'] | ||
1291 | 1065 | cmd.append(message) | ||
1292 | 1066 | |||
1293 | 1067 | subprocess.check_call(cmd) | ||
1294 | 1068 | |||
1295 | 1069 | |||
1296 | 830 | def action_name(): | 1070 | def action_name(): |
1297 | 831 | """Get the name of the currently executing action.""" | 1071 | """Get the name of the currently executing action.""" |
1298 | 832 | return os.environ.get('JUJU_ACTION_NAME') | 1072 | return os.environ.get('JUJU_ACTION_NAME') |
1299 | 833 | 1073 | ||
1300 | 834 | 1074 | ||
1301 | 1075 | def function_name(): | ||
1302 | 1076 | """Get the name of the currently executing function.""" | ||
1303 | 1077 | return os.environ.get('JUJU_FUNCTION_NAME') or action_name() | ||
1304 | 1078 | |||
1305 | 1079 | |||
1306 | 835 | def action_uuid(): | 1080 | def action_uuid(): |
1307 | 836 | """Get the UUID of the currently executing action.""" | 1081 | """Get the UUID of the currently executing action.""" |
1308 | 837 | return os.environ.get('JUJU_ACTION_UUID') | 1082 | return os.environ.get('JUJU_ACTION_UUID') |
1309 | 838 | 1083 | ||
1310 | 839 | 1084 | ||
1311 | 1085 | def function_id(): | ||
1312 | 1086 | """Get the ID of the currently executing function.""" | ||
1313 | 1087 | return os.environ.get('JUJU_FUNCTION_ID') or action_uuid() | ||
1314 | 1088 | |||
1315 | 1089 | |||
1316 | 840 | def action_tag(): | 1090 | def action_tag(): |
1317 | 841 | """Get the tag for the currently executing action.""" | 1091 | """Get the tag for the currently executing action.""" |
1318 | 842 | return os.environ.get('JUJU_ACTION_TAG') | 1092 | return os.environ.get('JUJU_ACTION_TAG') |
1319 | 843 | 1093 | ||
1320 | 844 | 1094 | ||
1322 | 845 | def status_set(workload_state, message): | 1095 | def function_tag(): |
1323 | 1096 | """Get the tag for the currently executing function.""" | ||
1324 | 1097 | return os.environ.get('JUJU_FUNCTION_TAG') or action_tag() | ||
1325 | 1098 | |||
1326 | 1099 | |||
1327 | 1100 | def status_set(workload_state, message, application=False): | ||
1328 | 846 | """Set the workload state with a message | 1101 | """Set the workload state with a message |
1329 | 847 | 1102 | ||
1330 | 848 | Use status-set to set the workload state with a message which is visible | 1103 | Use status-set to set the workload state with a message which is visible |
1331 | 849 | to the user via juju status. If the status-set command is not found then | 1104 | to the user via juju status. If the status-set command is not found then |
1333 | 850 | assume this is juju < 1.23 and juju-log the message unstead. | 1105 | assume this is juju < 1.23 and juju-log the message instead. |
1334 | 851 | 1106 | ||
1337 | 852 | workload_state -- valid juju workload state. | 1107 | workload_state -- valid juju workload state. str or WORKLOAD_STATES |
1338 | 853 | message -- status update message | 1108 | message -- status update message |
1339 | 1109 | application -- Whether this is an application state set | ||
1340 | 854 | """ | 1110 | """ |
1347 | 855 | valid_states = ['maintenance', 'blocked', 'waiting', 'active'] | 1111 | bad_state_msg = '{!r} is not a valid workload state' |
1348 | 856 | if workload_state not in valid_states: | 1112 | |
1349 | 857 | raise ValueError( | 1113 | if isinstance(workload_state, str): |
1350 | 858 | '{!r} is not a valid workload state'.format(workload_state) | 1114 | try: |
1351 | 859 | ) | 1115 | # Convert string to enum. |
1352 | 860 | cmd = ['status-set', workload_state, message] | 1116 | workload_state = WORKLOAD_STATES[workload_state.upper()] |
1353 | 1117 | except KeyError: | ||
1354 | 1118 | raise ValueError(bad_state_msg.format(workload_state)) | ||
1355 | 1119 | |||
1356 | 1120 | if workload_state not in WORKLOAD_STATES: | ||
1357 | 1121 | raise ValueError(bad_state_msg.format(workload_state)) | ||
1358 | 1122 | |||
1359 | 1123 | cmd = ['status-set'] | ||
1360 | 1124 | if application: | ||
1361 | 1125 | cmd.append('--application') | ||
1362 | 1126 | cmd.extend([workload_state.value, message]) | ||
1363 | 861 | try: | 1127 | try: |
1364 | 862 | ret = subprocess.call(cmd) | 1128 | ret = subprocess.call(cmd) |
1365 | 863 | if ret == 0: | 1129 | if ret == 0: |
1366 | @@ -865,7 +1131,7 @@ def status_set(workload_state, message): | |||
1367 | 865 | except OSError as e: | 1131 | except OSError as e: |
1368 | 866 | if e.errno != errno.ENOENT: | 1132 | if e.errno != errno.ENOENT: |
1369 | 867 | raise | 1133 | raise |
1371 | 868 | log_message = 'status-set failed: {} {}'.format(workload_state, | 1134 | log_message = 'status-set failed: {} {}'.format(workload_state.value, |
1372 | 869 | message) | 1135 | message) |
1373 | 870 | log(log_message, level='INFO') | 1136 | log(log_message, level='INFO') |
1374 | 871 | 1137 | ||
1375 | @@ -919,6 +1185,14 @@ def application_version_set(version): | |||
1376 | 919 | 1185 | ||
1377 | 920 | 1186 | ||
1378 | 921 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | 1187 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) |
1379 | 1188 | @cached | ||
1380 | 1189 | def goal_state(): | ||
1381 | 1190 | """Juju goal state values""" | ||
1382 | 1191 | cmd = ['goal-state', '--format=json'] | ||
1383 | 1192 | return json.loads(subprocess.check_output(cmd).decode('UTF-8')) | ||
1384 | 1193 | |||
1385 | 1194 | |||
1386 | 1195 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | ||
1387 | 922 | def is_leader(): | 1196 | def is_leader(): |
1388 | 923 | """Does the current unit hold the juju leadership | 1197 | """Does the current unit hold the juju leadership |
1389 | 924 | 1198 | ||
1390 | @@ -1012,7 +1286,6 @@ def juju_version(): | |||
1391 | 1012 | universal_newlines=True).strip() | 1286 | universal_newlines=True).strip() |
1392 | 1013 | 1287 | ||
1393 | 1014 | 1288 | ||
1394 | 1015 | @cached | ||
1395 | 1016 | def has_juju_version(minimum_version): | 1289 | def has_juju_version(minimum_version): |
1396 | 1017 | """Return True if the Juju version is at least the provided version""" | 1290 | """Return True if the Juju version is at least the provided version""" |
1397 | 1018 | return LooseVersion(juju_version()) >= LooseVersion(minimum_version) | 1291 | return LooseVersion(juju_version()) >= LooseVersion(minimum_version) |
1398 | @@ -1072,6 +1345,8 @@ def _run_atexit(): | |||
1399 | 1072 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | 1345 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) |
1400 | 1073 | def network_get_primary_address(binding): | 1346 | def network_get_primary_address(binding): |
1401 | 1074 | ''' | 1347 | ''' |
1402 | 1348 | Deprecated since Juju 2.3; use network_get() | ||
1403 | 1349 | |||
1404 | 1075 | Retrieve the primary network address for a named binding | 1350 | Retrieve the primary network address for a named binding |
1405 | 1076 | 1351 | ||
1406 | 1077 | :param binding: string. The name of a relation of extra-binding | 1352 | :param binding: string. The name of a relation of extra-binding |
1407 | @@ -1079,10 +1354,19 @@ def network_get_primary_address(binding): | |||
1408 | 1079 | :raise: NotImplementedError if run on Juju < 2.0 | 1354 | :raise: NotImplementedError if run on Juju < 2.0 |
1409 | 1080 | ''' | 1355 | ''' |
1410 | 1081 | cmd = ['network-get', '--primary-address', binding] | 1356 | cmd = ['network-get', '--primary-address', binding] |
1412 | 1082 | return subprocess.check_output(cmd).decode('UTF-8').strip() | 1357 | try: |
1413 | 1358 | response = subprocess.check_output( | ||
1414 | 1359 | cmd, | ||
1415 | 1360 | stderr=subprocess.STDOUT).decode('UTF-8').strip() | ||
1416 | 1361 | except CalledProcessError as e: | ||
1417 | 1362 | if 'no network config found for binding' in e.output.decode('UTF-8'): | ||
1418 | 1363 | raise NoNetworkBinding("No network binding for {}" | ||
1419 | 1364 | .format(binding)) | ||
1420 | 1365 | else: | ||
1421 | 1366 | raise | ||
1422 | 1367 | return response | ||
1423 | 1083 | 1368 | ||
1424 | 1084 | 1369 | ||
1425 | 1085 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | ||
1426 | 1086 | def network_get(endpoint, relation_id=None): | 1370 | def network_get(endpoint, relation_id=None): |
1427 | 1087 | """ | 1371 | """ |
1428 | 1088 | Retrieve the network details for a relation endpoint | 1372 | Retrieve the network details for a relation endpoint |
1429 | @@ -1090,24 +1374,20 @@ def network_get(endpoint, relation_id=None): | |||
1430 | 1090 | :param endpoint: string. The name of a relation endpoint | 1374 | :param endpoint: string. The name of a relation endpoint |
1431 | 1091 | :param relation_id: int. The ID of the relation for the current context. | 1375 | :param relation_id: int. The ID of the relation for the current context. |
1432 | 1092 | :return: dict. The loaded YAML output of the network-get query. | 1376 | :return: dict. The loaded YAML output of the network-get query. |
1434 | 1093 | :raise: NotImplementedError if run on Juju < 2.1 | 1377 | :raise: NotImplementedError if request not supported by the Juju version. |
1435 | 1094 | """ | 1378 | """ |
1436 | 1379 | if not has_juju_version('2.2'): | ||
1437 | 1380 | raise NotImplementedError(juju_version()) # earlier versions require --primary-address | ||
1438 | 1381 | if relation_id and not has_juju_version('2.3'): | ||
1439 | 1382 | raise NotImplementedError # 2.3 added the -r option | ||
1440 | 1383 | |||
1441 | 1095 | cmd = ['network-get', endpoint, '--format', 'yaml'] | 1384 | cmd = ['network-get', endpoint, '--format', 'yaml'] |
1442 | 1096 | if relation_id: | 1385 | if relation_id: |
1443 | 1097 | cmd.append('-r') | 1386 | cmd.append('-r') |
1444 | 1098 | cmd.append(relation_id) | 1387 | cmd.append(relation_id) |
1457 | 1099 | try: | 1388 | response = subprocess.check_output( |
1458 | 1100 | response = subprocess.check_output( | 1389 | cmd, |
1459 | 1101 | cmd, | 1390 | stderr=subprocess.STDOUT).decode('UTF-8').strip() |
1448 | 1102 | stderr=subprocess.STDOUT).decode('UTF-8').strip() | ||
1449 | 1103 | except CalledProcessError as e: | ||
1450 | 1104 | # Early versions of Juju 2.0.x required the --primary-address argument. | ||
1451 | 1105 | # We catch that condition here and raise NotImplementedError since | ||
1452 | 1106 | # the requested semantics are not available - the caller can then | ||
1453 | 1107 | # use the network_get_primary_address() method instead. | ||
1454 | 1108 | if '--primary-address is currently required' in e.output.decode('UTF-8'): | ||
1455 | 1109 | raise NotImplementedError | ||
1456 | 1110 | raise | ||
1460 | 1111 | return yaml.safe_load(response) | 1391 | return yaml.safe_load(response) |
1461 | 1112 | 1392 | ||
1462 | 1113 | 1393 | ||
1463 | @@ -1140,3 +1420,192 @@ def meter_info(): | |||
1464 | 1140 | """Get the meter status information, if running in the meter-status-changed | 1420 | """Get the meter status information, if running in the meter-status-changed |
1465 | 1141 | hook.""" | 1421 | hook.""" |
1466 | 1142 | return os.environ.get('JUJU_METER_INFO') | 1422 | return os.environ.get('JUJU_METER_INFO') |
1467 | 1423 | |||
1468 | 1424 | |||
1469 | 1425 | def iter_units_for_relation_name(relation_name): | ||
1470 | 1426 | """Iterate through all units in a relation | ||
1471 | 1427 | |||
1472 | 1428 | Generator that iterates through all the units in a relation and yields | ||
1473 | 1429 | a named tuple with rid and unit field names. | ||
1474 | 1430 | |||
1475 | 1431 | Usage: | ||
1476 | 1432 | data = [(u.rid, u.unit) | ||
1477 | 1433 | for u in iter_units_for_relation_name(relation_name)] | ||
1478 | 1434 | |||
1479 | 1435 | :param relation_name: string relation name | ||
1480 | 1436 | :yield: Named Tuple with rid and unit field names | ||
1481 | 1437 | """ | ||
1482 | 1438 | RelatedUnit = namedtuple('RelatedUnit', 'rid, unit') | ||
1483 | 1439 | for rid in relation_ids(relation_name): | ||
1484 | 1440 | for unit in related_units(rid): | ||
1485 | 1441 | yield RelatedUnit(rid, unit) | ||
1486 | 1442 | |||
1487 | 1443 | |||
1488 | 1444 | def ingress_address(rid=None, unit=None): | ||
1489 | 1445 | """ | ||
1490 | 1446 | Retrieve the ingress-address from a relation when available. | ||
1491 | 1447 | Otherwise, return the private-address. | ||
1492 | 1448 | |||
1493 | 1449 | When used on the consuming side of the relation (unit is a remote | ||
1494 | 1450 | unit), the ingress-address is the IP address that this unit needs | ||
1495 | 1451 | to use to reach the provided service on the remote unit. | ||
1496 | 1452 | |||
1497 | 1453 | When used on the providing side of the relation (unit == local_unit()), | ||
1498 | 1454 | the ingress-address is the IP address that is advertised to remote | ||
1499 | 1455 | units on this relation. Remote units need to use this address to | ||
1500 | 1456 | reach the local provided service on this unit. | ||
1501 | 1457 | |||
1502 | 1458 | Note that charms may document some other method to use in | ||
1503 | 1459 | preference to the ingress_address(), such as an address provided | ||
1504 | 1460 | on a different relation attribute or a service discovery mechanism. | ||
1505 | 1461 | This allows charms to redirect inbound connections to their peers | ||
1506 | 1462 | or different applications such as load balancers. | ||
1507 | 1463 | |||
1508 | 1464 | Usage: | ||
1509 | 1465 | addresses = [ingress_address(rid=u.rid, unit=u.unit) | ||
1510 | 1466 | for u in iter_units_for_relation_name(relation_name)] | ||
1511 | 1467 | |||
1512 | 1468 | :param rid: string relation id | ||
1513 | 1469 | :param unit: string unit name | ||
1514 | 1470 | :side effect: calls relation_get | ||
1515 | 1471 | :return: string IP address | ||
1516 | 1472 | """ | ||
1517 | 1473 | settings = relation_get(rid=rid, unit=unit) | ||
1518 | 1474 | return (settings.get('ingress-address') or | ||
1519 | 1475 | settings.get('private-address')) | ||
1520 | 1476 | |||
1521 | 1477 | |||
1522 | 1478 | def egress_subnets(rid=None, unit=None): | ||
1523 | 1479 | """ | ||
1524 | 1480 | Retrieve the egress-subnets from a relation. | ||
1525 | 1481 | |||
1526 | 1482 | This function is to be used on the providing side of the | ||
1527 | 1483 | relation, and provides the ranges of addresses that client | ||
1528 | 1484 | connections may come from. The result is uninteresting on | ||
1529 | 1485 | the consuming side of a relation (unit == local_unit()). | ||
1530 | 1486 | |||
1531 | 1487 | Returns a stable list of subnets in CIDR format. | ||
1532 | 1488 | eg. ['192.168.1.0/24', '2001::F00F/128'] | ||
1533 | 1489 | |||
1534 | 1490 | If egress-subnets is not available, falls back to using the published | ||
1535 | 1491 | ingress-address, or finally private-address. | ||
1536 | 1492 | |||
1537 | 1493 | :param rid: string relation id | ||
1538 | 1494 | :param unit: string unit name | ||
1539 | 1495 | :side effect: calls relation_get | ||
1540 | 1496 | :return: list of subnets in CIDR format. eg. ['192.168.1.0/24', '2001::F00F/128'] | ||
1541 | 1497 | """ | ||
1542 | 1498 | def _to_range(addr): | ||
1543 | 1499 | if re.search(r'^(?:\d{1,3}\.){3}\d{1,3}$', addr) is not None: | ||
1544 | 1500 | addr += '/32' | ||
1545 | 1501 | elif ':' in addr and '/' not in addr: # IPv6 | ||
1546 | 1502 | addr += '/128' | ||
1547 | 1503 | return addr | ||
1548 | 1504 | |||
1549 | 1505 | settings = relation_get(rid=rid, unit=unit) | ||
1550 | 1506 | if 'egress-subnets' in settings: | ||
1551 | 1507 | return [n.strip() for n in settings['egress-subnets'].split(',') if n.strip()] | ||
1552 | 1508 | if 'ingress-address' in settings: | ||
1553 | 1509 | return [_to_range(settings['ingress-address'])] | ||
1554 | 1510 | if 'private-address' in settings: | ||
1555 | 1511 | return [_to_range(settings['private-address'])] | ||
1556 | 1512 | return [] # Should never happen | ||
1557 | 1513 | |||
1558 | 1514 | |||
1559 | 1515 | def unit_doomed(unit=None): | ||
1560 | 1516 | """Determines if the unit is being removed from the model | ||
1561 | 1517 | |||
1562 | 1518 | Requires Juju 2.4.1. | ||
1563 | 1519 | |||
1564 | 1520 | :param unit: string unit name, defaults to local_unit | ||
1565 | 1521 | :side effect: calls goal_state | ||
1566 | 1522 | :side effect: calls local_unit | ||
1567 | 1523 | :side effect: calls has_juju_version | ||
1568 | 1524 | :return: True if the unit is being removed, already gone, or never existed | ||
1569 | 1525 | """ | ||
1570 | 1526 | if not has_juju_version("2.4.1"): | ||
1571 | 1527 | # We cannot risk blindly returning False for 'we don't know', | ||
1572 | 1528 | # because that could cause data loss; if call sites don't | ||
1573 | 1529 | # need an accurate answer, they likely don't need this helper | ||
1574 | 1530 | # at all. | ||
1575 | 1531 | # goal-state existed in 2.4.0, but did not handle removals | ||
1576 | 1532 | # correctly until 2.4.1. | ||
1577 | 1533 | raise NotImplementedError("is_doomed") | ||
1578 | 1534 | if unit is None: | ||
1579 | 1535 | unit = local_unit() | ||
1580 | 1536 | gs = goal_state() | ||
1581 | 1537 | units = gs.get('units', {}) | ||
1582 | 1538 | if unit not in units: | ||
1583 | 1539 | return True | ||
1584 | 1540 | # I don't think 'dead' units ever show up in the goal-state, but | ||
1585 | 1541 | # check anyway in addition to 'dying'. | ||
1586 | 1542 | return units[unit]['status'] in ('dying', 'dead') | ||
1587 | 1543 | |||
1588 | 1544 | |||
1589 | 1545 | def env_proxy_settings(selected_settings=None): | ||
1590 | 1546 | """Get proxy settings from process environment variables. | ||
1591 | 1547 | |||
1592 | 1548 | Get charm proxy settings from environment variables that correspond to | ||
1593 | 1549 | juju-http-proxy, juju-https-proxy juju-no-proxy (available as of 2.4.2, see | ||
1594 | 1550 | lp:1782236) and juju-ftp-proxy in a format suitable for passing to an | ||
1595 | 1551 | application that reacts to proxy settings passed as environment variables. | ||
1596 | 1552 | Some applications support lowercase or uppercase notation (e.g. curl), some | ||
1597 | 1553 | support only lowercase (e.g. wget), there are also subjectively rare cases | ||
1598 | 1554 | of only uppercase notation support. no_proxy CIDR and wildcard support also | ||
1599 | 1555 | varies between runtimes and applications as there is no enforced standard. | ||
1600 | 1556 | |||
1601 | 1557 | Some applications may connect to multiple destinations and expose config | ||
1602 | 1558 | options that would affect only proxy settings for a specific destination | ||
1603 | 1559 | these should be handled in charms in an application-specific manner. | ||
1604 | 1560 | |||
1605 | 1561 | :param selected_settings: format only a subset of possible settings | ||
1606 | 1562 | :type selected_settings: list | ||
1607 | 1563 | :rtype: Option(None, dict[str, str]) | ||
1608 | 1564 | """ | ||
1609 | 1565 | SUPPORTED_SETTINGS = { | ||
1610 | 1566 | 'http': 'HTTP_PROXY', | ||
1611 | 1567 | 'https': 'HTTPS_PROXY', | ||
1612 | 1568 | 'no_proxy': 'NO_PROXY', | ||
1613 | 1569 | 'ftp': 'FTP_PROXY' | ||
1614 | 1570 | } | ||
1615 | 1571 | if selected_settings is None: | ||
1616 | 1572 | selected_settings = SUPPORTED_SETTINGS | ||
1617 | 1573 | |||
1618 | 1574 | selected_vars = [v for k, v in SUPPORTED_SETTINGS.items() | ||
1619 | 1575 | if k in selected_settings] | ||
1620 | 1576 | proxy_settings = {} | ||
1621 | 1577 | for var in selected_vars: | ||
1622 | 1578 | var_val = os.getenv(var) | ||
1623 | 1579 | if var_val: | ||
1624 | 1580 | proxy_settings[var] = var_val | ||
1625 | 1581 | proxy_settings[var.lower()] = var_val | ||
1626 | 1582 | # Now handle juju-prefixed environment variables. The legacy vs new | ||
1627 | 1583 | # environment variable usage is mutually exclusive | ||
1628 | 1584 | charm_var_val = os.getenv('JUJU_CHARM_{}'.format(var)) | ||
1629 | 1585 | if charm_var_val: | ||
1630 | 1586 | proxy_settings[var] = charm_var_val | ||
1631 | 1587 | proxy_settings[var.lower()] = charm_var_val | ||
1632 | 1588 | if 'no_proxy' in proxy_settings: | ||
1633 | 1589 | if _contains_range(proxy_settings['no_proxy']): | ||
1634 | 1590 | log(RANGE_WARNING, level=WARNING) | ||
1635 | 1591 | return proxy_settings if proxy_settings else None | ||
1636 | 1592 | |||
1637 | 1593 | |||
1638 | 1594 | def _contains_range(addresses): | ||
1639 | 1595 | """Check for cidr or wildcard domain in a string. | ||
1640 | 1596 | |||
1641 | 1597 | Given a string comprising a comma seperated list of ip addresses | ||
1642 | 1598 | and domain names, determine whether the string contains IP ranges | ||
1643 | 1599 | or wildcard domains. | ||
1644 | 1600 | |||
1645 | 1601 | :param addresses: comma seperated list of domains and ip addresses. | ||
1646 | 1602 | :type addresses: str | ||
1647 | 1603 | """ | ||
1648 | 1604 | return ( | ||
1649 | 1605 | # Test for cidr (e.g. 10.20.20.0/24) | ||
1650 | 1606 | "/" in addresses or | ||
1651 | 1607 | # Test for wildcard domains (*.foo.com or .foo.com) | ||
1652 | 1608 | "*" in addresses or | ||
1653 | 1609 | addresses.startswith(".") or | ||
1654 | 1610 | ",." in addresses or | ||
1655 | 1611 | " ." in addresses) | ||
1656 | diff --git a/hooks/charmhelpers/core/host.py b/hooks/charmhelpers/core/host.py | |||
1657 | index 5656e2f..b33ac90 100644 | |||
1658 | --- a/hooks/charmhelpers/core/host.py | |||
1659 | +++ b/hooks/charmhelpers/core/host.py | |||
1660 | @@ -34,21 +34,23 @@ import six | |||
1661 | 34 | 34 | ||
1662 | 35 | from contextlib import contextmanager | 35 | from contextlib import contextmanager |
1663 | 36 | from collections import OrderedDict | 36 | from collections import OrderedDict |
1665 | 37 | from .hookenv import log, DEBUG | 37 | from .hookenv import log, INFO, DEBUG, local_unit, charm_name |
1666 | 38 | from .fstab import Fstab | 38 | from .fstab import Fstab |
1667 | 39 | from charmhelpers.osplatform import get_platform | 39 | from charmhelpers.osplatform import get_platform |
1668 | 40 | 40 | ||
1669 | 41 | __platform__ = get_platform() | 41 | __platform__ = get_platform() |
1670 | 42 | if __platform__ == "ubuntu": | 42 | if __platform__ == "ubuntu": |
1672 | 43 | from charmhelpers.core.host_factory.ubuntu import ( | 43 | from charmhelpers.core.host_factory.ubuntu import ( # NOQA:F401 |
1673 | 44 | service_available, | 44 | service_available, |
1674 | 45 | add_new_group, | 45 | add_new_group, |
1675 | 46 | lsb_release, | 46 | lsb_release, |
1676 | 47 | cmp_pkgrevno, | 47 | cmp_pkgrevno, |
1677 | 48 | CompareHostReleases, | 48 | CompareHostReleases, |
1678 | 49 | get_distrib_codename, | ||
1679 | 50 | arch | ||
1680 | 49 | ) # flake8: noqa -- ignore F401 for this import | 51 | ) # flake8: noqa -- ignore F401 for this import |
1681 | 50 | elif __platform__ == "centos": | 52 | elif __platform__ == "centos": |
1683 | 51 | from charmhelpers.core.host_factory.centos import ( | 53 | from charmhelpers.core.host_factory.centos import ( # NOQA:F401 |
1684 | 52 | service_available, | 54 | service_available, |
1685 | 53 | add_new_group, | 55 | add_new_group, |
1686 | 54 | lsb_release, | 56 | lsb_release, |
1687 | @@ -58,6 +60,7 @@ elif __platform__ == "centos": | |||
1688 | 58 | 60 | ||
1689 | 59 | UPDATEDB_PATH = '/etc/updatedb.conf' | 61 | UPDATEDB_PATH = '/etc/updatedb.conf' |
1690 | 60 | 62 | ||
1691 | 63 | |||
1692 | 61 | def service_start(service_name, **kwargs): | 64 | def service_start(service_name, **kwargs): |
1693 | 62 | """Start a system service. | 65 | """Start a system service. |
1694 | 63 | 66 | ||
1695 | @@ -287,8 +290,8 @@ def service_running(service_name, **kwargs): | |||
1696 | 287 | for key, value in six.iteritems(kwargs): | 290 | for key, value in six.iteritems(kwargs): |
1697 | 288 | parameter = '%s=%s' % (key, value) | 291 | parameter = '%s=%s' % (key, value) |
1698 | 289 | cmd.append(parameter) | 292 | cmd.append(parameter) |
1701 | 290 | output = subprocess.check_output(cmd, | 293 | output = subprocess.check_output( |
1702 | 291 | stderr=subprocess.STDOUT).decode('UTF-8') | 294 | cmd, stderr=subprocess.STDOUT).decode('UTF-8') |
1703 | 292 | except subprocess.CalledProcessError: | 295 | except subprocess.CalledProcessError: |
1704 | 293 | return False | 296 | return False |
1705 | 294 | else: | 297 | else: |
1706 | @@ -441,6 +444,51 @@ def add_user_to_group(username, group): | |||
1707 | 441 | subprocess.check_call(cmd) | 444 | subprocess.check_call(cmd) |
1708 | 442 | 445 | ||
1709 | 443 | 446 | ||
1710 | 447 | def chage(username, lastday=None, expiredate=None, inactive=None, | ||
1711 | 448 | mindays=None, maxdays=None, root=None, warndays=None): | ||
1712 | 449 | """Change user password expiry information | ||
1713 | 450 | |||
1714 | 451 | :param str username: User to update | ||
1715 | 452 | :param str lastday: Set when password was changed in YYYY-MM-DD format | ||
1716 | 453 | :param str expiredate: Set when user's account will no longer be | ||
1717 | 454 | accessible in YYYY-MM-DD format. | ||
1718 | 455 | -1 will remove an account expiration date. | ||
1719 | 456 | :param str inactive: Set the number of days of inactivity after a password | ||
1720 | 457 | has expired before the account is locked. | ||
1721 | 458 | -1 will remove an account's inactivity. | ||
1722 | 459 | :param str mindays: Set the minimum number of days between password | ||
1723 | 460 | changes to MIN_DAYS. | ||
1724 | 461 | 0 indicates the password can be changed anytime. | ||
1725 | 462 | :param str maxdays: Set the maximum number of days during which a | ||
1726 | 463 | password is valid. | ||
1727 | 464 | -1 as MAX_DAYS will remove checking maxdays | ||
1728 | 465 | :param str root: Apply changes in the CHROOT_DIR directory | ||
1729 | 466 | :param str warndays: Set the number of days of warning before a password | ||
1730 | 467 | change is required | ||
1731 | 468 | :raises subprocess.CalledProcessError: if call to chage fails | ||
1732 | 469 | """ | ||
1733 | 470 | cmd = ['chage'] | ||
1734 | 471 | if root: | ||
1735 | 472 | cmd.extend(['--root', root]) | ||
1736 | 473 | if lastday: | ||
1737 | 474 | cmd.extend(['--lastday', lastday]) | ||
1738 | 475 | if expiredate: | ||
1739 | 476 | cmd.extend(['--expiredate', expiredate]) | ||
1740 | 477 | if inactive: | ||
1741 | 478 | cmd.extend(['--inactive', inactive]) | ||
1742 | 479 | if mindays: | ||
1743 | 480 | cmd.extend(['--mindays', mindays]) | ||
1744 | 481 | if maxdays: | ||
1745 | 482 | cmd.extend(['--maxdays', maxdays]) | ||
1746 | 483 | if warndays: | ||
1747 | 484 | cmd.extend(['--warndays', warndays]) | ||
1748 | 485 | cmd.append(username) | ||
1749 | 486 | subprocess.check_call(cmd) | ||
1750 | 487 | |||
1751 | 488 | |||
1752 | 489 | remove_password_expiry = functools.partial(chage, expiredate='-1', inactive='-1', mindays='0', maxdays='-1') | ||
1753 | 490 | |||
1754 | 491 | |||
1755 | 444 | def rsync(from_path, to_path, flags='-r', options=None, timeout=None): | 492 | def rsync(from_path, to_path, flags='-r', options=None, timeout=None): |
1756 | 445 | """Replicate the contents of a path""" | 493 | """Replicate the contents of a path""" |
1757 | 446 | options = options or ['--delete', '--executability'] | 494 | options = options or ['--delete', '--executability'] |
1758 | @@ -492,13 +540,15 @@ def write_file(path, content, owner='root', group='root', perms=0o444): | |||
1759 | 492 | # lets see if we can grab the file and compare the context, to avoid doing | 540 | # lets see if we can grab the file and compare the context, to avoid doing |
1760 | 493 | # a write. | 541 | # a write. |
1761 | 494 | existing_content = None | 542 | existing_content = None |
1763 | 495 | existing_uid, existing_gid = None, None | 543 | existing_uid, existing_gid, existing_perms = None, None, None |
1764 | 496 | try: | 544 | try: |
1765 | 497 | with open(path, 'rb') as target: | 545 | with open(path, 'rb') as target: |
1766 | 498 | existing_content = target.read() | 546 | existing_content = target.read() |
1767 | 499 | stat = os.stat(path) | 547 | stat = os.stat(path) |
1770 | 500 | existing_uid, existing_gid = stat.st_uid, stat.st_gid | 548 | existing_uid, existing_gid, existing_perms = ( |
1771 | 501 | except: | 549 | stat.st_uid, stat.st_gid, stat.st_mode |
1772 | 550 | ) | ||
1773 | 551 | except Exception: | ||
1774 | 502 | pass | 552 | pass |
1775 | 503 | if content != existing_content: | 553 | if content != existing_content: |
1776 | 504 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms), | 554 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms), |
1777 | @@ -506,10 +556,12 @@ def write_file(path, content, owner='root', group='root', perms=0o444): | |||
1778 | 506 | with open(path, 'wb') as target: | 556 | with open(path, 'wb') as target: |
1779 | 507 | os.fchown(target.fileno(), uid, gid) | 557 | os.fchown(target.fileno(), uid, gid) |
1780 | 508 | os.fchmod(target.fileno(), perms) | 558 | os.fchmod(target.fileno(), perms) |
1781 | 559 | if six.PY3 and isinstance(content, six.string_types): | ||
1782 | 560 | content = content.encode('UTF-8') | ||
1783 | 509 | target.write(content) | 561 | target.write(content) |
1784 | 510 | return | 562 | return |
1785 | 511 | # the contents were the same, but we might still need to change the | 563 | # the contents were the same, but we might still need to change the |
1787 | 512 | # ownership. | 564 | # ownership or permissions. |
1788 | 513 | if existing_uid != uid: | 565 | if existing_uid != uid: |
1789 | 514 | log("Changing uid on already existing content: {} -> {}" | 566 | log("Changing uid on already existing content: {} -> {}" |
1790 | 515 | .format(existing_uid, uid), level=DEBUG) | 567 | .format(existing_uid, uid), level=DEBUG) |
1791 | @@ -518,6 +570,10 @@ def write_file(path, content, owner='root', group='root', perms=0o444): | |||
1792 | 518 | log("Changing gid on already existing content: {} -> {}" | 570 | log("Changing gid on already existing content: {} -> {}" |
1793 | 519 | .format(existing_gid, gid), level=DEBUG) | 571 | .format(existing_gid, gid), level=DEBUG) |
1794 | 520 | os.chown(path, -1, gid) | 572 | os.chown(path, -1, gid) |
1795 | 573 | if existing_perms != perms: | ||
1796 | 574 | log("Changing permissions on existing content: {} -> {}" | ||
1797 | 575 | .format(existing_perms, perms), level=DEBUG) | ||
1798 | 576 | os.chmod(path, perms) | ||
1799 | 521 | 577 | ||
1800 | 522 | 578 | ||
1801 | 523 | def fstab_remove(mp): | 579 | def fstab_remove(mp): |
1802 | @@ -782,7 +838,7 @@ def list_nics(nic_type=None): | |||
1803 | 782 | ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') | 838 | ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') |
1804 | 783 | ip_output = (line.strip() for line in ip_output if line) | 839 | ip_output = (line.strip() for line in ip_output if line) |
1805 | 784 | 840 | ||
1807 | 785 | key = re.compile('^[0-9]+:\s+(.+):') | 841 | key = re.compile(r'^[0-9]+:\s+(.+):') |
1808 | 786 | for line in ip_output: | 842 | for line in ip_output: |
1809 | 787 | matched = re.search(key, line) | 843 | matched = re.search(key, line) |
1810 | 788 | if matched: | 844 | if matched: |
1811 | @@ -927,6 +983,20 @@ def is_container(): | |||
1812 | 927 | 983 | ||
1813 | 928 | 984 | ||
1814 | 929 | def add_to_updatedb_prunepath(path, updatedb_path=UPDATEDB_PATH): | 985 | def add_to_updatedb_prunepath(path, updatedb_path=UPDATEDB_PATH): |
1815 | 986 | """Adds the specified path to the mlocate's udpatedb.conf PRUNEPATH list. | ||
1816 | 987 | |||
1817 | 988 | This method has no effect if the path specified by updatedb_path does not | ||
1818 | 989 | exist or is not a file. | ||
1819 | 990 | |||
1820 | 991 | @param path: string the path to add to the updatedb.conf PRUNEPATHS value | ||
1821 | 992 | @param updatedb_path: the path the updatedb.conf file | ||
1822 | 993 | """ | ||
1823 | 994 | if not os.path.exists(updatedb_path) or os.path.isdir(updatedb_path): | ||
1824 | 995 | # If the updatedb.conf file doesn't exist then don't attempt to update | ||
1825 | 996 | # the file as the package providing mlocate may not be installed on | ||
1826 | 997 | # the local system | ||
1827 | 998 | return | ||
1828 | 999 | |||
1829 | 930 | with open(updatedb_path, 'r+') as f_id: | 1000 | with open(updatedb_path, 'r+') as f_id: |
1830 | 931 | updatedb_text = f_id.read() | 1001 | updatedb_text = f_id.read() |
1831 | 932 | output = updatedb(updatedb_text, path) | 1002 | output = updatedb(updatedb_text, path) |
1832 | @@ -946,3 +1016,89 @@ def updatedb(updatedb_text, new_path): | |||
1833 | 946 | lines[i] = 'PRUNEPATHS="{}"'.format(' '.join(paths)) | 1016 | lines[i] = 'PRUNEPATHS="{}"'.format(' '.join(paths)) |
1834 | 947 | output = "\n".join(lines) | 1017 | output = "\n".join(lines) |
1835 | 948 | return output | 1018 | return output |
1836 | 1019 | |||
1837 | 1020 | |||
1838 | 1021 | def modulo_distribution(modulo=3, wait=30, non_zero_wait=False): | ||
1839 | 1022 | """ Modulo distribution | ||
1840 | 1023 | |||
1841 | 1024 | This helper uses the unit number, a modulo value and a constant wait time | ||
1842 | 1025 | to produce a calculated wait time distribution. This is useful in large | ||
1843 | 1026 | scale deployments to distribute load during an expensive operation such as | ||
1844 | 1027 | service restarts. | ||
1845 | 1028 | |||
1846 | 1029 | If you have 1000 nodes that need to restart 100 at a time 1 minute at a | ||
1847 | 1030 | time: | ||
1848 | 1031 | |||
1849 | 1032 | time.wait(modulo_distribution(modulo=100, wait=60)) | ||
1850 | 1033 | restart() | ||
1851 | 1034 | |||
1852 | 1035 | If you need restarts to happen serially set modulo to the exact number of | ||
1853 | 1036 | nodes and set a high constant wait time: | ||
1854 | 1037 | |||
1855 | 1038 | time.wait(modulo_distribution(modulo=10, wait=120)) | ||
1856 | 1039 | restart() | ||
1857 | 1040 | |||
1858 | 1041 | @param modulo: int The modulo number creates the group distribution | ||
1859 | 1042 | @param wait: int The constant time wait value | ||
1860 | 1043 | @param non_zero_wait: boolean Override unit % modulo == 0, | ||
1861 | 1044 | return modulo * wait. Used to avoid collisions with | ||
1862 | 1045 | leader nodes which are often given priority. | ||
1863 | 1046 | @return: int Calculated time to wait for unit operation | ||
1864 | 1047 | """ | ||
1865 | 1048 | unit_number = int(local_unit().split('/')[1]) | ||
1866 | 1049 | calculated_wait_time = (unit_number % modulo) * wait | ||
1867 | 1050 | if non_zero_wait and calculated_wait_time == 0: | ||
1868 | 1051 | return modulo * wait | ||
1869 | 1052 | else: | ||
1870 | 1053 | return calculated_wait_time | ||
1871 | 1054 | |||
1872 | 1055 | |||
1873 | 1056 | def install_ca_cert(ca_cert, name=None): | ||
1874 | 1057 | """ | ||
1875 | 1058 | Install the given cert as a trusted CA. | ||
1876 | 1059 | |||
1877 | 1060 | The ``name`` is the stem of the filename where the cert is written, and if | ||
1878 | 1061 | not provided, it will default to ``juju-{charm_name}``. | ||
1879 | 1062 | |||
1880 | 1063 | If the cert is empty or None, or is unchanged, nothing is done. | ||
1881 | 1064 | """ | ||
1882 | 1065 | if not ca_cert: | ||
1883 | 1066 | return | ||
1884 | 1067 | if not isinstance(ca_cert, bytes): | ||
1885 | 1068 | ca_cert = ca_cert.encode('utf8') | ||
1886 | 1069 | if not name: | ||
1887 | 1070 | name = 'juju-{}'.format(charm_name()) | ||
1888 | 1071 | cert_file = '/usr/local/share/ca-certificates/{}.crt'.format(name) | ||
1889 | 1072 | new_hash = hashlib.md5(ca_cert).hexdigest() | ||
1890 | 1073 | if file_hash(cert_file) == new_hash: | ||
1891 | 1074 | return | ||
1892 | 1075 | log("Installing new CA cert at: {}".format(cert_file), level=INFO) | ||
1893 | 1076 | write_file(cert_file, ca_cert) | ||
1894 | 1077 | subprocess.check_call(['update-ca-certificates', '--fresh']) | ||
1895 | 1078 | |||
1896 | 1079 | |||
1897 | 1080 | def get_system_env(key, default=None): | ||
1898 | 1081 | """Get data from system environment as represented in ``/etc/environment``. | ||
1899 | 1082 | |||
1900 | 1083 | :param key: Key to look up | ||
1901 | 1084 | :type key: str | ||
1902 | 1085 | :param default: Value to return if key is not found | ||
1903 | 1086 | :type default: any | ||
1904 | 1087 | :returns: Value for key if found or contents of default parameter | ||
1905 | 1088 | :rtype: any | ||
1906 | 1089 | :raises: subprocess.CalledProcessError | ||
1907 | 1090 | """ | ||
1908 | 1091 | env_file = '/etc/environment' | ||
1909 | 1092 | # use the shell and env(1) to parse the global environments file. This is | ||
1910 | 1093 | # done to get the correct result even if the user has shell variable | ||
1911 | 1094 | # substitutions or other shell logic in that file. | ||
1912 | 1095 | output = subprocess.check_output( | ||
1913 | 1096 | ['env', '-i', '/bin/bash', '-c', | ||
1914 | 1097 | 'set -a && source {} && env'.format(env_file)], | ||
1915 | 1098 | universal_newlines=True) | ||
1916 | 1099 | for k, v in (line.split('=', 1) | ||
1917 | 1100 | for line in output.splitlines() if '=' in line): | ||
1918 | 1101 | if k == key: | ||
1919 | 1102 | return v | ||
1920 | 1103 | else: | ||
1921 | 1104 | return default | ||
1922 | diff --git a/hooks/charmhelpers/core/host_factory/ubuntu.py b/hooks/charmhelpers/core/host_factory/ubuntu.py | |||
1923 | index d8dc378..3edc068 100644 | |||
1924 | --- a/hooks/charmhelpers/core/host_factory/ubuntu.py | |||
1925 | +++ b/hooks/charmhelpers/core/host_factory/ubuntu.py | |||
1926 | @@ -1,5 +1,6 @@ | |||
1927 | 1 | import subprocess | 1 | import subprocess |
1928 | 2 | 2 | ||
1929 | 3 | from charmhelpers.core.hookenv import cached | ||
1930 | 3 | from charmhelpers.core.strutils import BasicStringComparator | 4 | from charmhelpers.core.strutils import BasicStringComparator |
1931 | 4 | 5 | ||
1932 | 5 | 6 | ||
1933 | @@ -20,6 +21,11 @@ UBUNTU_RELEASES = ( | |||
1934 | 20 | 'yakkety', | 21 | 'yakkety', |
1935 | 21 | 'zesty', | 22 | 'zesty', |
1936 | 22 | 'artful', | 23 | 'artful', |
1937 | 24 | 'bionic', | ||
1938 | 25 | 'cosmic', | ||
1939 | 26 | 'disco', | ||
1940 | 27 | 'eoan', | ||
1941 | 28 | 'focal' | ||
1942 | 23 | ) | 29 | ) |
1943 | 24 | 30 | ||
1944 | 25 | 31 | ||
1945 | @@ -70,6 +76,14 @@ def lsb_release(): | |||
1946 | 70 | return d | 76 | return d |
1947 | 71 | 77 | ||
1948 | 72 | 78 | ||
1949 | 79 | def get_distrib_codename(): | ||
1950 | 80 | """Return the codename of the distribution | ||
1951 | 81 | :returns: The codename | ||
1952 | 82 | :rtype: str | ||
1953 | 83 | """ | ||
1954 | 84 | return lsb_release()['DISTRIB_CODENAME'].lower() | ||
1955 | 85 | |||
1956 | 86 | |||
1957 | 73 | def cmp_pkgrevno(package, revno, pkgcache=None): | 87 | def cmp_pkgrevno(package, revno, pkgcache=None): |
1958 | 74 | """Compare supplied revno with the revno of the installed package. | 88 | """Compare supplied revno with the revno of the installed package. |
1959 | 75 | 89 | ||
1960 | @@ -81,9 +95,22 @@ def cmp_pkgrevno(package, revno, pkgcache=None): | |||
1961 | 81 | the pkgcache argument is None. Be sure to add charmhelpers.fetch if | 95 | the pkgcache argument is None. Be sure to add charmhelpers.fetch if |
1962 | 82 | you call this function, or pass an apt_pkg.Cache() instance. | 96 | you call this function, or pass an apt_pkg.Cache() instance. |
1963 | 83 | """ | 97 | """ |
1965 | 84 | import apt_pkg | 98 | from charmhelpers.fetch import apt_pkg |
1966 | 85 | if not pkgcache: | 99 | if not pkgcache: |
1967 | 86 | from charmhelpers.fetch import apt_cache | 100 | from charmhelpers.fetch import apt_cache |
1968 | 87 | pkgcache = apt_cache() | 101 | pkgcache = apt_cache() |
1969 | 88 | pkg = pkgcache[package] | 102 | pkg = pkgcache[package] |
1970 | 89 | return apt_pkg.version_compare(pkg.current_ver.ver_str, revno) | 103 | return apt_pkg.version_compare(pkg.current_ver.ver_str, revno) |
1971 | 104 | |||
1972 | 105 | |||
1973 | 106 | @cached | ||
1974 | 107 | def arch(): | ||
1975 | 108 | """Return the package architecture as a string. | ||
1976 | 109 | |||
1977 | 110 | :returns: the architecture | ||
1978 | 111 | :rtype: str | ||
1979 | 112 | :raises: subprocess.CalledProcessError if dpkg command fails | ||
1980 | 113 | """ | ||
1981 | 114 | return subprocess.check_output( | ||
1982 | 115 | ['dpkg', '--print-architecture'] | ||
1983 | 116 | ).rstrip().decode('UTF-8') | ||
1984 | diff --git a/hooks/charmhelpers/core/kernel.py b/hooks/charmhelpers/core/kernel.py | |||
1985 | index 2d40452..e01f4f8 100644 | |||
1986 | --- a/hooks/charmhelpers/core/kernel.py | |||
1987 | +++ b/hooks/charmhelpers/core/kernel.py | |||
1988 | @@ -26,12 +26,12 @@ from charmhelpers.core.hookenv import ( | |||
1989 | 26 | 26 | ||
1990 | 27 | __platform__ = get_platform() | 27 | __platform__ = get_platform() |
1991 | 28 | if __platform__ == "ubuntu": | 28 | if __platform__ == "ubuntu": |
1993 | 29 | from charmhelpers.core.kernel_factory.ubuntu import ( | 29 | from charmhelpers.core.kernel_factory.ubuntu import ( # NOQA:F401 |
1994 | 30 | persistent_modprobe, | 30 | persistent_modprobe, |
1995 | 31 | update_initramfs, | 31 | update_initramfs, |
1996 | 32 | ) # flake8: noqa -- ignore F401 for this import | 32 | ) # flake8: noqa -- ignore F401 for this import |
1997 | 33 | elif __platform__ == "centos": | 33 | elif __platform__ == "centos": |
1999 | 34 | from charmhelpers.core.kernel_factory.centos import ( | 34 | from charmhelpers.core.kernel_factory.centos import ( # NOQA:F401 |
2000 | 35 | persistent_modprobe, | 35 | persistent_modprobe, |
2001 | 36 | update_initramfs, | 36 | update_initramfs, |
2002 | 37 | ) # flake8: noqa -- ignore F401 for this import | 37 | ) # flake8: noqa -- ignore F401 for this import |
2003 | diff --git a/hooks/charmhelpers/core/services/base.py b/hooks/charmhelpers/core/services/base.py | |||
2004 | index ca9dc99..179ad4f 100644 | |||
2005 | --- a/hooks/charmhelpers/core/services/base.py | |||
2006 | +++ b/hooks/charmhelpers/core/services/base.py | |||
2007 | @@ -307,23 +307,34 @@ class PortManagerCallback(ManagerCallback): | |||
2008 | 307 | """ | 307 | """ |
2009 | 308 | def __call__(self, manager, service_name, event_name): | 308 | def __call__(self, manager, service_name, event_name): |
2010 | 309 | service = manager.get_service(service_name) | 309 | service = manager.get_service(service_name) |
2012 | 310 | new_ports = service.get('ports', []) | 310 | # turn this generator into a list, |
2013 | 311 | # as we'll be going over it multiple times | ||
2014 | 312 | new_ports = list(service.get('ports', [])) | ||
2015 | 311 | port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name)) | 313 | port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name)) |
2016 | 312 | if os.path.exists(port_file): | 314 | if os.path.exists(port_file): |
2017 | 313 | with open(port_file) as fp: | 315 | with open(port_file) as fp: |
2018 | 314 | old_ports = fp.read().split(',') | 316 | old_ports = fp.read().split(',') |
2019 | 315 | for old_port in old_ports: | 317 | for old_port in old_ports: |
2024 | 316 | if bool(old_port): | 318 | if bool(old_port) and not self.ports_contains(old_port, new_ports): |
2025 | 317 | old_port = int(old_port) | 319 | hookenv.close_port(old_port) |
2022 | 318 | if old_port not in new_ports: | ||
2023 | 319 | hookenv.close_port(old_port) | ||
2026 | 320 | with open(port_file, 'w') as fp: | 320 | with open(port_file, 'w') as fp: |
2027 | 321 | fp.write(','.join(str(port) for port in new_ports)) | 321 | fp.write(','.join(str(port) for port in new_ports)) |
2028 | 322 | for port in new_ports: | 322 | for port in new_ports: |
2029 | 323 | # A port is either a number or 'ICMP' | ||
2030 | 324 | protocol = 'TCP' | ||
2031 | 325 | if str(port).upper() == 'ICMP': | ||
2032 | 326 | protocol = 'ICMP' | ||
2033 | 323 | if event_name == 'start': | 327 | if event_name == 'start': |
2035 | 324 | hookenv.open_port(port) | 328 | hookenv.open_port(port, protocol) |
2036 | 325 | elif event_name == 'stop': | 329 | elif event_name == 'stop': |
2038 | 326 | hookenv.close_port(port) | 330 | hookenv.close_port(port, protocol) |
2039 | 331 | |||
2040 | 332 | def ports_contains(self, port, ports): | ||
2041 | 333 | if not bool(port): | ||
2042 | 334 | return False | ||
2043 | 335 | if str(port).upper() != 'ICMP': | ||
2044 | 336 | port = int(port) | ||
2045 | 337 | return port in ports | ||
2046 | 327 | 338 | ||
2047 | 328 | 339 | ||
2048 | 329 | def service_stop(service_name): | 340 | def service_stop(service_name): |
2049 | diff --git a/hooks/charmhelpers/core/strutils.py b/hooks/charmhelpers/core/strutils.py | |||
2050 | index 685dabd..e8df045 100644 | |||
2051 | --- a/hooks/charmhelpers/core/strutils.py | |||
2052 | +++ b/hooks/charmhelpers/core/strutils.py | |||
2053 | @@ -61,13 +61,19 @@ def bytes_from_string(value): | |||
2054 | 61 | if isinstance(value, six.string_types): | 61 | if isinstance(value, six.string_types): |
2055 | 62 | value = six.text_type(value) | 62 | value = six.text_type(value) |
2056 | 63 | else: | 63 | else: |
2058 | 64 | msg = "Unable to interpret non-string value '%s' as boolean" % (value) | 64 | msg = "Unable to interpret non-string value '%s' as bytes" % (value) |
2059 | 65 | raise ValueError(msg) | 65 | raise ValueError(msg) |
2060 | 66 | matches = re.match("([0-9]+)([a-zA-Z]+)", value) | 66 | matches = re.match("([0-9]+)([a-zA-Z]+)", value) |
2065 | 67 | if not matches: | 67 | if matches: |
2066 | 68 | msg = "Unable to interpret string value '%s' as bytes" % (value) | 68 | size = int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)]) |
2067 | 69 | raise ValueError(msg) | 69 | else: |
2068 | 70 | return int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)]) | 70 | # Assume that value passed in is bytes |
2069 | 71 | try: | ||
2070 | 72 | size = int(value) | ||
2071 | 73 | except ValueError: | ||
2072 | 74 | msg = "Unable to interpret string value '%s' as bytes" % (value) | ||
2073 | 75 | raise ValueError(msg) | ||
2074 | 76 | return size | ||
2075 | 71 | 77 | ||
2076 | 72 | 78 | ||
2077 | 73 | class BasicStringComparator(object): | 79 | class BasicStringComparator(object): |
2078 | diff --git a/hooks/charmhelpers/core/sysctl.py b/hooks/charmhelpers/core/sysctl.py | |||
2079 | index 6e413e3..386428d 100644 | |||
2080 | --- a/hooks/charmhelpers/core/sysctl.py | |||
2081 | +++ b/hooks/charmhelpers/core/sysctl.py | |||
2082 | @@ -17,38 +17,59 @@ | |||
2083 | 17 | 17 | ||
2084 | 18 | import yaml | 18 | import yaml |
2085 | 19 | 19 | ||
2087 | 20 | from subprocess import check_call | 20 | from subprocess import check_call, CalledProcessError |
2088 | 21 | 21 | ||
2089 | 22 | from charmhelpers.core.hookenv import ( | 22 | from charmhelpers.core.hookenv import ( |
2090 | 23 | log, | 23 | log, |
2091 | 24 | DEBUG, | 24 | DEBUG, |
2092 | 25 | ERROR, | 25 | ERROR, |
2093 | 26 | WARNING, | ||
2094 | 26 | ) | 27 | ) |
2095 | 27 | 28 | ||
2096 | 29 | from charmhelpers.core.host import is_container | ||
2097 | 30 | |||
2098 | 28 | __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' | 31 | __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' |
2099 | 29 | 32 | ||
2100 | 30 | 33 | ||
2102 | 31 | def create(sysctl_dict, sysctl_file): | 34 | def create(sysctl_dict, sysctl_file, ignore=False): |
2103 | 32 | """Creates a sysctl.conf file from a YAML associative array | 35 | """Creates a sysctl.conf file from a YAML associative array |
2104 | 33 | 36 | ||
2106 | 34 | :param sysctl_dict: a YAML-formatted string of sysctl options eg "{ 'kernel.max_pid': 1337 }" | 37 | :param sysctl_dict: a dict or YAML-formatted string of sysctl |
2107 | 38 | options eg "{ 'kernel.max_pid': 1337 }" | ||
2108 | 35 | :type sysctl_dict: str | 39 | :type sysctl_dict: str |
2109 | 36 | :param sysctl_file: path to the sysctl file to be saved | 40 | :param sysctl_file: path to the sysctl file to be saved |
2110 | 37 | :type sysctl_file: str or unicode | 41 | :type sysctl_file: str or unicode |
2111 | 42 | :param ignore: If True, ignore "unknown variable" errors. | ||
2112 | 43 | :type ignore: bool | ||
2113 | 38 | :returns: None | 44 | :returns: None |
2114 | 39 | """ | 45 | """ |
2121 | 40 | try: | 46 | if type(sysctl_dict) is not dict: |
2122 | 41 | sysctl_dict_parsed = yaml.safe_load(sysctl_dict) | 47 | try: |
2123 | 42 | except yaml.YAMLError: | 48 | sysctl_dict_parsed = yaml.safe_load(sysctl_dict) |
2124 | 43 | log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict), | 49 | except yaml.YAMLError: |
2125 | 44 | level=ERROR) | 50 | log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict), |
2126 | 45 | return | 51 | level=ERROR) |
2127 | 52 | return | ||
2128 | 53 | else: | ||
2129 | 54 | sysctl_dict_parsed = sysctl_dict | ||
2130 | 46 | 55 | ||
2131 | 47 | with open(sysctl_file, "w") as fd: | 56 | with open(sysctl_file, "w") as fd: |
2132 | 48 | for key, value in sysctl_dict_parsed.items(): | 57 | for key, value in sysctl_dict_parsed.items(): |
2133 | 49 | fd.write("{}={}\n".format(key, value)) | 58 | fd.write("{}={}\n".format(key, value)) |
2134 | 50 | 59 | ||
2136 | 51 | log("Updating sysctl_file: %s values: %s" % (sysctl_file, sysctl_dict_parsed), | 60 | log("Updating sysctl_file: {} values: {}".format(sysctl_file, |
2137 | 61 | sysctl_dict_parsed), | ||
2138 | 52 | level=DEBUG) | 62 | level=DEBUG) |
2139 | 53 | 63 | ||
2141 | 54 | check_call(["sysctl", "-p", sysctl_file]) | 64 | call = ["sysctl", "-p", sysctl_file] |
2142 | 65 | if ignore: | ||
2143 | 66 | call.append("-e") | ||
2144 | 67 | |||
2145 | 68 | try: | ||
2146 | 69 | check_call(call) | ||
2147 | 70 | except CalledProcessError as e: | ||
2148 | 71 | if is_container(): | ||
2149 | 72 | log("Error setting some sysctl keys in this container: {}".format(e.output), | ||
2150 | 73 | level=WARNING) | ||
2151 | 74 | else: | ||
2152 | 75 | raise e | ||
2153 | diff --git a/hooks/charmhelpers/core/templating.py b/hooks/charmhelpers/core/templating.py | |||
2154 | index 7b801a3..9014015 100644 | |||
2155 | --- a/hooks/charmhelpers/core/templating.py | |||
2156 | +++ b/hooks/charmhelpers/core/templating.py | |||
2157 | @@ -20,7 +20,8 @@ from charmhelpers.core import hookenv | |||
2158 | 20 | 20 | ||
2159 | 21 | 21 | ||
2160 | 22 | def render(source, target, context, owner='root', group='root', | 22 | def render(source, target, context, owner='root', group='root', |
2162 | 23 | perms=0o444, templates_dir=None, encoding='UTF-8', template_loader=None): | 23 | perms=0o444, templates_dir=None, encoding='UTF-8', |
2163 | 24 | template_loader=None, config_template=None): | ||
2164 | 24 | """ | 25 | """ |
2165 | 25 | Render a template. | 26 | Render a template. |
2166 | 26 | 27 | ||
2167 | @@ -32,6 +33,9 @@ def render(source, target, context, owner='root', group='root', | |||
2168 | 32 | The context should be a dict containing the values to be replaced in the | 33 | The context should be a dict containing the values to be replaced in the |
2169 | 33 | template. | 34 | template. |
2170 | 34 | 35 | ||
2171 | 36 | config_template may be provided to render from a provided template instead | ||
2172 | 37 | of loading from a file. | ||
2173 | 38 | |||
2174 | 35 | The `owner`, `group`, and `perms` options will be passed to `write_file`. | 39 | The `owner`, `group`, and `perms` options will be passed to `write_file`. |
2175 | 36 | 40 | ||
2176 | 37 | If omitted, `templates_dir` defaults to the `templates` folder in the charm. | 41 | If omitted, `templates_dir` defaults to the `templates` folder in the charm. |
2177 | @@ -65,14 +69,19 @@ def render(source, target, context, owner='root', group='root', | |||
2178 | 65 | if templates_dir is None: | 69 | if templates_dir is None: |
2179 | 66 | templates_dir = os.path.join(hookenv.charm_dir(), 'templates') | 70 | templates_dir = os.path.join(hookenv.charm_dir(), 'templates') |
2180 | 67 | template_env = Environment(loader=FileSystemLoader(templates_dir)) | 71 | template_env = Environment(loader=FileSystemLoader(templates_dir)) |
2189 | 68 | try: | 72 | |
2190 | 69 | source = source | 73 | # load from a string if provided explicitly |
2191 | 70 | template = template_env.get_template(source) | 74 | if config_template is not None: |
2192 | 71 | except exceptions.TemplateNotFound as e: | 75 | template = template_env.from_string(config_template) |
2193 | 72 | hookenv.log('Could not load template %s from %s.' % | 76 | else: |
2194 | 73 | (source, templates_dir), | 77 | try: |
2195 | 74 | level=hookenv.ERROR) | 78 | source = source |
2196 | 75 | raise e | 79 | template = template_env.get_template(source) |
2197 | 80 | except exceptions.TemplateNotFound as e: | ||
2198 | 81 | hookenv.log('Could not load template %s from %s.' % | ||
2199 | 82 | (source, templates_dir), | ||
2200 | 83 | level=hookenv.ERROR) | ||
2201 | 84 | raise e | ||
2202 | 76 | content = template.render(context) | 85 | content = template.render(context) |
2203 | 77 | if target is not None: | 86 | if target is not None: |
2204 | 78 | target_dir = os.path.dirname(target) | 87 | target_dir = os.path.dirname(target) |
2205 | diff --git a/hooks/charmhelpers/core/unitdata.py b/hooks/charmhelpers/core/unitdata.py | |||
2206 | index 54ec969..ab55432 100644 | |||
2207 | --- a/hooks/charmhelpers/core/unitdata.py | |||
2208 | +++ b/hooks/charmhelpers/core/unitdata.py | |||
2209 | @@ -166,6 +166,10 @@ class Storage(object): | |||
2210 | 166 | 166 | ||
2211 | 167 | To support dicts, lists, integer, floats, and booleans values | 167 | To support dicts, lists, integer, floats, and booleans values |
2212 | 168 | are automatically json encoded/decoded. | 168 | are automatically json encoded/decoded. |
2213 | 169 | |||
2214 | 170 | Note: to facilitate unit testing, ':memory:' can be passed as the | ||
2215 | 171 | path parameter which causes sqlite3 to only build the db in memory. | ||
2216 | 172 | This should only be used for testing purposes. | ||
2217 | 169 | """ | 173 | """ |
2218 | 170 | def __init__(self, path=None): | 174 | def __init__(self, path=None): |
2219 | 171 | self.db_path = path | 175 | self.db_path = path |
2220 | @@ -175,6 +179,9 @@ class Storage(object): | |||
2221 | 175 | else: | 179 | else: |
2222 | 176 | self.db_path = os.path.join( | 180 | self.db_path = os.path.join( |
2223 | 177 | os.environ.get('CHARM_DIR', ''), '.unit-state.db') | 181 | os.environ.get('CHARM_DIR', ''), '.unit-state.db') |
2224 | 182 | if self.db_path != ':memory:': | ||
2225 | 183 | with open(self.db_path, 'a') as f: | ||
2226 | 184 | os.fchmod(f.fileno(), 0o600) | ||
2227 | 178 | self.conn = sqlite3.connect('%s' % self.db_path) | 185 | self.conn = sqlite3.connect('%s' % self.db_path) |
2228 | 179 | self.cursor = self.conn.cursor() | 186 | self.cursor = self.conn.cursor() |
2229 | 180 | self.revision = None | 187 | self.revision = None |
2230 | @@ -358,7 +365,7 @@ class Storage(object): | |||
2231 | 358 | try: | 365 | try: |
2232 | 359 | yield self.revision | 366 | yield self.revision |
2233 | 360 | self.revision = None | 367 | self.revision = None |
2235 | 361 | except: | 368 | except Exception: |
2236 | 362 | self.flush(False) | 369 | self.flush(False) |
2237 | 363 | self.revision = None | 370 | self.revision = None |
2238 | 364 | raise | 371 | raise |
2239 | diff --git a/hooks/charmhelpers/fetch/__init__.py b/hooks/charmhelpers/fetch/__init__.py | |||
2240 | index 480a627..0cc7fc8 100644 | |||
2241 | --- a/hooks/charmhelpers/fetch/__init__.py | |||
2242 | +++ b/hooks/charmhelpers/fetch/__init__.py | |||
2243 | @@ -84,6 +84,7 @@ module = "charmhelpers.fetch.%s" % __platform__ | |||
2244 | 84 | fetch = importlib.import_module(module) | 84 | fetch = importlib.import_module(module) |
2245 | 85 | 85 | ||
2246 | 86 | filter_installed_packages = fetch.filter_installed_packages | 86 | filter_installed_packages = fetch.filter_installed_packages |
2247 | 87 | filter_missing_packages = fetch.filter_missing_packages | ||
2248 | 87 | install = fetch.apt_install | 88 | install = fetch.apt_install |
2249 | 88 | upgrade = fetch.apt_upgrade | 89 | upgrade = fetch.apt_upgrade |
2250 | 89 | update = _fetch_update = fetch.apt_update | 90 | update = _fetch_update = fetch.apt_update |
2251 | @@ -96,11 +97,14 @@ if __platform__ == "ubuntu": | |||
2252 | 96 | apt_update = fetch.apt_update | 97 | apt_update = fetch.apt_update |
2253 | 97 | apt_upgrade = fetch.apt_upgrade | 98 | apt_upgrade = fetch.apt_upgrade |
2254 | 98 | apt_purge = fetch.apt_purge | 99 | apt_purge = fetch.apt_purge |
2255 | 100 | apt_autoremove = fetch.apt_autoremove | ||
2256 | 99 | apt_mark = fetch.apt_mark | 101 | apt_mark = fetch.apt_mark |
2257 | 100 | apt_hold = fetch.apt_hold | 102 | apt_hold = fetch.apt_hold |
2258 | 101 | apt_unhold = fetch.apt_unhold | 103 | apt_unhold = fetch.apt_unhold |
2259 | 102 | import_key = fetch.import_key | 104 | import_key = fetch.import_key |
2260 | 103 | get_upstream_version = fetch.get_upstream_version | 105 | get_upstream_version = fetch.get_upstream_version |
2261 | 106 | apt_pkg = fetch.ubuntu_apt_pkg | ||
2262 | 107 | get_apt_dpkg_env = fetch.get_apt_dpkg_env | ||
2263 | 104 | elif __platform__ == "centos": | 108 | elif __platform__ == "centos": |
2264 | 105 | yum_search = fetch.yum_search | 109 | yum_search = fetch.yum_search |
2265 | 106 | 110 | ||
2266 | diff --git a/hooks/charmhelpers/fetch/archiveurl.py b/hooks/charmhelpers/fetch/archiveurl.py | |||
2267 | index dd24f9e..d25587a 100644 | |||
2268 | --- a/hooks/charmhelpers/fetch/archiveurl.py | |||
2269 | +++ b/hooks/charmhelpers/fetch/archiveurl.py | |||
2270 | @@ -89,7 +89,7 @@ class ArchiveUrlFetchHandler(BaseFetchHandler): | |||
2271 | 89 | :param str source: URL pointing to an archive file. | 89 | :param str source: URL pointing to an archive file. |
2272 | 90 | :param str dest: Local path location to download archive file to. | 90 | :param str dest: Local path location to download archive file to. |
2273 | 91 | """ | 91 | """ |
2275 | 92 | # propogate all exceptions | 92 | # propagate all exceptions |
2276 | 93 | # URLError, OSError, etc | 93 | # URLError, OSError, etc |
2277 | 94 | proto, netloc, path, params, query, fragment = urlparse(source) | 94 | proto, netloc, path, params, query, fragment = urlparse(source) |
2278 | 95 | if proto in ('http', 'https'): | 95 | if proto in ('http', 'https'): |
2279 | diff --git a/hooks/charmhelpers/fetch/bzrurl.py b/hooks/charmhelpers/fetch/bzrurl.py | |||
2280 | index 07cd029..c4ab3ff 100644 | |||
2281 | --- a/hooks/charmhelpers/fetch/bzrurl.py | |||
2282 | +++ b/hooks/charmhelpers/fetch/bzrurl.py | |||
2283 | @@ -13,7 +13,7 @@ | |||
2284 | 13 | # limitations under the License. | 13 | # limitations under the License. |
2285 | 14 | 14 | ||
2286 | 15 | import os | 15 | import os |
2288 | 16 | from subprocess import check_call | 16 | from subprocess import STDOUT, check_output |
2289 | 17 | from charmhelpers.fetch import ( | 17 | from charmhelpers.fetch import ( |
2290 | 18 | BaseFetchHandler, | 18 | BaseFetchHandler, |
2291 | 19 | UnhandledSource, | 19 | UnhandledSource, |
2292 | @@ -55,7 +55,7 @@ class BzrUrlFetchHandler(BaseFetchHandler): | |||
2293 | 55 | cmd = ['bzr', 'branch'] | 55 | cmd = ['bzr', 'branch'] |
2294 | 56 | cmd += cmd_opts | 56 | cmd += cmd_opts |
2295 | 57 | cmd += [source, dest] | 57 | cmd += [source, dest] |
2297 | 58 | check_call(cmd) | 58 | check_output(cmd, stderr=STDOUT) |
2298 | 59 | 59 | ||
2299 | 60 | def install(self, source, dest=None, revno=None): | 60 | def install(self, source, dest=None, revno=None): |
2300 | 61 | url_parts = self.parse_url(source) | 61 | url_parts = self.parse_url(source) |
2301 | diff --git a/hooks/charmhelpers/fetch/giturl.py b/hooks/charmhelpers/fetch/giturl.py | |||
2302 | index 4cf21bc..070ca9b 100644 | |||
2303 | --- a/hooks/charmhelpers/fetch/giturl.py | |||
2304 | +++ b/hooks/charmhelpers/fetch/giturl.py | |||
2305 | @@ -13,7 +13,7 @@ | |||
2306 | 13 | # limitations under the License. | 13 | # limitations under the License. |
2307 | 14 | 14 | ||
2308 | 15 | import os | 15 | import os |
2310 | 16 | from subprocess import check_call, CalledProcessError | 16 | from subprocess import check_output, CalledProcessError, STDOUT |
2311 | 17 | from charmhelpers.fetch import ( | 17 | from charmhelpers.fetch import ( |
2312 | 18 | BaseFetchHandler, | 18 | BaseFetchHandler, |
2313 | 19 | UnhandledSource, | 19 | UnhandledSource, |
2314 | @@ -50,7 +50,7 @@ class GitUrlFetchHandler(BaseFetchHandler): | |||
2315 | 50 | cmd = ['git', 'clone', source, dest, '--branch', branch] | 50 | cmd = ['git', 'clone', source, dest, '--branch', branch] |
2316 | 51 | if depth: | 51 | if depth: |
2317 | 52 | cmd.extend(['--depth', depth]) | 52 | cmd.extend(['--depth', depth]) |
2319 | 53 | check_call(cmd) | 53 | check_output(cmd, stderr=STDOUT) |
2320 | 54 | 54 | ||
2321 | 55 | def install(self, source, branch="master", dest=None, depth=None): | 55 | def install(self, source, branch="master", dest=None, depth=None): |
2322 | 56 | url_parts = self.parse_url(source) | 56 | url_parts = self.parse_url(source) |
2323 | diff --git a/hooks/charmhelpers/fetch/python/__init__.py b/hooks/charmhelpers/fetch/python/__init__.py | |||
2324 | 57 | new file mode 100644 | 57 | new file mode 100644 |
2325 | index 0000000..bff99dc | |||
2326 | --- /dev/null | |||
2327 | +++ b/hooks/charmhelpers/fetch/python/__init__.py | |||
2328 | @@ -0,0 +1,13 @@ | |||
2329 | 1 | # Copyright 2014-2019 Canonical Limited. | ||
2330 | 2 | # | ||
2331 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
2332 | 4 | # you may not use this file except in compliance with the License. | ||
2333 | 5 | # You may obtain a copy of the License at | ||
2334 | 6 | # | ||
2335 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
2336 | 8 | # | ||
2337 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
2338 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
2339 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
2340 | 12 | # See the License for the specific language governing permissions and | ||
2341 | 13 | # limitations under the License. | ||
2342 | diff --git a/hooks/charmhelpers/fetch/python/debug.py b/hooks/charmhelpers/fetch/python/debug.py | |||
2343 | 0 | new file mode 100644 | 14 | new file mode 100644 |
2344 | index 0000000..757135e | |||
2345 | --- /dev/null | |||
2346 | +++ b/hooks/charmhelpers/fetch/python/debug.py | |||
2347 | @@ -0,0 +1,54 @@ | |||
2348 | 1 | #!/usr/bin/env python | ||
2349 | 2 | # coding: utf-8 | ||
2350 | 3 | |||
2351 | 4 | # Copyright 2014-2015 Canonical Limited. | ||
2352 | 5 | # | ||
2353 | 6 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
2354 | 7 | # you may not use this file except in compliance with the License. | ||
2355 | 8 | # You may obtain a copy of the License at | ||
2356 | 9 | # | ||
2357 | 10 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
2358 | 11 | # | ||
2359 | 12 | # Unless required by applicable law or agreed to in writing, software | ||
2360 | 13 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
2361 | 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
2362 | 15 | # See the License for the specific language governing permissions and | ||
2363 | 16 | # limitations under the License. | ||
2364 | 17 | |||
2365 | 18 | from __future__ import print_function | ||
2366 | 19 | |||
2367 | 20 | import atexit | ||
2368 | 21 | import sys | ||
2369 | 22 | |||
2370 | 23 | from charmhelpers.fetch.python.rpdb import Rpdb | ||
2371 | 24 | from charmhelpers.core.hookenv import ( | ||
2372 | 25 | open_port, | ||
2373 | 26 | close_port, | ||
2374 | 27 | ERROR, | ||
2375 | 28 | log | ||
2376 | 29 | ) | ||
2377 | 30 | |||
2378 | 31 | __author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" | ||
2379 | 32 | |||
2380 | 33 | DEFAULT_ADDR = "0.0.0.0" | ||
2381 | 34 | DEFAULT_PORT = 4444 | ||
2382 | 35 | |||
2383 | 36 | |||
2384 | 37 | def _error(message): | ||
2385 | 38 | log(message, level=ERROR) | ||
2386 | 39 | |||
2387 | 40 | |||
2388 | 41 | def set_trace(addr=DEFAULT_ADDR, port=DEFAULT_PORT): | ||
2389 | 42 | """ | ||
2390 | 43 | Set a trace point using the remote debugger | ||
2391 | 44 | """ | ||
2392 | 45 | atexit.register(close_port, port) | ||
2393 | 46 | try: | ||
2394 | 47 | log("Starting a remote python debugger session on %s:%s" % (addr, | ||
2395 | 48 | port)) | ||
2396 | 49 | open_port(port) | ||
2397 | 50 | debugger = Rpdb(addr=addr, port=port) | ||
2398 | 51 | debugger.set_trace(sys._getframe().f_back) | ||
2399 | 52 | except Exception: | ||
2400 | 53 | _error("Cannot start a remote debug session on %s:%s" % (addr, | ||
2401 | 54 | port)) | ||
2402 | diff --git a/hooks/charmhelpers/fetch/python/packages.py b/hooks/charmhelpers/fetch/python/packages.py | |||
2403 | 0 | new file mode 100644 | 55 | new file mode 100644 |
2404 | index 0000000..6e95028 | |||
2405 | --- /dev/null | |||
2406 | +++ b/hooks/charmhelpers/fetch/python/packages.py | |||
2407 | @@ -0,0 +1,154 @@ | |||
2408 | 1 | #!/usr/bin/env python | ||
2409 | 2 | # coding: utf-8 | ||
2410 | 3 | |||
2411 | 4 | # Copyright 2014-2015 Canonical Limited. | ||
2412 | 5 | # | ||
2413 | 6 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
2414 | 7 | # you may not use this file except in compliance with the License. | ||
2415 | 8 | # You may obtain a copy of the License at | ||
2416 | 9 | # | ||
2417 | 10 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
2418 | 11 | # | ||
2419 | 12 | # Unless required by applicable law or agreed to in writing, software | ||
2420 | 13 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
2421 | 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
2422 | 15 | # See the License for the specific language governing permissions and | ||
2423 | 16 | # limitations under the License. | ||
2424 | 17 | |||
2425 | 18 | import os | ||
2426 | 19 | import six | ||
2427 | 20 | import subprocess | ||
2428 | 21 | import sys | ||
2429 | 22 | |||
2430 | 23 | from charmhelpers.fetch import apt_install, apt_update | ||
2431 | 24 | from charmhelpers.core.hookenv import charm_dir, log | ||
2432 | 25 | |||
2433 | 26 | __author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" | ||
2434 | 27 | |||
2435 | 28 | |||
2436 | 29 | def pip_execute(*args, **kwargs): | ||
2437 | 30 | """Overriden pip_execute() to stop sys.path being changed. | ||
2438 | 31 | |||
2439 | 32 | The act of importing main from the pip module seems to cause add wheels | ||
2440 | 33 | from the /usr/share/python-wheels which are installed by various tools. | ||
2441 | 34 | This function ensures that sys.path remains the same after the call is | ||
2442 | 35 | executed. | ||
2443 | 36 | """ | ||
2444 | 37 | try: | ||
2445 | 38 | _path = sys.path | ||
2446 | 39 | try: | ||
2447 | 40 | from pip import main as _pip_execute | ||
2448 | 41 | except ImportError: | ||
2449 | 42 | apt_update() | ||
2450 | 43 | if six.PY2: | ||
2451 | 44 | apt_install('python-pip') | ||
2452 | 45 | else: | ||
2453 | 46 | apt_install('python3-pip') | ||
2454 | 47 | from pip import main as _pip_execute | ||
2455 | 48 | _pip_execute(*args, **kwargs) | ||
2456 | 49 | finally: | ||
2457 | 50 | sys.path = _path | ||
2458 | 51 | |||
2459 | 52 | |||
2460 | 53 | def parse_options(given, available): | ||
2461 | 54 | """Given a set of options, check if available""" | ||
2462 | 55 | for key, value in sorted(given.items()): | ||
2463 | 56 | if not value: | ||
2464 | 57 | continue | ||
2465 | 58 | if key in available: | ||
2466 | 59 | yield "--{0}={1}".format(key, value) | ||
2467 | 60 | |||
2468 | 61 | |||
2469 | 62 | def pip_install_requirements(requirements, constraints=None, **options): | ||
2470 | 63 | """Install a requirements file. | ||
2471 | 64 | |||
2472 | 65 | :param constraints: Path to pip constraints file. | ||
2473 | 66 | http://pip.readthedocs.org/en/stable/user_guide/#constraints-files | ||
2474 | 67 | """ | ||
2475 | 68 | command = ["install"] | ||
2476 | 69 | |||
2477 | 70 | available_options = ('proxy', 'src', 'log', ) | ||
2478 | 71 | for option in parse_options(options, available_options): | ||
2479 | 72 | command.append(option) | ||
2480 | 73 | |||
2481 | 74 | command.append("-r {0}".format(requirements)) | ||
2482 | 75 | if constraints: | ||
2483 | 76 | command.append("-c {0}".format(constraints)) | ||
2484 | 77 | log("Installing from file: {} with constraints {} " | ||
2485 | 78 | "and options: {}".format(requirements, constraints, command)) | ||
2486 | 79 | else: | ||
2487 | 80 | log("Installing from file: {} with options: {}".format(requirements, | ||
2488 | 81 | command)) | ||
2489 | 82 | pip_execute(command) | ||
2490 | 83 | |||
2491 | 84 | |||
2492 | 85 | def pip_install(package, fatal=False, upgrade=False, venv=None, | ||
2493 | 86 | constraints=None, **options): | ||
2494 | 87 | """Install a python package""" | ||
2495 | 88 | if venv: | ||
2496 | 89 | venv_python = os.path.join(venv, 'bin/pip') | ||
2497 | 90 | command = [venv_python, "install"] | ||
2498 | 91 | else: | ||
2499 | 92 | command = ["install"] | ||
2500 | 93 | |||
2501 | 94 | available_options = ('proxy', 'src', 'log', 'index-url', ) | ||
2502 | 95 | for option in parse_options(options, available_options): | ||
2503 | 96 | command.append(option) | ||
2504 | 97 | |||
2505 | 98 | if upgrade: | ||
2506 | 99 | command.append('--upgrade') | ||
2507 | 100 | |||
2508 | 101 | if constraints: | ||
2509 | 102 | command.extend(['-c', constraints]) | ||
2510 | 103 | |||
2511 | 104 | if isinstance(package, list): | ||
2512 | 105 | command.extend(package) | ||
2513 | 106 | else: | ||
2514 | 107 | command.append(package) | ||
2515 | 108 | |||
2516 | 109 | log("Installing {} package with options: {}".format(package, | ||
2517 | 110 | command)) | ||
2518 | 111 | if venv: | ||
2519 | 112 | subprocess.check_call(command) | ||
2520 | 113 | else: | ||
2521 | 114 | pip_execute(command) | ||
2522 | 115 | |||
2523 | 116 | |||
2524 | 117 | def pip_uninstall(package, **options): | ||
2525 | 118 | """Uninstall a python package""" | ||
2526 | 119 | command = ["uninstall", "-q", "-y"] | ||
2527 | 120 | |||
2528 | 121 | available_options = ('proxy', 'log', ) | ||
2529 | 122 | for option in parse_options(options, available_options): | ||
2530 | 123 | command.append(option) | ||
2531 | 124 | |||
2532 | 125 | if isinstance(package, list): | ||
2533 | 126 | command.extend(package) | ||
2534 | 127 | else: | ||
2535 | 128 | command.append(package) | ||
2536 | 129 | |||
2537 | 130 | log("Uninstalling {} package with options: {}".format(package, | ||
2538 | 131 | command)) | ||
2539 | 132 | pip_execute(command) | ||
2540 | 133 | |||
2541 | 134 | |||
2542 | 135 | def pip_list(): | ||
2543 | 136 | """Returns the list of current python installed packages | ||
2544 | 137 | """ | ||
2545 | 138 | return pip_execute(["list"]) | ||
2546 | 139 | |||
2547 | 140 | |||
2548 | 141 | def pip_create_virtualenv(path=None): | ||
2549 | 142 | """Create an isolated Python environment.""" | ||
2550 | 143 | if six.PY2: | ||
2551 | 144 | apt_install('python-virtualenv') | ||
2552 | 145 | else: | ||
2553 | 146 | apt_install('python3-virtualenv') | ||
2554 | 147 | |||
2555 | 148 | if path: | ||
2556 | 149 | venv_path = path | ||
2557 | 150 | else: | ||
2558 | 151 | venv_path = os.path.join(charm_dir(), 'venv') | ||
2559 | 152 | |||
2560 | 153 | if not os.path.exists(venv_path): | ||
2561 | 154 | subprocess.check_call(['virtualenv', venv_path]) | ||
2562 | diff --git a/hooks/charmhelpers/fetch/python/rpdb.py b/hooks/charmhelpers/fetch/python/rpdb.py | |||
2563 | 0 | new file mode 100644 | 155 | new file mode 100644 |
2564 | index 0000000..9b31610 | |||
2565 | --- /dev/null | |||
2566 | +++ b/hooks/charmhelpers/fetch/python/rpdb.py | |||
2567 | @@ -0,0 +1,56 @@ | |||
2568 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
2569 | 2 | # | ||
2570 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
2571 | 4 | # you may not use this file except in compliance with the License. | ||
2572 | 5 | # You may obtain a copy of the License at | ||
2573 | 6 | # | ||
2574 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
2575 | 8 | # | ||
2576 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
2577 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
2578 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
2579 | 12 | # See the License for the specific language governing permissions and | ||
2580 | 13 | # limitations under the License. | ||
2581 | 14 | |||
2582 | 15 | """Remote Python Debugger (pdb wrapper).""" | ||
2583 | 16 | |||
2584 | 17 | import pdb | ||
2585 | 18 | import socket | ||
2586 | 19 | import sys | ||
2587 | 20 | |||
2588 | 21 | __author__ = "Bertrand Janin <b@janin.com>" | ||
2589 | 22 | __version__ = "0.1.3" | ||
2590 | 23 | |||
2591 | 24 | |||
2592 | 25 | class Rpdb(pdb.Pdb): | ||
2593 | 26 | |||
2594 | 27 | def __init__(self, addr="127.0.0.1", port=4444): | ||
2595 | 28 | """Initialize the socket and initialize pdb.""" | ||
2596 | 29 | |||
2597 | 30 | # Backup stdin and stdout before replacing them by the socket handle | ||
2598 | 31 | self.old_stdout = sys.stdout | ||
2599 | 32 | self.old_stdin = sys.stdin | ||
2600 | 33 | |||
2601 | 34 | # Open a 'reusable' socket to let the webapp reload on the same port | ||
2602 | 35 | self.skt = socket.socket(socket.AF_INET, socket.SOCK_STREAM) | ||
2603 | 36 | self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True) | ||
2604 | 37 | self.skt.bind((addr, port)) | ||
2605 | 38 | self.skt.listen(1) | ||
2606 | 39 | (clientsocket, address) = self.skt.accept() | ||
2607 | 40 | handle = clientsocket.makefile('rw') | ||
2608 | 41 | pdb.Pdb.__init__(self, completekey='tab', stdin=handle, stdout=handle) | ||
2609 | 42 | sys.stdout = sys.stdin = handle | ||
2610 | 43 | |||
2611 | 44 | def shutdown(self): | ||
2612 | 45 | """Revert stdin and stdout, close the socket.""" | ||
2613 | 46 | sys.stdout = self.old_stdout | ||
2614 | 47 | sys.stdin = self.old_stdin | ||
2615 | 48 | self.skt.close() | ||
2616 | 49 | self.set_continue() | ||
2617 | 50 | |||
2618 | 51 | def do_continue(self, arg): | ||
2619 | 52 | """Stop all operation on ``continue``.""" | ||
2620 | 53 | self.shutdown() | ||
2621 | 54 | return 1 | ||
2622 | 55 | |||
2623 | 56 | do_EOF = do_quit = do_exit = do_c = do_cont = do_continue | ||
2624 | diff --git a/hooks/charmhelpers/fetch/python/version.py b/hooks/charmhelpers/fetch/python/version.py | |||
2625 | 0 | new file mode 100644 | 57 | new file mode 100644 |
2626 | index 0000000..3eb4210 | |||
2627 | --- /dev/null | |||
2628 | +++ b/hooks/charmhelpers/fetch/python/version.py | |||
2629 | @@ -0,0 +1,32 @@ | |||
2630 | 1 | #!/usr/bin/env python | ||
2631 | 2 | # coding: utf-8 | ||
2632 | 3 | |||
2633 | 4 | # Copyright 2014-2015 Canonical Limited. | ||
2634 | 5 | # | ||
2635 | 6 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
2636 | 7 | # you may not use this file except in compliance with the License. | ||
2637 | 8 | # You may obtain a copy of the License at | ||
2638 | 9 | # | ||
2639 | 10 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
2640 | 11 | # | ||
2641 | 12 | # Unless required by applicable law or agreed to in writing, software | ||
2642 | 13 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
2643 | 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
2644 | 15 | # See the License for the specific language governing permissions and | ||
2645 | 16 | # limitations under the License. | ||
2646 | 17 | |||
2647 | 18 | import sys | ||
2648 | 19 | |||
2649 | 20 | __author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" | ||
2650 | 21 | |||
2651 | 22 | |||
2652 | 23 | def current_version(): | ||
2653 | 24 | """Current system python version""" | ||
2654 | 25 | return sys.version_info | ||
2655 | 26 | |||
2656 | 27 | |||
2657 | 28 | def current_version_string(): | ||
2658 | 29 | """Current system python version as string major.minor.micro""" | ||
2659 | 30 | return "{0}.{1}.{2}".format(sys.version_info.major, | ||
2660 | 31 | sys.version_info.minor, | ||
2661 | 32 | sys.version_info.micro) | ||
2662 | diff --git a/hooks/charmhelpers/fetch/snap.py b/hooks/charmhelpers/fetch/snap.py | |||
2663 | index 112a54c..fc70aa9 100644 | |||
2664 | --- a/hooks/charmhelpers/fetch/snap.py | |||
2665 | +++ b/hooks/charmhelpers/fetch/snap.py | |||
2666 | @@ -41,6 +41,10 @@ class CouldNotAcquireLockException(Exception): | |||
2667 | 41 | pass | 41 | pass |
2668 | 42 | 42 | ||
2669 | 43 | 43 | ||
2670 | 44 | class InvalidSnapChannel(Exception): | ||
2671 | 45 | pass | ||
2672 | 46 | |||
2673 | 47 | |||
2674 | 44 | def _snap_exec(commands): | 48 | def _snap_exec(commands): |
2675 | 45 | """ | 49 | """ |
2676 | 46 | Execute snap commands. | 50 | Execute snap commands. |
2677 | @@ -65,7 +69,7 @@ def _snap_exec(commands): | |||
2678 | 65 | .format(SNAP_NO_LOCK_RETRY_COUNT)) | 69 | .format(SNAP_NO_LOCK_RETRY_COUNT)) |
2679 | 66 | return_code = e.returncode | 70 | return_code = e.returncode |
2680 | 67 | log('Snap failed to acquire lock, trying again in {} seconds.' | 71 | log('Snap failed to acquire lock, trying again in {} seconds.' |
2682 | 68 | .format(SNAP_NO_LOCK_RETRY_DELAY, level='WARN')) | 72 | .format(SNAP_NO_LOCK_RETRY_DELAY), level='WARN') |
2683 | 69 | sleep(SNAP_NO_LOCK_RETRY_DELAY) | 73 | sleep(SNAP_NO_LOCK_RETRY_DELAY) |
2684 | 70 | 74 | ||
2685 | 71 | return return_code | 75 | return return_code |
2686 | @@ -132,3 +136,15 @@ def snap_refresh(packages, *flags): | |||
2687 | 132 | 136 | ||
2688 | 133 | log(message, level='INFO') | 137 | log(message, level='INFO') |
2689 | 134 | return _snap_exec(['refresh'] + flags + packages) | 138 | return _snap_exec(['refresh'] + flags + packages) |
2690 | 139 | |||
2691 | 140 | |||
2692 | 141 | def valid_snap_channel(channel): | ||
2693 | 142 | """ Validate snap channel exists | ||
2694 | 143 | |||
2695 | 144 | :raises InvalidSnapChannel: When channel does not exist | ||
2696 | 145 | :return: Boolean | ||
2697 | 146 | """ | ||
2698 | 147 | if channel.lower() in SNAP_CHANNELS: | ||
2699 | 148 | return True | ||
2700 | 149 | else: | ||
2701 | 150 | raise InvalidSnapChannel("Invalid Snap Channel: {}".format(channel)) | ||
2702 | diff --git a/hooks/charmhelpers/fetch/ubuntu.py b/hooks/charmhelpers/fetch/ubuntu.py | |||
2703 | index 40e1cb5..3ddaf0d 100644 | |||
2704 | --- a/hooks/charmhelpers/fetch/ubuntu.py | |||
2705 | +++ b/hooks/charmhelpers/fetch/ubuntu.py | |||
2706 | @@ -13,23 +13,23 @@ | |||
2707 | 13 | # limitations under the License. | 13 | # limitations under the License. |
2708 | 14 | 14 | ||
2709 | 15 | from collections import OrderedDict | 15 | from collections import OrderedDict |
2710 | 16 | import os | ||
2711 | 17 | import platform | 16 | import platform |
2712 | 18 | import re | 17 | import re |
2713 | 19 | import six | 18 | import six |
2714 | 20 | import time | ||
2715 | 21 | import subprocess | 19 | import subprocess |
2717 | 22 | from tempfile import NamedTemporaryFile | 20 | import sys |
2718 | 21 | import time | ||
2719 | 22 | |||
2720 | 23 | from charmhelpers.core.host import get_distrib_codename, get_system_env | ||
2721 | 23 | 24 | ||
2722 | 24 | from charmhelpers.core.host import ( | ||
2723 | 25 | lsb_release | ||
2724 | 26 | ) | ||
2725 | 27 | from charmhelpers.core.hookenv import ( | 25 | from charmhelpers.core.hookenv import ( |
2726 | 28 | log, | 26 | log, |
2727 | 29 | DEBUG, | 27 | DEBUG, |
2728 | 30 | WARNING, | 28 | WARNING, |
2729 | 29 | env_proxy_settings, | ||
2730 | 31 | ) | 30 | ) |
2731 | 32 | from charmhelpers.fetch import SourceConfigError, GPGKeyError | 31 | from charmhelpers.fetch import SourceConfigError, GPGKeyError |
2732 | 32 | from charmhelpers.fetch import ubuntu_apt_pkg | ||
2733 | 33 | 33 | ||
2734 | 34 | PROPOSED_POCKET = ( | 34 | PROPOSED_POCKET = ( |
2735 | 35 | "# Proposed\n" | 35 | "# Proposed\n" |
2736 | @@ -44,6 +44,7 @@ ARCH_TO_PROPOSED_POCKET = { | |||
2737 | 44 | 'x86_64': PROPOSED_POCKET, | 44 | 'x86_64': PROPOSED_POCKET, |
2738 | 45 | 'ppc64le': PROPOSED_PORTS_POCKET, | 45 | 'ppc64le': PROPOSED_PORTS_POCKET, |
2739 | 46 | 'aarch64': PROPOSED_PORTS_POCKET, | 46 | 'aarch64': PROPOSED_PORTS_POCKET, |
2740 | 47 | 's390x': PROPOSED_PORTS_POCKET, | ||
2741 | 47 | } | 48 | } |
2742 | 48 | CLOUD_ARCHIVE_URL = "http://ubuntu-cloud.archive.canonical.com/ubuntu" | 49 | CLOUD_ARCHIVE_URL = "http://ubuntu-cloud.archive.canonical.com/ubuntu" |
2743 | 49 | CLOUD_ARCHIVE_KEY_ID = '5EDB1B62EC4926EA' | 50 | CLOUD_ARCHIVE_KEY_ID = '5EDB1B62EC4926EA' |
2744 | @@ -157,6 +158,38 @@ CLOUD_ARCHIVE_POCKETS = { | |||
2745 | 157 | 'queens/proposed': 'xenial-proposed/queens', | 158 | 'queens/proposed': 'xenial-proposed/queens', |
2746 | 158 | 'xenial-queens/proposed': 'xenial-proposed/queens', | 159 | 'xenial-queens/proposed': 'xenial-proposed/queens', |
2747 | 159 | 'xenial-proposed/queens': 'xenial-proposed/queens', | 160 | 'xenial-proposed/queens': 'xenial-proposed/queens', |
2748 | 161 | # Rocky | ||
2749 | 162 | 'rocky': 'bionic-updates/rocky', | ||
2750 | 163 | 'bionic-rocky': 'bionic-updates/rocky', | ||
2751 | 164 | 'bionic-rocky/updates': 'bionic-updates/rocky', | ||
2752 | 165 | 'bionic-updates/rocky': 'bionic-updates/rocky', | ||
2753 | 166 | 'rocky/proposed': 'bionic-proposed/rocky', | ||
2754 | 167 | 'bionic-rocky/proposed': 'bionic-proposed/rocky', | ||
2755 | 168 | 'bionic-proposed/rocky': 'bionic-proposed/rocky', | ||
2756 | 169 | # Stein | ||
2757 | 170 | 'stein': 'bionic-updates/stein', | ||
2758 | 171 | 'bionic-stein': 'bionic-updates/stein', | ||
2759 | 172 | 'bionic-stein/updates': 'bionic-updates/stein', | ||
2760 | 173 | 'bionic-updates/stein': 'bionic-updates/stein', | ||
2761 | 174 | 'stein/proposed': 'bionic-proposed/stein', | ||
2762 | 175 | 'bionic-stein/proposed': 'bionic-proposed/stein', | ||
2763 | 176 | 'bionic-proposed/stein': 'bionic-proposed/stein', | ||
2764 | 177 | # Train | ||
2765 | 178 | 'train': 'bionic-updates/train', | ||
2766 | 179 | 'bionic-train': 'bionic-updates/train', | ||
2767 | 180 | 'bionic-train/updates': 'bionic-updates/train', | ||
2768 | 181 | 'bionic-updates/train': 'bionic-updates/train', | ||
2769 | 182 | 'train/proposed': 'bionic-proposed/train', | ||
2770 | 183 | 'bionic-train/proposed': 'bionic-proposed/train', | ||
2771 | 184 | 'bionic-proposed/train': 'bionic-proposed/train', | ||
2772 | 185 | # Ussuri | ||
2773 | 186 | 'ussuri': 'bionic-updates/ussuri', | ||
2774 | 187 | 'bionic-ussuri': 'bionic-updates/ussuri', | ||
2775 | 188 | 'bionic-ussuri/updates': 'bionic-updates/ussuri', | ||
2776 | 189 | 'bionic-updates/ussuri': 'bionic-updates/ussuri', | ||
2777 | 190 | 'ussuri/proposed': 'bionic-proposed/ussuri', | ||
2778 | 191 | 'bionic-ussuri/proposed': 'bionic-proposed/ussuri', | ||
2779 | 192 | 'bionic-proposed/ussuri': 'bionic-proposed/ussuri', | ||
2780 | 160 | } | 193 | } |
2781 | 161 | 194 | ||
2782 | 162 | 195 | ||
2783 | @@ -180,18 +213,54 @@ def filter_installed_packages(packages): | |||
2784 | 180 | return _pkgs | 213 | return _pkgs |
2785 | 181 | 214 | ||
2786 | 182 | 215 | ||
2795 | 183 | def apt_cache(in_memory=True, progress=None): | 216 | def filter_missing_packages(packages): |
2796 | 184 | """Build and return an apt cache.""" | 217 | """Return a list of packages that are installed. |
2797 | 185 | from apt import apt_pkg | 218 | |
2798 | 186 | apt_pkg.init() | 219 | :param packages: list of packages to evaluate. |
2799 | 187 | if in_memory: | 220 | :returns list: Packages that are installed. |
2800 | 188 | apt_pkg.config.set("Dir::Cache::pkgcache", "") | 221 | """ |
2801 | 189 | apt_pkg.config.set("Dir::Cache::srcpkgcache", "") | 222 | return list( |
2802 | 190 | return apt_pkg.Cache(progress) | 223 | set(packages) - |
2803 | 224 | set(filter_installed_packages(packages)) | ||
2804 | 225 | ) | ||
2805 | 226 | |||
2806 | 227 | |||
2807 | 228 | def apt_cache(*_, **__): | ||
2808 | 229 | """Shim returning an object simulating the apt_pkg Cache. | ||
2809 | 230 | |||
2810 | 231 | :param _: Accept arguments for compability, not used. | ||
2811 | 232 | :type _: any | ||
2812 | 233 | :param __: Accept keyword arguments for compability, not used. | ||
2813 | 234 | :type __: any | ||
2814 | 235 | :returns:Object used to interrogate the system apt and dpkg databases. | ||
2815 | 236 | :rtype:ubuntu_apt_pkg.Cache | ||
2816 | 237 | """ | ||
2817 | 238 | if 'apt_pkg' in sys.modules: | ||
2818 | 239 | # NOTE(fnordahl): When our consumer use the upstream ``apt_pkg`` module | ||
2819 | 240 | # in conjunction with the apt_cache helper function, they may expect us | ||
2820 | 241 | # to call ``apt_pkg.init()`` for them. | ||
2821 | 242 | # | ||
2822 | 243 | # Detect this situation, log a warning and make the call to | ||
2823 | 244 | # ``apt_pkg.init()`` to avoid the consumer Python interpreter from | ||
2824 | 245 | # crashing with a segmentation fault. | ||
2825 | 246 | log('Support for use of upstream ``apt_pkg`` module in conjunction' | ||
2826 | 247 | 'with charm-helpers is deprecated since 2019-06-25', level=WARNING) | ||
2827 | 248 | sys.modules['apt_pkg'].init() | ||
2828 | 249 | return ubuntu_apt_pkg.Cache() | ||
2829 | 191 | 250 | ||
2830 | 192 | 251 | ||
2831 | 193 | def apt_install(packages, options=None, fatal=False): | 252 | def apt_install(packages, options=None, fatal=False): |
2833 | 194 | """Install one or more packages.""" | 253 | """Install one or more packages. |
2834 | 254 | |||
2835 | 255 | :param packages: Package(s) to install | ||
2836 | 256 | :type packages: Option[str, List[str]] | ||
2837 | 257 | :param options: Options to pass on to apt-get | ||
2838 | 258 | :type options: Option[None, List[str]] | ||
2839 | 259 | :param fatal: Whether the command's output should be checked and | ||
2840 | 260 | retried. | ||
2841 | 261 | :type fatal: bool | ||
2842 | 262 | :raises: subprocess.CalledProcessError | ||
2843 | 263 | """ | ||
2844 | 195 | if options is None: | 264 | if options is None: |
2845 | 196 | options = ['--option=Dpkg::Options::=--force-confold'] | 265 | options = ['--option=Dpkg::Options::=--force-confold'] |
2846 | 197 | 266 | ||
2847 | @@ -208,7 +277,17 @@ def apt_install(packages, options=None, fatal=False): | |||
2848 | 208 | 277 | ||
2849 | 209 | 278 | ||
2850 | 210 | def apt_upgrade(options=None, fatal=False, dist=False): | 279 | def apt_upgrade(options=None, fatal=False, dist=False): |
2852 | 211 | """Upgrade all packages.""" | 280 | """Upgrade all packages. |
2853 | 281 | |||
2854 | 282 | :param options: Options to pass on to apt-get | ||
2855 | 283 | :type options: Option[None, List[str]] | ||
2856 | 284 | :param fatal: Whether the command's output should be checked and | ||
2857 | 285 | retried. | ||
2858 | 286 | :type fatal: bool | ||
2859 | 287 | :param dist: Whether ``dist-upgrade`` should be used over ``upgrade`` | ||
2860 | 288 | :type dist: bool | ||
2861 | 289 | :raises: subprocess.CalledProcessError | ||
2862 | 290 | """ | ||
2863 | 212 | if options is None: | 291 | if options is None: |
2864 | 213 | options = ['--option=Dpkg::Options::=--force-confold'] | 292 | options = ['--option=Dpkg::Options::=--force-confold'] |
2865 | 214 | 293 | ||
2866 | @@ -229,7 +308,15 @@ def apt_update(fatal=False): | |||
2867 | 229 | 308 | ||
2868 | 230 | 309 | ||
2869 | 231 | def apt_purge(packages, fatal=False): | 310 | def apt_purge(packages, fatal=False): |
2871 | 232 | """Purge one or more packages.""" | 311 | """Purge one or more packages. |
2872 | 312 | |||
2873 | 313 | :param packages: Package(s) to install | ||
2874 | 314 | :type packages: Option[str, List[str]] | ||
2875 | 315 | :param fatal: Whether the command's output should be checked and | ||
2876 | 316 | retried. | ||
2877 | 317 | :type fatal: bool | ||
2878 | 318 | :raises: subprocess.CalledProcessError | ||
2879 | 319 | """ | ||
2880 | 233 | cmd = ['apt-get', '--assume-yes', 'purge'] | 320 | cmd = ['apt-get', '--assume-yes', 'purge'] |
2881 | 234 | if isinstance(packages, six.string_types): | 321 | if isinstance(packages, six.string_types): |
2882 | 235 | cmd.append(packages) | 322 | cmd.append(packages) |
2883 | @@ -239,6 +326,21 @@ def apt_purge(packages, fatal=False): | |||
2884 | 239 | _run_apt_command(cmd, fatal) | 326 | _run_apt_command(cmd, fatal) |
2885 | 240 | 327 | ||
2886 | 241 | 328 | ||
2887 | 329 | def apt_autoremove(purge=True, fatal=False): | ||
2888 | 330 | """Purge one or more packages. | ||
2889 | 331 | :param purge: Whether the ``--purge`` option should be passed on or not. | ||
2890 | 332 | :type purge: bool | ||
2891 | 333 | :param fatal: Whether the command's output should be checked and | ||
2892 | 334 | retried. | ||
2893 | 335 | :type fatal: bool | ||
2894 | 336 | :raises: subprocess.CalledProcessError | ||
2895 | 337 | """ | ||
2896 | 338 | cmd = ['apt-get', '--assume-yes', 'autoremove'] | ||
2897 | 339 | if purge: | ||
2898 | 340 | cmd.append('--purge') | ||
2899 | 341 | _run_apt_command(cmd, fatal) | ||
2900 | 342 | |||
2901 | 343 | |||
2902 | 242 | def apt_mark(packages, mark, fatal=False): | 344 | def apt_mark(packages, mark, fatal=False): |
2903 | 243 | """Flag one or more packages using apt-mark.""" | 345 | """Flag one or more packages using apt-mark.""" |
2904 | 244 | log("Marking {} as {}".format(packages, mark)) | 346 | log("Marking {} as {}".format(packages, mark)) |
2905 | @@ -265,13 +367,18 @@ def apt_unhold(packages, fatal=False): | |||
2906 | 265 | def import_key(key): | 367 | def import_key(key): |
2907 | 266 | """Import an ASCII Armor key. | 368 | """Import an ASCII Armor key. |
2908 | 267 | 369 | ||
2916 | 268 | /!\ A Radix64 format keyid is also supported for backwards | 370 | A Radix64 format keyid is also supported for backwards |
2917 | 269 | compatibility, but should never be used; the key retrieval | 371 | compatibility. In this case Ubuntu keyserver will be |
2918 | 270 | mechanism is insecure and subject to man-in-the-middle attacks | 372 | queried for a key via HTTPS by its keyid. This method |
2919 | 271 | voiding all signature checks using that key. | 373 | is less preferrable because https proxy servers may |
2920 | 272 | 374 | require traffic decryption which is equivalent to a | |
2921 | 273 | :param keyid: The key in ASCII armor format, | 375 | man-in-the-middle attack (a proxy server impersonates |
2922 | 274 | including BEGIN and END markers. | 376 | keyserver TLS certificates and has to be explicitly |
2923 | 377 | trusted by the system). | ||
2924 | 378 | |||
2925 | 379 | :param key: A GPG key in ASCII armor format, | ||
2926 | 380 | including BEGIN and END markers or a keyid. | ||
2927 | 381 | :type key: (bytes, str) | ||
2928 | 275 | :raises: GPGKeyError if the key could not be imported | 382 | :raises: GPGKeyError if the key could not be imported |
2929 | 276 | """ | 383 | """ |
2930 | 277 | key = key.strip() | 384 | key = key.strip() |
2931 | @@ -282,35 +389,131 @@ def import_key(key): | |||
2932 | 282 | log("PGP key found (looks like ASCII Armor format)", level=DEBUG) | 389 | log("PGP key found (looks like ASCII Armor format)", level=DEBUG) |
2933 | 283 | if ('-----BEGIN PGP PUBLIC KEY BLOCK-----' in key and | 390 | if ('-----BEGIN PGP PUBLIC KEY BLOCK-----' in key and |
2934 | 284 | '-----END PGP PUBLIC KEY BLOCK-----' in key): | 391 | '-----END PGP PUBLIC KEY BLOCK-----' in key): |
2947 | 285 | log("Importing ASCII Armor PGP key", level=DEBUG) | 392 | log("Writing provided PGP key in the binary format", level=DEBUG) |
2948 | 286 | with NamedTemporaryFile() as keyfile: | 393 | if six.PY3: |
2949 | 287 | with open(keyfile.name, 'w') as fd: | 394 | key_bytes = key.encode('utf-8') |
2950 | 288 | fd.write(key) | 395 | else: |
2951 | 289 | fd.write("\n") | 396 | key_bytes = key |
2952 | 290 | cmd = ['apt-key', 'add', keyfile.name] | 397 | key_name = _get_keyid_by_gpg_key(key_bytes) |
2953 | 291 | try: | 398 | key_gpg = _dearmor_gpg_key(key_bytes) |
2954 | 292 | subprocess.check_call(cmd) | 399 | _write_apt_gpg_keyfile(key_name=key_name, key_material=key_gpg) |
2943 | 293 | except subprocess.CalledProcessError: | ||
2944 | 294 | error = "Error importing PGP key '{}'".format(key) | ||
2945 | 295 | log(error) | ||
2946 | 296 | raise GPGKeyError(error) | ||
2955 | 297 | else: | 400 | else: |
2956 | 298 | raise GPGKeyError("ASCII armor markers missing from GPG key") | 401 | raise GPGKeyError("ASCII armor markers missing from GPG key") |
2957 | 299 | else: | 402 | else: |
2958 | 300 | # We should only send things obviously not a keyid offsite | ||
2959 | 301 | # via this unsecured protocol, as it may be a secret or part | ||
2960 | 302 | # of one. | ||
2961 | 303 | log("PGP key found (looks like Radix64 format)", level=WARNING) | 403 | log("PGP key found (looks like Radix64 format)", level=WARNING) |
2963 | 304 | log("INSECURLY importing PGP key from keyserver; " | 404 | log("SECURELY importing PGP key from keyserver; " |
2964 | 305 | "full key not provided.", level=WARNING) | 405 | "full key not provided.", level=WARNING) |
2973 | 306 | cmd = ['apt-key', 'adv', '--keyserver', | 406 | # as of bionic add-apt-repository uses curl with an HTTPS keyserver URL |
2974 | 307 | 'hkp://keyserver.ubuntu.com:80', '--recv-keys', key] | 407 | # to retrieve GPG keys. `apt-key adv` command is deprecated as is |
2975 | 308 | try: | 408 | # apt-key in general as noted in its manpage. See lp:1433761 for more |
2976 | 309 | subprocess.check_call(cmd) | 409 | # history. Instead, /etc/apt/trusted.gpg.d is used directly to drop |
2977 | 310 | except subprocess.CalledProcessError: | 410 | # gpg |
2978 | 311 | error = "Error importing PGP key '{}'".format(key) | 411 | key_asc = _get_key_by_keyid(key) |
2979 | 312 | log(error) | 412 | # write the key in GPG format so that apt-key list shows it |
2980 | 313 | raise GPGKeyError(error) | 413 | key_gpg = _dearmor_gpg_key(key_asc) |
2981 | 414 | _write_apt_gpg_keyfile(key_name=key, key_material=key_gpg) | ||
2982 | 415 | |||
2983 | 416 | |||
2984 | 417 | def _get_keyid_by_gpg_key(key_material): | ||
2985 | 418 | """Get a GPG key fingerprint by GPG key material. | ||
2986 | 419 | Gets a GPG key fingerprint (40-digit, 160-bit) by the ASCII armor-encoded | ||
2987 | 420 | or binary GPG key material. Can be used, for example, to generate file | ||
2988 | 421 | names for keys passed via charm options. | ||
2989 | 422 | |||
2990 | 423 | :param key_material: ASCII armor-encoded or binary GPG key material | ||
2991 | 424 | :type key_material: bytes | ||
2992 | 425 | :raises: GPGKeyError if invalid key material has been provided | ||
2993 | 426 | :returns: A GPG key fingerprint | ||
2994 | 427 | :rtype: str | ||
2995 | 428 | """ | ||
2996 | 429 | # Use the same gpg command for both Xenial and Bionic | ||
2997 | 430 | cmd = 'gpg --with-colons --with-fingerprint' | ||
2998 | 431 | ps = subprocess.Popen(cmd.split(), | ||
2999 | 432 | stdout=subprocess.PIPE, | ||
3000 | 433 | stderr=subprocess.PIPE, | ||
3001 | 434 | stdin=subprocess.PIPE) | ||
3002 | 435 | out, err = ps.communicate(input=key_material) | ||
3003 | 436 | if six.PY3: | ||
3004 | 437 | out = out.decode('utf-8') | ||
3005 | 438 | err = err.decode('utf-8') | ||
3006 | 439 | if 'gpg: no valid OpenPGP data found.' in err: | ||
3007 | 440 | raise GPGKeyError('Invalid GPG key material provided') | ||
3008 | 441 | # from gnupg2 docs: fpr :: Fingerprint (fingerprint is in field 10) | ||
3009 | 442 | return re.search(r"^fpr:{9}([0-9A-F]{40}):$", out, re.MULTILINE).group(1) | ||
3010 | 443 | |||
3011 | 444 | |||
3012 | 445 | def _get_key_by_keyid(keyid): | ||
3013 | 446 | """Get a key via HTTPS from the Ubuntu keyserver. | ||
3014 | 447 | Different key ID formats are supported by SKS keyservers (the longer ones | ||
3015 | 448 | are more secure, see "dead beef attack" and https://evil32.com/). Since | ||
3016 | 449 | HTTPS is used, if SSLBump-like HTTPS proxies are in place, they will | ||
3017 | 450 | impersonate keyserver.ubuntu.com and generate a certificate with | ||
3018 | 451 | keyserver.ubuntu.com in the CN field or in SubjAltName fields of a | ||
3019 | 452 | certificate. If such proxy behavior is expected it is necessary to add the | ||
3020 | 453 | CA certificate chain containing the intermediate CA of the SSLBump proxy to | ||
3021 | 454 | every machine that this code runs on via ca-certs cloud-init directive (via | ||
3022 | 455 | cloudinit-userdata model-config) or via other means (such as through a | ||
3023 | 456 | custom charm option). Also note that DNS resolution for the hostname in a | ||
3024 | 457 | URL is done at a proxy server - not at the client side. | ||
3025 | 458 | |||
3026 | 459 | 8-digit (32 bit) key ID | ||
3027 | 460 | https://keyserver.ubuntu.com/pks/lookup?search=0x4652B4E6 | ||
3028 | 461 | 16-digit (64 bit) key ID | ||
3029 | 462 | https://keyserver.ubuntu.com/pks/lookup?search=0x6E85A86E4652B4E6 | ||
3030 | 463 | 40-digit key ID: | ||
3031 | 464 | https://keyserver.ubuntu.com/pks/lookup?search=0x35F77D63B5CEC106C577ED856E85A86E4652B4E6 | ||
3032 | 465 | |||
3033 | 466 | :param keyid: An 8, 16 or 40 hex digit keyid to find a key for | ||
3034 | 467 | :type keyid: (bytes, str) | ||
3035 | 468 | :returns: A key material for the specified GPG key id | ||
3036 | 469 | :rtype: (str, bytes) | ||
3037 | 470 | :raises: subprocess.CalledProcessError | ||
3038 | 471 | """ | ||
3039 | 472 | # options=mr - machine-readable output (disables html wrappers) | ||
3040 | 473 | keyserver_url = ('https://keyserver.ubuntu.com' | ||
3041 | 474 | '/pks/lookup?op=get&options=mr&exact=on&search=0x{}') | ||
3042 | 475 | curl_cmd = ['curl', keyserver_url.format(keyid)] | ||
3043 | 476 | # use proxy server settings in order to retrieve the key | ||
3044 | 477 | return subprocess.check_output(curl_cmd, | ||
3045 | 478 | env=env_proxy_settings(['https'])) | ||
3046 | 479 | |||
3047 | 480 | |||
3048 | 481 | def _dearmor_gpg_key(key_asc): | ||
3049 | 482 | """Converts a GPG key in the ASCII armor format to the binary format. | ||
3050 | 483 | |||
3051 | 484 | :param key_asc: A GPG key in ASCII armor format. | ||
3052 | 485 | :type key_asc: (str, bytes) | ||
3053 | 486 | :returns: A GPG key in binary format | ||
3054 | 487 | :rtype: (str, bytes) | ||
3055 | 488 | :raises: GPGKeyError | ||
3056 | 489 | """ | ||
3057 | 490 | ps = subprocess.Popen(['gpg', '--dearmor'], | ||
3058 | 491 | stdout=subprocess.PIPE, | ||
3059 | 492 | stderr=subprocess.PIPE, | ||
3060 | 493 | stdin=subprocess.PIPE) | ||
3061 | 494 | out, err = ps.communicate(input=key_asc) | ||
3062 | 495 | # no need to decode output as it is binary (invalid utf-8), only error | ||
3063 | 496 | if six.PY3: | ||
3064 | 497 | err = err.decode('utf-8') | ||
3065 | 498 | if 'gpg: no valid OpenPGP data found.' in err: | ||
3066 | 499 | raise GPGKeyError('Invalid GPG key material. Check your network setup' | ||
3067 | 500 | ' (MTU, routing, DNS) and/or proxy server settings' | ||
3068 | 501 | ' as well as destination keyserver status.') | ||
3069 | 502 | else: | ||
3070 | 503 | return out | ||
3071 | 504 | |||
3072 | 505 | |||
3073 | 506 | def _write_apt_gpg_keyfile(key_name, key_material): | ||
3074 | 507 | """Writes GPG key material into a file at a provided path. | ||
3075 | 508 | |||
3076 | 509 | :param key_name: A key name to use for a key file (could be a fingerprint) | ||
3077 | 510 | :type key_name: str | ||
3078 | 511 | :param key_material: A GPG key material (binary) | ||
3079 | 512 | :type key_material: (str, bytes) | ||
3080 | 513 | """ | ||
3081 | 514 | with open('/etc/apt/trusted.gpg.d/{}.gpg'.format(key_name), | ||
3082 | 515 | 'wb') as keyf: | ||
3083 | 516 | keyf.write(key_material) | ||
3084 | 314 | 517 | ||
3085 | 315 | 518 | ||
3086 | 316 | def add_source(source, key=None, fail_invalid=False): | 519 | def add_source(source, key=None, fail_invalid=False): |
3087 | @@ -385,14 +588,16 @@ def add_source(source, key=None, fail_invalid=False): | |||
3088 | 385 | for r, fn in six.iteritems(_mapping): | 588 | for r, fn in six.iteritems(_mapping): |
3089 | 386 | m = re.match(r, source) | 589 | m = re.match(r, source) |
3090 | 387 | if m: | 590 | if m: |
3091 | 388 | # call the assoicated function with the captured groups | ||
3092 | 389 | # raises SourceConfigError on error. | ||
3093 | 390 | fn(*m.groups()) | ||
3094 | 391 | if key: | 591 | if key: |
3095 | 592 | # Import key before adding the source which depends on it, | ||
3096 | 593 | # as refreshing packages could fail otherwise. | ||
3097 | 392 | try: | 594 | try: |
3098 | 393 | import_key(key) | 595 | import_key(key) |
3099 | 394 | except GPGKeyError as e: | 596 | except GPGKeyError as e: |
3100 | 395 | raise SourceConfigError(str(e)) | 597 | raise SourceConfigError(str(e)) |
3101 | 598 | # call the associated function with the captured groups | ||
3102 | 599 | # raises SourceConfigError on error. | ||
3103 | 600 | fn(*m.groups()) | ||
3104 | 396 | break | 601 | break |
3105 | 397 | else: | 602 | else: |
3106 | 398 | # nothing matched. log an error and maybe sys.exit | 603 | # nothing matched. log an error and maybe sys.exit |
3107 | @@ -405,13 +610,13 @@ def add_source(source, key=None, fail_invalid=False): | |||
3108 | 405 | def _add_proposed(): | 610 | def _add_proposed(): |
3109 | 406 | """Add the PROPOSED_POCKET as /etc/apt/source.list.d/proposed.list | 611 | """Add the PROPOSED_POCKET as /etc/apt/source.list.d/proposed.list |
3110 | 407 | 612 | ||
3112 | 408 | Uses lsb_release()['DISTRIB_CODENAME'] to determine the correct staza for | 613 | Uses get_distrib_codename to determine the correct stanza for |
3113 | 409 | the deb line. | 614 | the deb line. |
3114 | 410 | 615 | ||
3115 | 411 | For intel architecutres PROPOSED_POCKET is used for the release, but for | 616 | For intel architecutres PROPOSED_POCKET is used for the release, but for |
3116 | 412 | other architectures PROPOSED_PORTS_POCKET is used for the release. | 617 | other architectures PROPOSED_PORTS_POCKET is used for the release. |
3117 | 413 | """ | 618 | """ |
3119 | 414 | release = lsb_release()['DISTRIB_CODENAME'] | 619 | release = get_distrib_codename() |
3120 | 415 | arch = platform.machine() | 620 | arch = platform.machine() |
3121 | 416 | if arch not in six.iterkeys(ARCH_TO_PROPOSED_POCKET): | 621 | if arch not in six.iterkeys(ARCH_TO_PROPOSED_POCKET): |
3122 | 417 | raise SourceConfigError("Arch {} not supported for (distro-)proposed" | 622 | raise SourceConfigError("Arch {} not supported for (distro-)proposed" |
3123 | @@ -424,8 +629,16 @@ def _add_apt_repository(spec): | |||
3124 | 424 | """Add the spec using add_apt_repository | 629 | """Add the spec using add_apt_repository |
3125 | 425 | 630 | ||
3126 | 426 | :param spec: the parameter to pass to add_apt_repository | 631 | :param spec: the parameter to pass to add_apt_repository |
3127 | 632 | :type spec: str | ||
3128 | 427 | """ | 633 | """ |
3130 | 428 | _run_with_retries(['add-apt-repository', '--yes', spec]) | 634 | if '{series}' in spec: |
3131 | 635 | series = get_distrib_codename() | ||
3132 | 636 | spec = spec.replace('{series}', series) | ||
3133 | 637 | # software-properties package for bionic properly reacts to proxy settings | ||
3134 | 638 | # passed as environment variables (See lp:1433761). This is not the case | ||
3135 | 639 | # LTS and non-LTS releases below bionic. | ||
3136 | 640 | _run_with_retries(['add-apt-repository', '--yes', spec], | ||
3137 | 641 | cmd_env=env_proxy_settings(['https'])) | ||
3138 | 429 | 642 | ||
3139 | 430 | 643 | ||
3140 | 431 | def _add_cloud_pocket(pocket): | 644 | def _add_cloud_pocket(pocket): |
3141 | @@ -494,7 +707,7 @@ def _verify_is_ubuntu_rel(release, os_release): | |||
3142 | 494 | :raises: SourceConfigError if the release is not the same as the ubuntu | 707 | :raises: SourceConfigError if the release is not the same as the ubuntu |
3143 | 495 | release. | 708 | release. |
3144 | 496 | """ | 709 | """ |
3146 | 497 | ubuntu_rel = lsb_release()['DISTRIB_CODENAME'] | 710 | ubuntu_rel = get_distrib_codename() |
3147 | 498 | if release != ubuntu_rel: | 711 | if release != ubuntu_rel: |
3148 | 499 | raise SourceConfigError( | 712 | raise SourceConfigError( |
3149 | 500 | 'Invalid Cloud Archive release specified: {}-{} on this Ubuntu' | 713 | 'Invalid Cloud Archive release specified: {}-{} on this Ubuntu' |
3150 | @@ -505,21 +718,22 @@ def _run_with_retries(cmd, max_retries=CMD_RETRY_COUNT, retry_exitcodes=(1,), | |||
3151 | 505 | retry_message="", cmd_env=None): | 718 | retry_message="", cmd_env=None): |
3152 | 506 | """Run a command and retry until success or max_retries is reached. | 719 | """Run a command and retry until success or max_retries is reached. |
3153 | 507 | 720 | ||
3161 | 508 | :param: cmd: str: The apt command to run. | 721 | :param cmd: The apt command to run. |
3162 | 509 | :param: max_retries: int: The number of retries to attempt on a fatal | 722 | :type cmd: str |
3163 | 510 | command. Defaults to CMD_RETRY_COUNT. | 723 | :param max_retries: The number of retries to attempt on a fatal |
3164 | 511 | :param: retry_exitcodes: tuple: Optional additional exit codes to retry. | 724 | command. Defaults to CMD_RETRY_COUNT. |
3165 | 512 | Defaults to retry on exit code 1. | 725 | :type max_retries: int |
3166 | 513 | :param: retry_message: str: Optional log prefix emitted during retries. | 726 | :param retry_exitcodes: Optional additional exit codes to retry. |
3167 | 514 | :param: cmd_env: dict: Environment variables to add to the command run. | 727 | Defaults to retry on exit code 1. |
3168 | 728 | :type retry_exitcodes: tuple | ||
3169 | 729 | :param retry_message: Optional log prefix emitted during retries. | ||
3170 | 730 | :type retry_message: str | ||
3171 | 731 | :param: cmd_env: Environment variables to add to the command run. | ||
3172 | 732 | :type cmd_env: Option[None, Dict[str, str]] | ||
3173 | 515 | """ | 733 | """ |
3177 | 516 | 734 | env = get_apt_dpkg_env() | |
3175 | 517 | env = None | ||
3176 | 518 | kwargs = {} | ||
3178 | 519 | if cmd_env: | 735 | if cmd_env: |
3179 | 520 | env = os.environ.copy() | ||
3180 | 521 | env.update(cmd_env) | 736 | env.update(cmd_env) |
3181 | 522 | kwargs['env'] = env | ||
3182 | 523 | 737 | ||
3183 | 524 | if not retry_message: | 738 | if not retry_message: |
3184 | 525 | retry_message = "Failed executing '{}'".format(" ".join(cmd)) | 739 | retry_message = "Failed executing '{}'".format(" ".join(cmd)) |
3185 | @@ -531,8 +745,7 @@ def _run_with_retries(cmd, max_retries=CMD_RETRY_COUNT, retry_exitcodes=(1,), | |||
3186 | 531 | retry_results = (None,) + retry_exitcodes | 745 | retry_results = (None,) + retry_exitcodes |
3187 | 532 | while result in retry_results: | 746 | while result in retry_results: |
3188 | 533 | try: | 747 | try: |
3191 | 534 | # result = subprocess.check_call(cmd, env=env) | 748 | result = subprocess.check_call(cmd, env=env) |
3190 | 535 | result = subprocess.check_call(cmd, **kwargs) | ||
3192 | 536 | except subprocess.CalledProcessError as e: | 749 | except subprocess.CalledProcessError as e: |
3193 | 537 | retry_count = retry_count + 1 | 750 | retry_count = retry_count + 1 |
3194 | 538 | if retry_count > max_retries: | 751 | if retry_count > max_retries: |
3195 | @@ -545,22 +758,18 @@ def _run_with_retries(cmd, max_retries=CMD_RETRY_COUNT, retry_exitcodes=(1,), | |||
3196 | 545 | def _run_apt_command(cmd, fatal=False): | 758 | def _run_apt_command(cmd, fatal=False): |
3197 | 546 | """Run an apt command with optional retries. | 759 | """Run an apt command with optional retries. |
3198 | 547 | 760 | ||
3202 | 548 | :param: cmd: str: The apt command to run. | 761 | :param cmd: The apt command to run. |
3203 | 549 | :param: fatal: bool: Whether the command's output should be checked and | 762 | :type cmd: str |
3204 | 550 | retried. | 763 | :param fatal: Whether the command's output should be checked and |
3205 | 764 | retried. | ||
3206 | 765 | :type fatal: bool | ||
3207 | 551 | """ | 766 | """ |
3208 | 552 | # Provide DEBIAN_FRONTEND=noninteractive if not present in the environment. | ||
3209 | 553 | cmd_env = { | ||
3210 | 554 | 'DEBIAN_FRONTEND': os.environ.get('DEBIAN_FRONTEND', 'noninteractive')} | ||
3211 | 555 | |||
3212 | 556 | if fatal: | 767 | if fatal: |
3213 | 557 | _run_with_retries( | 768 | _run_with_retries( |
3215 | 558 | cmd, cmd_env=cmd_env, retry_exitcodes=(1, APT_NO_LOCK,), | 769 | cmd, retry_exitcodes=(1, APT_NO_LOCK,), |
3216 | 559 | retry_message="Couldn't acquire DPKG lock") | 770 | retry_message="Couldn't acquire DPKG lock") |
3217 | 560 | else: | 771 | else: |
3221 | 561 | env = os.environ.copy() | 772 | subprocess.call(cmd, env=get_apt_dpkg_env()) |
3219 | 562 | env.update(cmd_env) | ||
3220 | 563 | subprocess.call(cmd, env=env) | ||
3222 | 564 | 773 | ||
3223 | 565 | 774 | ||
3224 | 566 | def get_upstream_version(package): | 775 | def get_upstream_version(package): |
3225 | @@ -568,11 +777,10 @@ def get_upstream_version(package): | |||
3226 | 568 | 777 | ||
3227 | 569 | @returns None (if not installed) or the upstream version | 778 | @returns None (if not installed) or the upstream version |
3228 | 570 | """ | 779 | """ |
3229 | 571 | import apt_pkg | ||
3230 | 572 | cache = apt_cache() | 780 | cache = apt_cache() |
3231 | 573 | try: | 781 | try: |
3232 | 574 | pkg = cache[package] | 782 | pkg = cache[package] |
3234 | 575 | except: | 783 | except Exception: |
3235 | 576 | # the package is unknown to the current apt cache. | 784 | # the package is unknown to the current apt cache. |
3236 | 577 | return None | 785 | return None |
3237 | 578 | 786 | ||
3238 | @@ -580,4 +788,18 @@ def get_upstream_version(package): | |||
3239 | 580 | # package is known, but no version is currently installed. | 788 | # package is known, but no version is currently installed. |
3240 | 581 | return None | 789 | return None |
3241 | 582 | 790 | ||
3243 | 583 | return apt_pkg.upstream_version(pkg.current_ver.ver_str) | 791 | return ubuntu_apt_pkg.upstream_version(pkg.current_ver.ver_str) |
3244 | 792 | |||
3245 | 793 | |||
3246 | 794 | def get_apt_dpkg_env(): | ||
3247 | 795 | """Get environment suitable for execution of APT and DPKG tools. | ||
3248 | 796 | |||
3249 | 797 | We keep this in a helper function instead of in a global constant to | ||
3250 | 798 | avoid execution on import of the library. | ||
3251 | 799 | :returns: Environment suitable for execution of APT and DPKG tools. | ||
3252 | 800 | :rtype: Dict[str, str] | ||
3253 | 801 | """ | ||
3254 | 802 | # The fallback is used in the event of ``/etc/environment`` not containing | ||
3255 | 803 | # avalid PATH variable. | ||
3256 | 804 | return {'DEBIAN_FRONTEND': 'noninteractive', | ||
3257 | 805 | 'PATH': get_system_env('PATH', '/usr/sbin:/usr/bin:/sbin:/bin')} | ||
3258 | diff --git a/hooks/charmhelpers/fetch/ubuntu_apt_pkg.py b/hooks/charmhelpers/fetch/ubuntu_apt_pkg.py | |||
3259 | 584 | new file mode 100644 | 806 | new file mode 100644 |
3260 | index 0000000..929a75d | |||
3261 | --- /dev/null | |||
3262 | +++ b/hooks/charmhelpers/fetch/ubuntu_apt_pkg.py | |||
3263 | @@ -0,0 +1,267 @@ | |||
3264 | 1 | # Copyright 2019 Canonical Ltd | ||
3265 | 2 | # | ||
3266 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
3267 | 4 | # you may not use this file except in compliance with the License. | ||
3268 | 5 | # You may obtain a copy of the License at | ||
3269 | 6 | # | ||
3270 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
3271 | 8 | # | ||
3272 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
3273 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
3274 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
3275 | 12 | # See the License for the specific language governing permissions and | ||
3276 | 13 | # limitations under the License. | ||
3277 | 14 | |||
3278 | 15 | """Provide a subset of the ``python-apt`` module API. | ||
3279 | 16 | |||
3280 | 17 | Data collection is done through subprocess calls to ``apt-cache`` and | ||
3281 | 18 | ``dpkg-query`` commands. | ||
3282 | 19 | |||
3283 | 20 | The main purpose for this module is to avoid dependency on the | ||
3284 | 21 | ``python-apt`` python module. | ||
3285 | 22 | |||
3286 | 23 | The indicated python module is a wrapper around the ``apt`` C++ library | ||
3287 | 24 | which is tightly connected to the version of the distribution it was | ||
3288 | 25 | shipped on. It is not developed in a backward/forward compatible manner. | ||
3289 | 26 | |||
3290 | 27 | This in turn makes it incredibly hard to distribute as a wheel for a piece | ||
3291 | 28 | of python software that supports a span of distro releases [0][1]. | ||
3292 | 29 | |||
3293 | 30 | Upstream feedback like [2] does not give confidence in this ever changing, | ||
3294 | 31 | so with this we get rid of the dependency. | ||
3295 | 32 | |||
3296 | 33 | 0: https://github.com/juju-solutions/layer-basic/pull/135 | ||
3297 | 34 | 1: https://bugs.launchpad.net/charm-octavia/+bug/1824112 | ||
3298 | 35 | 2: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=845330#10 | ||
3299 | 36 | """ | ||
3300 | 37 | |||
3301 | 38 | import locale | ||
3302 | 39 | import os | ||
3303 | 40 | import subprocess | ||
3304 | 41 | import sys | ||
3305 | 42 | |||
3306 | 43 | |||
3307 | 44 | class _container(dict): | ||
3308 | 45 | """Simple container for attributes.""" | ||
3309 | 46 | __getattr__ = dict.__getitem__ | ||
3310 | 47 | __setattr__ = dict.__setitem__ | ||
3311 | 48 | |||
3312 | 49 | |||
3313 | 50 | class Package(_container): | ||
3314 | 51 | """Simple container for package attributes.""" | ||
3315 | 52 | |||
3316 | 53 | |||
3317 | 54 | class Version(_container): | ||
3318 | 55 | """Simple container for version attributes.""" | ||
3319 | 56 | |||
3320 | 57 | |||
3321 | 58 | class Cache(object): | ||
3322 | 59 | """Simulation of ``apt_pkg`` Cache object.""" | ||
3323 | 60 | def __init__(self, progress=None): | ||
3324 | 61 | pass | ||
3325 | 62 | |||
3326 | 63 | def __contains__(self, package): | ||
3327 | 64 | try: | ||
3328 | 65 | pkg = self.__getitem__(package) | ||
3329 | 66 | return pkg is not None | ||
3330 | 67 | except KeyError: | ||
3331 | 68 | return False | ||
3332 | 69 | |||
3333 | 70 | def __getitem__(self, package): | ||
3334 | 71 | """Get information about a package from apt and dpkg databases. | ||
3335 | 72 | |||
3336 | 73 | :param package: Name of package | ||
3337 | 74 | :type package: str | ||
3338 | 75 | :returns: Package object | ||
3339 | 76 | :rtype: object | ||
3340 | 77 | :raises: KeyError, subprocess.CalledProcessError | ||
3341 | 78 | """ | ||
3342 | 79 | apt_result = self._apt_cache_show([package])[package] | ||
3343 | 80 | apt_result['name'] = apt_result.pop('package') | ||
3344 | 81 | pkg = Package(apt_result) | ||
3345 | 82 | dpkg_result = self._dpkg_list([package]).get(package, {}) | ||
3346 | 83 | current_ver = None | ||
3347 | 84 | installed_version = dpkg_result.get('version') | ||
3348 | 85 | if installed_version: | ||
3349 | 86 | current_ver = Version({'ver_str': installed_version}) | ||
3350 | 87 | pkg.current_ver = current_ver | ||
3351 | 88 | pkg.architecture = dpkg_result.get('architecture') | ||
3352 | 89 | return pkg | ||
3353 | 90 | |||
3354 | 91 | def _dpkg_list(self, packages): | ||
3355 | 92 | """Get data from system dpkg database for package. | ||
3356 | 93 | |||
3357 | 94 | :param packages: Packages to get data from | ||
3358 | 95 | :type packages: List[str] | ||
3359 | 96 | :returns: Structured data about installed packages, keys like | ||
3360 | 97 | ``dpkg-query --list`` | ||
3361 | 98 | :rtype: dict | ||
3362 | 99 | :raises: subprocess.CalledProcessError | ||
3363 | 100 | """ | ||
3364 | 101 | pkgs = {} | ||
3365 | 102 | cmd = ['dpkg-query', '--list'] | ||
3366 | 103 | cmd.extend(packages) | ||
3367 | 104 | if locale.getlocale() == (None, None): | ||
3368 | 105 | # subprocess calls out to locale.getpreferredencoding(False) to | ||
3369 | 106 | # determine encoding. Workaround for Trusty where the | ||
3370 | 107 | # environment appears to not be set up correctly. | ||
3371 | 108 | locale.setlocale(locale.LC_ALL, 'en_US.UTF-8') | ||
3372 | 109 | try: | ||
3373 | 110 | output = subprocess.check_output(cmd, | ||
3374 | 111 | stderr=subprocess.STDOUT, | ||
3375 | 112 | universal_newlines=True) | ||
3376 | 113 | except subprocess.CalledProcessError as cp: | ||
3377 | 114 | # ``dpkg-query`` may return error and at the same time have | ||
3378 | 115 | # produced useful output, for example when asked for multiple | ||
3379 | 116 | # packages where some are not installed | ||
3380 | 117 | if cp.returncode != 1: | ||
3381 | 118 | raise | ||
3382 | 119 | output = cp.output | ||
3383 | 120 | headings = [] | ||
3384 | 121 | for line in output.splitlines(): | ||
3385 | 122 | if line.startswith('||/'): | ||
3386 | 123 | headings = line.split() | ||
3387 | 124 | headings.pop(0) | ||
3388 | 125 | continue | ||
3389 | 126 | elif (line.startswith('|') or line.startswith('+') or | ||
3390 | 127 | line.startswith('dpkg-query:')): | ||
3391 | 128 | continue | ||
3392 | 129 | else: | ||
3393 | 130 | data = line.split(None, 4) | ||
3394 | 131 | status = data.pop(0) | ||
3395 | 132 | if status != 'ii': | ||
3396 | 133 | continue | ||
3397 | 134 | pkg = {} | ||
3398 | 135 | pkg.update({k.lower(): v for k, v in zip(headings, data)}) | ||
3399 | 136 | if 'name' in pkg: | ||
3400 | 137 | pkgs.update({pkg['name']: pkg}) | ||
3401 | 138 | return pkgs | ||
3402 | 139 | |||
3403 | 140 | def _apt_cache_show(self, packages): | ||
3404 | 141 | """Get data from system apt cache for package. | ||
3405 | 142 | |||
3406 | 143 | :param packages: Packages to get data from | ||
3407 | 144 | :type packages: List[str] | ||
3408 | 145 | :returns: Structured data about package, keys like | ||
3409 | 146 | ``apt-cache show`` | ||
3410 | 147 | :rtype: dict | ||
3411 | 148 | :raises: subprocess.CalledProcessError | ||
3412 | 149 | """ | ||
3413 | 150 | pkgs = {} | ||
3414 | 151 | cmd = ['apt-cache', 'show', '--no-all-versions'] | ||
3415 | 152 | cmd.extend(packages) | ||
3416 | 153 | if locale.getlocale() == (None, None): | ||
3417 | 154 | # subprocess calls out to locale.getpreferredencoding(False) to | ||
3418 | 155 | # determine encoding. Workaround for Trusty where the | ||
3419 | 156 | # environment appears to not be set up correctly. | ||
3420 | 157 | locale.setlocale(locale.LC_ALL, 'en_US.UTF-8') | ||
3421 | 158 | try: | ||
3422 | 159 | output = subprocess.check_output(cmd, | ||
3423 | 160 | stderr=subprocess.STDOUT, | ||
3424 | 161 | universal_newlines=True) | ||
3425 | 162 | previous = None | ||
3426 | 163 | pkg = {} | ||
3427 | 164 | for line in output.splitlines(): | ||
3428 | 165 | if not line: | ||
3429 | 166 | if 'package' in pkg: | ||
3430 | 167 | pkgs.update({pkg['package']: pkg}) | ||
3431 | 168 | pkg = {} | ||
3432 | 169 | continue | ||
3433 | 170 | if line.startswith(' '): | ||
3434 | 171 | if previous and previous in pkg: | ||
3435 | 172 | pkg[previous] += os.linesep + line.lstrip() | ||
3436 | 173 | continue | ||
3437 | 174 | if ':' in line: | ||
3438 | 175 | kv = line.split(':', 1) | ||
3439 | 176 | key = kv[0].lower() | ||
3440 | 177 | if key == 'n': | ||
3441 | 178 | continue | ||
3442 | 179 | previous = key | ||
3443 | 180 | pkg.update({key: kv[1].lstrip()}) | ||
3444 | 181 | except subprocess.CalledProcessError as cp: | ||
3445 | 182 | # ``apt-cache`` returns 100 if none of the packages asked for | ||
3446 | 183 | # exist in the apt cache. | ||
3447 | 184 | if cp.returncode != 100: | ||
3448 | 185 | raise | ||
3449 | 186 | return pkgs | ||
3450 | 187 | |||
3451 | 188 | |||
3452 | 189 | class Config(_container): | ||
3453 | 190 | def __init__(self): | ||
3454 | 191 | super(Config, self).__init__(self._populate()) | ||
3455 | 192 | |||
3456 | 193 | def _populate(self): | ||
3457 | 194 | cfgs = {} | ||
3458 | 195 | cmd = ['apt-config', 'dump'] | ||
3459 | 196 | output = subprocess.check_output(cmd, | ||
3460 | 197 | stderr=subprocess.STDOUT, | ||
3461 | 198 | universal_newlines=True) | ||
3462 | 199 | for line in output.splitlines(): | ||
3463 | 200 | if not line.startswith("CommandLine"): | ||
3464 | 201 | k, v = line.split(" ", 1) | ||
3465 | 202 | cfgs[k] = v.strip(";").strip("\"") | ||
3466 | 203 | |||
3467 | 204 | return cfgs | ||
3468 | 205 | |||
3469 | 206 | |||
3470 | 207 | # Backwards compatibility with old apt_pkg module | ||
3471 | 208 | sys.modules[__name__].config = Config() | ||
3472 | 209 | |||
3473 | 210 | |||
3474 | 211 | def init(): | ||
3475 | 212 | """Compability shim that does nothing.""" | ||
3476 | 213 | pass | ||
3477 | 214 | |||
3478 | 215 | |||
3479 | 216 | def upstream_version(version): | ||
3480 | 217 | """Extracts upstream version from a version string. | ||
3481 | 218 | |||
3482 | 219 | Upstream reference: https://salsa.debian.org/apt-team/apt/blob/master/ | ||
3483 | 220 | apt-pkg/deb/debversion.cc#L259 | ||
3484 | 221 | |||
3485 | 222 | :param version: Version string | ||
3486 | 223 | :type version: str | ||
3487 | 224 | :returns: Upstream version | ||
3488 | 225 | :rtype: str | ||
3489 | 226 | """ | ||
3490 | 227 | if version: | ||
3491 | 228 | version = version.split(':')[-1] | ||
3492 | 229 | version = version.split('-')[0] | ||
3493 | 230 | return version | ||
3494 | 231 | |||
3495 | 232 | |||
3496 | 233 | def version_compare(a, b): | ||
3497 | 234 | """Compare the given versions. | ||
3498 | 235 | |||
3499 | 236 | Call out to ``dpkg`` to make sure the code doing the comparison is | ||
3500 | 237 | compatible with what the ``apt`` library would do. Mimic the return | ||
3501 | 238 | values. | ||
3502 | 239 | |||
3503 | 240 | Upstream reference: | ||
3504 | 241 | https://apt-team.pages.debian.net/python-apt/library/apt_pkg.html | ||
3505 | 242 | ?highlight=version_compare#apt_pkg.version_compare | ||
3506 | 243 | |||
3507 | 244 | :param a: version string | ||
3508 | 245 | :type a: str | ||
3509 | 246 | :param b: version string | ||
3510 | 247 | :type b: str | ||
3511 | 248 | :returns: >0 if ``a`` is greater than ``b``, 0 if a equals b, | ||
3512 | 249 | <0 if ``a`` is smaller than ``b`` | ||
3513 | 250 | :rtype: int | ||
3514 | 251 | :raises: subprocess.CalledProcessError, RuntimeError | ||
3515 | 252 | """ | ||
3516 | 253 | for op in ('gt', 1), ('eq', 0), ('lt', -1): | ||
3517 | 254 | try: | ||
3518 | 255 | subprocess.check_call(['dpkg', '--compare-versions', | ||
3519 | 256 | a, op[0], b], | ||
3520 | 257 | stderr=subprocess.STDOUT, | ||
3521 | 258 | universal_newlines=True) | ||
3522 | 259 | return op[1] | ||
3523 | 260 | except subprocess.CalledProcessError as cp: | ||
3524 | 261 | if cp.returncode == 1: | ||
3525 | 262 | continue | ||
3526 | 263 | raise | ||
3527 | 264 | else: | ||
3528 | 265 | raise RuntimeError('Unable to compare "{}" and "{}", according to ' | ||
3529 | 266 | 'our logic they are neither greater, equal nor ' | ||
3530 | 267 | 'less than each other.') | ||
3531 | diff --git a/hooks/charmhelpers/osplatform.py b/hooks/charmhelpers/osplatform.py | |||
3532 | index d9a4d5c..78c81af 100644 | |||
3533 | --- a/hooks/charmhelpers/osplatform.py | |||
3534 | +++ b/hooks/charmhelpers/osplatform.py | |||
3535 | @@ -1,4 +1,5 @@ | |||
3536 | 1 | import platform | 1 | import platform |
3537 | 2 | import os | ||
3538 | 2 | 3 | ||
3539 | 3 | 4 | ||
3540 | 4 | def get_platform(): | 5 | def get_platform(): |
3541 | @@ -9,9 +10,13 @@ def get_platform(): | |||
3542 | 9 | This string is used to decide which platform module should be imported. | 10 | This string is used to decide which platform module should be imported. |
3543 | 10 | """ | 11 | """ |
3544 | 11 | # linux_distribution is deprecated and will be removed in Python 3.7 | 12 | # linux_distribution is deprecated and will be removed in Python 3.7 |
3548 | 12 | # Warings *not* disabled, as we certainly need to fix this. | 13 | # Warnings *not* disabled, as we certainly need to fix this. |
3549 | 13 | tuple_platform = platform.linux_distribution() | 14 | if hasattr(platform, 'linux_distribution'): |
3550 | 14 | current_platform = tuple_platform[0] | 15 | tuple_platform = platform.linux_distribution() |
3551 | 16 | current_platform = tuple_platform[0] | ||
3552 | 17 | else: | ||
3553 | 18 | current_platform = _get_platform_from_fs() | ||
3554 | 19 | |||
3555 | 15 | if "Ubuntu" in current_platform: | 20 | if "Ubuntu" in current_platform: |
3556 | 16 | return "ubuntu" | 21 | return "ubuntu" |
3557 | 17 | elif "CentOS" in current_platform: | 22 | elif "CentOS" in current_platform: |
3558 | @@ -20,6 +25,22 @@ def get_platform(): | |||
3559 | 20 | # Stock Python does not detect Ubuntu and instead returns debian. | 25 | # Stock Python does not detect Ubuntu and instead returns debian. |
3560 | 21 | # Or at least it does in some build environments like Travis CI | 26 | # Or at least it does in some build environments like Travis CI |
3561 | 22 | return "ubuntu" | 27 | return "ubuntu" |
3562 | 28 | elif "elementary" in current_platform: | ||
3563 | 29 | # ElementaryOS fails to run tests locally without this. | ||
3564 | 30 | return "ubuntu" | ||
3565 | 23 | else: | 31 | else: |
3566 | 24 | raise RuntimeError("This module is not supported on {}." | 32 | raise RuntimeError("This module is not supported on {}." |
3567 | 25 | .format(current_platform)) | 33 | .format(current_platform)) |
3568 | 34 | |||
3569 | 35 | |||
3570 | 36 | def _get_platform_from_fs(): | ||
3571 | 37 | """Get Platform from /etc/os-release.""" | ||
3572 | 38 | with open(os.path.join(os.sep, 'etc', 'os-release')) as fin: | ||
3573 | 39 | content = dict( | ||
3574 | 40 | line.split('=', 1) | ||
3575 | 41 | for line in fin.read().splitlines() | ||
3576 | 42 | if '=' in line | ||
3577 | 43 | ) | ||
3578 | 44 | for k, v in content.items(): | ||
3579 | 45 | content[k] = v.strip('"') | ||
3580 | 46 | return content["NAME"] | ||
3581 | diff --git a/hooks/common.py b/hooks/common.py | |||
3582 | index 66d41ec..c2280a3 100644 | |||
3583 | --- a/hooks/common.py | |||
3584 | +++ b/hooks/common.py | |||
3585 | @@ -43,6 +43,12 @@ def check_ip(n): | |||
3586 | 43 | return False | 43 | return False |
3587 | 44 | 44 | ||
3588 | 45 | 45 | ||
3589 | 46 | def ingress_address(relation_data): | ||
3590 | 47 | if 'ingress-address' in relation_data: | ||
3591 | 48 | return relation_data['ingress-address'] | ||
3592 | 49 | return relation_data['private-address'] | ||
3593 | 50 | |||
3594 | 51 | |||
3595 | 46 | def get_local_ingress_address(binding='website'): | 52 | def get_local_ingress_address(binding='website'): |
3596 | 47 | # using network-get to retrieve the address details if available. | 53 | # using network-get to retrieve the address details if available. |
3597 | 48 | log('Getting hostname for binding %s' % binding) | 54 | log('Getting hostname for binding %s' % binding) |
3598 | @@ -342,21 +348,6 @@ def apply_host_policy(target_id, owner_unit, owner_relation): | |||
3599 | 342 | ssh_service.save() | 348 | ssh_service.save() |
3600 | 343 | 349 | ||
3601 | 344 | 350 | ||
3602 | 345 | def get_valid_relations(): | ||
3603 | 346 | for x in subprocess.Popen(['relation-ids', 'monitors'], | ||
3604 | 347 | stdout=subprocess.PIPE).stdout: | ||
3605 | 348 | yield x.strip() | ||
3606 | 349 | for x in subprocess.Popen(['relation-ids', 'nagios'], | ||
3607 | 350 | stdout=subprocess.PIPE).stdout: | ||
3608 | 351 | yield x.strip() | ||
3609 | 352 | |||
3610 | 353 | |||
3611 | 354 | def get_valid_units(relation_id): | ||
3612 | 355 | for x in subprocess.Popen(['relation-list', '-r', relation_id], | ||
3613 | 356 | stdout=subprocess.PIPE).stdout: | ||
3614 | 357 | yield x.strip() | ||
3615 | 358 | |||
3616 | 359 | |||
3617 | 360 | def _replace_in_config(find_me, replacement): | 351 | def _replace_in_config(find_me, replacement): |
3618 | 361 | with open(INPROGRESS_CFG) as cf: | 352 | with open(INPROGRESS_CFG) as cf: |
3619 | 362 | with tempfile.NamedTemporaryFile(dir=INPROGRESS_DIR, delete=False) as new_cf: | 353 | with tempfile.NamedTemporaryFile(dir=INPROGRESS_DIR, delete=False) as new_cf: |
3620 | diff --git a/hooks/install b/hooks/install | |||
3621 | index f002e46..a8900a3 100755 | |||
3622 | --- a/hooks/install | |||
3623 | +++ b/hooks/install | |||
3624 | @@ -29,7 +29,7 @@ echo nagios3-cgi nagios3/adminpassword password $PASSWORD | debconf-set-selectio | |||
3625 | 29 | echo nagios3-cgi nagios3/adminpassword-repeat password $PASSWORD | debconf-set-selections | 29 | echo nagios3-cgi nagios3/adminpassword-repeat password $PASSWORD | debconf-set-selections |
3626 | 30 | 30 | ||
3627 | 31 | DEBIAN_FRONTEND=noninteractive apt-get -qy \ | 31 | DEBIAN_FRONTEND=noninteractive apt-get -qy \ |
3629 | 32 | install nagios3 nagios-plugins python-cheetah python-jinja2 dnsutils debconf-utils nagios-nrpe-plugin pynag python-apt python-yaml | 32 | install nagios3 nagios-plugins python-cheetah python-jinja2 dnsutils debconf-utils nagios-nrpe-plugin pynag python-apt python-yaml python-enum34 |
3630 | 33 | 33 | ||
3631 | 34 | scripts/postfix_loopback_only.sh | 34 | scripts/postfix_loopback_only.sh |
3632 | 35 | 35 | ||
3633 | diff --git a/hooks/monitors-relation-changed b/hooks/monitors-relation-changed | |||
3634 | index 13cb96c..e16589d 100755 | |||
3635 | --- a/hooks/monitors-relation-changed | |||
3636 | +++ b/hooks/monitors-relation-changed | |||
3637 | @@ -18,17 +18,77 @@ | |||
3638 | 18 | 18 | ||
3639 | 19 | import sys | 19 | import sys |
3640 | 20 | import os | 20 | import os |
3641 | 21 | import subprocess | ||
3642 | 22 | import yaml | 21 | import yaml |
3643 | 23 | import json | ||
3644 | 24 | import re | 22 | import re |
3652 | 25 | 23 | from collections import defaultdict | |
3653 | 26 | 24 | ||
3654 | 27 | from common import (customize_service, get_pynag_host, | 25 | from charmhelpers.core.hookenv import ( |
3655 | 28 | get_pynag_service, refresh_hostgroups, | 26 | relation_get, |
3656 | 29 | get_valid_relations, get_valid_units, | 27 | ingress_address, |
3657 | 30 | initialize_inprogress_config, flush_inprogress_config, | 28 | related_units, |
3658 | 31 | get_local_ingress_address) | 29 | relation_ids, |
3659 | 30 | log, | ||
3660 | 31 | DEBUG | ||
3661 | 32 | ) | ||
3662 | 33 | |||
3663 | 34 | from common import ( | ||
3664 | 35 | customize_service, | ||
3665 | 36 | get_pynag_host, | ||
3666 | 37 | get_pynag_service, | ||
3667 | 38 | refresh_hostgroups, | ||
3668 | 39 | initialize_inprogress_config, | ||
3669 | 40 | flush_inprogress_config | ||
3670 | 41 | ) | ||
3671 | 42 | |||
3672 | 43 | |||
3673 | 44 | REQUIRED_REL_DATA_KEYS = [ | ||
3674 | 45 | 'target-address', | ||
3675 | 46 | 'monitors', | ||
3676 | 47 | 'target-id', | ||
3677 | 48 | ] | ||
3678 | 49 | |||
3679 | 50 | |||
3680 | 51 | def _prepare_relation_data(unit, rid): | ||
3681 | 52 | relation_data = relation_get(unit=unit, rid=rid) | ||
3682 | 53 | |||
3683 | 54 | if not relation_data: | ||
3684 | 55 | msg = ( | ||
3685 | 56 | 'no relation data found for unit {} in relation {} - ' | ||
3686 | 57 | 'skipping'.format(unit, rid) | ||
3687 | 58 | ) | ||
3688 | 59 | log(msg, level=DEBUG) | ||
3689 | 60 | return {} | ||
3690 | 61 | |||
3691 | 62 | if rid.split(':')[0] == 'nagios': | ||
3692 | 63 | # Fake it for the more generic 'nagios' relation' | ||
3693 | 64 | relation_data['target-id'] = unit.replace('/', '-') | ||
3694 | 65 | relation_data['monitors'] = {'monitors': {'remote': {}}} | ||
3695 | 66 | |||
3696 | 67 | if not relation_data.get('target-address'): | ||
3697 | 68 | relation_data['target-address'] = ingress_address(unit=unit, rid=rid) | ||
3698 | 69 | |||
3699 | 70 | for key in REQUIRED_REL_DATA_KEYS: | ||
3700 | 71 | if not relation_data.get(key): | ||
3701 | 72 | msg = ( | ||
3702 | 73 | '{} not found for unit {} in relation {} - ' | ||
3703 | 74 | 'skipping'.format(key, unit, rid) | ||
3704 | 75 | ) | ||
3705 | 76 | log(msg, level=DEBUG) | ||
3706 | 77 | return {} | ||
3707 | 78 | |||
3708 | 79 | return relation_data | ||
3709 | 80 | |||
3710 | 81 | |||
3711 | 82 | def _collect_relation_data(): | ||
3712 | 83 | all_relations = defaultdict(dict) | ||
3713 | 84 | for relname in ['nagios', 'monitors']: | ||
3714 | 85 | for relid in relation_ids(relname): | ||
3715 | 86 | for unit in related_units(relid): | ||
3716 | 87 | relation_data = _prepare_relation_data(unit=unit, rid=relid) | ||
3717 | 88 | if relation_data: | ||
3718 | 89 | all_relations[relid][unit] = relation_data | ||
3719 | 90 | |||
3720 | 91 | return all_relations | ||
3721 | 32 | 92 | ||
3722 | 33 | 93 | ||
3723 | 34 | def main(argv): | 94 | def main(argv): |
3724 | @@ -43,35 +103,7 @@ def main(argv): | |||
3725 | 43 | relation_settings['target-address'] = argv[3] | 103 | relation_settings['target-address'] = argv[3] |
3726 | 44 | all_relations = {'monitors:99': {'testing/0': relation_settings}} | 104 | all_relations = {'monitors:99': {'testing/0': relation_settings}} |
3727 | 45 | else: | 105 | else: |
3757 | 46 | all_relations = {} | 106 | all_relations = _collect_relation_data() |
3729 | 47 | for relid in get_valid_relations(): | ||
3730 | 48 | (relname, relnum) = relid.split(':') | ||
3731 | 49 | for unit in get_valid_units(relid): | ||
3732 | 50 | relation_settings = json.loads( | ||
3733 | 51 | subprocess.check_output(['relation-get', '--format=json', | ||
3734 | 52 | '-r', relid, | ||
3735 | 53 | '-',unit]).strip()) | ||
3736 | 54 | |||
3737 | 55 | if relation_settings is None or relation_settings == '': | ||
3738 | 56 | continue | ||
3739 | 57 | |||
3740 | 58 | if relname == 'monitors': | ||
3741 | 59 | if ('monitors' not in relation_settings | ||
3742 | 60 | or 'target-id' not in relation_settings): | ||
3743 | 61 | continue | ||
3744 | 62 | if ('target-id' in relation_settings and 'target-address' not in relation_settings): | ||
3745 | 63 | relation_settings['target-address'] = get_local_ingress_address('monitors') | ||
3746 | 64 | |||
3747 | 65 | else: | ||
3748 | 66 | # Fake it for the more generic 'nagios' relation' | ||
3749 | 67 | relation_settings['target-id'] = unit.replace('/','-') | ||
3750 | 68 | relation_settings['target-address'] = get_local_ingress_address('monitors') | ||
3751 | 69 | relation_settings['monitors'] = {'monitors': {'remote': {} } } | ||
3752 | 70 | |||
3753 | 71 | if relid not in all_relations: | ||
3754 | 72 | all_relations[relid] = {} | ||
3755 | 73 | |||
3756 | 74 | all_relations[relid][unit] = relation_settings | ||
3758 | 75 | 107 | ||
3759 | 76 | # Hack to work around http://pad.lv/1025478 | 108 | # Hack to work around http://pad.lv/1025478 |
3760 | 77 | targets_with_addresses = set() | 109 | targets_with_addresses = set() |
LGTM, some nits but nothing blocking imo