Merge ~xavpaice/charm-thruk-agent:update_charmhelpers into ~nagios-charmers/charm-thruk-agent:master
- Git
- lp:~xavpaice/charm-thruk-agent
- update_charmhelpers
- Merge into master
Proposed by
Xav Paice
Status: | Merged |
---|---|
Approved by: | Wouter van Bommel |
Approved revision: | 66c19905c8c565dfb495a14e83a809681496ac46 |
Merged at revision: | 3a9512547f97be9b44dacbc79f31818b441be4ec |
Proposed branch: | ~xavpaice/charm-thruk-agent:update_charmhelpers |
Merge into: | ~nagios-charmers/charm-thruk-agent:master |
Diff against target: |
2206 lines (+1317/-155) 23 files modified
bin/charm_helpers_sync.py (+30/-22) hooks/actions.py (+2/-1) hooks/charmhelpers/__init__.py (+4/-4) hooks/charmhelpers/core/hookenv.py (+449/-27) hooks/charmhelpers/core/host.py (+164/-11) hooks/charmhelpers/core/host_factory/ubuntu.py (+25/-0) hooks/charmhelpers/core/kernel.py (+2/-2) hooks/charmhelpers/core/services/base.py (+18/-7) hooks/charmhelpers/core/strutils.py (+11/-5) hooks/charmhelpers/core/sysctl.py (+21/-10) hooks/charmhelpers/core/templating.py (+18/-9) hooks/charmhelpers/core/unitdata.py (+8/-1) hooks/charmhelpers/fetch/__init__.py (+2/-0) hooks/charmhelpers/fetch/archiveurl.py (+1/-1) hooks/charmhelpers/fetch/bzrurl.py (+2/-2) hooks/charmhelpers/fetch/giturl.py (+2/-2) hooks/charmhelpers/fetch/python/__init__.py (+13/-0) hooks/charmhelpers/fetch/python/debug.py (+54/-0) hooks/charmhelpers/fetch/python/packages.py (+154/-0) hooks/charmhelpers/fetch/python/rpdb.py (+56/-0) hooks/charmhelpers/fetch/python/version.py (+32/-0) hooks/charmhelpers/fetch/snap.py (+33/-5) hooks/charmhelpers/fetch/ubuntu.py (+216/-46) |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Wouter van Bommel (community) | Approve | ||
Canonical IS Reviewers | Pending | ||
Review via email: mp+368892@code.launchpad.net |
Commit message
Update to allow the use of the newer juju proxy settings for apt installations.
Description of the change
To post a comment you must log in.
Revision history for this message
🤖 Canonical IS Merge Bot (canonical-is-mergebot) wrote : | # |
Revision history for this message
Wouter van Bommel (woutervb) wrote : | # |
Upgrade of charmhelpers
review:
Approve
Revision history for this message
🤖 Canonical IS Merge Bot (canonical-is-mergebot) wrote : | # |
Change successfully merged at revision 3a9512547f97be9
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | diff --git a/bin/charm_helpers_sync.py b/bin/charm_helpers_sync.py |
2 | index f67fdb9..7c0c194 100644 |
3 | --- a/bin/charm_helpers_sync.py |
4 | +++ b/bin/charm_helpers_sync.py |
5 | @@ -2,19 +2,17 @@ |
6 | |
7 | # Copyright 2014-2015 Canonical Limited. |
8 | # |
9 | -# This file is part of charm-helpers. |
10 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
11 | +# you may not use this file except in compliance with the License. |
12 | +# You may obtain a copy of the License at |
13 | # |
14 | -# charm-helpers is free software: you can redistribute it and/or modify |
15 | -# it under the terms of the GNU Lesser General Public License version 3 as |
16 | -# published by the Free Software Foundation. |
17 | +# http://www.apache.org/licenses/LICENSE-2.0 |
18 | # |
19 | -# charm-helpers is distributed in the hope that it will be useful, |
20 | -# but WITHOUT ANY WARRANTY; without even the implied warranty of |
21 | -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
22 | -# GNU Lesser General Public License for more details. |
23 | -# |
24 | -# You should have received a copy of the GNU Lesser General Public License |
25 | -# along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. |
26 | +# Unless required by applicable law or agreed to in writing, software |
27 | +# distributed under the License is distributed on an "AS IS" BASIS, |
28 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
29 | +# See the License for the specific language governing permissions and |
30 | +# limitations under the License. |
31 | |
32 | # Authors: |
33 | # Adam Gandelman <adamg@ubuntu.com> |
34 | @@ -31,7 +29,7 @@ from fnmatch import fnmatch |
35 | |
36 | import six |
37 | |
38 | -CHARM_HELPERS_BRANCH = 'lp:charm-helpers' |
39 | +CHARM_HELPERS_REPO = 'https://github.com/juju/charm-helpers' |
40 | |
41 | |
42 | def parse_config(conf_file): |
43 | @@ -41,10 +39,16 @@ def parse_config(conf_file): |
44 | return yaml.load(open(conf_file).read()) |
45 | |
46 | |
47 | -def clone_helpers(work_dir, branch): |
48 | +def clone_helpers(work_dir, repo): |
49 | dest = os.path.join(work_dir, 'charm-helpers') |
50 | - logging.info('Checking out %s to %s.' % (branch, dest)) |
51 | - cmd = ['bzr', 'checkout', '--lightweight', branch, dest] |
52 | + logging.info('Cloning out %s to %s.' % (repo, dest)) |
53 | + branch = None |
54 | + if '@' in repo: |
55 | + repo, branch = repo.split('@', 1) |
56 | + cmd = ['git', 'clone', '--depth=1'] |
57 | + if branch is not None: |
58 | + cmd += ['--branch', branch] |
59 | + cmd += [repo, dest] |
60 | subprocess.check_call(cmd) |
61 | return dest |
62 | |
63 | @@ -176,6 +180,9 @@ def extract_options(inc, global_options=None): |
64 | |
65 | |
66 | def sync_helpers(include, src, dest, options=None): |
67 | + if os.path.exists(dest): |
68 | + logging.debug('Removing existing directory: %s' % dest) |
69 | + shutil.rmtree(dest) |
70 | if not os.path.isdir(dest): |
71 | os.makedirs(dest) |
72 | |
73 | @@ -193,14 +200,15 @@ def sync_helpers(include, src, dest, options=None): |
74 | inc, opts = extract_options(m, global_options) |
75 | sync(src, dest, '%s.%s' % (k, inc), opts) |
76 | |
77 | + |
78 | if __name__ == '__main__': |
79 | parser = optparse.OptionParser() |
80 | parser.add_option('-c', '--config', action='store', dest='config', |
81 | default=None, help='helper config file') |
82 | parser.add_option('-D', '--debug', action='store_true', dest='debug', |
83 | default=False, help='debug') |
84 | - parser.add_option('-b', '--branch', action='store', dest='branch', |
85 | - help='charm-helpers bzr branch (overrides config)') |
86 | + parser.add_option('-r', '--repository', action='store', dest='repo', |
87 | + help='charm-helpers git repository (overrides config)') |
88 | parser.add_option('-d', '--destination', action='store', dest='dest_dir', |
89 | help='sync destination dir (overrides config)') |
90 | (opts, args) = parser.parse_args() |
91 | @@ -219,10 +227,10 @@ if __name__ == '__main__': |
92 | else: |
93 | config = {} |
94 | |
95 | - if 'branch' not in config: |
96 | - config['branch'] = CHARM_HELPERS_BRANCH |
97 | - if opts.branch: |
98 | - config['branch'] = opts.branch |
99 | + if 'repo' not in config: |
100 | + config['repo'] = CHARM_HELPERS_REPO |
101 | + if opts.repo: |
102 | + config['repo'] = opts.repo |
103 | if opts.dest_dir: |
104 | config['destination'] = opts.dest_dir |
105 | |
106 | @@ -242,7 +250,7 @@ if __name__ == '__main__': |
107 | sync_options = config['options'] |
108 | tmpd = tempfile.mkdtemp() |
109 | try: |
110 | - checkout = clone_helpers(tmpd, config['branch']) |
111 | + checkout = clone_helpers(tmpd, config['repo']) |
112 | sync_helpers(config['include'], checkout, config['destination'], |
113 | options=sync_options) |
114 | except Exception as e: |
115 | diff --git a/hooks/actions.py b/hooks/actions.py |
116 | index a72e9a6..1593b51 100644 |
117 | --- a/hooks/actions.py |
118 | +++ b/hooks/actions.py |
119 | @@ -93,7 +93,8 @@ def update_ppa(service_name): |
120 | prev_source = config.previous('source') |
121 | if prev_source is not None and prev_source != new_source: |
122 | subprocess.check_call(['add-apt-repository', |
123 | - '--yes', '--remove', prev_source]) |
124 | + '--yes', '--remove', prev_source], |
125 | + env=hookenv.env_proxy_settings(['https'])) |
126 | add_source(config.get('source'), config.get('key', None)) |
127 | apt_update(fatal=True) |
128 | package_list = ["thruk", "pwgen", "apache2-utils"] |
129 | diff --git a/hooks/charmhelpers/__init__.py b/hooks/charmhelpers/__init__.py |
130 | index e7aa471..61ef907 100644 |
131 | --- a/hooks/charmhelpers/__init__.py |
132 | +++ b/hooks/charmhelpers/__init__.py |
133 | @@ -23,22 +23,22 @@ import subprocess |
134 | import sys |
135 | |
136 | try: |
137 | - import six # flake8: noqa |
138 | + import six # NOQA:F401 |
139 | except ImportError: |
140 | if sys.version_info.major == 2: |
141 | subprocess.check_call(['apt-get', 'install', '-y', 'python-six']) |
142 | else: |
143 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-six']) |
144 | - import six # flake8: noqa |
145 | + import six # NOQA:F401 |
146 | |
147 | try: |
148 | - import yaml # flake8: noqa |
149 | + import yaml # NOQA:F401 |
150 | except ImportError: |
151 | if sys.version_info.major == 2: |
152 | subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml']) |
153 | else: |
154 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml']) |
155 | - import yaml # flake8: noqa |
156 | + import yaml # NOQA:F401 |
157 | |
158 | |
159 | # Holds a list of mapping of mangled function names that have been deprecated |
160 | diff --git a/hooks/charmhelpers/core/hookenv.py b/hooks/charmhelpers/core/hookenv.py |
161 | index e44e22b..4744eb4 100644 |
162 | --- a/hooks/charmhelpers/core/hookenv.py |
163 | +++ b/hooks/charmhelpers/core/hookenv.py |
164 | @@ -22,10 +22,12 @@ from __future__ import print_function |
165 | import copy |
166 | from distutils.version import LooseVersion |
167 | from functools import wraps |
168 | +from collections import namedtuple |
169 | import glob |
170 | import os |
171 | import json |
172 | import yaml |
173 | +import re |
174 | import subprocess |
175 | import sys |
176 | import errno |
177 | @@ -38,12 +40,20 @@ if not six.PY3: |
178 | else: |
179 | from collections import UserDict |
180 | |
181 | + |
182 | CRITICAL = "CRITICAL" |
183 | ERROR = "ERROR" |
184 | WARNING = "WARNING" |
185 | INFO = "INFO" |
186 | DEBUG = "DEBUG" |
187 | +TRACE = "TRACE" |
188 | MARKER = object() |
189 | +SH_MAX_ARG = 131071 |
190 | + |
191 | + |
192 | +RANGE_WARNING = ('Passing NO_PROXY string that includes a cidr. ' |
193 | + 'This may not be compatible with software you are ' |
194 | + 'running in your shell.') |
195 | |
196 | cache = {} |
197 | |
198 | @@ -64,7 +74,7 @@ def cached(func): |
199 | @wraps(func) |
200 | def wrapper(*args, **kwargs): |
201 | global cache |
202 | - key = str((func, args, kwargs)) |
203 | + key = json.dumps((func, args, kwargs), sort_keys=True, default=str) |
204 | try: |
205 | return cache[key] |
206 | except KeyError: |
207 | @@ -94,7 +104,7 @@ def log(message, level=None): |
208 | command += ['-l', level] |
209 | if not isinstance(message, six.string_types): |
210 | message = repr(message) |
211 | - command += [message] |
212 | + command += [message[:SH_MAX_ARG]] |
213 | # Missing juju-log should not cause failures in unit tests |
214 | # Send log output to stderr |
215 | try: |
216 | @@ -197,11 +207,58 @@ def remote_unit(): |
217 | return os.environ.get('JUJU_REMOTE_UNIT', None) |
218 | |
219 | |
220 | -def service_name(): |
221 | - """The name service group this unit belongs to""" |
222 | +def application_name(): |
223 | + """ |
224 | + The name of the deployed application this unit belongs to. |
225 | + """ |
226 | return local_unit().split('/')[0] |
227 | |
228 | |
229 | +def service_name(): |
230 | + """ |
231 | + .. deprecated:: 0.19.1 |
232 | + Alias for :func:`application_name`. |
233 | + """ |
234 | + return application_name() |
235 | + |
236 | + |
237 | +def model_name(): |
238 | + """ |
239 | + Name of the model that this unit is deployed in. |
240 | + """ |
241 | + return os.environ['JUJU_MODEL_NAME'] |
242 | + |
243 | + |
244 | +def model_uuid(): |
245 | + """ |
246 | + UUID of the model that this unit is deployed in. |
247 | + """ |
248 | + return os.environ['JUJU_MODEL_UUID'] |
249 | + |
250 | + |
251 | +def principal_unit(): |
252 | + """Returns the principal unit of this unit, otherwise None""" |
253 | + # Juju 2.2 and above provides JUJU_PRINCIPAL_UNIT |
254 | + principal_unit = os.environ.get('JUJU_PRINCIPAL_UNIT', None) |
255 | + # If it's empty, then this unit is the principal |
256 | + if principal_unit == '': |
257 | + return os.environ['JUJU_UNIT_NAME'] |
258 | + elif principal_unit is not None: |
259 | + return principal_unit |
260 | + # For Juju 2.1 and below, let's try work out the principle unit by |
261 | + # the various charms' metadata.yaml. |
262 | + for reltype in relation_types(): |
263 | + for rid in relation_ids(reltype): |
264 | + for unit in related_units(rid): |
265 | + md = _metadata_unit(unit) |
266 | + if not md: |
267 | + continue |
268 | + subordinate = md.pop('subordinate', None) |
269 | + if not subordinate: |
270 | + return unit |
271 | + return None |
272 | + |
273 | + |
274 | @cached |
275 | def remote_service_name(relid=None): |
276 | """The remote service name for a given relation-id (or the current relation)""" |
277 | @@ -263,7 +320,7 @@ class Config(dict): |
278 | self.implicit_save = True |
279 | self._prev_dict = None |
280 | self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME) |
281 | - if os.path.exists(self.path): |
282 | + if os.path.exists(self.path) and os.stat(self.path).st_size: |
283 | self.load_previous() |
284 | atexit(self._implicit_save) |
285 | |
286 | @@ -283,7 +340,11 @@ class Config(dict): |
287 | """ |
288 | self.path = path or self.path |
289 | with open(self.path) as f: |
290 | - self._prev_dict = json.load(f) |
291 | + try: |
292 | + self._prev_dict = json.load(f) |
293 | + except ValueError as e: |
294 | + log('Unable to parse previous config data - {}'.format(str(e)), |
295 | + level=ERROR) |
296 | for k, v in copy.deepcopy(self._prev_dict).items(): |
297 | if k not in self: |
298 | self[k] = v |
299 | @@ -319,6 +380,7 @@ class Config(dict): |
300 | |
301 | """ |
302 | with open(self.path, 'w') as f: |
303 | + os.fchmod(f.fileno(), 0o600) |
304 | json.dump(self, f) |
305 | |
306 | def _implicit_save(self): |
307 | @@ -326,22 +388,40 @@ class Config(dict): |
308 | self.save() |
309 | |
310 | |
311 | -@cached |
312 | +_cache_config = None |
313 | + |
314 | + |
315 | def config(scope=None): |
316 | - """Juju charm configuration""" |
317 | - config_cmd_line = ['config-get'] |
318 | - if scope is not None: |
319 | - config_cmd_line.append(scope) |
320 | - else: |
321 | - config_cmd_line.append('--all') |
322 | - config_cmd_line.append('--format=json') |
323 | + """ |
324 | + Get the juju charm configuration (scope==None) or individual key, |
325 | + (scope=str). The returned value is a Python data structure loaded as |
326 | + JSON from the Juju config command. |
327 | + |
328 | + :param scope: If set, return the value for the specified key. |
329 | + :type scope: Optional[str] |
330 | + :returns: Either the whole config as a Config, or a key from it. |
331 | + :rtype: Any |
332 | + """ |
333 | + global _cache_config |
334 | + config_cmd_line = ['config-get', '--all', '--format=json'] |
335 | try: |
336 | - config_data = json.loads( |
337 | - subprocess.check_output(config_cmd_line).decode('UTF-8')) |
338 | + # JSON Decode Exception for Python3.5+ |
339 | + exc_json = json.decoder.JSONDecodeError |
340 | + except AttributeError: |
341 | + # JSON Decode Exception for Python2.7 through Python3.4 |
342 | + exc_json = ValueError |
343 | + try: |
344 | + if _cache_config is None: |
345 | + config_data = json.loads( |
346 | + subprocess.check_output(config_cmd_line).decode('UTF-8')) |
347 | + _cache_config = Config(config_data) |
348 | if scope is not None: |
349 | - return config_data |
350 | - return Config(config_data) |
351 | - except ValueError: |
352 | + return _cache_config.get(scope) |
353 | + return _cache_config |
354 | + except (exc_json, UnicodeDecodeError) as e: |
355 | + log('Unable to parse output from config-get: config_cmd_line="{}" ' |
356 | + 'message="{}"' |
357 | + .format(config_cmd_line, str(e)), level=ERROR) |
358 | return None |
359 | |
360 | |
361 | @@ -435,6 +515,67 @@ def related_units(relid=None): |
362 | subprocess.check_output(units_cmd_line).decode('UTF-8')) or [] |
363 | |
364 | |
365 | +def expected_peer_units(): |
366 | + """Get a generator for units we expect to join peer relation based on |
367 | + goal-state. |
368 | + |
369 | + The local unit is excluded from the result to make it easy to gauge |
370 | + completion of all peers joining the relation with existing hook tools. |
371 | + |
372 | + Example usage: |
373 | + log('peer {} of {} joined peer relation' |
374 | + .format(len(related_units()), |
375 | + len(list(expected_peer_units())))) |
376 | + |
377 | + This function will raise NotImplementedError if used with juju versions |
378 | + without goal-state support. |
379 | + |
380 | + :returns: iterator |
381 | + :rtype: types.GeneratorType |
382 | + :raises: NotImplementedError |
383 | + """ |
384 | + if not has_juju_version("2.4.0"): |
385 | + # goal-state first appeared in 2.4.0. |
386 | + raise NotImplementedError("goal-state") |
387 | + _goal_state = goal_state() |
388 | + return (key for key in _goal_state['units'] |
389 | + if '/' in key and key != local_unit()) |
390 | + |
391 | + |
392 | +def expected_related_units(reltype=None): |
393 | + """Get a generator for units we expect to join relation based on |
394 | + goal-state. |
395 | + |
396 | + Note that you can not use this function for the peer relation, take a look |
397 | + at expected_peer_units() for that. |
398 | + |
399 | + This function will raise KeyError if you request information for a |
400 | + relation type for which juju goal-state does not have information. It will |
401 | + raise NotImplementedError if used with juju versions without goal-state |
402 | + support. |
403 | + |
404 | + Example usage: |
405 | + log('participant {} of {} joined relation {}' |
406 | + .format(len(related_units()), |
407 | + len(list(expected_related_units())), |
408 | + relation_type())) |
409 | + |
410 | + :param reltype: Relation type to list data for, default is to list data for |
411 | + the realtion type we are currently executing a hook for. |
412 | + :type reltype: str |
413 | + :returns: iterator |
414 | + :rtype: types.GeneratorType |
415 | + :raises: KeyError, NotImplementedError |
416 | + """ |
417 | + if not has_juju_version("2.4.4"): |
418 | + # goal-state existed in 2.4.0, but did not list individual units to |
419 | + # join a relation in 2.4.1 through 2.4.3. (LP: #1794739) |
420 | + raise NotImplementedError("goal-state relation unit count") |
421 | + reltype = reltype or relation_type() |
422 | + _goal_state = goal_state() |
423 | + return (key for key in _goal_state['relations'][reltype] if '/' in key) |
424 | + |
425 | + |
426 | @cached |
427 | def relation_for_unit(unit=None, rid=None): |
428 | """Get the json represenation of a unit's relation""" |
429 | @@ -478,6 +619,24 @@ def metadata(): |
430 | return yaml.safe_load(md) |
431 | |
432 | |
433 | +def _metadata_unit(unit): |
434 | + """Given the name of a unit (e.g. apache2/0), get the unit charm's |
435 | + metadata.yaml. Very similar to metadata() but allows us to inspect |
436 | + other units. Unit needs to be co-located, such as a subordinate or |
437 | + principal/primary. |
438 | + |
439 | + :returns: metadata.yaml as a python object. |
440 | + |
441 | + """ |
442 | + basedir = os.sep.join(charm_dir().split(os.sep)[:-2]) |
443 | + unitdir = 'unit-{}'.format(unit.replace(os.sep, '-')) |
444 | + joineddir = os.path.join(basedir, unitdir, 'charm', 'metadata.yaml') |
445 | + if not os.path.exists(joineddir): |
446 | + return None |
447 | + with open(joineddir) as md: |
448 | + return yaml.safe_load(md) |
449 | + |
450 | + |
451 | @cached |
452 | def relation_types(): |
453 | """Get a list of relation types supported by this charm""" |
454 | @@ -602,18 +761,31 @@ def is_relation_made(relation, keys='private-address'): |
455 | return False |
456 | |
457 | |
458 | +def _port_op(op_name, port, protocol="TCP"): |
459 | + """Open or close a service network port""" |
460 | + _args = [op_name] |
461 | + icmp = protocol.upper() == "ICMP" |
462 | + if icmp: |
463 | + _args.append(protocol) |
464 | + else: |
465 | + _args.append('{}/{}'.format(port, protocol)) |
466 | + try: |
467 | + subprocess.check_call(_args) |
468 | + except subprocess.CalledProcessError: |
469 | + # Older Juju pre 2.3 doesn't support ICMP |
470 | + # so treat it as a no-op if it fails. |
471 | + if not icmp: |
472 | + raise |
473 | + |
474 | + |
475 | def open_port(port, protocol="TCP"): |
476 | """Open a service network port""" |
477 | - _args = ['open-port'] |
478 | - _args.append('{}/{}'.format(port, protocol)) |
479 | - subprocess.check_call(_args) |
480 | + _port_op('open-port', port, protocol) |
481 | |
482 | |
483 | def close_port(port, protocol="TCP"): |
484 | """Close a service network port""" |
485 | - _args = ['close-port'] |
486 | - _args.append('{}/{}'.format(port, protocol)) |
487 | - subprocess.check_call(_args) |
488 | + _port_op('close-port', port, protocol) |
489 | |
490 | |
491 | def open_ports(start, end, protocol="TCP"): |
492 | @@ -630,6 +802,17 @@ def close_ports(start, end, protocol="TCP"): |
493 | subprocess.check_call(_args) |
494 | |
495 | |
496 | +def opened_ports(): |
497 | + """Get the opened ports |
498 | + |
499 | + *Note that this will only show ports opened in a previous hook* |
500 | + |
501 | + :returns: Opened ports as a list of strings: ``['8080/tcp', '8081-8083/tcp']`` |
502 | + """ |
503 | + _args = ['opened-ports', '--format=json'] |
504 | + return json.loads(subprocess.check_output(_args).decode('UTF-8')) |
505 | + |
506 | + |
507 | @cached |
508 | def unit_get(attribute): |
509 | """Get the unit ID for the remote unit""" |
510 | @@ -751,8 +934,15 @@ class Hooks(object): |
511 | return wrapper |
512 | |
513 | |
514 | +class NoNetworkBinding(Exception): |
515 | + pass |
516 | + |
517 | + |
518 | def charm_dir(): |
519 | """Return the root directory of the current charm""" |
520 | + d = os.environ.get('JUJU_CHARM_DIR') |
521 | + if d is not None: |
522 | + return d |
523 | return os.environ.get('CHARM_DIR') |
524 | |
525 | |
526 | @@ -874,6 +1064,14 @@ def application_version_set(version): |
527 | |
528 | |
529 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) |
530 | +@cached |
531 | +def goal_state(): |
532 | + """Juju goal state values""" |
533 | + cmd = ['goal-state', '--format=json'] |
534 | + return json.loads(subprocess.check_output(cmd).decode('UTF-8')) |
535 | + |
536 | + |
537 | +@translate_exc(from_exc=OSError, to_exc=NotImplementedError) |
538 | def is_leader(): |
539 | """Does the current unit hold the juju leadership |
540 | |
541 | @@ -967,7 +1165,6 @@ def juju_version(): |
542 | universal_newlines=True).strip() |
543 | |
544 | |
545 | -@cached |
546 | def has_juju_version(minimum_version): |
547 | """Return True if the Juju version is at least the provided version""" |
548 | return LooseVersion(juju_version()) >= LooseVersion(minimum_version) |
549 | @@ -1027,6 +1224,8 @@ def _run_atexit(): |
550 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) |
551 | def network_get_primary_address(binding): |
552 | ''' |
553 | + Deprecated since Juju 2.3; use network_get() |
554 | + |
555 | Retrieve the primary network address for a named binding |
556 | |
557 | :param binding: string. The name of a relation of extra-binding |
558 | @@ -1034,7 +1233,41 @@ def network_get_primary_address(binding): |
559 | :raise: NotImplementedError if run on Juju < 2.0 |
560 | ''' |
561 | cmd = ['network-get', '--primary-address', binding] |
562 | - return subprocess.check_output(cmd).decode('UTF-8').strip() |
563 | + try: |
564 | + response = subprocess.check_output( |
565 | + cmd, |
566 | + stderr=subprocess.STDOUT).decode('UTF-8').strip() |
567 | + except CalledProcessError as e: |
568 | + if 'no network config found for binding' in e.output.decode('UTF-8'): |
569 | + raise NoNetworkBinding("No network binding for {}" |
570 | + .format(binding)) |
571 | + else: |
572 | + raise |
573 | + return response |
574 | + |
575 | + |
576 | +def network_get(endpoint, relation_id=None): |
577 | + """ |
578 | + Retrieve the network details for a relation endpoint |
579 | + |
580 | + :param endpoint: string. The name of a relation endpoint |
581 | + :param relation_id: int. The ID of the relation for the current context. |
582 | + :return: dict. The loaded YAML output of the network-get query. |
583 | + :raise: NotImplementedError if request not supported by the Juju version. |
584 | + """ |
585 | + if not has_juju_version('2.2'): |
586 | + raise NotImplementedError(juju_version()) # earlier versions require --primary-address |
587 | + if relation_id and not has_juju_version('2.3'): |
588 | + raise NotImplementedError # 2.3 added the -r option |
589 | + |
590 | + cmd = ['network-get', endpoint, '--format', 'yaml'] |
591 | + if relation_id: |
592 | + cmd.append('-r') |
593 | + cmd.append(relation_id) |
594 | + response = subprocess.check_output( |
595 | + cmd, |
596 | + stderr=subprocess.STDOUT).decode('UTF-8').strip() |
597 | + return yaml.safe_load(response) |
598 | |
599 | |
600 | def add_metric(*args, **kwargs): |
601 | @@ -1066,3 +1299,192 @@ def meter_info(): |
602 | """Get the meter status information, if running in the meter-status-changed |
603 | hook.""" |
604 | return os.environ.get('JUJU_METER_INFO') |
605 | + |
606 | + |
607 | +def iter_units_for_relation_name(relation_name): |
608 | + """Iterate through all units in a relation |
609 | + |
610 | + Generator that iterates through all the units in a relation and yields |
611 | + a named tuple with rid and unit field names. |
612 | + |
613 | + Usage: |
614 | + data = [(u.rid, u.unit) |
615 | + for u in iter_units_for_relation_name(relation_name)] |
616 | + |
617 | + :param relation_name: string relation name |
618 | + :yield: Named Tuple with rid and unit field names |
619 | + """ |
620 | + RelatedUnit = namedtuple('RelatedUnit', 'rid, unit') |
621 | + for rid in relation_ids(relation_name): |
622 | + for unit in related_units(rid): |
623 | + yield RelatedUnit(rid, unit) |
624 | + |
625 | + |
626 | +def ingress_address(rid=None, unit=None): |
627 | + """ |
628 | + Retrieve the ingress-address from a relation when available. |
629 | + Otherwise, return the private-address. |
630 | + |
631 | + When used on the consuming side of the relation (unit is a remote |
632 | + unit), the ingress-address is the IP address that this unit needs |
633 | + to use to reach the provided service on the remote unit. |
634 | + |
635 | + When used on the providing side of the relation (unit == local_unit()), |
636 | + the ingress-address is the IP address that is advertised to remote |
637 | + units on this relation. Remote units need to use this address to |
638 | + reach the local provided service on this unit. |
639 | + |
640 | + Note that charms may document some other method to use in |
641 | + preference to the ingress_address(), such as an address provided |
642 | + on a different relation attribute or a service discovery mechanism. |
643 | + This allows charms to redirect inbound connections to their peers |
644 | + or different applications such as load balancers. |
645 | + |
646 | + Usage: |
647 | + addresses = [ingress_address(rid=u.rid, unit=u.unit) |
648 | + for u in iter_units_for_relation_name(relation_name)] |
649 | + |
650 | + :param rid: string relation id |
651 | + :param unit: string unit name |
652 | + :side effect: calls relation_get |
653 | + :return: string IP address |
654 | + """ |
655 | + settings = relation_get(rid=rid, unit=unit) |
656 | + return (settings.get('ingress-address') or |
657 | + settings.get('private-address')) |
658 | + |
659 | + |
660 | +def egress_subnets(rid=None, unit=None): |
661 | + """ |
662 | + Retrieve the egress-subnets from a relation. |
663 | + |
664 | + This function is to be used on the providing side of the |
665 | + relation, and provides the ranges of addresses that client |
666 | + connections may come from. The result is uninteresting on |
667 | + the consuming side of a relation (unit == local_unit()). |
668 | + |
669 | + Returns a stable list of subnets in CIDR format. |
670 | + eg. ['192.168.1.0/24', '2001::F00F/128'] |
671 | + |
672 | + If egress-subnets is not available, falls back to using the published |
673 | + ingress-address, or finally private-address. |
674 | + |
675 | + :param rid: string relation id |
676 | + :param unit: string unit name |
677 | + :side effect: calls relation_get |
678 | + :return: list of subnets in CIDR format. eg. ['192.168.1.0/24', '2001::F00F/128'] |
679 | + """ |
680 | + def _to_range(addr): |
681 | + if re.search(r'^(?:\d{1,3}\.){3}\d{1,3}$', addr) is not None: |
682 | + addr += '/32' |
683 | + elif ':' in addr and '/' not in addr: # IPv6 |
684 | + addr += '/128' |
685 | + return addr |
686 | + |
687 | + settings = relation_get(rid=rid, unit=unit) |
688 | + if 'egress-subnets' in settings: |
689 | + return [n.strip() for n in settings['egress-subnets'].split(',') if n.strip()] |
690 | + if 'ingress-address' in settings: |
691 | + return [_to_range(settings['ingress-address'])] |
692 | + if 'private-address' in settings: |
693 | + return [_to_range(settings['private-address'])] |
694 | + return [] # Should never happen |
695 | + |
696 | + |
697 | +def unit_doomed(unit=None): |
698 | + """Determines if the unit is being removed from the model |
699 | + |
700 | + Requires Juju 2.4.1. |
701 | + |
702 | + :param unit: string unit name, defaults to local_unit |
703 | + :side effect: calls goal_state |
704 | + :side effect: calls local_unit |
705 | + :side effect: calls has_juju_version |
706 | + :return: True if the unit is being removed, already gone, or never existed |
707 | + """ |
708 | + if not has_juju_version("2.4.1"): |
709 | + # We cannot risk blindly returning False for 'we don't know', |
710 | + # because that could cause data loss; if call sites don't |
711 | + # need an accurate answer, they likely don't need this helper |
712 | + # at all. |
713 | + # goal-state existed in 2.4.0, but did not handle removals |
714 | + # correctly until 2.4.1. |
715 | + raise NotImplementedError("is_doomed") |
716 | + if unit is None: |
717 | + unit = local_unit() |
718 | + gs = goal_state() |
719 | + units = gs.get('units', {}) |
720 | + if unit not in units: |
721 | + return True |
722 | + # I don't think 'dead' units ever show up in the goal-state, but |
723 | + # check anyway in addition to 'dying'. |
724 | + return units[unit]['status'] in ('dying', 'dead') |
725 | + |
726 | + |
727 | +def env_proxy_settings(selected_settings=None): |
728 | + """Get proxy settings from process environment variables. |
729 | + |
730 | + Get charm proxy settings from environment variables that correspond to |
731 | + juju-http-proxy, juju-https-proxy and juju-no-proxy (available as of 2.4.2, |
732 | + see lp:1782236) in a format suitable for passing to an application that |
733 | + reacts to proxy settings passed as environment variables. Some applications |
734 | + support lowercase or uppercase notation (e.g. curl), some support only |
735 | + lowercase (e.g. wget), there are also subjectively rare cases of only |
736 | + uppercase notation support. no_proxy CIDR and wildcard support also varies |
737 | + between runtimes and applications as there is no enforced standard. |
738 | + |
739 | + Some applications may connect to multiple destinations and expose config |
740 | + options that would affect only proxy settings for a specific destination |
741 | + these should be handled in charms in an application-specific manner. |
742 | + |
743 | + :param selected_settings: format only a subset of possible settings |
744 | + :type selected_settings: list |
745 | + :rtype: Option(None, dict[str, str]) |
746 | + """ |
747 | + SUPPORTED_SETTINGS = { |
748 | + 'http': 'HTTP_PROXY', |
749 | + 'https': 'HTTPS_PROXY', |
750 | + 'no_proxy': 'NO_PROXY', |
751 | + 'ftp': 'FTP_PROXY' |
752 | + } |
753 | + if selected_settings is None: |
754 | + selected_settings = SUPPORTED_SETTINGS |
755 | + |
756 | + selected_vars = [v for k, v in SUPPORTED_SETTINGS.items() |
757 | + if k in selected_settings] |
758 | + proxy_settings = {} |
759 | + for var in selected_vars: |
760 | + var_val = os.getenv(var) |
761 | + if var_val: |
762 | + proxy_settings[var] = var_val |
763 | + proxy_settings[var.lower()] = var_val |
764 | + # Now handle juju-prefixed environment variables. The legacy vs new |
765 | + # environment variable usage is mutually exclusive |
766 | + charm_var_val = os.getenv('JUJU_CHARM_{}'.format(var)) |
767 | + if charm_var_val: |
768 | + proxy_settings[var] = charm_var_val |
769 | + proxy_settings[var.lower()] = charm_var_val |
770 | + if 'no_proxy' in proxy_settings: |
771 | + if _contains_range(proxy_settings['no_proxy']): |
772 | + log(RANGE_WARNING, level=WARNING) |
773 | + return proxy_settings if proxy_settings else None |
774 | + |
775 | + |
776 | +def _contains_range(addresses): |
777 | + """Check for cidr or wildcard domain in a string. |
778 | + |
779 | + Given a string comprising a comma seperated list of ip addresses |
780 | + and domain names, determine whether the string contains IP ranges |
781 | + or wildcard domains. |
782 | + |
783 | + :param addresses: comma seperated list of domains and ip addresses. |
784 | + :type addresses: str |
785 | + """ |
786 | + return ( |
787 | + # Test for cidr (e.g. 10.20.20.0/24) |
788 | + "/" in addresses or |
789 | + # Test for wildcard domains (*.foo.com or .foo.com) |
790 | + "*" in addresses or |
791 | + addresses.startswith(".") or |
792 | + ",." in addresses or |
793 | + " ." in addresses) |
794 | diff --git a/hooks/charmhelpers/core/host.py b/hooks/charmhelpers/core/host.py |
795 | index b0043cb..32754ff 100644 |
796 | --- a/hooks/charmhelpers/core/host.py |
797 | +++ b/hooks/charmhelpers/core/host.py |
798 | @@ -34,21 +34,23 @@ import six |
799 | |
800 | from contextlib import contextmanager |
801 | from collections import OrderedDict |
802 | -from .hookenv import log |
803 | +from .hookenv import log, INFO, DEBUG, local_unit, charm_name |
804 | from .fstab import Fstab |
805 | from charmhelpers.osplatform import get_platform |
806 | |
807 | __platform__ = get_platform() |
808 | if __platform__ == "ubuntu": |
809 | - from charmhelpers.core.host_factory.ubuntu import ( |
810 | + from charmhelpers.core.host_factory.ubuntu import ( # NOQA:F401 |
811 | service_available, |
812 | add_new_group, |
813 | lsb_release, |
814 | cmp_pkgrevno, |
815 | CompareHostReleases, |
816 | + get_distrib_codename, |
817 | + arch |
818 | ) # flake8: noqa -- ignore F401 for this import |
819 | elif __platform__ == "centos": |
820 | - from charmhelpers.core.host_factory.centos import ( |
821 | + from charmhelpers.core.host_factory.centos import ( # NOQA:F401 |
822 | service_available, |
823 | add_new_group, |
824 | lsb_release, |
825 | @@ -58,6 +60,7 @@ elif __platform__ == "centos": |
826 | |
827 | UPDATEDB_PATH = '/etc/updatedb.conf' |
828 | |
829 | + |
830 | def service_start(service_name, **kwargs): |
831 | """Start a system service. |
832 | |
833 | @@ -287,8 +290,8 @@ def service_running(service_name, **kwargs): |
834 | for key, value in six.iteritems(kwargs): |
835 | parameter = '%s=%s' % (key, value) |
836 | cmd.append(parameter) |
837 | - output = subprocess.check_output(cmd, |
838 | - stderr=subprocess.STDOUT).decode('UTF-8') |
839 | + output = subprocess.check_output( |
840 | + cmd, stderr=subprocess.STDOUT).decode('UTF-8') |
841 | except subprocess.CalledProcessError: |
842 | return False |
843 | else: |
844 | @@ -441,6 +444,51 @@ def add_user_to_group(username, group): |
845 | subprocess.check_call(cmd) |
846 | |
847 | |
848 | +def chage(username, lastday=None, expiredate=None, inactive=None, |
849 | + mindays=None, maxdays=None, root=None, warndays=None): |
850 | + """Change user password expiry information |
851 | + |
852 | + :param str username: User to update |
853 | + :param str lastday: Set when password was changed in YYYY-MM-DD format |
854 | + :param str expiredate: Set when user's account will no longer be |
855 | + accessible in YYYY-MM-DD format. |
856 | + -1 will remove an account expiration date. |
857 | + :param str inactive: Set the number of days of inactivity after a password |
858 | + has expired before the account is locked. |
859 | + -1 will remove an account's inactivity. |
860 | + :param str mindays: Set the minimum number of days between password |
861 | + changes to MIN_DAYS. |
862 | + 0 indicates the password can be changed anytime. |
863 | + :param str maxdays: Set the maximum number of days during which a |
864 | + password is valid. |
865 | + -1 as MAX_DAYS will remove checking maxdays |
866 | + :param str root: Apply changes in the CHROOT_DIR directory |
867 | + :param str warndays: Set the number of days of warning before a password |
868 | + change is required |
869 | + :raises subprocess.CalledProcessError: if call to chage fails |
870 | + """ |
871 | + cmd = ['chage'] |
872 | + if root: |
873 | + cmd.extend(['--root', root]) |
874 | + if lastday: |
875 | + cmd.extend(['--lastday', lastday]) |
876 | + if expiredate: |
877 | + cmd.extend(['--expiredate', expiredate]) |
878 | + if inactive: |
879 | + cmd.extend(['--inactive', inactive]) |
880 | + if mindays: |
881 | + cmd.extend(['--mindays', mindays]) |
882 | + if maxdays: |
883 | + cmd.extend(['--maxdays', maxdays]) |
884 | + if warndays: |
885 | + cmd.extend(['--warndays', warndays]) |
886 | + cmd.append(username) |
887 | + subprocess.check_call(cmd) |
888 | + |
889 | + |
890 | +remove_password_expiry = functools.partial(chage, expiredate='-1', inactive='-1', mindays='0', maxdays='-1') |
891 | + |
892 | + |
893 | def rsync(from_path, to_path, flags='-r', options=None, timeout=None): |
894 | """Replicate the contents of a path""" |
895 | options = options or ['--delete', '--executability'] |
896 | @@ -487,13 +535,45 @@ def mkdir(path, owner='root', group='root', perms=0o555, force=False): |
897 | |
898 | def write_file(path, content, owner='root', group='root', perms=0o444): |
899 | """Create or overwrite a file with the contents of a byte string.""" |
900 | - log("Writing file {} {}:{} {:o}".format(path, owner, group, perms)) |
901 | uid = pwd.getpwnam(owner).pw_uid |
902 | gid = grp.getgrnam(group).gr_gid |
903 | - with open(path, 'wb') as target: |
904 | - os.fchown(target.fileno(), uid, gid) |
905 | - os.fchmod(target.fileno(), perms) |
906 | - target.write(content) |
907 | + # lets see if we can grab the file and compare the context, to avoid doing |
908 | + # a write. |
909 | + existing_content = None |
910 | + existing_uid, existing_gid, existing_perms = None, None, None |
911 | + try: |
912 | + with open(path, 'rb') as target: |
913 | + existing_content = target.read() |
914 | + stat = os.stat(path) |
915 | + existing_uid, existing_gid, existing_perms = ( |
916 | + stat.st_uid, stat.st_gid, stat.st_mode |
917 | + ) |
918 | + except Exception: |
919 | + pass |
920 | + if content != existing_content: |
921 | + log("Writing file {} {}:{} {:o}".format(path, owner, group, perms), |
922 | + level=DEBUG) |
923 | + with open(path, 'wb') as target: |
924 | + os.fchown(target.fileno(), uid, gid) |
925 | + os.fchmod(target.fileno(), perms) |
926 | + if six.PY3 and isinstance(content, six.string_types): |
927 | + content = content.encode('UTF-8') |
928 | + target.write(content) |
929 | + return |
930 | + # the contents were the same, but we might still need to change the |
931 | + # ownership or permissions. |
932 | + if existing_uid != uid: |
933 | + log("Changing uid on already existing content: {} -> {}" |
934 | + .format(existing_uid, uid), level=DEBUG) |
935 | + os.chown(path, uid, -1) |
936 | + if existing_gid != gid: |
937 | + log("Changing gid on already existing content: {} -> {}" |
938 | + .format(existing_gid, gid), level=DEBUG) |
939 | + os.chown(path, -1, gid) |
940 | + if existing_perms != perms: |
941 | + log("Changing permissions on existing content: {} -> {}" |
942 | + .format(existing_perms, perms), level=DEBUG) |
943 | + os.chmod(path, perms) |
944 | |
945 | |
946 | def fstab_remove(mp): |
947 | @@ -758,7 +838,7 @@ def list_nics(nic_type=None): |
948 | ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') |
949 | ip_output = (line.strip() for line in ip_output if line) |
950 | |
951 | - key = re.compile('^[0-9]+:\s+(.+):') |
952 | + key = re.compile(r'^[0-9]+:\s+(.+):') |
953 | for line in ip_output: |
954 | matched = re.search(key, line) |
955 | if matched: |
956 | @@ -903,6 +983,20 @@ def is_container(): |
957 | |
958 | |
959 | def add_to_updatedb_prunepath(path, updatedb_path=UPDATEDB_PATH): |
960 | + """Adds the specified path to the mlocate's udpatedb.conf PRUNEPATH list. |
961 | + |
962 | + This method has no effect if the path specified by updatedb_path does not |
963 | + exist or is not a file. |
964 | + |
965 | + @param path: string the path to add to the updatedb.conf PRUNEPATHS value |
966 | + @param updatedb_path: the path the updatedb.conf file |
967 | + """ |
968 | + if not os.path.exists(updatedb_path) or os.path.isdir(updatedb_path): |
969 | + # If the updatedb.conf file doesn't exist then don't attempt to update |
970 | + # the file as the package providing mlocate may not be installed on |
971 | + # the local system |
972 | + return |
973 | + |
974 | with open(updatedb_path, 'r+') as f_id: |
975 | updatedb_text = f_id.read() |
976 | output = updatedb(updatedb_text, path) |
977 | @@ -922,3 +1016,62 @@ def updatedb(updatedb_text, new_path): |
978 | lines[i] = 'PRUNEPATHS="{}"'.format(' '.join(paths)) |
979 | output = "\n".join(lines) |
980 | return output |
981 | + |
982 | + |
983 | +def modulo_distribution(modulo=3, wait=30, non_zero_wait=False): |
984 | + """ Modulo distribution |
985 | + |
986 | + This helper uses the unit number, a modulo value and a constant wait time |
987 | + to produce a calculated wait time distribution. This is useful in large |
988 | + scale deployments to distribute load during an expensive operation such as |
989 | + service restarts. |
990 | + |
991 | + If you have 1000 nodes that need to restart 100 at a time 1 minute at a |
992 | + time: |
993 | + |
994 | + time.wait(modulo_distribution(modulo=100, wait=60)) |
995 | + restart() |
996 | + |
997 | + If you need restarts to happen serially set modulo to the exact number of |
998 | + nodes and set a high constant wait time: |
999 | + |
1000 | + time.wait(modulo_distribution(modulo=10, wait=120)) |
1001 | + restart() |
1002 | + |
1003 | + @param modulo: int The modulo number creates the group distribution |
1004 | + @param wait: int The constant time wait value |
1005 | + @param non_zero_wait: boolean Override unit % modulo == 0, |
1006 | + return modulo * wait. Used to avoid collisions with |
1007 | + leader nodes which are often given priority. |
1008 | + @return: int Calculated time to wait for unit operation |
1009 | + """ |
1010 | + unit_number = int(local_unit().split('/')[1]) |
1011 | + calculated_wait_time = (unit_number % modulo) * wait |
1012 | + if non_zero_wait and calculated_wait_time == 0: |
1013 | + return modulo * wait |
1014 | + else: |
1015 | + return calculated_wait_time |
1016 | + |
1017 | + |
1018 | +def install_ca_cert(ca_cert, name=None): |
1019 | + """ |
1020 | + Install the given cert as a trusted CA. |
1021 | + |
1022 | + The ``name`` is the stem of the filename where the cert is written, and if |
1023 | + not provided, it will default to ``juju-{charm_name}``. |
1024 | + |
1025 | + If the cert is empty or None, or is unchanged, nothing is done. |
1026 | + """ |
1027 | + if not ca_cert: |
1028 | + return |
1029 | + if not isinstance(ca_cert, bytes): |
1030 | + ca_cert = ca_cert.encode('utf8') |
1031 | + if not name: |
1032 | + name = 'juju-{}'.format(charm_name()) |
1033 | + cert_file = '/usr/local/share/ca-certificates/{}.crt'.format(name) |
1034 | + new_hash = hashlib.md5(ca_cert).hexdigest() |
1035 | + if file_hash(cert_file) == new_hash: |
1036 | + return |
1037 | + log("Installing new CA cert at: {}".format(cert_file), level=INFO) |
1038 | + write_file(cert_file, ca_cert) |
1039 | + subprocess.check_call(['update-ca-certificates', '--fresh']) |
1040 | diff --git a/hooks/charmhelpers/core/host_factory/ubuntu.py b/hooks/charmhelpers/core/host_factory/ubuntu.py |
1041 | index d8dc378..0ee2b66 100644 |
1042 | --- a/hooks/charmhelpers/core/host_factory/ubuntu.py |
1043 | +++ b/hooks/charmhelpers/core/host_factory/ubuntu.py |
1044 | @@ -1,5 +1,6 @@ |
1045 | import subprocess |
1046 | |
1047 | +from charmhelpers.core.hookenv import cached |
1048 | from charmhelpers.core.strutils import BasicStringComparator |
1049 | |
1050 | |
1051 | @@ -20,6 +21,9 @@ UBUNTU_RELEASES = ( |
1052 | 'yakkety', |
1053 | 'zesty', |
1054 | 'artful', |
1055 | + 'bionic', |
1056 | + 'cosmic', |
1057 | + 'disco', |
1058 | ) |
1059 | |
1060 | |
1061 | @@ -70,6 +74,14 @@ def lsb_release(): |
1062 | return d |
1063 | |
1064 | |
1065 | +def get_distrib_codename(): |
1066 | + """Return the codename of the distribution |
1067 | + :returns: The codename |
1068 | + :rtype: str |
1069 | + """ |
1070 | + return lsb_release()['DISTRIB_CODENAME'].lower() |
1071 | + |
1072 | + |
1073 | def cmp_pkgrevno(package, revno, pkgcache=None): |
1074 | """Compare supplied revno with the revno of the installed package. |
1075 | |
1076 | @@ -87,3 +99,16 @@ def cmp_pkgrevno(package, revno, pkgcache=None): |
1077 | pkgcache = apt_cache() |
1078 | pkg = pkgcache[package] |
1079 | return apt_pkg.version_compare(pkg.current_ver.ver_str, revno) |
1080 | + |
1081 | + |
1082 | +@cached |
1083 | +def arch(): |
1084 | + """Return the package architecture as a string. |
1085 | + |
1086 | + :returns: the architecture |
1087 | + :rtype: str |
1088 | + :raises: subprocess.CalledProcessError if dpkg command fails |
1089 | + """ |
1090 | + return subprocess.check_output( |
1091 | + ['dpkg', '--print-architecture'] |
1092 | + ).rstrip().decode('UTF-8') |
1093 | diff --git a/hooks/charmhelpers/core/kernel.py b/hooks/charmhelpers/core/kernel.py |
1094 | index 2d40452..e01f4f8 100644 |
1095 | --- a/hooks/charmhelpers/core/kernel.py |
1096 | +++ b/hooks/charmhelpers/core/kernel.py |
1097 | @@ -26,12 +26,12 @@ from charmhelpers.core.hookenv import ( |
1098 | |
1099 | __platform__ = get_platform() |
1100 | if __platform__ == "ubuntu": |
1101 | - from charmhelpers.core.kernel_factory.ubuntu import ( |
1102 | + from charmhelpers.core.kernel_factory.ubuntu import ( # NOQA:F401 |
1103 | persistent_modprobe, |
1104 | update_initramfs, |
1105 | ) # flake8: noqa -- ignore F401 for this import |
1106 | elif __platform__ == "centos": |
1107 | - from charmhelpers.core.kernel_factory.centos import ( |
1108 | + from charmhelpers.core.kernel_factory.centos import ( # NOQA:F401 |
1109 | persistent_modprobe, |
1110 | update_initramfs, |
1111 | ) # flake8: noqa -- ignore F401 for this import |
1112 | diff --git a/hooks/charmhelpers/core/services/base.py b/hooks/charmhelpers/core/services/base.py |
1113 | index ca9dc99..179ad4f 100644 |
1114 | --- a/hooks/charmhelpers/core/services/base.py |
1115 | +++ b/hooks/charmhelpers/core/services/base.py |
1116 | @@ -307,23 +307,34 @@ class PortManagerCallback(ManagerCallback): |
1117 | """ |
1118 | def __call__(self, manager, service_name, event_name): |
1119 | service = manager.get_service(service_name) |
1120 | - new_ports = service.get('ports', []) |
1121 | + # turn this generator into a list, |
1122 | + # as we'll be going over it multiple times |
1123 | + new_ports = list(service.get('ports', [])) |
1124 | port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name)) |
1125 | if os.path.exists(port_file): |
1126 | with open(port_file) as fp: |
1127 | old_ports = fp.read().split(',') |
1128 | for old_port in old_ports: |
1129 | - if bool(old_port): |
1130 | - old_port = int(old_port) |
1131 | - if old_port not in new_ports: |
1132 | - hookenv.close_port(old_port) |
1133 | + if bool(old_port) and not self.ports_contains(old_port, new_ports): |
1134 | + hookenv.close_port(old_port) |
1135 | with open(port_file, 'w') as fp: |
1136 | fp.write(','.join(str(port) for port in new_ports)) |
1137 | for port in new_ports: |
1138 | + # A port is either a number or 'ICMP' |
1139 | + protocol = 'TCP' |
1140 | + if str(port).upper() == 'ICMP': |
1141 | + protocol = 'ICMP' |
1142 | if event_name == 'start': |
1143 | - hookenv.open_port(port) |
1144 | + hookenv.open_port(port, protocol) |
1145 | elif event_name == 'stop': |
1146 | - hookenv.close_port(port) |
1147 | + hookenv.close_port(port, protocol) |
1148 | + |
1149 | + def ports_contains(self, port, ports): |
1150 | + if not bool(port): |
1151 | + return False |
1152 | + if str(port).upper() != 'ICMP': |
1153 | + port = int(port) |
1154 | + return port in ports |
1155 | |
1156 | |
1157 | def service_stop(service_name): |
1158 | diff --git a/hooks/charmhelpers/core/strutils.py b/hooks/charmhelpers/core/strutils.py |
1159 | index 685dabd..e8df045 100644 |
1160 | --- a/hooks/charmhelpers/core/strutils.py |
1161 | +++ b/hooks/charmhelpers/core/strutils.py |
1162 | @@ -61,13 +61,19 @@ def bytes_from_string(value): |
1163 | if isinstance(value, six.string_types): |
1164 | value = six.text_type(value) |
1165 | else: |
1166 | - msg = "Unable to interpret non-string value '%s' as boolean" % (value) |
1167 | + msg = "Unable to interpret non-string value '%s' as bytes" % (value) |
1168 | raise ValueError(msg) |
1169 | matches = re.match("([0-9]+)([a-zA-Z]+)", value) |
1170 | - if not matches: |
1171 | - msg = "Unable to interpret string value '%s' as bytes" % (value) |
1172 | - raise ValueError(msg) |
1173 | - return int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)]) |
1174 | + if matches: |
1175 | + size = int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)]) |
1176 | + else: |
1177 | + # Assume that value passed in is bytes |
1178 | + try: |
1179 | + size = int(value) |
1180 | + except ValueError: |
1181 | + msg = "Unable to interpret string value '%s' as bytes" % (value) |
1182 | + raise ValueError(msg) |
1183 | + return size |
1184 | |
1185 | |
1186 | class BasicStringComparator(object): |
1187 | diff --git a/hooks/charmhelpers/core/sysctl.py b/hooks/charmhelpers/core/sysctl.py |
1188 | index 6e413e3..f1f4a28 100644 |
1189 | --- a/hooks/charmhelpers/core/sysctl.py |
1190 | +++ b/hooks/charmhelpers/core/sysctl.py |
1191 | @@ -28,27 +28,38 @@ from charmhelpers.core.hookenv import ( |
1192 | __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' |
1193 | |
1194 | |
1195 | -def create(sysctl_dict, sysctl_file): |
1196 | +def create(sysctl_dict, sysctl_file, ignore=False): |
1197 | """Creates a sysctl.conf file from a YAML associative array |
1198 | |
1199 | - :param sysctl_dict: a YAML-formatted string of sysctl options eg "{ 'kernel.max_pid': 1337 }" |
1200 | + :param sysctl_dict: a dict or YAML-formatted string of sysctl |
1201 | + options eg "{ 'kernel.max_pid': 1337 }" |
1202 | :type sysctl_dict: str |
1203 | :param sysctl_file: path to the sysctl file to be saved |
1204 | :type sysctl_file: str or unicode |
1205 | + :param ignore: If True, ignore "unknown variable" errors. |
1206 | + :type ignore: bool |
1207 | :returns: None |
1208 | """ |
1209 | - try: |
1210 | - sysctl_dict_parsed = yaml.safe_load(sysctl_dict) |
1211 | - except yaml.YAMLError: |
1212 | - log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict), |
1213 | - level=ERROR) |
1214 | - return |
1215 | + if type(sysctl_dict) is not dict: |
1216 | + try: |
1217 | + sysctl_dict_parsed = yaml.safe_load(sysctl_dict) |
1218 | + except yaml.YAMLError: |
1219 | + log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict), |
1220 | + level=ERROR) |
1221 | + return |
1222 | + else: |
1223 | + sysctl_dict_parsed = sysctl_dict |
1224 | |
1225 | with open(sysctl_file, "w") as fd: |
1226 | for key, value in sysctl_dict_parsed.items(): |
1227 | fd.write("{}={}\n".format(key, value)) |
1228 | |
1229 | - log("Updating sysctl_file: %s values: %s" % (sysctl_file, sysctl_dict_parsed), |
1230 | + log("Updating sysctl_file: {} values: {}".format(sysctl_file, |
1231 | + sysctl_dict_parsed), |
1232 | level=DEBUG) |
1233 | |
1234 | - check_call(["sysctl", "-p", sysctl_file]) |
1235 | + call = ["sysctl", "-p", sysctl_file] |
1236 | + if ignore: |
1237 | + call.append("-e") |
1238 | + |
1239 | + check_call(call) |
1240 | diff --git a/hooks/charmhelpers/core/templating.py b/hooks/charmhelpers/core/templating.py |
1241 | index 7b801a3..9014015 100644 |
1242 | --- a/hooks/charmhelpers/core/templating.py |
1243 | +++ b/hooks/charmhelpers/core/templating.py |
1244 | @@ -20,7 +20,8 @@ from charmhelpers.core import hookenv |
1245 | |
1246 | |
1247 | def render(source, target, context, owner='root', group='root', |
1248 | - perms=0o444, templates_dir=None, encoding='UTF-8', template_loader=None): |
1249 | + perms=0o444, templates_dir=None, encoding='UTF-8', |
1250 | + template_loader=None, config_template=None): |
1251 | """ |
1252 | Render a template. |
1253 | |
1254 | @@ -32,6 +33,9 @@ def render(source, target, context, owner='root', group='root', |
1255 | The context should be a dict containing the values to be replaced in the |
1256 | template. |
1257 | |
1258 | + config_template may be provided to render from a provided template instead |
1259 | + of loading from a file. |
1260 | + |
1261 | The `owner`, `group`, and `perms` options will be passed to `write_file`. |
1262 | |
1263 | If omitted, `templates_dir` defaults to the `templates` folder in the charm. |
1264 | @@ -65,14 +69,19 @@ def render(source, target, context, owner='root', group='root', |
1265 | if templates_dir is None: |
1266 | templates_dir = os.path.join(hookenv.charm_dir(), 'templates') |
1267 | template_env = Environment(loader=FileSystemLoader(templates_dir)) |
1268 | - try: |
1269 | - source = source |
1270 | - template = template_env.get_template(source) |
1271 | - except exceptions.TemplateNotFound as e: |
1272 | - hookenv.log('Could not load template %s from %s.' % |
1273 | - (source, templates_dir), |
1274 | - level=hookenv.ERROR) |
1275 | - raise e |
1276 | + |
1277 | + # load from a string if provided explicitly |
1278 | + if config_template is not None: |
1279 | + template = template_env.from_string(config_template) |
1280 | + else: |
1281 | + try: |
1282 | + source = source |
1283 | + template = template_env.get_template(source) |
1284 | + except exceptions.TemplateNotFound as e: |
1285 | + hookenv.log('Could not load template %s from %s.' % |
1286 | + (source, templates_dir), |
1287 | + level=hookenv.ERROR) |
1288 | + raise e |
1289 | content = template.render(context) |
1290 | if target is not None: |
1291 | target_dir = os.path.dirname(target) |
1292 | diff --git a/hooks/charmhelpers/core/unitdata.py b/hooks/charmhelpers/core/unitdata.py |
1293 | index 54ec969..ab55432 100644 |
1294 | --- a/hooks/charmhelpers/core/unitdata.py |
1295 | +++ b/hooks/charmhelpers/core/unitdata.py |
1296 | @@ -166,6 +166,10 @@ class Storage(object): |
1297 | |
1298 | To support dicts, lists, integer, floats, and booleans values |
1299 | are automatically json encoded/decoded. |
1300 | + |
1301 | + Note: to facilitate unit testing, ':memory:' can be passed as the |
1302 | + path parameter which causes sqlite3 to only build the db in memory. |
1303 | + This should only be used for testing purposes. |
1304 | """ |
1305 | def __init__(self, path=None): |
1306 | self.db_path = path |
1307 | @@ -175,6 +179,9 @@ class Storage(object): |
1308 | else: |
1309 | self.db_path = os.path.join( |
1310 | os.environ.get('CHARM_DIR', ''), '.unit-state.db') |
1311 | + if self.db_path != ':memory:': |
1312 | + with open(self.db_path, 'a') as f: |
1313 | + os.fchmod(f.fileno(), 0o600) |
1314 | self.conn = sqlite3.connect('%s' % self.db_path) |
1315 | self.cursor = self.conn.cursor() |
1316 | self.revision = None |
1317 | @@ -358,7 +365,7 @@ class Storage(object): |
1318 | try: |
1319 | yield self.revision |
1320 | self.revision = None |
1321 | - except: |
1322 | + except Exception: |
1323 | self.flush(False) |
1324 | self.revision = None |
1325 | raise |
1326 | diff --git a/hooks/charmhelpers/fetch/__init__.py b/hooks/charmhelpers/fetch/__init__.py |
1327 | index 480a627..8572d34 100644 |
1328 | --- a/hooks/charmhelpers/fetch/__init__.py |
1329 | +++ b/hooks/charmhelpers/fetch/__init__.py |
1330 | @@ -84,6 +84,7 @@ module = "charmhelpers.fetch.%s" % __platform__ |
1331 | fetch = importlib.import_module(module) |
1332 | |
1333 | filter_installed_packages = fetch.filter_installed_packages |
1334 | +filter_missing_packages = fetch.filter_missing_packages |
1335 | install = fetch.apt_install |
1336 | upgrade = fetch.apt_upgrade |
1337 | update = _fetch_update = fetch.apt_update |
1338 | @@ -96,6 +97,7 @@ if __platform__ == "ubuntu": |
1339 | apt_update = fetch.apt_update |
1340 | apt_upgrade = fetch.apt_upgrade |
1341 | apt_purge = fetch.apt_purge |
1342 | + apt_autoremove = fetch.apt_autoremove |
1343 | apt_mark = fetch.apt_mark |
1344 | apt_hold = fetch.apt_hold |
1345 | apt_unhold = fetch.apt_unhold |
1346 | diff --git a/hooks/charmhelpers/fetch/archiveurl.py b/hooks/charmhelpers/fetch/archiveurl.py |
1347 | index dd24f9e..d25587a 100644 |
1348 | --- a/hooks/charmhelpers/fetch/archiveurl.py |
1349 | +++ b/hooks/charmhelpers/fetch/archiveurl.py |
1350 | @@ -89,7 +89,7 @@ class ArchiveUrlFetchHandler(BaseFetchHandler): |
1351 | :param str source: URL pointing to an archive file. |
1352 | :param str dest: Local path location to download archive file to. |
1353 | """ |
1354 | - # propogate all exceptions |
1355 | + # propagate all exceptions |
1356 | # URLError, OSError, etc |
1357 | proto, netloc, path, params, query, fragment = urlparse(source) |
1358 | if proto in ('http', 'https'): |
1359 | diff --git a/hooks/charmhelpers/fetch/bzrurl.py b/hooks/charmhelpers/fetch/bzrurl.py |
1360 | index 07cd029..c4ab3ff 100644 |
1361 | --- a/hooks/charmhelpers/fetch/bzrurl.py |
1362 | +++ b/hooks/charmhelpers/fetch/bzrurl.py |
1363 | @@ -13,7 +13,7 @@ |
1364 | # limitations under the License. |
1365 | |
1366 | import os |
1367 | -from subprocess import check_call |
1368 | +from subprocess import STDOUT, check_output |
1369 | from charmhelpers.fetch import ( |
1370 | BaseFetchHandler, |
1371 | UnhandledSource, |
1372 | @@ -55,7 +55,7 @@ class BzrUrlFetchHandler(BaseFetchHandler): |
1373 | cmd = ['bzr', 'branch'] |
1374 | cmd += cmd_opts |
1375 | cmd += [source, dest] |
1376 | - check_call(cmd) |
1377 | + check_output(cmd, stderr=STDOUT) |
1378 | |
1379 | def install(self, source, dest=None, revno=None): |
1380 | url_parts = self.parse_url(source) |
1381 | diff --git a/hooks/charmhelpers/fetch/giturl.py b/hooks/charmhelpers/fetch/giturl.py |
1382 | index 4cf21bc..070ca9b 100644 |
1383 | --- a/hooks/charmhelpers/fetch/giturl.py |
1384 | +++ b/hooks/charmhelpers/fetch/giturl.py |
1385 | @@ -13,7 +13,7 @@ |
1386 | # limitations under the License. |
1387 | |
1388 | import os |
1389 | -from subprocess import check_call, CalledProcessError |
1390 | +from subprocess import check_output, CalledProcessError, STDOUT |
1391 | from charmhelpers.fetch import ( |
1392 | BaseFetchHandler, |
1393 | UnhandledSource, |
1394 | @@ -50,7 +50,7 @@ class GitUrlFetchHandler(BaseFetchHandler): |
1395 | cmd = ['git', 'clone', source, dest, '--branch', branch] |
1396 | if depth: |
1397 | cmd.extend(['--depth', depth]) |
1398 | - check_call(cmd) |
1399 | + check_output(cmd, stderr=STDOUT) |
1400 | |
1401 | def install(self, source, branch="master", dest=None, depth=None): |
1402 | url_parts = self.parse_url(source) |
1403 | diff --git a/hooks/charmhelpers/fetch/python/__init__.py b/hooks/charmhelpers/fetch/python/__init__.py |
1404 | new file mode 100644 |
1405 | index 0000000..bff99dc |
1406 | --- /dev/null |
1407 | +++ b/hooks/charmhelpers/fetch/python/__init__.py |
1408 | @@ -0,0 +1,13 @@ |
1409 | +# Copyright 2014-2019 Canonical Limited. |
1410 | +# |
1411 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
1412 | +# you may not use this file except in compliance with the License. |
1413 | +# You may obtain a copy of the License at |
1414 | +# |
1415 | +# http://www.apache.org/licenses/LICENSE-2.0 |
1416 | +# |
1417 | +# Unless required by applicable law or agreed to in writing, software |
1418 | +# distributed under the License is distributed on an "AS IS" BASIS, |
1419 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
1420 | +# See the License for the specific language governing permissions and |
1421 | +# limitations under the License. |
1422 | diff --git a/hooks/charmhelpers/fetch/python/debug.py b/hooks/charmhelpers/fetch/python/debug.py |
1423 | new file mode 100644 |
1424 | index 0000000..757135e |
1425 | --- /dev/null |
1426 | +++ b/hooks/charmhelpers/fetch/python/debug.py |
1427 | @@ -0,0 +1,54 @@ |
1428 | +#!/usr/bin/env python |
1429 | +# coding: utf-8 |
1430 | + |
1431 | +# Copyright 2014-2015 Canonical Limited. |
1432 | +# |
1433 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
1434 | +# you may not use this file except in compliance with the License. |
1435 | +# You may obtain a copy of the License at |
1436 | +# |
1437 | +# http://www.apache.org/licenses/LICENSE-2.0 |
1438 | +# |
1439 | +# Unless required by applicable law or agreed to in writing, software |
1440 | +# distributed under the License is distributed on an "AS IS" BASIS, |
1441 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
1442 | +# See the License for the specific language governing permissions and |
1443 | +# limitations under the License. |
1444 | + |
1445 | +from __future__ import print_function |
1446 | + |
1447 | +import atexit |
1448 | +import sys |
1449 | + |
1450 | +from charmhelpers.fetch.python.rpdb import Rpdb |
1451 | +from charmhelpers.core.hookenv import ( |
1452 | + open_port, |
1453 | + close_port, |
1454 | + ERROR, |
1455 | + log |
1456 | +) |
1457 | + |
1458 | +__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" |
1459 | + |
1460 | +DEFAULT_ADDR = "0.0.0.0" |
1461 | +DEFAULT_PORT = 4444 |
1462 | + |
1463 | + |
1464 | +def _error(message): |
1465 | + log(message, level=ERROR) |
1466 | + |
1467 | + |
1468 | +def set_trace(addr=DEFAULT_ADDR, port=DEFAULT_PORT): |
1469 | + """ |
1470 | + Set a trace point using the remote debugger |
1471 | + """ |
1472 | + atexit.register(close_port, port) |
1473 | + try: |
1474 | + log("Starting a remote python debugger session on %s:%s" % (addr, |
1475 | + port)) |
1476 | + open_port(port) |
1477 | + debugger = Rpdb(addr=addr, port=port) |
1478 | + debugger.set_trace(sys._getframe().f_back) |
1479 | + except Exception: |
1480 | + _error("Cannot start a remote debug session on %s:%s" % (addr, |
1481 | + port)) |
1482 | diff --git a/hooks/charmhelpers/fetch/python/packages.py b/hooks/charmhelpers/fetch/python/packages.py |
1483 | new file mode 100644 |
1484 | index 0000000..6e95028 |
1485 | --- /dev/null |
1486 | +++ b/hooks/charmhelpers/fetch/python/packages.py |
1487 | @@ -0,0 +1,154 @@ |
1488 | +#!/usr/bin/env python |
1489 | +# coding: utf-8 |
1490 | + |
1491 | +# Copyright 2014-2015 Canonical Limited. |
1492 | +# |
1493 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
1494 | +# you may not use this file except in compliance with the License. |
1495 | +# You may obtain a copy of the License at |
1496 | +# |
1497 | +# http://www.apache.org/licenses/LICENSE-2.0 |
1498 | +# |
1499 | +# Unless required by applicable law or agreed to in writing, software |
1500 | +# distributed under the License is distributed on an "AS IS" BASIS, |
1501 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
1502 | +# See the License for the specific language governing permissions and |
1503 | +# limitations under the License. |
1504 | + |
1505 | +import os |
1506 | +import six |
1507 | +import subprocess |
1508 | +import sys |
1509 | + |
1510 | +from charmhelpers.fetch import apt_install, apt_update |
1511 | +from charmhelpers.core.hookenv import charm_dir, log |
1512 | + |
1513 | +__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" |
1514 | + |
1515 | + |
1516 | +def pip_execute(*args, **kwargs): |
1517 | + """Overriden pip_execute() to stop sys.path being changed. |
1518 | + |
1519 | + The act of importing main from the pip module seems to cause add wheels |
1520 | + from the /usr/share/python-wheels which are installed by various tools. |
1521 | + This function ensures that sys.path remains the same after the call is |
1522 | + executed. |
1523 | + """ |
1524 | + try: |
1525 | + _path = sys.path |
1526 | + try: |
1527 | + from pip import main as _pip_execute |
1528 | + except ImportError: |
1529 | + apt_update() |
1530 | + if six.PY2: |
1531 | + apt_install('python-pip') |
1532 | + else: |
1533 | + apt_install('python3-pip') |
1534 | + from pip import main as _pip_execute |
1535 | + _pip_execute(*args, **kwargs) |
1536 | + finally: |
1537 | + sys.path = _path |
1538 | + |
1539 | + |
1540 | +def parse_options(given, available): |
1541 | + """Given a set of options, check if available""" |
1542 | + for key, value in sorted(given.items()): |
1543 | + if not value: |
1544 | + continue |
1545 | + if key in available: |
1546 | + yield "--{0}={1}".format(key, value) |
1547 | + |
1548 | + |
1549 | +def pip_install_requirements(requirements, constraints=None, **options): |
1550 | + """Install a requirements file. |
1551 | + |
1552 | + :param constraints: Path to pip constraints file. |
1553 | + http://pip.readthedocs.org/en/stable/user_guide/#constraints-files |
1554 | + """ |
1555 | + command = ["install"] |
1556 | + |
1557 | + available_options = ('proxy', 'src', 'log', ) |
1558 | + for option in parse_options(options, available_options): |
1559 | + command.append(option) |
1560 | + |
1561 | + command.append("-r {0}".format(requirements)) |
1562 | + if constraints: |
1563 | + command.append("-c {0}".format(constraints)) |
1564 | + log("Installing from file: {} with constraints {} " |
1565 | + "and options: {}".format(requirements, constraints, command)) |
1566 | + else: |
1567 | + log("Installing from file: {} with options: {}".format(requirements, |
1568 | + command)) |
1569 | + pip_execute(command) |
1570 | + |
1571 | + |
1572 | +def pip_install(package, fatal=False, upgrade=False, venv=None, |
1573 | + constraints=None, **options): |
1574 | + """Install a python package""" |
1575 | + if venv: |
1576 | + venv_python = os.path.join(venv, 'bin/pip') |
1577 | + command = [venv_python, "install"] |
1578 | + else: |
1579 | + command = ["install"] |
1580 | + |
1581 | + available_options = ('proxy', 'src', 'log', 'index-url', ) |
1582 | + for option in parse_options(options, available_options): |
1583 | + command.append(option) |
1584 | + |
1585 | + if upgrade: |
1586 | + command.append('--upgrade') |
1587 | + |
1588 | + if constraints: |
1589 | + command.extend(['-c', constraints]) |
1590 | + |
1591 | + if isinstance(package, list): |
1592 | + command.extend(package) |
1593 | + else: |
1594 | + command.append(package) |
1595 | + |
1596 | + log("Installing {} package with options: {}".format(package, |
1597 | + command)) |
1598 | + if venv: |
1599 | + subprocess.check_call(command) |
1600 | + else: |
1601 | + pip_execute(command) |
1602 | + |
1603 | + |
1604 | +def pip_uninstall(package, **options): |
1605 | + """Uninstall a python package""" |
1606 | + command = ["uninstall", "-q", "-y"] |
1607 | + |
1608 | + available_options = ('proxy', 'log', ) |
1609 | + for option in parse_options(options, available_options): |
1610 | + command.append(option) |
1611 | + |
1612 | + if isinstance(package, list): |
1613 | + command.extend(package) |
1614 | + else: |
1615 | + command.append(package) |
1616 | + |
1617 | + log("Uninstalling {} package with options: {}".format(package, |
1618 | + command)) |
1619 | + pip_execute(command) |
1620 | + |
1621 | + |
1622 | +def pip_list(): |
1623 | + """Returns the list of current python installed packages |
1624 | + """ |
1625 | + return pip_execute(["list"]) |
1626 | + |
1627 | + |
1628 | +def pip_create_virtualenv(path=None): |
1629 | + """Create an isolated Python environment.""" |
1630 | + if six.PY2: |
1631 | + apt_install('python-virtualenv') |
1632 | + else: |
1633 | + apt_install('python3-virtualenv') |
1634 | + |
1635 | + if path: |
1636 | + venv_path = path |
1637 | + else: |
1638 | + venv_path = os.path.join(charm_dir(), 'venv') |
1639 | + |
1640 | + if not os.path.exists(venv_path): |
1641 | + subprocess.check_call(['virtualenv', venv_path]) |
1642 | diff --git a/hooks/charmhelpers/fetch/python/rpdb.py b/hooks/charmhelpers/fetch/python/rpdb.py |
1643 | new file mode 100644 |
1644 | index 0000000..9b31610 |
1645 | --- /dev/null |
1646 | +++ b/hooks/charmhelpers/fetch/python/rpdb.py |
1647 | @@ -0,0 +1,56 @@ |
1648 | +# Copyright 2014-2015 Canonical Limited. |
1649 | +# |
1650 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
1651 | +# you may not use this file except in compliance with the License. |
1652 | +# You may obtain a copy of the License at |
1653 | +# |
1654 | +# http://www.apache.org/licenses/LICENSE-2.0 |
1655 | +# |
1656 | +# Unless required by applicable law or agreed to in writing, software |
1657 | +# distributed under the License is distributed on an "AS IS" BASIS, |
1658 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
1659 | +# See the License for the specific language governing permissions and |
1660 | +# limitations under the License. |
1661 | + |
1662 | +"""Remote Python Debugger (pdb wrapper).""" |
1663 | + |
1664 | +import pdb |
1665 | +import socket |
1666 | +import sys |
1667 | + |
1668 | +__author__ = "Bertrand Janin <b@janin.com>" |
1669 | +__version__ = "0.1.3" |
1670 | + |
1671 | + |
1672 | +class Rpdb(pdb.Pdb): |
1673 | + |
1674 | + def __init__(self, addr="127.0.0.1", port=4444): |
1675 | + """Initialize the socket and initialize pdb.""" |
1676 | + |
1677 | + # Backup stdin and stdout before replacing them by the socket handle |
1678 | + self.old_stdout = sys.stdout |
1679 | + self.old_stdin = sys.stdin |
1680 | + |
1681 | + # Open a 'reusable' socket to let the webapp reload on the same port |
1682 | + self.skt = socket.socket(socket.AF_INET, socket.SOCK_STREAM) |
1683 | + self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True) |
1684 | + self.skt.bind((addr, port)) |
1685 | + self.skt.listen(1) |
1686 | + (clientsocket, address) = self.skt.accept() |
1687 | + handle = clientsocket.makefile('rw') |
1688 | + pdb.Pdb.__init__(self, completekey='tab', stdin=handle, stdout=handle) |
1689 | + sys.stdout = sys.stdin = handle |
1690 | + |
1691 | + def shutdown(self): |
1692 | + """Revert stdin and stdout, close the socket.""" |
1693 | + sys.stdout = self.old_stdout |
1694 | + sys.stdin = self.old_stdin |
1695 | + self.skt.close() |
1696 | + self.set_continue() |
1697 | + |
1698 | + def do_continue(self, arg): |
1699 | + """Stop all operation on ``continue``.""" |
1700 | + self.shutdown() |
1701 | + return 1 |
1702 | + |
1703 | + do_EOF = do_quit = do_exit = do_c = do_cont = do_continue |
1704 | diff --git a/hooks/charmhelpers/fetch/python/version.py b/hooks/charmhelpers/fetch/python/version.py |
1705 | new file mode 100644 |
1706 | index 0000000..3eb4210 |
1707 | --- /dev/null |
1708 | +++ b/hooks/charmhelpers/fetch/python/version.py |
1709 | @@ -0,0 +1,32 @@ |
1710 | +#!/usr/bin/env python |
1711 | +# coding: utf-8 |
1712 | + |
1713 | +# Copyright 2014-2015 Canonical Limited. |
1714 | +# |
1715 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
1716 | +# you may not use this file except in compliance with the License. |
1717 | +# You may obtain a copy of the License at |
1718 | +# |
1719 | +# http://www.apache.org/licenses/LICENSE-2.0 |
1720 | +# |
1721 | +# Unless required by applicable law or agreed to in writing, software |
1722 | +# distributed under the License is distributed on an "AS IS" BASIS, |
1723 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
1724 | +# See the License for the specific language governing permissions and |
1725 | +# limitations under the License. |
1726 | + |
1727 | +import sys |
1728 | + |
1729 | +__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" |
1730 | + |
1731 | + |
1732 | +def current_version(): |
1733 | + """Current system python version""" |
1734 | + return sys.version_info |
1735 | + |
1736 | + |
1737 | +def current_version_string(): |
1738 | + """Current system python version as string major.minor.micro""" |
1739 | + return "{0}.{1}.{2}".format(sys.version_info.major, |
1740 | + sys.version_info.minor, |
1741 | + sys.version_info.micro) |
1742 | diff --git a/hooks/charmhelpers/fetch/snap.py b/hooks/charmhelpers/fetch/snap.py |
1743 | index 23c707b..395836c 100644 |
1744 | --- a/hooks/charmhelpers/fetch/snap.py |
1745 | +++ b/hooks/charmhelpers/fetch/snap.py |
1746 | @@ -18,21 +18,33 @@ If writing reactive charms, use the snap layer: |
1747 | https://lists.ubuntu.com/archives/snapcraft/2016-September/001114.html |
1748 | """ |
1749 | import subprocess |
1750 | -from os import environ |
1751 | +import os |
1752 | from time import sleep |
1753 | from charmhelpers.core.hookenv import log |
1754 | |
1755 | __author__ = 'Joseph Borg <joseph.borg@canonical.com>' |
1756 | |
1757 | -SNAP_NO_LOCK = 1 # The return code for "couldn't acquire lock" in Snap (hopefully this will be improved). |
1758 | +# The return code for "couldn't acquire lock" in Snap |
1759 | +# (hopefully this will be improved). |
1760 | +SNAP_NO_LOCK = 1 |
1761 | SNAP_NO_LOCK_RETRY_DELAY = 10 # Wait X seconds between Snap lock checks. |
1762 | SNAP_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times. |
1763 | +SNAP_CHANNELS = [ |
1764 | + 'edge', |
1765 | + 'beta', |
1766 | + 'candidate', |
1767 | + 'stable', |
1768 | +] |
1769 | |
1770 | |
1771 | class CouldNotAcquireLockException(Exception): |
1772 | pass |
1773 | |
1774 | |
1775 | +class InvalidSnapChannel(Exception): |
1776 | + pass |
1777 | + |
1778 | + |
1779 | def _snap_exec(commands): |
1780 | """ |
1781 | Execute snap commands. |
1782 | @@ -47,13 +59,17 @@ def _snap_exec(commands): |
1783 | |
1784 | while return_code is None or return_code == SNAP_NO_LOCK: |
1785 | try: |
1786 | - return_code = subprocess.check_call(['snap'] + commands, env=environ) |
1787 | + return_code = subprocess.check_call(['snap'] + commands, |
1788 | + env=os.environ) |
1789 | except subprocess.CalledProcessError as e: |
1790 | retry_count += + 1 |
1791 | if retry_count > SNAP_NO_LOCK_RETRY_COUNT: |
1792 | - raise CouldNotAcquireLockException('Could not aquire lock after %s attempts' % SNAP_NO_LOCK_RETRY_COUNT) |
1793 | + raise CouldNotAcquireLockException( |
1794 | + 'Could not aquire lock after {} attempts' |
1795 | + .format(SNAP_NO_LOCK_RETRY_COUNT)) |
1796 | return_code = e.returncode |
1797 | - log('Snap failed to acquire lock, trying again in %s seconds.' % SNAP_NO_LOCK_RETRY_DELAY, level='WARN') |
1798 | + log('Snap failed to acquire lock, trying again in {} seconds.' |
1799 | + .format(SNAP_NO_LOCK_RETRY_DELAY, level='WARN')) |
1800 | sleep(SNAP_NO_LOCK_RETRY_DELAY) |
1801 | |
1802 | return return_code |
1803 | @@ -120,3 +136,15 @@ def snap_refresh(packages, *flags): |
1804 | |
1805 | log(message, level='INFO') |
1806 | return _snap_exec(['refresh'] + flags + packages) |
1807 | + |
1808 | + |
1809 | +def valid_snap_channel(channel): |
1810 | + """ Validate snap channel exists |
1811 | + |
1812 | + :raises InvalidSnapChannel: When channel does not exist |
1813 | + :return: Boolean |
1814 | + """ |
1815 | + if channel.lower() in SNAP_CHANNELS: |
1816 | + return True |
1817 | + else: |
1818 | + raise InvalidSnapChannel("Invalid Snap Channel: {}".format(channel)) |
1819 | diff --git a/hooks/charmhelpers/fetch/ubuntu.py b/hooks/charmhelpers/fetch/ubuntu.py |
1820 | index 57b5fb6..24c76e3 100644 |
1821 | --- a/hooks/charmhelpers/fetch/ubuntu.py |
1822 | +++ b/hooks/charmhelpers/fetch/ubuntu.py |
1823 | @@ -19,14 +19,14 @@ import re |
1824 | import six |
1825 | import time |
1826 | import subprocess |
1827 | -from tempfile import NamedTemporaryFile |
1828 | |
1829 | -from charmhelpers.core.host import ( |
1830 | - lsb_release |
1831 | -) |
1832 | +from charmhelpers.core.host import get_distrib_codename |
1833 | + |
1834 | from charmhelpers.core.hookenv import ( |
1835 | log, |
1836 | DEBUG, |
1837 | + WARNING, |
1838 | + env_proxy_settings, |
1839 | ) |
1840 | from charmhelpers.fetch import SourceConfigError, GPGKeyError |
1841 | |
1842 | @@ -43,6 +43,7 @@ ARCH_TO_PROPOSED_POCKET = { |
1843 | 'x86_64': PROPOSED_POCKET, |
1844 | 'ppc64le': PROPOSED_PORTS_POCKET, |
1845 | 'aarch64': PROPOSED_PORTS_POCKET, |
1846 | + 's390x': PROPOSED_PORTS_POCKET, |
1847 | } |
1848 | CLOUD_ARCHIVE_URL = "http://ubuntu-cloud.archive.canonical.com/ubuntu" |
1849 | CLOUD_ARCHIVE_KEY_ID = '5EDB1B62EC4926EA' |
1850 | @@ -139,7 +140,7 @@ CLOUD_ARCHIVE_POCKETS = { |
1851 | 'xenial-updates/ocata': 'xenial-updates/ocata', |
1852 | 'ocata/proposed': 'xenial-proposed/ocata', |
1853 | 'xenial-ocata/proposed': 'xenial-proposed/ocata', |
1854 | - 'xenial-ocata/newton': 'xenial-proposed/ocata', |
1855 | + 'xenial-proposed/ocata': 'xenial-proposed/ocata', |
1856 | # Pike |
1857 | 'pike': 'xenial-updates/pike', |
1858 | 'xenial-pike': 'xenial-updates/pike', |
1859 | @@ -147,7 +148,7 @@ CLOUD_ARCHIVE_POCKETS = { |
1860 | 'xenial-updates/pike': 'xenial-updates/pike', |
1861 | 'pike/proposed': 'xenial-proposed/pike', |
1862 | 'xenial-pike/proposed': 'xenial-proposed/pike', |
1863 | - 'xenial-pike/newton': 'xenial-proposed/pike', |
1864 | + 'xenial-proposed/pike': 'xenial-proposed/pike', |
1865 | # Queens |
1866 | 'queens': 'xenial-updates/queens', |
1867 | 'xenial-queens': 'xenial-updates/queens', |
1868 | @@ -155,13 +156,37 @@ CLOUD_ARCHIVE_POCKETS = { |
1869 | 'xenial-updates/queens': 'xenial-updates/queens', |
1870 | 'queens/proposed': 'xenial-proposed/queens', |
1871 | 'xenial-queens/proposed': 'xenial-proposed/queens', |
1872 | - 'xenial-queens/newton': 'xenial-proposed/queens', |
1873 | + 'xenial-proposed/queens': 'xenial-proposed/queens', |
1874 | + # Rocky |
1875 | + 'rocky': 'bionic-updates/rocky', |
1876 | + 'bionic-rocky': 'bionic-updates/rocky', |
1877 | + 'bionic-rocky/updates': 'bionic-updates/rocky', |
1878 | + 'bionic-updates/rocky': 'bionic-updates/rocky', |
1879 | + 'rocky/proposed': 'bionic-proposed/rocky', |
1880 | + 'bionic-rocky/proposed': 'bionic-proposed/rocky', |
1881 | + 'bionic-proposed/rocky': 'bionic-proposed/rocky', |
1882 | + # Stein |
1883 | + 'stein': 'bionic-updates/stein', |
1884 | + 'bionic-stein': 'bionic-updates/stein', |
1885 | + 'bionic-stein/updates': 'bionic-updates/stein', |
1886 | + 'bionic-updates/stein': 'bionic-updates/stein', |
1887 | + 'stein/proposed': 'bionic-proposed/stein', |
1888 | + 'bionic-stein/proposed': 'bionic-proposed/stein', |
1889 | + 'bionic-proposed/stein': 'bionic-proposed/stein', |
1890 | + # Train |
1891 | + 'train': 'bionic-updates/train', |
1892 | + 'bionic-train': 'bionic-updates/train', |
1893 | + 'bionic-train/updates': 'bionic-updates/train', |
1894 | + 'bionic-updates/train': 'bionic-updates/train', |
1895 | + 'train/proposed': 'bionic-proposed/train', |
1896 | + 'bionic-train/proposed': 'bionic-proposed/train', |
1897 | + 'bionic-proposed/train': 'bionic-proposed/train', |
1898 | } |
1899 | |
1900 | |
1901 | APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT. |
1902 | CMD_RETRY_DELAY = 10 # Wait 10 seconds between command retries. |
1903 | -CMD_RETRY_COUNT = 30 # Retry a failing fatal command X times. |
1904 | +CMD_RETRY_COUNT = 3 # Retry a failing fatal command X times. |
1905 | |
1906 | |
1907 | def filter_installed_packages(packages): |
1908 | @@ -179,6 +204,18 @@ def filter_installed_packages(packages): |
1909 | return _pkgs |
1910 | |
1911 | |
1912 | +def filter_missing_packages(packages): |
1913 | + """Return a list of packages that are installed. |
1914 | + |
1915 | + :param packages: list of packages to evaluate. |
1916 | + :returns list: Packages that are installed. |
1917 | + """ |
1918 | + return list( |
1919 | + set(packages) - |
1920 | + set(filter_installed_packages(packages)) |
1921 | + ) |
1922 | + |
1923 | + |
1924 | def apt_cache(in_memory=True, progress=None): |
1925 | """Build and return an apt cache.""" |
1926 | from apt import apt_pkg |
1927 | @@ -238,6 +275,14 @@ def apt_purge(packages, fatal=False): |
1928 | _run_apt_command(cmd, fatal) |
1929 | |
1930 | |
1931 | +def apt_autoremove(purge=True, fatal=False): |
1932 | + """Purge one or more packages.""" |
1933 | + cmd = ['apt-get', '--assume-yes', 'autoremove'] |
1934 | + if purge: |
1935 | + cmd.append('--purge') |
1936 | + _run_apt_command(cmd, fatal) |
1937 | + |
1938 | + |
1939 | def apt_mark(packages, mark, fatal=False): |
1940 | """Flag one or more packages using apt-mark.""" |
1941 | log("Marking {} as {}".format(packages, mark)) |
1942 | @@ -261,42 +306,156 @@ def apt_unhold(packages, fatal=False): |
1943 | return apt_mark(packages, 'unhold', fatal=fatal) |
1944 | |
1945 | |
1946 | -def import_key(keyid): |
1947 | - """Import a key in either ASCII Armor or Radix64 format. |
1948 | +def import_key(key): |
1949 | + """Import an ASCII Armor key. |
1950 | |
1951 | - `keyid` is either the keyid to fetch from a PGP server, or |
1952 | - the key in ASCII armor foramt. |
1953 | + A Radix64 format keyid is also supported for backwards |
1954 | + compatibility. In this case Ubuntu keyserver will be |
1955 | + queried for a key via HTTPS by its keyid. This method |
1956 | + is less preferrable because https proxy servers may |
1957 | + require traffic decryption which is equivalent to a |
1958 | + man-in-the-middle attack (a proxy server impersonates |
1959 | + keyserver TLS certificates and has to be explicitly |
1960 | + trusted by the system). |
1961 | |
1962 | - :param keyid: String of key (or key id). |
1963 | + :param key: A GPG key in ASCII armor format, |
1964 | + including BEGIN and END markers or a keyid. |
1965 | + :type key: (bytes, str) |
1966 | :raises: GPGKeyError if the key could not be imported |
1967 | """ |
1968 | - key = keyid.strip() |
1969 | - if (key.startswith('-----BEGIN PGP PUBLIC KEY BLOCK-----') and |
1970 | - key.endswith('-----END PGP PUBLIC KEY BLOCK-----')): |
1971 | + key = key.strip() |
1972 | + if '-' in key or '\n' in key: |
1973 | + # Send everything not obviously a keyid to GPG to import, as |
1974 | + # we trust its validation better than our own. eg. handling |
1975 | + # comments before the key. |
1976 | log("PGP key found (looks like ASCII Armor format)", level=DEBUG) |
1977 | - log("Importing ASCII Armor PGP key", level=DEBUG) |
1978 | - with NamedTemporaryFile() as keyfile: |
1979 | - with open(keyfile.name, 'w') as fd: |
1980 | - fd.write(key) |
1981 | - fd.write("\n") |
1982 | - cmd = ['apt-key', 'add', keyfile.name] |
1983 | - try: |
1984 | - subprocess.check_call(cmd) |
1985 | - except subprocess.CalledProcessError: |
1986 | - error = "Error importing PGP key '{}'".format(key) |
1987 | - log(error) |
1988 | - raise GPGKeyError(error) |
1989 | + if ('-----BEGIN PGP PUBLIC KEY BLOCK-----' in key and |
1990 | + '-----END PGP PUBLIC KEY BLOCK-----' in key): |
1991 | + log("Writing provided PGP key in the binary format", level=DEBUG) |
1992 | + if six.PY3: |
1993 | + key_bytes = key.encode('utf-8') |
1994 | + else: |
1995 | + key_bytes = key |
1996 | + key_name = _get_keyid_by_gpg_key(key_bytes) |
1997 | + key_gpg = _dearmor_gpg_key(key_bytes) |
1998 | + _write_apt_gpg_keyfile(key_name=key_name, key_material=key_gpg) |
1999 | + else: |
2000 | + raise GPGKeyError("ASCII armor markers missing from GPG key") |
2001 | else: |
2002 | - log("PGP key found (looks like Radix64 format)", level=DEBUG) |
2003 | - log("Importing PGP key from keyserver", level=DEBUG) |
2004 | - cmd = ['apt-key', 'adv', '--keyserver', |
2005 | - 'hkp://keyserver.ubuntu.com:80', '--recv-keys', key] |
2006 | - try: |
2007 | - subprocess.check_call(cmd) |
2008 | - except subprocess.CalledProcessError: |
2009 | - error = "Error importing PGP key '{}'".format(key) |
2010 | - log(error) |
2011 | - raise GPGKeyError(error) |
2012 | + log("PGP key found (looks like Radix64 format)", level=WARNING) |
2013 | + log("SECURELY importing PGP key from keyserver; " |
2014 | + "full key not provided.", level=WARNING) |
2015 | + # as of bionic add-apt-repository uses curl with an HTTPS keyserver URL |
2016 | + # to retrieve GPG keys. `apt-key adv` command is deprecated as is |
2017 | + # apt-key in general as noted in its manpage. See lp:1433761 for more |
2018 | + # history. Instead, /etc/apt/trusted.gpg.d is used directly to drop |
2019 | + # gpg |
2020 | + key_asc = _get_key_by_keyid(key) |
2021 | + # write the key in GPG format so that apt-key list shows it |
2022 | + key_gpg = _dearmor_gpg_key(key_asc) |
2023 | + _write_apt_gpg_keyfile(key_name=key, key_material=key_gpg) |
2024 | + |
2025 | + |
2026 | +def _get_keyid_by_gpg_key(key_material): |
2027 | + """Get a GPG key fingerprint by GPG key material. |
2028 | + Gets a GPG key fingerprint (40-digit, 160-bit) by the ASCII armor-encoded |
2029 | + or binary GPG key material. Can be used, for example, to generate file |
2030 | + names for keys passed via charm options. |
2031 | + |
2032 | + :param key_material: ASCII armor-encoded or binary GPG key material |
2033 | + :type key_material: bytes |
2034 | + :raises: GPGKeyError if invalid key material has been provided |
2035 | + :returns: A GPG key fingerprint |
2036 | + :rtype: str |
2037 | + """ |
2038 | + # Use the same gpg command for both Xenial and Bionic |
2039 | + cmd = 'gpg --with-colons --with-fingerprint' |
2040 | + ps = subprocess.Popen(cmd.split(), |
2041 | + stdout=subprocess.PIPE, |
2042 | + stderr=subprocess.PIPE, |
2043 | + stdin=subprocess.PIPE) |
2044 | + out, err = ps.communicate(input=key_material) |
2045 | + if six.PY3: |
2046 | + out = out.decode('utf-8') |
2047 | + err = err.decode('utf-8') |
2048 | + if 'gpg: no valid OpenPGP data found.' in err: |
2049 | + raise GPGKeyError('Invalid GPG key material provided') |
2050 | + # from gnupg2 docs: fpr :: Fingerprint (fingerprint is in field 10) |
2051 | + return re.search(r"^fpr:{9}([0-9A-F]{40}):$", out, re.MULTILINE).group(1) |
2052 | + |
2053 | + |
2054 | +def _get_key_by_keyid(keyid): |
2055 | + """Get a key via HTTPS from the Ubuntu keyserver. |
2056 | + Different key ID formats are supported by SKS keyservers (the longer ones |
2057 | + are more secure, see "dead beef attack" and https://evil32.com/). Since |
2058 | + HTTPS is used, if SSLBump-like HTTPS proxies are in place, they will |
2059 | + impersonate keyserver.ubuntu.com and generate a certificate with |
2060 | + keyserver.ubuntu.com in the CN field or in SubjAltName fields of a |
2061 | + certificate. If such proxy behavior is expected it is necessary to add the |
2062 | + CA certificate chain containing the intermediate CA of the SSLBump proxy to |
2063 | + every machine that this code runs on via ca-certs cloud-init directive (via |
2064 | + cloudinit-userdata model-config) or via other means (such as through a |
2065 | + custom charm option). Also note that DNS resolution for the hostname in a |
2066 | + URL is done at a proxy server - not at the client side. |
2067 | + |
2068 | + 8-digit (32 bit) key ID |
2069 | + https://keyserver.ubuntu.com/pks/lookup?search=0x4652B4E6 |
2070 | + 16-digit (64 bit) key ID |
2071 | + https://keyserver.ubuntu.com/pks/lookup?search=0x6E85A86E4652B4E6 |
2072 | + 40-digit key ID: |
2073 | + https://keyserver.ubuntu.com/pks/lookup?search=0x35F77D63B5CEC106C577ED856E85A86E4652B4E6 |
2074 | + |
2075 | + :param keyid: An 8, 16 or 40 hex digit keyid to find a key for |
2076 | + :type keyid: (bytes, str) |
2077 | + :returns: A key material for the specified GPG key id |
2078 | + :rtype: (str, bytes) |
2079 | + :raises: subprocess.CalledProcessError |
2080 | + """ |
2081 | + # options=mr - machine-readable output (disables html wrappers) |
2082 | + keyserver_url = ('https://keyserver.ubuntu.com' |
2083 | + '/pks/lookup?op=get&options=mr&exact=on&search=0x{}') |
2084 | + curl_cmd = ['curl', keyserver_url.format(keyid)] |
2085 | + # use proxy server settings in order to retrieve the key |
2086 | + return subprocess.check_output(curl_cmd, |
2087 | + env=env_proxy_settings(['https'])) |
2088 | + |
2089 | + |
2090 | +def _dearmor_gpg_key(key_asc): |
2091 | + """Converts a GPG key in the ASCII armor format to the binary format. |
2092 | + |
2093 | + :param key_asc: A GPG key in ASCII armor format. |
2094 | + :type key_asc: (str, bytes) |
2095 | + :returns: A GPG key in binary format |
2096 | + :rtype: (str, bytes) |
2097 | + :raises: GPGKeyError |
2098 | + """ |
2099 | + ps = subprocess.Popen(['gpg', '--dearmor'], |
2100 | + stdout=subprocess.PIPE, |
2101 | + stderr=subprocess.PIPE, |
2102 | + stdin=subprocess.PIPE) |
2103 | + out, err = ps.communicate(input=key_asc) |
2104 | + # no need to decode output as it is binary (invalid utf-8), only error |
2105 | + if six.PY3: |
2106 | + err = err.decode('utf-8') |
2107 | + if 'gpg: no valid OpenPGP data found.' in err: |
2108 | + raise GPGKeyError('Invalid GPG key material. Check your network setup' |
2109 | + ' (MTU, routing, DNS) and/or proxy server settings' |
2110 | + ' as well as destination keyserver status.') |
2111 | + else: |
2112 | + return out |
2113 | + |
2114 | + |
2115 | +def _write_apt_gpg_keyfile(key_name, key_material): |
2116 | + """Writes GPG key material into a file at a provided path. |
2117 | + |
2118 | + :param key_name: A key name to use for a key file (could be a fingerprint) |
2119 | + :type key_name: str |
2120 | + :param key_material: A GPG key material (binary) |
2121 | + :type key_material: (str, bytes) |
2122 | + """ |
2123 | + with open('/etc/apt/trusted.gpg.d/{}.gpg'.format(key_name), |
2124 | + 'wb') as keyf: |
2125 | + keyf.write(key_material) |
2126 | |
2127 | |
2128 | def add_source(source, key=None, fail_invalid=False): |
2129 | @@ -364,20 +523,23 @@ def add_source(source, key=None, fail_invalid=False): |
2130 | (r"^cloud:(.*)-(.*)\/staging$", _add_cloud_staging), |
2131 | (r"^cloud:(.*)-(.*)$", _add_cloud_distro_check), |
2132 | (r"^cloud:(.*)$", _add_cloud_pocket), |
2133 | + (r"^snap:.*-(.*)-(.*)$", _add_cloud_distro_check), |
2134 | ]) |
2135 | if source is None: |
2136 | source = '' |
2137 | for r, fn in six.iteritems(_mapping): |
2138 | m = re.match(r, source) |
2139 | if m: |
2140 | - # call the assoicated function with the captured groups |
2141 | - # raises SourceConfigError on error. |
2142 | - fn(*m.groups()) |
2143 | if key: |
2144 | + # Import key before adding the source which depends on it, |
2145 | + # as refreshing packages could fail otherwise. |
2146 | try: |
2147 | import_key(key) |
2148 | except GPGKeyError as e: |
2149 | raise SourceConfigError(str(e)) |
2150 | + # call the associated function with the captured groups |
2151 | + # raises SourceConfigError on error. |
2152 | + fn(*m.groups()) |
2153 | break |
2154 | else: |
2155 | # nothing matched. log an error and maybe sys.exit |
2156 | @@ -390,13 +552,13 @@ def add_source(source, key=None, fail_invalid=False): |
2157 | def _add_proposed(): |
2158 | """Add the PROPOSED_POCKET as /etc/apt/source.list.d/proposed.list |
2159 | |
2160 | - Uses lsb_release()['DISTRIB_CODENAME'] to determine the correct staza for |
2161 | + Uses get_distrib_codename to determine the correct stanza for |
2162 | the deb line. |
2163 | |
2164 | For intel architecutres PROPOSED_POCKET is used for the release, but for |
2165 | other architectures PROPOSED_PORTS_POCKET is used for the release. |
2166 | """ |
2167 | - release = lsb_release()['DISTRIB_CODENAME'] |
2168 | + release = get_distrib_codename() |
2169 | arch = platform.machine() |
2170 | if arch not in six.iterkeys(ARCH_TO_PROPOSED_POCKET): |
2171 | raise SourceConfigError("Arch {} not supported for (distro-)proposed" |
2172 | @@ -409,8 +571,16 @@ def _add_apt_repository(spec): |
2173 | """Add the spec using add_apt_repository |
2174 | |
2175 | :param spec: the parameter to pass to add_apt_repository |
2176 | + :type spec: str |
2177 | """ |
2178 | - _run_with_retries(['add-apt-repository', '--yes', spec]) |
2179 | + if '{series}' in spec: |
2180 | + series = get_distrib_codename() |
2181 | + spec = spec.replace('{series}', series) |
2182 | + # software-properties package for bionic properly reacts to proxy settings |
2183 | + # passed as environment variables (See lp:1433761). This is not the case |
2184 | + # LTS and non-LTS releases below bionic. |
2185 | + _run_with_retries(['add-apt-repository', '--yes', spec], |
2186 | + cmd_env=env_proxy_settings(['https'])) |
2187 | |
2188 | |
2189 | def _add_cloud_pocket(pocket): |
2190 | @@ -479,7 +649,7 @@ def _verify_is_ubuntu_rel(release, os_release): |
2191 | :raises: SourceConfigError if the release is not the same as the ubuntu |
2192 | release. |
2193 | """ |
2194 | - ubuntu_rel = lsb_release()['DISTRIB_CODENAME'] |
2195 | + ubuntu_rel = get_distrib_codename() |
2196 | if release != ubuntu_rel: |
2197 | raise SourceConfigError( |
2198 | 'Invalid Cloud Archive release specified: {}-{} on this Ubuntu' |
2199 | @@ -557,7 +727,7 @@ def get_upstream_version(package): |
2200 | cache = apt_cache() |
2201 | try: |
2202 | pkg = cache[package] |
2203 | - except: |
2204 | + except Exception: |
2205 | # the package is unknown to the current apt cache. |
2206 | return None |
2207 |
This merge proposal is being monitored by mergebot. Change the status to Approved to merge.