Merge ~hloeung/container-log-archive-charm:charm-helpers-sync into container-log-archive-charm:master
- Git
- lp:~hloeung/container-log-archive-charm
- charm-helpers-sync
- Merge into master
Proposed by
Haw Loeung
Status: | Merged |
---|---|
Approved by: | Paul Collins |
Approved revision: | b8906d4b6ad560aecfc09283be83f55faa857af8 |
Merged at revision: | 1a17c428de43b0bc96741bbd35a4ab91f486bdc2 |
Proposed branch: | ~hloeung/container-log-archive-charm:charm-helpers-sync |
Merge into: | container-log-archive-charm:master |
Diff against target: |
2158 lines (+1396/-133) 19 files modified
lib/charmhelpers/__init__.py (+4/-4) lib/charmhelpers/core/hookenv.py (+334/-20) lib/charmhelpers/core/host.py (+87/-11) lib/charmhelpers/core/host_factory/ubuntu.py (+27/-1) lib/charmhelpers/core/kernel.py (+2/-2) lib/charmhelpers/core/sysctl.py (+21/-4) lib/charmhelpers/fetch/__init__.py (+4/-0) lib/charmhelpers/fetch/archiveurl.py (+1/-1) lib/charmhelpers/fetch/bzrurl.py (+2/-2) lib/charmhelpers/fetch/giturl.py (+2/-2) lib/charmhelpers/fetch/python/__init__.py (+13/-0) lib/charmhelpers/fetch/python/debug.py (+54/-0) lib/charmhelpers/fetch/python/packages.py (+154/-0) lib/charmhelpers/fetch/python/rpdb.py (+56/-0) lib/charmhelpers/fetch/python/version.py (+32/-0) lib/charmhelpers/fetch/snap.py (+1/-1) lib/charmhelpers/fetch/ubuntu.py (+311/-82) lib/charmhelpers/fetch/ubuntu_apt_pkg.py (+267/-0) lib/charmhelpers/osplatform.py (+24/-3) |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Paul Collins | Approve | ||
Canonical IS Reviewers | Pending | ||
Canonical IS Reviewers | Pending | ||
Review via email: mp+387401@code.launchpad.net |
This proposal supersedes a proposal from 2020-07-14.
Commit message
charm-helpers sync
Description of the change
To post a comment you must log in.
Revision history for this message
🤖 Canonical IS Merge Bot (canonical-is-mergebot) wrote : Posted in a previous version of this proposal | # |
Revision history for this message
🤖 Canonical IS Merge Bot (canonical-is-mergebot) wrote : | # |
This merge proposal is being monitored by mergebot. Change the status to Approved to merge.
Revision history for this message
Paul Collins (pjdc) wrote : | # |
I would have just trivialled this.
review:
Approve
Revision history for this message
🤖 Canonical IS Merge Bot (canonical-is-mergebot) wrote : | # |
Change successfully merged at revision 1a17c428de43b0b
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | diff --git a/lib/charmhelpers/__init__.py b/lib/charmhelpers/__init__.py |
2 | index e7aa471..61ef907 100644 |
3 | --- a/lib/charmhelpers/__init__.py |
4 | +++ b/lib/charmhelpers/__init__.py |
5 | @@ -23,22 +23,22 @@ import subprocess |
6 | import sys |
7 | |
8 | try: |
9 | - import six # flake8: noqa |
10 | + import six # NOQA:F401 |
11 | except ImportError: |
12 | if sys.version_info.major == 2: |
13 | subprocess.check_call(['apt-get', 'install', '-y', 'python-six']) |
14 | else: |
15 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-six']) |
16 | - import six # flake8: noqa |
17 | + import six # NOQA:F401 |
18 | |
19 | try: |
20 | - import yaml # flake8: noqa |
21 | + import yaml # NOQA:F401 |
22 | except ImportError: |
23 | if sys.version_info.major == 2: |
24 | subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml']) |
25 | else: |
26 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml']) |
27 | - import yaml # flake8: noqa |
28 | + import yaml # NOQA:F401 |
29 | |
30 | |
31 | # Holds a list of mapping of mangled function names that have been deprecated |
32 | diff --git a/lib/charmhelpers/core/hookenv.py b/lib/charmhelpers/core/hookenv.py |
33 | index ed7af39..db7ce72 100644 |
34 | --- a/lib/charmhelpers/core/hookenv.py |
35 | +++ b/lib/charmhelpers/core/hookenv.py |
36 | @@ -21,6 +21,7 @@ |
37 | from __future__ import print_function |
38 | import copy |
39 | from distutils.version import LooseVersion |
40 | +from enum import Enum |
41 | from functools import wraps |
42 | from collections import namedtuple |
43 | import glob |
44 | @@ -34,6 +35,8 @@ import errno |
45 | import tempfile |
46 | from subprocess import CalledProcessError |
47 | |
48 | +from charmhelpers import deprecate |
49 | + |
50 | import six |
51 | if not six.PY3: |
52 | from UserDict import UserDict |
53 | @@ -48,6 +51,20 @@ INFO = "INFO" |
54 | DEBUG = "DEBUG" |
55 | TRACE = "TRACE" |
56 | MARKER = object() |
57 | +SH_MAX_ARG = 131071 |
58 | + |
59 | + |
60 | +RANGE_WARNING = ('Passing NO_PROXY string that includes a cidr. ' |
61 | + 'This may not be compatible with software you are ' |
62 | + 'running in your shell.') |
63 | + |
64 | + |
65 | +class WORKLOAD_STATES(Enum): |
66 | + ACTIVE = 'active' |
67 | + BLOCKED = 'blocked' |
68 | + MAINTENANCE = 'maintenance' |
69 | + WAITING = 'waiting' |
70 | + |
71 | |
72 | cache = {} |
73 | |
74 | @@ -98,7 +115,7 @@ def log(message, level=None): |
75 | command += ['-l', level] |
76 | if not isinstance(message, six.string_types): |
77 | message = repr(message) |
78 | - command += [message] |
79 | + command += [message[:SH_MAX_ARG]] |
80 | # Missing juju-log should not cause failures in unit tests |
81 | # Send log output to stderr |
82 | try: |
83 | @@ -113,6 +130,24 @@ def log(message, level=None): |
84 | raise |
85 | |
86 | |
87 | +def function_log(message): |
88 | + """Write a function progress message""" |
89 | + command = ['function-log'] |
90 | + if not isinstance(message, six.string_types): |
91 | + message = repr(message) |
92 | + command += [message[:SH_MAX_ARG]] |
93 | + # Missing function-log should not cause failures in unit tests |
94 | + # Send function_log output to stderr |
95 | + try: |
96 | + subprocess.call(command) |
97 | + except OSError as e: |
98 | + if e.errno == errno.ENOENT: |
99 | + message = "function-log: {}".format(message) |
100 | + print(message, file=sys.stderr) |
101 | + else: |
102 | + raise |
103 | + |
104 | + |
105 | class Serializable(UserDict): |
106 | """Wrapper, an object that can be serialized to yaml or json""" |
107 | |
108 | @@ -201,11 +236,35 @@ def remote_unit(): |
109 | return os.environ.get('JUJU_REMOTE_UNIT', None) |
110 | |
111 | |
112 | -def service_name(): |
113 | - """The name service group this unit belongs to""" |
114 | +def application_name(): |
115 | + """ |
116 | + The name of the deployed application this unit belongs to. |
117 | + """ |
118 | return local_unit().split('/')[0] |
119 | |
120 | |
121 | +def service_name(): |
122 | + """ |
123 | + .. deprecated:: 0.19.1 |
124 | + Alias for :func:`application_name`. |
125 | + """ |
126 | + return application_name() |
127 | + |
128 | + |
129 | +def model_name(): |
130 | + """ |
131 | + Name of the model that this unit is deployed in. |
132 | + """ |
133 | + return os.environ['JUJU_MODEL_NAME'] |
134 | + |
135 | + |
136 | +def model_uuid(): |
137 | + """ |
138 | + UUID of the model that this unit is deployed in. |
139 | + """ |
140 | + return os.environ['JUJU_MODEL_UUID'] |
141 | + |
142 | + |
143 | def principal_unit(): |
144 | """Returns the principal unit of this unit, otherwise None""" |
145 | # Juju 2.2 and above provides JUJU_PRINCIPAL_UNIT |
146 | @@ -313,8 +372,10 @@ class Config(dict): |
147 | try: |
148 | self._prev_dict = json.load(f) |
149 | except ValueError as e: |
150 | - log('Unable to parse previous config data - {}'.format(str(e)), |
151 | - level=ERROR) |
152 | + log('Found but was unable to parse previous config data, ' |
153 | + 'ignoring which will report all values as changed - {}' |
154 | + .format(str(e)), level=ERROR) |
155 | + return |
156 | for k, v in copy.deepcopy(self._prev_dict).items(): |
157 | if k not in self: |
158 | self[k] = v |
159 | @@ -485,6 +546,67 @@ def related_units(relid=None): |
160 | subprocess.check_output(units_cmd_line).decode('UTF-8')) or [] |
161 | |
162 | |
163 | +def expected_peer_units(): |
164 | + """Get a generator for units we expect to join peer relation based on |
165 | + goal-state. |
166 | + |
167 | + The local unit is excluded from the result to make it easy to gauge |
168 | + completion of all peers joining the relation with existing hook tools. |
169 | + |
170 | + Example usage: |
171 | + log('peer {} of {} joined peer relation' |
172 | + .format(len(related_units()), |
173 | + len(list(expected_peer_units())))) |
174 | + |
175 | + This function will raise NotImplementedError if used with juju versions |
176 | + without goal-state support. |
177 | + |
178 | + :returns: iterator |
179 | + :rtype: types.GeneratorType |
180 | + :raises: NotImplementedError |
181 | + """ |
182 | + if not has_juju_version("2.4.0"): |
183 | + # goal-state first appeared in 2.4.0. |
184 | + raise NotImplementedError("goal-state") |
185 | + _goal_state = goal_state() |
186 | + return (key for key in _goal_state['units'] |
187 | + if '/' in key and key != local_unit()) |
188 | + |
189 | + |
190 | +def expected_related_units(reltype=None): |
191 | + """Get a generator for units we expect to join relation based on |
192 | + goal-state. |
193 | + |
194 | + Note that you can not use this function for the peer relation, take a look |
195 | + at expected_peer_units() for that. |
196 | + |
197 | + This function will raise KeyError if you request information for a |
198 | + relation type for which juju goal-state does not have information. It will |
199 | + raise NotImplementedError if used with juju versions without goal-state |
200 | + support. |
201 | + |
202 | + Example usage: |
203 | + log('participant {} of {} joined relation {}' |
204 | + .format(len(related_units()), |
205 | + len(list(expected_related_units())), |
206 | + relation_type())) |
207 | + |
208 | + :param reltype: Relation type to list data for, default is to list data for |
209 | + the realtion type we are currently executing a hook for. |
210 | + :type reltype: str |
211 | + :returns: iterator |
212 | + :rtype: types.GeneratorType |
213 | + :raises: KeyError, NotImplementedError |
214 | + """ |
215 | + if not has_juju_version("2.4.4"): |
216 | + # goal-state existed in 2.4.0, but did not list individual units to |
217 | + # join a relation in 2.4.1 through 2.4.3. (LP: #1794739) |
218 | + raise NotImplementedError("goal-state relation unit count") |
219 | + reltype = reltype or relation_type() |
220 | + _goal_state = goal_state() |
221 | + return (key for key in _goal_state['relations'][reltype] if '/' in key) |
222 | + |
223 | + |
224 | @cached |
225 | def relation_for_unit(unit=None, rid=None): |
226 | """Get the json represenation of a unit's relation""" |
227 | @@ -855,9 +977,23 @@ def charm_dir(): |
228 | return os.environ.get('CHARM_DIR') |
229 | |
230 | |
231 | +def cmd_exists(cmd): |
232 | + """Return True if the specified cmd exists in the path""" |
233 | + return any( |
234 | + os.access(os.path.join(path, cmd), os.X_OK) |
235 | + for path in os.environ["PATH"].split(os.pathsep) |
236 | + ) |
237 | + |
238 | + |
239 | @cached |
240 | +@deprecate("moved to function_get()", log=log) |
241 | def action_get(key=None): |
242 | - """Gets the value of an action parameter, or all key/value param pairs""" |
243 | + """ |
244 | + .. deprecated:: 0.20.7 |
245 | + Alias for :func:`function_get`. |
246 | + |
247 | + Gets the value of an action parameter, or all key/value param pairs. |
248 | + """ |
249 | cmd = ['action-get'] |
250 | if key is not None: |
251 | cmd.append(key) |
252 | @@ -866,52 +1002,130 @@ def action_get(key=None): |
253 | return action_data |
254 | |
255 | |
256 | +@cached |
257 | +def function_get(key=None): |
258 | + """Gets the value of an action parameter, or all key/value param pairs""" |
259 | + cmd = ['function-get'] |
260 | + # Fallback for older charms. |
261 | + if not cmd_exists('function-get'): |
262 | + cmd = ['action-get'] |
263 | + |
264 | + if key is not None: |
265 | + cmd.append(key) |
266 | + cmd.append('--format=json') |
267 | + function_data = json.loads(subprocess.check_output(cmd).decode('UTF-8')) |
268 | + return function_data |
269 | + |
270 | + |
271 | +@deprecate("moved to function_set()", log=log) |
272 | def action_set(values): |
273 | - """Sets the values to be returned after the action finishes""" |
274 | + """ |
275 | + .. deprecated:: 0.20.7 |
276 | + Alias for :func:`function_set`. |
277 | + |
278 | + Sets the values to be returned after the action finishes. |
279 | + """ |
280 | cmd = ['action-set'] |
281 | for k, v in list(values.items()): |
282 | cmd.append('{}={}'.format(k, v)) |
283 | subprocess.check_call(cmd) |
284 | |
285 | |
286 | +def function_set(values): |
287 | + """Sets the values to be returned after the function finishes""" |
288 | + cmd = ['function-set'] |
289 | + # Fallback for older charms. |
290 | + if not cmd_exists('function-get'): |
291 | + cmd = ['action-set'] |
292 | + |
293 | + for k, v in list(values.items()): |
294 | + cmd.append('{}={}'.format(k, v)) |
295 | + subprocess.check_call(cmd) |
296 | + |
297 | + |
298 | +@deprecate("moved to function_fail()", log=log) |
299 | def action_fail(message): |
300 | - """Sets the action status to failed and sets the error message. |
301 | + """ |
302 | + .. deprecated:: 0.20.7 |
303 | + Alias for :func:`function_fail`. |
304 | + |
305 | + Sets the action status to failed and sets the error message. |
306 | |
307 | - The results set by action_set are preserved.""" |
308 | + The results set by action_set are preserved. |
309 | + """ |
310 | subprocess.check_call(['action-fail', message]) |
311 | |
312 | |
313 | +def function_fail(message): |
314 | + """Sets the function status to failed and sets the error message. |
315 | + |
316 | + The results set by function_set are preserved.""" |
317 | + cmd = ['function-fail'] |
318 | + # Fallback for older charms. |
319 | + if not cmd_exists('function-fail'): |
320 | + cmd = ['action-fail'] |
321 | + cmd.append(message) |
322 | + |
323 | + subprocess.check_call(cmd) |
324 | + |
325 | + |
326 | def action_name(): |
327 | """Get the name of the currently executing action.""" |
328 | return os.environ.get('JUJU_ACTION_NAME') |
329 | |
330 | |
331 | +def function_name(): |
332 | + """Get the name of the currently executing function.""" |
333 | + return os.environ.get('JUJU_FUNCTION_NAME') or action_name() |
334 | + |
335 | + |
336 | def action_uuid(): |
337 | """Get the UUID of the currently executing action.""" |
338 | return os.environ.get('JUJU_ACTION_UUID') |
339 | |
340 | |
341 | +def function_id(): |
342 | + """Get the ID of the currently executing function.""" |
343 | + return os.environ.get('JUJU_FUNCTION_ID') or action_uuid() |
344 | + |
345 | + |
346 | def action_tag(): |
347 | """Get the tag for the currently executing action.""" |
348 | return os.environ.get('JUJU_ACTION_TAG') |
349 | |
350 | |
351 | -def status_set(workload_state, message): |
352 | +def function_tag(): |
353 | + """Get the tag for the currently executing function.""" |
354 | + return os.environ.get('JUJU_FUNCTION_TAG') or action_tag() |
355 | + |
356 | + |
357 | +def status_set(workload_state, message, application=False): |
358 | """Set the workload state with a message |
359 | |
360 | Use status-set to set the workload state with a message which is visible |
361 | to the user via juju status. If the status-set command is not found then |
362 | - assume this is juju < 1.23 and juju-log the message unstead. |
363 | + assume this is juju < 1.23 and juju-log the message instead. |
364 | |
365 | - workload_state -- valid juju workload state. |
366 | - message -- status update message |
367 | + workload_state -- valid juju workload state. str or WORKLOAD_STATES |
368 | + message -- status update message |
369 | + application -- Whether this is an application state set |
370 | """ |
371 | - valid_states = ['maintenance', 'blocked', 'waiting', 'active'] |
372 | - if workload_state not in valid_states: |
373 | - raise ValueError( |
374 | - '{!r} is not a valid workload state'.format(workload_state) |
375 | - ) |
376 | - cmd = ['status-set', workload_state, message] |
377 | + bad_state_msg = '{!r} is not a valid workload state' |
378 | + |
379 | + if isinstance(workload_state, str): |
380 | + try: |
381 | + # Convert string to enum. |
382 | + workload_state = WORKLOAD_STATES[workload_state.upper()] |
383 | + except KeyError: |
384 | + raise ValueError(bad_state_msg.format(workload_state)) |
385 | + |
386 | + if workload_state not in WORKLOAD_STATES: |
387 | + raise ValueError(bad_state_msg.format(workload_state)) |
388 | + |
389 | + cmd = ['status-set'] |
390 | + if application: |
391 | + cmd.append('--application') |
392 | + cmd.extend([workload_state.value, message]) |
393 | try: |
394 | ret = subprocess.call(cmd) |
395 | if ret == 0: |
396 | @@ -919,7 +1133,7 @@ def status_set(workload_state, message): |
397 | except OSError as e: |
398 | if e.errno != errno.ENOENT: |
399 | raise |
400 | - log_message = 'status-set failed: {} {}'.format(workload_state, |
401 | + log_message = 'status-set failed: {} {}'.format(workload_state.value, |
402 | message) |
403 | log(log_message, level='INFO') |
404 | |
405 | @@ -973,6 +1187,7 @@ def application_version_set(version): |
406 | |
407 | |
408 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) |
409 | +@cached |
410 | def goal_state(): |
411 | """Juju goal state values""" |
412 | cmd = ['goal-state', '--format=json'] |
413 | @@ -1297,3 +1512,102 @@ def egress_subnets(rid=None, unit=None): |
414 | if 'private-address' in settings: |
415 | return [_to_range(settings['private-address'])] |
416 | return [] # Should never happen |
417 | + |
418 | + |
419 | +def unit_doomed(unit=None): |
420 | + """Determines if the unit is being removed from the model |
421 | + |
422 | + Requires Juju 2.4.1. |
423 | + |
424 | + :param unit: string unit name, defaults to local_unit |
425 | + :side effect: calls goal_state |
426 | + :side effect: calls local_unit |
427 | + :side effect: calls has_juju_version |
428 | + :return: True if the unit is being removed, already gone, or never existed |
429 | + """ |
430 | + if not has_juju_version("2.4.1"): |
431 | + # We cannot risk blindly returning False for 'we don't know', |
432 | + # because that could cause data loss; if call sites don't |
433 | + # need an accurate answer, they likely don't need this helper |
434 | + # at all. |
435 | + # goal-state existed in 2.4.0, but did not handle removals |
436 | + # correctly until 2.4.1. |
437 | + raise NotImplementedError("is_doomed") |
438 | + if unit is None: |
439 | + unit = local_unit() |
440 | + gs = goal_state() |
441 | + units = gs.get('units', {}) |
442 | + if unit not in units: |
443 | + return True |
444 | + # I don't think 'dead' units ever show up in the goal-state, but |
445 | + # check anyway in addition to 'dying'. |
446 | + return units[unit]['status'] in ('dying', 'dead') |
447 | + |
448 | + |
449 | +def env_proxy_settings(selected_settings=None): |
450 | + """Get proxy settings from process environment variables. |
451 | + |
452 | + Get charm proxy settings from environment variables that correspond to |
453 | + juju-http-proxy, juju-https-proxy juju-no-proxy (available as of 2.4.2, see |
454 | + lp:1782236) and juju-ftp-proxy in a format suitable for passing to an |
455 | + application that reacts to proxy settings passed as environment variables. |
456 | + Some applications support lowercase or uppercase notation (e.g. curl), some |
457 | + support only lowercase (e.g. wget), there are also subjectively rare cases |
458 | + of only uppercase notation support. no_proxy CIDR and wildcard support also |
459 | + varies between runtimes and applications as there is no enforced standard. |
460 | + |
461 | + Some applications may connect to multiple destinations and expose config |
462 | + options that would affect only proxy settings for a specific destination |
463 | + these should be handled in charms in an application-specific manner. |
464 | + |
465 | + :param selected_settings: format only a subset of possible settings |
466 | + :type selected_settings: list |
467 | + :rtype: Option(None, dict[str, str]) |
468 | + """ |
469 | + SUPPORTED_SETTINGS = { |
470 | + 'http': 'HTTP_PROXY', |
471 | + 'https': 'HTTPS_PROXY', |
472 | + 'no_proxy': 'NO_PROXY', |
473 | + 'ftp': 'FTP_PROXY' |
474 | + } |
475 | + if selected_settings is None: |
476 | + selected_settings = SUPPORTED_SETTINGS |
477 | + |
478 | + selected_vars = [v for k, v in SUPPORTED_SETTINGS.items() |
479 | + if k in selected_settings] |
480 | + proxy_settings = {} |
481 | + for var in selected_vars: |
482 | + var_val = os.getenv(var) |
483 | + if var_val: |
484 | + proxy_settings[var] = var_val |
485 | + proxy_settings[var.lower()] = var_val |
486 | + # Now handle juju-prefixed environment variables. The legacy vs new |
487 | + # environment variable usage is mutually exclusive |
488 | + charm_var_val = os.getenv('JUJU_CHARM_{}'.format(var)) |
489 | + if charm_var_val: |
490 | + proxy_settings[var] = charm_var_val |
491 | + proxy_settings[var.lower()] = charm_var_val |
492 | + if 'no_proxy' in proxy_settings: |
493 | + if _contains_range(proxy_settings['no_proxy']): |
494 | + log(RANGE_WARNING, level=WARNING) |
495 | + return proxy_settings if proxy_settings else None |
496 | + |
497 | + |
498 | +def _contains_range(addresses): |
499 | + """Check for cidr or wildcard domain in a string. |
500 | + |
501 | + Given a string comprising a comma seperated list of ip addresses |
502 | + and domain names, determine whether the string contains IP ranges |
503 | + or wildcard domains. |
504 | + |
505 | + :param addresses: comma seperated list of domains and ip addresses. |
506 | + :type addresses: str |
507 | + """ |
508 | + return ( |
509 | + # Test for cidr (e.g. 10.20.20.0/24) |
510 | + "/" in addresses or |
511 | + # Test for wildcard domains (*.foo.com or .foo.com) |
512 | + "*" in addresses or |
513 | + addresses.startswith(".") or |
514 | + ",." in addresses or |
515 | + " ." in addresses) |
516 | diff --git a/lib/charmhelpers/core/host.py b/lib/charmhelpers/core/host.py |
517 | index 322ab2a..b33ac90 100644 |
518 | --- a/lib/charmhelpers/core/host.py |
519 | +++ b/lib/charmhelpers/core/host.py |
520 | @@ -34,21 +34,23 @@ import six |
521 | |
522 | from contextlib import contextmanager |
523 | from collections import OrderedDict |
524 | -from .hookenv import log, DEBUG, local_unit |
525 | +from .hookenv import log, INFO, DEBUG, local_unit, charm_name |
526 | from .fstab import Fstab |
527 | from charmhelpers.osplatform import get_platform |
528 | |
529 | __platform__ = get_platform() |
530 | if __platform__ == "ubuntu": |
531 | - from charmhelpers.core.host_factory.ubuntu import ( |
532 | + from charmhelpers.core.host_factory.ubuntu import ( # NOQA:F401 |
533 | service_available, |
534 | add_new_group, |
535 | lsb_release, |
536 | cmp_pkgrevno, |
537 | CompareHostReleases, |
538 | + get_distrib_codename, |
539 | + arch |
540 | ) # flake8: noqa -- ignore F401 for this import |
541 | elif __platform__ == "centos": |
542 | - from charmhelpers.core.host_factory.centos import ( |
543 | + from charmhelpers.core.host_factory.centos import ( # NOQA:F401 |
544 | service_available, |
545 | add_new_group, |
546 | lsb_release, |
547 | @@ -58,6 +60,7 @@ elif __platform__ == "centos": |
548 | |
549 | UPDATEDB_PATH = '/etc/updatedb.conf' |
550 | |
551 | + |
552 | def service_start(service_name, **kwargs): |
553 | """Start a system service. |
554 | |
555 | @@ -287,8 +290,8 @@ def service_running(service_name, **kwargs): |
556 | for key, value in six.iteritems(kwargs): |
557 | parameter = '%s=%s' % (key, value) |
558 | cmd.append(parameter) |
559 | - output = subprocess.check_output(cmd, |
560 | - stderr=subprocess.STDOUT).decode('UTF-8') |
561 | + output = subprocess.check_output( |
562 | + cmd, stderr=subprocess.STDOUT).decode('UTF-8') |
563 | except subprocess.CalledProcessError: |
564 | return False |
565 | else: |
566 | @@ -442,7 +445,7 @@ def add_user_to_group(username, group): |
567 | |
568 | |
569 | def chage(username, lastday=None, expiredate=None, inactive=None, |
570 | - mindays=None, maxdays=None, root=None, warndays=None): |
571 | + mindays=None, maxdays=None, root=None, warndays=None): |
572 | """Change user password expiry information |
573 | |
574 | :param str username: User to update |
575 | @@ -482,8 +485,10 @@ def chage(username, lastday=None, expiredate=None, inactive=None, |
576 | cmd.append(username) |
577 | subprocess.check_call(cmd) |
578 | |
579 | + |
580 | remove_password_expiry = functools.partial(chage, expiredate='-1', inactive='-1', mindays='0', maxdays='-1') |
581 | |
582 | + |
583 | def rsync(from_path, to_path, flags='-r', options=None, timeout=None): |
584 | """Replicate the contents of a path""" |
585 | options = options or ['--delete', '--executability'] |
586 | @@ -535,13 +540,15 @@ def write_file(path, content, owner='root', group='root', perms=0o444): |
587 | # lets see if we can grab the file and compare the context, to avoid doing |
588 | # a write. |
589 | existing_content = None |
590 | - existing_uid, existing_gid = None, None |
591 | + existing_uid, existing_gid, existing_perms = None, None, None |
592 | try: |
593 | with open(path, 'rb') as target: |
594 | existing_content = target.read() |
595 | stat = os.stat(path) |
596 | - existing_uid, existing_gid = stat.st_uid, stat.st_gid |
597 | - except: |
598 | + existing_uid, existing_gid, existing_perms = ( |
599 | + stat.st_uid, stat.st_gid, stat.st_mode |
600 | + ) |
601 | + except Exception: |
602 | pass |
603 | if content != existing_content: |
604 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms), |
605 | @@ -554,7 +561,7 @@ def write_file(path, content, owner='root', group='root', perms=0o444): |
606 | target.write(content) |
607 | return |
608 | # the contents were the same, but we might still need to change the |
609 | - # ownership. |
610 | + # ownership or permissions. |
611 | if existing_uid != uid: |
612 | log("Changing uid on already existing content: {} -> {}" |
613 | .format(existing_uid, uid), level=DEBUG) |
614 | @@ -563,6 +570,10 @@ def write_file(path, content, owner='root', group='root', perms=0o444): |
615 | log("Changing gid on already existing content: {} -> {}" |
616 | .format(existing_gid, gid), level=DEBUG) |
617 | os.chown(path, -1, gid) |
618 | + if existing_perms != perms: |
619 | + log("Changing permissions on existing content: {} -> {}" |
620 | + .format(existing_perms, perms), level=DEBUG) |
621 | + os.chmod(path, perms) |
622 | |
623 | |
624 | def fstab_remove(mp): |
625 | @@ -827,7 +838,7 @@ def list_nics(nic_type=None): |
626 | ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') |
627 | ip_output = (line.strip() for line in ip_output if line) |
628 | |
629 | - key = re.compile('^[0-9]+:\s+(.+):') |
630 | + key = re.compile(r'^[0-9]+:\s+(.+):') |
631 | for line in ip_output: |
632 | matched = re.search(key, line) |
633 | if matched: |
634 | @@ -972,6 +983,20 @@ def is_container(): |
635 | |
636 | |
637 | def add_to_updatedb_prunepath(path, updatedb_path=UPDATEDB_PATH): |
638 | + """Adds the specified path to the mlocate's udpatedb.conf PRUNEPATH list. |
639 | + |
640 | + This method has no effect if the path specified by updatedb_path does not |
641 | + exist or is not a file. |
642 | + |
643 | + @param path: string the path to add to the updatedb.conf PRUNEPATHS value |
644 | + @param updatedb_path: the path the updatedb.conf file |
645 | + """ |
646 | + if not os.path.exists(updatedb_path) or os.path.isdir(updatedb_path): |
647 | + # If the updatedb.conf file doesn't exist then don't attempt to update |
648 | + # the file as the package providing mlocate may not be installed on |
649 | + # the local system |
650 | + return |
651 | + |
652 | with open(updatedb_path, 'r+') as f_id: |
653 | updatedb_text = f_id.read() |
654 | output = updatedb(updatedb_text, path) |
655 | @@ -1026,3 +1051,54 @@ def modulo_distribution(modulo=3, wait=30, non_zero_wait=False): |
656 | return modulo * wait |
657 | else: |
658 | return calculated_wait_time |
659 | + |
660 | + |
661 | +def install_ca_cert(ca_cert, name=None): |
662 | + """ |
663 | + Install the given cert as a trusted CA. |
664 | + |
665 | + The ``name`` is the stem of the filename where the cert is written, and if |
666 | + not provided, it will default to ``juju-{charm_name}``. |
667 | + |
668 | + If the cert is empty or None, or is unchanged, nothing is done. |
669 | + """ |
670 | + if not ca_cert: |
671 | + return |
672 | + if not isinstance(ca_cert, bytes): |
673 | + ca_cert = ca_cert.encode('utf8') |
674 | + if not name: |
675 | + name = 'juju-{}'.format(charm_name()) |
676 | + cert_file = '/usr/local/share/ca-certificates/{}.crt'.format(name) |
677 | + new_hash = hashlib.md5(ca_cert).hexdigest() |
678 | + if file_hash(cert_file) == new_hash: |
679 | + return |
680 | + log("Installing new CA cert at: {}".format(cert_file), level=INFO) |
681 | + write_file(cert_file, ca_cert) |
682 | + subprocess.check_call(['update-ca-certificates', '--fresh']) |
683 | + |
684 | + |
685 | +def get_system_env(key, default=None): |
686 | + """Get data from system environment as represented in ``/etc/environment``. |
687 | + |
688 | + :param key: Key to look up |
689 | + :type key: str |
690 | + :param default: Value to return if key is not found |
691 | + :type default: any |
692 | + :returns: Value for key if found or contents of default parameter |
693 | + :rtype: any |
694 | + :raises: subprocess.CalledProcessError |
695 | + """ |
696 | + env_file = '/etc/environment' |
697 | + # use the shell and env(1) to parse the global environments file. This is |
698 | + # done to get the correct result even if the user has shell variable |
699 | + # substitutions or other shell logic in that file. |
700 | + output = subprocess.check_output( |
701 | + ['env', '-i', '/bin/bash', '-c', |
702 | + 'set -a && source {} && env'.format(env_file)], |
703 | + universal_newlines=True) |
704 | + for k, v in (line.split('=', 1) |
705 | + for line in output.splitlines() if '=' in line): |
706 | + if k == key: |
707 | + return v |
708 | + else: |
709 | + return default |
710 | diff --git a/lib/charmhelpers/core/host_factory/ubuntu.py b/lib/charmhelpers/core/host_factory/ubuntu.py |
711 | index 99451b5..3edc068 100644 |
712 | --- a/lib/charmhelpers/core/host_factory/ubuntu.py |
713 | +++ b/lib/charmhelpers/core/host_factory/ubuntu.py |
714 | @@ -1,5 +1,6 @@ |
715 | import subprocess |
716 | |
717 | +from charmhelpers.core.hookenv import cached |
718 | from charmhelpers.core.strutils import BasicStringComparator |
719 | |
720 | |
721 | @@ -21,6 +22,10 @@ UBUNTU_RELEASES = ( |
722 | 'zesty', |
723 | 'artful', |
724 | 'bionic', |
725 | + 'cosmic', |
726 | + 'disco', |
727 | + 'eoan', |
728 | + 'focal' |
729 | ) |
730 | |
731 | |
732 | @@ -71,6 +76,14 @@ def lsb_release(): |
733 | return d |
734 | |
735 | |
736 | +def get_distrib_codename(): |
737 | + """Return the codename of the distribution |
738 | + :returns: The codename |
739 | + :rtype: str |
740 | + """ |
741 | + return lsb_release()['DISTRIB_CODENAME'].lower() |
742 | + |
743 | + |
744 | def cmp_pkgrevno(package, revno, pkgcache=None): |
745 | """Compare supplied revno with the revno of the installed package. |
746 | |
747 | @@ -82,9 +95,22 @@ def cmp_pkgrevno(package, revno, pkgcache=None): |
748 | the pkgcache argument is None. Be sure to add charmhelpers.fetch if |
749 | you call this function, or pass an apt_pkg.Cache() instance. |
750 | """ |
751 | - import apt_pkg |
752 | + from charmhelpers.fetch import apt_pkg |
753 | if not pkgcache: |
754 | from charmhelpers.fetch import apt_cache |
755 | pkgcache = apt_cache() |
756 | pkg = pkgcache[package] |
757 | return apt_pkg.version_compare(pkg.current_ver.ver_str, revno) |
758 | + |
759 | + |
760 | +@cached |
761 | +def arch(): |
762 | + """Return the package architecture as a string. |
763 | + |
764 | + :returns: the architecture |
765 | + :rtype: str |
766 | + :raises: subprocess.CalledProcessError if dpkg command fails |
767 | + """ |
768 | + return subprocess.check_output( |
769 | + ['dpkg', '--print-architecture'] |
770 | + ).rstrip().decode('UTF-8') |
771 | diff --git a/lib/charmhelpers/core/kernel.py b/lib/charmhelpers/core/kernel.py |
772 | index 2d40452..e01f4f8 100644 |
773 | --- a/lib/charmhelpers/core/kernel.py |
774 | +++ b/lib/charmhelpers/core/kernel.py |
775 | @@ -26,12 +26,12 @@ from charmhelpers.core.hookenv import ( |
776 | |
777 | __platform__ = get_platform() |
778 | if __platform__ == "ubuntu": |
779 | - from charmhelpers.core.kernel_factory.ubuntu import ( |
780 | + from charmhelpers.core.kernel_factory.ubuntu import ( # NOQA:F401 |
781 | persistent_modprobe, |
782 | update_initramfs, |
783 | ) # flake8: noqa -- ignore F401 for this import |
784 | elif __platform__ == "centos": |
785 | - from charmhelpers.core.kernel_factory.centos import ( |
786 | + from charmhelpers.core.kernel_factory.centos import ( # NOQA:F401 |
787 | persistent_modprobe, |
788 | update_initramfs, |
789 | ) # flake8: noqa -- ignore F401 for this import |
790 | diff --git a/lib/charmhelpers/core/sysctl.py b/lib/charmhelpers/core/sysctl.py |
791 | index 1f188d8..386428d 100644 |
792 | --- a/lib/charmhelpers/core/sysctl.py |
793 | +++ b/lib/charmhelpers/core/sysctl.py |
794 | @@ -17,18 +17,21 @@ |
795 | |
796 | import yaml |
797 | |
798 | -from subprocess import check_call |
799 | +from subprocess import check_call, CalledProcessError |
800 | |
801 | from charmhelpers.core.hookenv import ( |
802 | log, |
803 | DEBUG, |
804 | ERROR, |
805 | + WARNING, |
806 | ) |
807 | |
808 | +from charmhelpers.core.host import is_container |
809 | + |
810 | __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' |
811 | |
812 | |
813 | -def create(sysctl_dict, sysctl_file): |
814 | +def create(sysctl_dict, sysctl_file, ignore=False): |
815 | """Creates a sysctl.conf file from a YAML associative array |
816 | |
817 | :param sysctl_dict: a dict or YAML-formatted string of sysctl |
818 | @@ -36,6 +39,8 @@ def create(sysctl_dict, sysctl_file): |
819 | :type sysctl_dict: str |
820 | :param sysctl_file: path to the sysctl file to be saved |
821 | :type sysctl_file: str or unicode |
822 | + :param ignore: If True, ignore "unknown variable" errors. |
823 | + :type ignore: bool |
824 | :returns: None |
825 | """ |
826 | if type(sysctl_dict) is not dict: |
827 | @@ -52,7 +57,19 @@ def create(sysctl_dict, sysctl_file): |
828 | for key, value in sysctl_dict_parsed.items(): |
829 | fd.write("{}={}\n".format(key, value)) |
830 | |
831 | - log("Updating sysctl_file: %s values: %s" % (sysctl_file, sysctl_dict_parsed), |
832 | + log("Updating sysctl_file: {} values: {}".format(sysctl_file, |
833 | + sysctl_dict_parsed), |
834 | level=DEBUG) |
835 | |
836 | - check_call(["sysctl", "-p", sysctl_file]) |
837 | + call = ["sysctl", "-p", sysctl_file] |
838 | + if ignore: |
839 | + call.append("-e") |
840 | + |
841 | + try: |
842 | + check_call(call) |
843 | + except CalledProcessError as e: |
844 | + if is_container(): |
845 | + log("Error setting some sysctl keys in this container: {}".format(e.output), |
846 | + level=WARNING) |
847 | + else: |
848 | + raise e |
849 | diff --git a/lib/charmhelpers/fetch/__init__.py b/lib/charmhelpers/fetch/__init__.py |
850 | index 480a627..0cc7fc8 100644 |
851 | --- a/lib/charmhelpers/fetch/__init__.py |
852 | +++ b/lib/charmhelpers/fetch/__init__.py |
853 | @@ -84,6 +84,7 @@ module = "charmhelpers.fetch.%s" % __platform__ |
854 | fetch = importlib.import_module(module) |
855 | |
856 | filter_installed_packages = fetch.filter_installed_packages |
857 | +filter_missing_packages = fetch.filter_missing_packages |
858 | install = fetch.apt_install |
859 | upgrade = fetch.apt_upgrade |
860 | update = _fetch_update = fetch.apt_update |
861 | @@ -96,11 +97,14 @@ if __platform__ == "ubuntu": |
862 | apt_update = fetch.apt_update |
863 | apt_upgrade = fetch.apt_upgrade |
864 | apt_purge = fetch.apt_purge |
865 | + apt_autoremove = fetch.apt_autoremove |
866 | apt_mark = fetch.apt_mark |
867 | apt_hold = fetch.apt_hold |
868 | apt_unhold = fetch.apt_unhold |
869 | import_key = fetch.import_key |
870 | get_upstream_version = fetch.get_upstream_version |
871 | + apt_pkg = fetch.ubuntu_apt_pkg |
872 | + get_apt_dpkg_env = fetch.get_apt_dpkg_env |
873 | elif __platform__ == "centos": |
874 | yum_search = fetch.yum_search |
875 | |
876 | diff --git a/lib/charmhelpers/fetch/archiveurl.py b/lib/charmhelpers/fetch/archiveurl.py |
877 | index dd24f9e..d25587a 100644 |
878 | --- a/lib/charmhelpers/fetch/archiveurl.py |
879 | +++ b/lib/charmhelpers/fetch/archiveurl.py |
880 | @@ -89,7 +89,7 @@ class ArchiveUrlFetchHandler(BaseFetchHandler): |
881 | :param str source: URL pointing to an archive file. |
882 | :param str dest: Local path location to download archive file to. |
883 | """ |
884 | - # propogate all exceptions |
885 | + # propagate all exceptions |
886 | # URLError, OSError, etc |
887 | proto, netloc, path, params, query, fragment = urlparse(source) |
888 | if proto in ('http', 'https'): |
889 | diff --git a/lib/charmhelpers/fetch/bzrurl.py b/lib/charmhelpers/fetch/bzrurl.py |
890 | index 07cd029..c4ab3ff 100644 |
891 | --- a/lib/charmhelpers/fetch/bzrurl.py |
892 | +++ b/lib/charmhelpers/fetch/bzrurl.py |
893 | @@ -13,7 +13,7 @@ |
894 | # limitations under the License. |
895 | |
896 | import os |
897 | -from subprocess import check_call |
898 | +from subprocess import STDOUT, check_output |
899 | from charmhelpers.fetch import ( |
900 | BaseFetchHandler, |
901 | UnhandledSource, |
902 | @@ -55,7 +55,7 @@ class BzrUrlFetchHandler(BaseFetchHandler): |
903 | cmd = ['bzr', 'branch'] |
904 | cmd += cmd_opts |
905 | cmd += [source, dest] |
906 | - check_call(cmd) |
907 | + check_output(cmd, stderr=STDOUT) |
908 | |
909 | def install(self, source, dest=None, revno=None): |
910 | url_parts = self.parse_url(source) |
911 | diff --git a/lib/charmhelpers/fetch/giturl.py b/lib/charmhelpers/fetch/giturl.py |
912 | index 4cf21bc..070ca9b 100644 |
913 | --- a/lib/charmhelpers/fetch/giturl.py |
914 | +++ b/lib/charmhelpers/fetch/giturl.py |
915 | @@ -13,7 +13,7 @@ |
916 | # limitations under the License. |
917 | |
918 | import os |
919 | -from subprocess import check_call, CalledProcessError |
920 | +from subprocess import check_output, CalledProcessError, STDOUT |
921 | from charmhelpers.fetch import ( |
922 | BaseFetchHandler, |
923 | UnhandledSource, |
924 | @@ -50,7 +50,7 @@ class GitUrlFetchHandler(BaseFetchHandler): |
925 | cmd = ['git', 'clone', source, dest, '--branch', branch] |
926 | if depth: |
927 | cmd.extend(['--depth', depth]) |
928 | - check_call(cmd) |
929 | + check_output(cmd, stderr=STDOUT) |
930 | |
931 | def install(self, source, branch="master", dest=None, depth=None): |
932 | url_parts = self.parse_url(source) |
933 | diff --git a/lib/charmhelpers/fetch/python/__init__.py b/lib/charmhelpers/fetch/python/__init__.py |
934 | new file mode 100644 |
935 | index 0000000..bff99dc |
936 | --- /dev/null |
937 | +++ b/lib/charmhelpers/fetch/python/__init__.py |
938 | @@ -0,0 +1,13 @@ |
939 | +# Copyright 2014-2019 Canonical Limited. |
940 | +# |
941 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
942 | +# you may not use this file except in compliance with the License. |
943 | +# You may obtain a copy of the License at |
944 | +# |
945 | +# http://www.apache.org/licenses/LICENSE-2.0 |
946 | +# |
947 | +# Unless required by applicable law or agreed to in writing, software |
948 | +# distributed under the License is distributed on an "AS IS" BASIS, |
949 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
950 | +# See the License for the specific language governing permissions and |
951 | +# limitations under the License. |
952 | diff --git a/lib/charmhelpers/fetch/python/debug.py b/lib/charmhelpers/fetch/python/debug.py |
953 | new file mode 100644 |
954 | index 0000000..757135e |
955 | --- /dev/null |
956 | +++ b/lib/charmhelpers/fetch/python/debug.py |
957 | @@ -0,0 +1,54 @@ |
958 | +#!/usr/bin/env python |
959 | +# coding: utf-8 |
960 | + |
961 | +# Copyright 2014-2015 Canonical Limited. |
962 | +# |
963 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
964 | +# you may not use this file except in compliance with the License. |
965 | +# You may obtain a copy of the License at |
966 | +# |
967 | +# http://www.apache.org/licenses/LICENSE-2.0 |
968 | +# |
969 | +# Unless required by applicable law or agreed to in writing, software |
970 | +# distributed under the License is distributed on an "AS IS" BASIS, |
971 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
972 | +# See the License for the specific language governing permissions and |
973 | +# limitations under the License. |
974 | + |
975 | +from __future__ import print_function |
976 | + |
977 | +import atexit |
978 | +import sys |
979 | + |
980 | +from charmhelpers.fetch.python.rpdb import Rpdb |
981 | +from charmhelpers.core.hookenv import ( |
982 | + open_port, |
983 | + close_port, |
984 | + ERROR, |
985 | + log |
986 | +) |
987 | + |
988 | +__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" |
989 | + |
990 | +DEFAULT_ADDR = "0.0.0.0" |
991 | +DEFAULT_PORT = 4444 |
992 | + |
993 | + |
994 | +def _error(message): |
995 | + log(message, level=ERROR) |
996 | + |
997 | + |
998 | +def set_trace(addr=DEFAULT_ADDR, port=DEFAULT_PORT): |
999 | + """ |
1000 | + Set a trace point using the remote debugger |
1001 | + """ |
1002 | + atexit.register(close_port, port) |
1003 | + try: |
1004 | + log("Starting a remote python debugger session on %s:%s" % (addr, |
1005 | + port)) |
1006 | + open_port(port) |
1007 | + debugger = Rpdb(addr=addr, port=port) |
1008 | + debugger.set_trace(sys._getframe().f_back) |
1009 | + except Exception: |
1010 | + _error("Cannot start a remote debug session on %s:%s" % (addr, |
1011 | + port)) |
1012 | diff --git a/lib/charmhelpers/fetch/python/packages.py b/lib/charmhelpers/fetch/python/packages.py |
1013 | new file mode 100644 |
1014 | index 0000000..6e95028 |
1015 | --- /dev/null |
1016 | +++ b/lib/charmhelpers/fetch/python/packages.py |
1017 | @@ -0,0 +1,154 @@ |
1018 | +#!/usr/bin/env python |
1019 | +# coding: utf-8 |
1020 | + |
1021 | +# Copyright 2014-2015 Canonical Limited. |
1022 | +# |
1023 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
1024 | +# you may not use this file except in compliance with the License. |
1025 | +# You may obtain a copy of the License at |
1026 | +# |
1027 | +# http://www.apache.org/licenses/LICENSE-2.0 |
1028 | +# |
1029 | +# Unless required by applicable law or agreed to in writing, software |
1030 | +# distributed under the License is distributed on an "AS IS" BASIS, |
1031 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
1032 | +# See the License for the specific language governing permissions and |
1033 | +# limitations under the License. |
1034 | + |
1035 | +import os |
1036 | +import six |
1037 | +import subprocess |
1038 | +import sys |
1039 | + |
1040 | +from charmhelpers.fetch import apt_install, apt_update |
1041 | +from charmhelpers.core.hookenv import charm_dir, log |
1042 | + |
1043 | +__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" |
1044 | + |
1045 | + |
1046 | +def pip_execute(*args, **kwargs): |
1047 | + """Overriden pip_execute() to stop sys.path being changed. |
1048 | + |
1049 | + The act of importing main from the pip module seems to cause add wheels |
1050 | + from the /usr/share/python-wheels which are installed by various tools. |
1051 | + This function ensures that sys.path remains the same after the call is |
1052 | + executed. |
1053 | + """ |
1054 | + try: |
1055 | + _path = sys.path |
1056 | + try: |
1057 | + from pip import main as _pip_execute |
1058 | + except ImportError: |
1059 | + apt_update() |
1060 | + if six.PY2: |
1061 | + apt_install('python-pip') |
1062 | + else: |
1063 | + apt_install('python3-pip') |
1064 | + from pip import main as _pip_execute |
1065 | + _pip_execute(*args, **kwargs) |
1066 | + finally: |
1067 | + sys.path = _path |
1068 | + |
1069 | + |
1070 | +def parse_options(given, available): |
1071 | + """Given a set of options, check if available""" |
1072 | + for key, value in sorted(given.items()): |
1073 | + if not value: |
1074 | + continue |
1075 | + if key in available: |
1076 | + yield "--{0}={1}".format(key, value) |
1077 | + |
1078 | + |
1079 | +def pip_install_requirements(requirements, constraints=None, **options): |
1080 | + """Install a requirements file. |
1081 | + |
1082 | + :param constraints: Path to pip constraints file. |
1083 | + http://pip.readthedocs.org/en/stable/user_guide/#constraints-files |
1084 | + """ |
1085 | + command = ["install"] |
1086 | + |
1087 | + available_options = ('proxy', 'src', 'log', ) |
1088 | + for option in parse_options(options, available_options): |
1089 | + command.append(option) |
1090 | + |
1091 | + command.append("-r {0}".format(requirements)) |
1092 | + if constraints: |
1093 | + command.append("-c {0}".format(constraints)) |
1094 | + log("Installing from file: {} with constraints {} " |
1095 | + "and options: {}".format(requirements, constraints, command)) |
1096 | + else: |
1097 | + log("Installing from file: {} with options: {}".format(requirements, |
1098 | + command)) |
1099 | + pip_execute(command) |
1100 | + |
1101 | + |
1102 | +def pip_install(package, fatal=False, upgrade=False, venv=None, |
1103 | + constraints=None, **options): |
1104 | + """Install a python package""" |
1105 | + if venv: |
1106 | + venv_python = os.path.join(venv, 'bin/pip') |
1107 | + command = [venv_python, "install"] |
1108 | + else: |
1109 | + command = ["install"] |
1110 | + |
1111 | + available_options = ('proxy', 'src', 'log', 'index-url', ) |
1112 | + for option in parse_options(options, available_options): |
1113 | + command.append(option) |
1114 | + |
1115 | + if upgrade: |
1116 | + command.append('--upgrade') |
1117 | + |
1118 | + if constraints: |
1119 | + command.extend(['-c', constraints]) |
1120 | + |
1121 | + if isinstance(package, list): |
1122 | + command.extend(package) |
1123 | + else: |
1124 | + command.append(package) |
1125 | + |
1126 | + log("Installing {} package with options: {}".format(package, |
1127 | + command)) |
1128 | + if venv: |
1129 | + subprocess.check_call(command) |
1130 | + else: |
1131 | + pip_execute(command) |
1132 | + |
1133 | + |
1134 | +def pip_uninstall(package, **options): |
1135 | + """Uninstall a python package""" |
1136 | + command = ["uninstall", "-q", "-y"] |
1137 | + |
1138 | + available_options = ('proxy', 'log', ) |
1139 | + for option in parse_options(options, available_options): |
1140 | + command.append(option) |
1141 | + |
1142 | + if isinstance(package, list): |
1143 | + command.extend(package) |
1144 | + else: |
1145 | + command.append(package) |
1146 | + |
1147 | + log("Uninstalling {} package with options: {}".format(package, |
1148 | + command)) |
1149 | + pip_execute(command) |
1150 | + |
1151 | + |
1152 | +def pip_list(): |
1153 | + """Returns the list of current python installed packages |
1154 | + """ |
1155 | + return pip_execute(["list"]) |
1156 | + |
1157 | + |
1158 | +def pip_create_virtualenv(path=None): |
1159 | + """Create an isolated Python environment.""" |
1160 | + if six.PY2: |
1161 | + apt_install('python-virtualenv') |
1162 | + else: |
1163 | + apt_install('python3-virtualenv') |
1164 | + |
1165 | + if path: |
1166 | + venv_path = path |
1167 | + else: |
1168 | + venv_path = os.path.join(charm_dir(), 'venv') |
1169 | + |
1170 | + if not os.path.exists(venv_path): |
1171 | + subprocess.check_call(['virtualenv', venv_path]) |
1172 | diff --git a/lib/charmhelpers/fetch/python/rpdb.py b/lib/charmhelpers/fetch/python/rpdb.py |
1173 | new file mode 100644 |
1174 | index 0000000..9b31610 |
1175 | --- /dev/null |
1176 | +++ b/lib/charmhelpers/fetch/python/rpdb.py |
1177 | @@ -0,0 +1,56 @@ |
1178 | +# Copyright 2014-2015 Canonical Limited. |
1179 | +# |
1180 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
1181 | +# you may not use this file except in compliance with the License. |
1182 | +# You may obtain a copy of the License at |
1183 | +# |
1184 | +# http://www.apache.org/licenses/LICENSE-2.0 |
1185 | +# |
1186 | +# Unless required by applicable law or agreed to in writing, software |
1187 | +# distributed under the License is distributed on an "AS IS" BASIS, |
1188 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
1189 | +# See the License for the specific language governing permissions and |
1190 | +# limitations under the License. |
1191 | + |
1192 | +"""Remote Python Debugger (pdb wrapper).""" |
1193 | + |
1194 | +import pdb |
1195 | +import socket |
1196 | +import sys |
1197 | + |
1198 | +__author__ = "Bertrand Janin <b@janin.com>" |
1199 | +__version__ = "0.1.3" |
1200 | + |
1201 | + |
1202 | +class Rpdb(pdb.Pdb): |
1203 | + |
1204 | + def __init__(self, addr="127.0.0.1", port=4444): |
1205 | + """Initialize the socket and initialize pdb.""" |
1206 | + |
1207 | + # Backup stdin and stdout before replacing them by the socket handle |
1208 | + self.old_stdout = sys.stdout |
1209 | + self.old_stdin = sys.stdin |
1210 | + |
1211 | + # Open a 'reusable' socket to let the webapp reload on the same port |
1212 | + self.skt = socket.socket(socket.AF_INET, socket.SOCK_STREAM) |
1213 | + self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True) |
1214 | + self.skt.bind((addr, port)) |
1215 | + self.skt.listen(1) |
1216 | + (clientsocket, address) = self.skt.accept() |
1217 | + handle = clientsocket.makefile('rw') |
1218 | + pdb.Pdb.__init__(self, completekey='tab', stdin=handle, stdout=handle) |
1219 | + sys.stdout = sys.stdin = handle |
1220 | + |
1221 | + def shutdown(self): |
1222 | + """Revert stdin and stdout, close the socket.""" |
1223 | + sys.stdout = self.old_stdout |
1224 | + sys.stdin = self.old_stdin |
1225 | + self.skt.close() |
1226 | + self.set_continue() |
1227 | + |
1228 | + def do_continue(self, arg): |
1229 | + """Stop all operation on ``continue``.""" |
1230 | + self.shutdown() |
1231 | + return 1 |
1232 | + |
1233 | + do_EOF = do_quit = do_exit = do_c = do_cont = do_continue |
1234 | diff --git a/lib/charmhelpers/fetch/python/version.py b/lib/charmhelpers/fetch/python/version.py |
1235 | new file mode 100644 |
1236 | index 0000000..3eb4210 |
1237 | --- /dev/null |
1238 | +++ b/lib/charmhelpers/fetch/python/version.py |
1239 | @@ -0,0 +1,32 @@ |
1240 | +#!/usr/bin/env python |
1241 | +# coding: utf-8 |
1242 | + |
1243 | +# Copyright 2014-2015 Canonical Limited. |
1244 | +# |
1245 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
1246 | +# you may not use this file except in compliance with the License. |
1247 | +# You may obtain a copy of the License at |
1248 | +# |
1249 | +# http://www.apache.org/licenses/LICENSE-2.0 |
1250 | +# |
1251 | +# Unless required by applicable law or agreed to in writing, software |
1252 | +# distributed under the License is distributed on an "AS IS" BASIS, |
1253 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
1254 | +# See the License for the specific language governing permissions and |
1255 | +# limitations under the License. |
1256 | + |
1257 | +import sys |
1258 | + |
1259 | +__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" |
1260 | + |
1261 | + |
1262 | +def current_version(): |
1263 | + """Current system python version""" |
1264 | + return sys.version_info |
1265 | + |
1266 | + |
1267 | +def current_version_string(): |
1268 | + """Current system python version as string major.minor.micro""" |
1269 | + return "{0}.{1}.{2}".format(sys.version_info.major, |
1270 | + sys.version_info.minor, |
1271 | + sys.version_info.micro) |
1272 | diff --git a/lib/charmhelpers/fetch/snap.py b/lib/charmhelpers/fetch/snap.py |
1273 | index 395836c..fc70aa9 100644 |
1274 | --- a/lib/charmhelpers/fetch/snap.py |
1275 | +++ b/lib/charmhelpers/fetch/snap.py |
1276 | @@ -69,7 +69,7 @@ def _snap_exec(commands): |
1277 | .format(SNAP_NO_LOCK_RETRY_COUNT)) |
1278 | return_code = e.returncode |
1279 | log('Snap failed to acquire lock, trying again in {} seconds.' |
1280 | - .format(SNAP_NO_LOCK_RETRY_DELAY, level='WARN')) |
1281 | + .format(SNAP_NO_LOCK_RETRY_DELAY), level='WARN') |
1282 | sleep(SNAP_NO_LOCK_RETRY_DELAY) |
1283 | |
1284 | return return_code |
1285 | diff --git a/lib/charmhelpers/fetch/ubuntu.py b/lib/charmhelpers/fetch/ubuntu.py |
1286 | index 653d58f..3315284 100644 |
1287 | --- a/lib/charmhelpers/fetch/ubuntu.py |
1288 | +++ b/lib/charmhelpers/fetch/ubuntu.py |
1289 | @@ -13,23 +13,23 @@ |
1290 | # limitations under the License. |
1291 | |
1292 | from collections import OrderedDict |
1293 | -import os |
1294 | import platform |
1295 | import re |
1296 | import six |
1297 | -import time |
1298 | import subprocess |
1299 | -from tempfile import NamedTemporaryFile |
1300 | +import sys |
1301 | +import time |
1302 | + |
1303 | +from charmhelpers.core.host import get_distrib_codename, get_system_env |
1304 | |
1305 | -from charmhelpers.core.host import ( |
1306 | - lsb_release |
1307 | -) |
1308 | from charmhelpers.core.hookenv import ( |
1309 | log, |
1310 | DEBUG, |
1311 | WARNING, |
1312 | + env_proxy_settings, |
1313 | ) |
1314 | from charmhelpers.fetch import SourceConfigError, GPGKeyError |
1315 | +from charmhelpers.fetch import ubuntu_apt_pkg |
1316 | |
1317 | PROPOSED_POCKET = ( |
1318 | "# Proposed\n" |
1319 | @@ -158,6 +158,46 @@ CLOUD_ARCHIVE_POCKETS = { |
1320 | 'queens/proposed': 'xenial-proposed/queens', |
1321 | 'xenial-queens/proposed': 'xenial-proposed/queens', |
1322 | 'xenial-proposed/queens': 'xenial-proposed/queens', |
1323 | + # Rocky |
1324 | + 'rocky': 'bionic-updates/rocky', |
1325 | + 'bionic-rocky': 'bionic-updates/rocky', |
1326 | + 'bionic-rocky/updates': 'bionic-updates/rocky', |
1327 | + 'bionic-updates/rocky': 'bionic-updates/rocky', |
1328 | + 'rocky/proposed': 'bionic-proposed/rocky', |
1329 | + 'bionic-rocky/proposed': 'bionic-proposed/rocky', |
1330 | + 'bionic-proposed/rocky': 'bionic-proposed/rocky', |
1331 | + # Stein |
1332 | + 'stein': 'bionic-updates/stein', |
1333 | + 'bionic-stein': 'bionic-updates/stein', |
1334 | + 'bionic-stein/updates': 'bionic-updates/stein', |
1335 | + 'bionic-updates/stein': 'bionic-updates/stein', |
1336 | + 'stein/proposed': 'bionic-proposed/stein', |
1337 | + 'bionic-stein/proposed': 'bionic-proposed/stein', |
1338 | + 'bionic-proposed/stein': 'bionic-proposed/stein', |
1339 | + # Train |
1340 | + 'train': 'bionic-updates/train', |
1341 | + 'bionic-train': 'bionic-updates/train', |
1342 | + 'bionic-train/updates': 'bionic-updates/train', |
1343 | + 'bionic-updates/train': 'bionic-updates/train', |
1344 | + 'train/proposed': 'bionic-proposed/train', |
1345 | + 'bionic-train/proposed': 'bionic-proposed/train', |
1346 | + 'bionic-proposed/train': 'bionic-proposed/train', |
1347 | + # Ussuri |
1348 | + 'ussuri': 'bionic-updates/ussuri', |
1349 | + 'bionic-ussuri': 'bionic-updates/ussuri', |
1350 | + 'bionic-ussuri/updates': 'bionic-updates/ussuri', |
1351 | + 'bionic-updates/ussuri': 'bionic-updates/ussuri', |
1352 | + 'ussuri/proposed': 'bionic-proposed/ussuri', |
1353 | + 'bionic-ussuri/proposed': 'bionic-proposed/ussuri', |
1354 | + 'bionic-proposed/ussuri': 'bionic-proposed/ussuri', |
1355 | + # Victoria |
1356 | + 'victoria': 'focal-updates/victoria', |
1357 | + 'focal-victoria': 'focal-updates/victoria', |
1358 | + 'focal-victoria/updates': 'focal-updates/victoria', |
1359 | + 'focal-updates/victoria': 'focal-updates/victoria', |
1360 | + 'victoria/proposed': 'focal-proposed/victoria', |
1361 | + 'focal-victoria/proposed': 'focal-proposed/victoria', |
1362 | + 'focal-proposed/victoria': 'focal-proposed/victoria', |
1363 | } |
1364 | |
1365 | |
1366 | @@ -181,18 +221,54 @@ def filter_installed_packages(packages): |
1367 | return _pkgs |
1368 | |
1369 | |
1370 | -def apt_cache(in_memory=True, progress=None): |
1371 | - """Build and return an apt cache.""" |
1372 | - from apt import apt_pkg |
1373 | - apt_pkg.init() |
1374 | - if in_memory: |
1375 | - apt_pkg.config.set("Dir::Cache::pkgcache", "") |
1376 | - apt_pkg.config.set("Dir::Cache::srcpkgcache", "") |
1377 | - return apt_pkg.Cache(progress) |
1378 | +def filter_missing_packages(packages): |
1379 | + """Return a list of packages that are installed. |
1380 | + |
1381 | + :param packages: list of packages to evaluate. |
1382 | + :returns list: Packages that are installed. |
1383 | + """ |
1384 | + return list( |
1385 | + set(packages) - |
1386 | + set(filter_installed_packages(packages)) |
1387 | + ) |
1388 | + |
1389 | + |
1390 | +def apt_cache(*_, **__): |
1391 | + """Shim returning an object simulating the apt_pkg Cache. |
1392 | + |
1393 | + :param _: Accept arguments for compability, not used. |
1394 | + :type _: any |
1395 | + :param __: Accept keyword arguments for compability, not used. |
1396 | + :type __: any |
1397 | + :returns:Object used to interrogate the system apt and dpkg databases. |
1398 | + :rtype:ubuntu_apt_pkg.Cache |
1399 | + """ |
1400 | + if 'apt_pkg' in sys.modules: |
1401 | + # NOTE(fnordahl): When our consumer use the upstream ``apt_pkg`` module |
1402 | + # in conjunction with the apt_cache helper function, they may expect us |
1403 | + # to call ``apt_pkg.init()`` for them. |
1404 | + # |
1405 | + # Detect this situation, log a warning and make the call to |
1406 | + # ``apt_pkg.init()`` to avoid the consumer Python interpreter from |
1407 | + # crashing with a segmentation fault. |
1408 | + log('Support for use of upstream ``apt_pkg`` module in conjunction' |
1409 | + 'with charm-helpers is deprecated since 2019-06-25', level=WARNING) |
1410 | + sys.modules['apt_pkg'].init() |
1411 | + return ubuntu_apt_pkg.Cache() |
1412 | |
1413 | |
1414 | def apt_install(packages, options=None, fatal=False): |
1415 | - """Install one or more packages.""" |
1416 | + """Install one or more packages. |
1417 | + |
1418 | + :param packages: Package(s) to install |
1419 | + :type packages: Option[str, List[str]] |
1420 | + :param options: Options to pass on to apt-get |
1421 | + :type options: Option[None, List[str]] |
1422 | + :param fatal: Whether the command's output should be checked and |
1423 | + retried. |
1424 | + :type fatal: bool |
1425 | + :raises: subprocess.CalledProcessError |
1426 | + """ |
1427 | if options is None: |
1428 | options = ['--option=Dpkg::Options::=--force-confold'] |
1429 | |
1430 | @@ -209,7 +285,17 @@ def apt_install(packages, options=None, fatal=False): |
1431 | |
1432 | |
1433 | def apt_upgrade(options=None, fatal=False, dist=False): |
1434 | - """Upgrade all packages.""" |
1435 | + """Upgrade all packages. |
1436 | + |
1437 | + :param options: Options to pass on to apt-get |
1438 | + :type options: Option[None, List[str]] |
1439 | + :param fatal: Whether the command's output should be checked and |
1440 | + retried. |
1441 | + :type fatal: bool |
1442 | + :param dist: Whether ``dist-upgrade`` should be used over ``upgrade`` |
1443 | + :type dist: bool |
1444 | + :raises: subprocess.CalledProcessError |
1445 | + """ |
1446 | if options is None: |
1447 | options = ['--option=Dpkg::Options::=--force-confold'] |
1448 | |
1449 | @@ -230,7 +316,15 @@ def apt_update(fatal=False): |
1450 | |
1451 | |
1452 | def apt_purge(packages, fatal=False): |
1453 | - """Purge one or more packages.""" |
1454 | + """Purge one or more packages. |
1455 | + |
1456 | + :param packages: Package(s) to install |
1457 | + :type packages: Option[str, List[str]] |
1458 | + :param fatal: Whether the command's output should be checked and |
1459 | + retried. |
1460 | + :type fatal: bool |
1461 | + :raises: subprocess.CalledProcessError |
1462 | + """ |
1463 | cmd = ['apt-get', '--assume-yes', 'purge'] |
1464 | if isinstance(packages, six.string_types): |
1465 | cmd.append(packages) |
1466 | @@ -240,6 +334,21 @@ def apt_purge(packages, fatal=False): |
1467 | _run_apt_command(cmd, fatal) |
1468 | |
1469 | |
1470 | +def apt_autoremove(purge=True, fatal=False): |
1471 | + """Purge one or more packages. |
1472 | + :param purge: Whether the ``--purge`` option should be passed on or not. |
1473 | + :type purge: bool |
1474 | + :param fatal: Whether the command's output should be checked and |
1475 | + retried. |
1476 | + :type fatal: bool |
1477 | + :raises: subprocess.CalledProcessError |
1478 | + """ |
1479 | + cmd = ['apt-get', '--assume-yes', 'autoremove'] |
1480 | + if purge: |
1481 | + cmd.append('--purge') |
1482 | + _run_apt_command(cmd, fatal) |
1483 | + |
1484 | + |
1485 | def apt_mark(packages, mark, fatal=False): |
1486 | """Flag one or more packages using apt-mark.""" |
1487 | log("Marking {} as {}".format(packages, mark)) |
1488 | @@ -266,13 +375,18 @@ def apt_unhold(packages, fatal=False): |
1489 | def import_key(key): |
1490 | """Import an ASCII Armor key. |
1491 | |
1492 | - /!\ A Radix64 format keyid is also supported for backwards |
1493 | - compatibility, but should never be used; the key retrieval |
1494 | - mechanism is insecure and subject to man-in-the-middle attacks |
1495 | - voiding all signature checks using that key. |
1496 | - |
1497 | - :param keyid: The key in ASCII armor format, |
1498 | - including BEGIN and END markers. |
1499 | + A Radix64 format keyid is also supported for backwards |
1500 | + compatibility. In this case Ubuntu keyserver will be |
1501 | + queried for a key via HTTPS by its keyid. This method |
1502 | + is less preferrable because https proxy servers may |
1503 | + require traffic decryption which is equivalent to a |
1504 | + man-in-the-middle attack (a proxy server impersonates |
1505 | + keyserver TLS certificates and has to be explicitly |
1506 | + trusted by the system). |
1507 | + |
1508 | + :param key: A GPG key in ASCII armor format, |
1509 | + including BEGIN and END markers or a keyid. |
1510 | + :type key: (bytes, str) |
1511 | :raises: GPGKeyError if the key could not be imported |
1512 | """ |
1513 | key = key.strip() |
1514 | @@ -283,35 +397,131 @@ def import_key(key): |
1515 | log("PGP key found (looks like ASCII Armor format)", level=DEBUG) |
1516 | if ('-----BEGIN PGP PUBLIC KEY BLOCK-----' in key and |
1517 | '-----END PGP PUBLIC KEY BLOCK-----' in key): |
1518 | - log("Importing ASCII Armor PGP key", level=DEBUG) |
1519 | - with NamedTemporaryFile() as keyfile: |
1520 | - with open(keyfile.name, 'w') as fd: |
1521 | - fd.write(key) |
1522 | - fd.write("\n") |
1523 | - cmd = ['apt-key', 'add', keyfile.name] |
1524 | - try: |
1525 | - subprocess.check_call(cmd) |
1526 | - except subprocess.CalledProcessError: |
1527 | - error = "Error importing PGP key '{}'".format(key) |
1528 | - log(error) |
1529 | - raise GPGKeyError(error) |
1530 | + log("Writing provided PGP key in the binary format", level=DEBUG) |
1531 | + if six.PY3: |
1532 | + key_bytes = key.encode('utf-8') |
1533 | + else: |
1534 | + key_bytes = key |
1535 | + key_name = _get_keyid_by_gpg_key(key_bytes) |
1536 | + key_gpg = _dearmor_gpg_key(key_bytes) |
1537 | + _write_apt_gpg_keyfile(key_name=key_name, key_material=key_gpg) |
1538 | else: |
1539 | raise GPGKeyError("ASCII armor markers missing from GPG key") |
1540 | else: |
1541 | - # We should only send things obviously not a keyid offsite |
1542 | - # via this unsecured protocol, as it may be a secret or part |
1543 | - # of one. |
1544 | log("PGP key found (looks like Radix64 format)", level=WARNING) |
1545 | - log("INSECURLY importing PGP key from keyserver; " |
1546 | + log("SECURELY importing PGP key from keyserver; " |
1547 | "full key not provided.", level=WARNING) |
1548 | - cmd = ['apt-key', 'adv', '--keyserver', |
1549 | - 'hkp://keyserver.ubuntu.com:80', '--recv-keys', key] |
1550 | - try: |
1551 | - subprocess.check_call(cmd) |
1552 | - except subprocess.CalledProcessError: |
1553 | - error = "Error importing PGP key '{}'".format(key) |
1554 | - log(error) |
1555 | - raise GPGKeyError(error) |
1556 | + # as of bionic add-apt-repository uses curl with an HTTPS keyserver URL |
1557 | + # to retrieve GPG keys. `apt-key adv` command is deprecated as is |
1558 | + # apt-key in general as noted in its manpage. See lp:1433761 for more |
1559 | + # history. Instead, /etc/apt/trusted.gpg.d is used directly to drop |
1560 | + # gpg |
1561 | + key_asc = _get_key_by_keyid(key) |
1562 | + # write the key in GPG format so that apt-key list shows it |
1563 | + key_gpg = _dearmor_gpg_key(key_asc) |
1564 | + _write_apt_gpg_keyfile(key_name=key, key_material=key_gpg) |
1565 | + |
1566 | + |
1567 | +def _get_keyid_by_gpg_key(key_material): |
1568 | + """Get a GPG key fingerprint by GPG key material. |
1569 | + Gets a GPG key fingerprint (40-digit, 160-bit) by the ASCII armor-encoded |
1570 | + or binary GPG key material. Can be used, for example, to generate file |
1571 | + names for keys passed via charm options. |
1572 | + |
1573 | + :param key_material: ASCII armor-encoded or binary GPG key material |
1574 | + :type key_material: bytes |
1575 | + :raises: GPGKeyError if invalid key material has been provided |
1576 | + :returns: A GPG key fingerprint |
1577 | + :rtype: str |
1578 | + """ |
1579 | + # Use the same gpg command for both Xenial and Bionic |
1580 | + cmd = 'gpg --with-colons --with-fingerprint' |
1581 | + ps = subprocess.Popen(cmd.split(), |
1582 | + stdout=subprocess.PIPE, |
1583 | + stderr=subprocess.PIPE, |
1584 | + stdin=subprocess.PIPE) |
1585 | + out, err = ps.communicate(input=key_material) |
1586 | + if six.PY3: |
1587 | + out = out.decode('utf-8') |
1588 | + err = err.decode('utf-8') |
1589 | + if 'gpg: no valid OpenPGP data found.' in err: |
1590 | + raise GPGKeyError('Invalid GPG key material provided') |
1591 | + # from gnupg2 docs: fpr :: Fingerprint (fingerprint is in field 10) |
1592 | + return re.search(r"^fpr:{9}([0-9A-F]{40}):$", out, re.MULTILINE).group(1) |
1593 | + |
1594 | + |
1595 | +def _get_key_by_keyid(keyid): |
1596 | + """Get a key via HTTPS from the Ubuntu keyserver. |
1597 | + Different key ID formats are supported by SKS keyservers (the longer ones |
1598 | + are more secure, see "dead beef attack" and https://evil32.com/). Since |
1599 | + HTTPS is used, if SSLBump-like HTTPS proxies are in place, they will |
1600 | + impersonate keyserver.ubuntu.com and generate a certificate with |
1601 | + keyserver.ubuntu.com in the CN field or in SubjAltName fields of a |
1602 | + certificate. If such proxy behavior is expected it is necessary to add the |
1603 | + CA certificate chain containing the intermediate CA of the SSLBump proxy to |
1604 | + every machine that this code runs on via ca-certs cloud-init directive (via |
1605 | + cloudinit-userdata model-config) or via other means (such as through a |
1606 | + custom charm option). Also note that DNS resolution for the hostname in a |
1607 | + URL is done at a proxy server - not at the client side. |
1608 | + |
1609 | + 8-digit (32 bit) key ID |
1610 | + https://keyserver.ubuntu.com/pks/lookup?search=0x4652B4E6 |
1611 | + 16-digit (64 bit) key ID |
1612 | + https://keyserver.ubuntu.com/pks/lookup?search=0x6E85A86E4652B4E6 |
1613 | + 40-digit key ID: |
1614 | + https://keyserver.ubuntu.com/pks/lookup?search=0x35F77D63B5CEC106C577ED856E85A86E4652B4E6 |
1615 | + |
1616 | + :param keyid: An 8, 16 or 40 hex digit keyid to find a key for |
1617 | + :type keyid: (bytes, str) |
1618 | + :returns: A key material for the specified GPG key id |
1619 | + :rtype: (str, bytes) |
1620 | + :raises: subprocess.CalledProcessError |
1621 | + """ |
1622 | + # options=mr - machine-readable output (disables html wrappers) |
1623 | + keyserver_url = ('https://keyserver.ubuntu.com' |
1624 | + '/pks/lookup?op=get&options=mr&exact=on&search=0x{}') |
1625 | + curl_cmd = ['curl', keyserver_url.format(keyid)] |
1626 | + # use proxy server settings in order to retrieve the key |
1627 | + return subprocess.check_output(curl_cmd, |
1628 | + env=env_proxy_settings(['https'])) |
1629 | + |
1630 | + |
1631 | +def _dearmor_gpg_key(key_asc): |
1632 | + """Converts a GPG key in the ASCII armor format to the binary format. |
1633 | + |
1634 | + :param key_asc: A GPG key in ASCII armor format. |
1635 | + :type key_asc: (str, bytes) |
1636 | + :returns: A GPG key in binary format |
1637 | + :rtype: (str, bytes) |
1638 | + :raises: GPGKeyError |
1639 | + """ |
1640 | + ps = subprocess.Popen(['gpg', '--dearmor'], |
1641 | + stdout=subprocess.PIPE, |
1642 | + stderr=subprocess.PIPE, |
1643 | + stdin=subprocess.PIPE) |
1644 | + out, err = ps.communicate(input=key_asc) |
1645 | + # no need to decode output as it is binary (invalid utf-8), only error |
1646 | + if six.PY3: |
1647 | + err = err.decode('utf-8') |
1648 | + if 'gpg: no valid OpenPGP data found.' in err: |
1649 | + raise GPGKeyError('Invalid GPG key material. Check your network setup' |
1650 | + ' (MTU, routing, DNS) and/or proxy server settings' |
1651 | + ' as well as destination keyserver status.') |
1652 | + else: |
1653 | + return out |
1654 | + |
1655 | + |
1656 | +def _write_apt_gpg_keyfile(key_name, key_material): |
1657 | + """Writes GPG key material into a file at a provided path. |
1658 | + |
1659 | + :param key_name: A key name to use for a key file (could be a fingerprint) |
1660 | + :type key_name: str |
1661 | + :param key_material: A GPG key material (binary) |
1662 | + :type key_material: (str, bytes) |
1663 | + """ |
1664 | + with open('/etc/apt/trusted.gpg.d/{}.gpg'.format(key_name), |
1665 | + 'wb') as keyf: |
1666 | + keyf.write(key_material) |
1667 | |
1668 | |
1669 | def add_source(source, key=None, fail_invalid=False): |
1670 | @@ -386,14 +596,16 @@ def add_source(source, key=None, fail_invalid=False): |
1671 | for r, fn in six.iteritems(_mapping): |
1672 | m = re.match(r, source) |
1673 | if m: |
1674 | - # call the assoicated function with the captured groups |
1675 | - # raises SourceConfigError on error. |
1676 | - fn(*m.groups()) |
1677 | if key: |
1678 | + # Import key before adding the source which depends on it, |
1679 | + # as refreshing packages could fail otherwise. |
1680 | try: |
1681 | import_key(key) |
1682 | except GPGKeyError as e: |
1683 | raise SourceConfigError(str(e)) |
1684 | + # call the associated function with the captured groups |
1685 | + # raises SourceConfigError on error. |
1686 | + fn(*m.groups()) |
1687 | break |
1688 | else: |
1689 | # nothing matched. log an error and maybe sys.exit |
1690 | @@ -406,13 +618,13 @@ def add_source(source, key=None, fail_invalid=False): |
1691 | def _add_proposed(): |
1692 | """Add the PROPOSED_POCKET as /etc/apt/source.list.d/proposed.list |
1693 | |
1694 | - Uses lsb_release()['DISTRIB_CODENAME'] to determine the correct staza for |
1695 | + Uses get_distrib_codename to determine the correct stanza for |
1696 | the deb line. |
1697 | |
1698 | For intel architecutres PROPOSED_POCKET is used for the release, but for |
1699 | other architectures PROPOSED_PORTS_POCKET is used for the release. |
1700 | """ |
1701 | - release = lsb_release()['DISTRIB_CODENAME'] |
1702 | + release = get_distrib_codename() |
1703 | arch = platform.machine() |
1704 | if arch not in six.iterkeys(ARCH_TO_PROPOSED_POCKET): |
1705 | raise SourceConfigError("Arch {} not supported for (distro-)proposed" |
1706 | @@ -425,8 +637,16 @@ def _add_apt_repository(spec): |
1707 | """Add the spec using add_apt_repository |
1708 | |
1709 | :param spec: the parameter to pass to add_apt_repository |
1710 | + :type spec: str |
1711 | """ |
1712 | - _run_with_retries(['add-apt-repository', '--yes', spec]) |
1713 | + if '{series}' in spec: |
1714 | + series = get_distrib_codename() |
1715 | + spec = spec.replace('{series}', series) |
1716 | + # software-properties package for bionic properly reacts to proxy settings |
1717 | + # passed as environment variables (See lp:1433761). This is not the case |
1718 | + # LTS and non-LTS releases below bionic. |
1719 | + _run_with_retries(['add-apt-repository', '--yes', spec], |
1720 | + cmd_env=env_proxy_settings(['https'])) |
1721 | |
1722 | |
1723 | def _add_cloud_pocket(pocket): |
1724 | @@ -495,7 +715,7 @@ def _verify_is_ubuntu_rel(release, os_release): |
1725 | :raises: SourceConfigError if the release is not the same as the ubuntu |
1726 | release. |
1727 | """ |
1728 | - ubuntu_rel = lsb_release()['DISTRIB_CODENAME'] |
1729 | + ubuntu_rel = get_distrib_codename() |
1730 | if release != ubuntu_rel: |
1731 | raise SourceConfigError( |
1732 | 'Invalid Cloud Archive release specified: {}-{} on this Ubuntu' |
1733 | @@ -506,21 +726,22 @@ def _run_with_retries(cmd, max_retries=CMD_RETRY_COUNT, retry_exitcodes=(1,), |
1734 | retry_message="", cmd_env=None): |
1735 | """Run a command and retry until success or max_retries is reached. |
1736 | |
1737 | - :param: cmd: str: The apt command to run. |
1738 | - :param: max_retries: int: The number of retries to attempt on a fatal |
1739 | - command. Defaults to CMD_RETRY_COUNT. |
1740 | - :param: retry_exitcodes: tuple: Optional additional exit codes to retry. |
1741 | - Defaults to retry on exit code 1. |
1742 | - :param: retry_message: str: Optional log prefix emitted during retries. |
1743 | - :param: cmd_env: dict: Environment variables to add to the command run. |
1744 | + :param cmd: The apt command to run. |
1745 | + :type cmd: str |
1746 | + :param max_retries: The number of retries to attempt on a fatal |
1747 | + command. Defaults to CMD_RETRY_COUNT. |
1748 | + :type max_retries: int |
1749 | + :param retry_exitcodes: Optional additional exit codes to retry. |
1750 | + Defaults to retry on exit code 1. |
1751 | + :type retry_exitcodes: tuple |
1752 | + :param retry_message: Optional log prefix emitted during retries. |
1753 | + :type retry_message: str |
1754 | + :param: cmd_env: Environment variables to add to the command run. |
1755 | + :type cmd_env: Option[None, Dict[str, str]] |
1756 | """ |
1757 | - |
1758 | - env = None |
1759 | - kwargs = {} |
1760 | + env = get_apt_dpkg_env() |
1761 | if cmd_env: |
1762 | - env = os.environ.copy() |
1763 | env.update(cmd_env) |
1764 | - kwargs['env'] = env |
1765 | |
1766 | if not retry_message: |
1767 | retry_message = "Failed executing '{}'".format(" ".join(cmd)) |
1768 | @@ -532,8 +753,7 @@ def _run_with_retries(cmd, max_retries=CMD_RETRY_COUNT, retry_exitcodes=(1,), |
1769 | retry_results = (None,) + retry_exitcodes |
1770 | while result in retry_results: |
1771 | try: |
1772 | - # result = subprocess.check_call(cmd, env=env) |
1773 | - result = subprocess.check_call(cmd, **kwargs) |
1774 | + result = subprocess.check_call(cmd, env=env) |
1775 | except subprocess.CalledProcessError as e: |
1776 | retry_count = retry_count + 1 |
1777 | if retry_count > max_retries: |
1778 | @@ -546,22 +766,18 @@ def _run_with_retries(cmd, max_retries=CMD_RETRY_COUNT, retry_exitcodes=(1,), |
1779 | def _run_apt_command(cmd, fatal=False): |
1780 | """Run an apt command with optional retries. |
1781 | |
1782 | - :param: cmd: str: The apt command to run. |
1783 | - :param: fatal: bool: Whether the command's output should be checked and |
1784 | - retried. |
1785 | + :param cmd: The apt command to run. |
1786 | + :type cmd: str |
1787 | + :param fatal: Whether the command's output should be checked and |
1788 | + retried. |
1789 | + :type fatal: bool |
1790 | """ |
1791 | - # Provide DEBIAN_FRONTEND=noninteractive if not present in the environment. |
1792 | - cmd_env = { |
1793 | - 'DEBIAN_FRONTEND': os.environ.get('DEBIAN_FRONTEND', 'noninteractive')} |
1794 | - |
1795 | if fatal: |
1796 | _run_with_retries( |
1797 | - cmd, cmd_env=cmd_env, retry_exitcodes=(1, APT_NO_LOCK,), |
1798 | + cmd, retry_exitcodes=(1, APT_NO_LOCK,), |
1799 | retry_message="Couldn't acquire DPKG lock") |
1800 | else: |
1801 | - env = os.environ.copy() |
1802 | - env.update(cmd_env) |
1803 | - subprocess.call(cmd, env=env) |
1804 | + subprocess.call(cmd, env=get_apt_dpkg_env()) |
1805 | |
1806 | |
1807 | def get_upstream_version(package): |
1808 | @@ -569,7 +785,6 @@ def get_upstream_version(package): |
1809 | |
1810 | @returns None (if not installed) or the upstream version |
1811 | """ |
1812 | - import apt_pkg |
1813 | cache = apt_cache() |
1814 | try: |
1815 | pkg = cache[package] |
1816 | @@ -581,4 +796,18 @@ def get_upstream_version(package): |
1817 | # package is known, but no version is currently installed. |
1818 | return None |
1819 | |
1820 | - return apt_pkg.upstream_version(pkg.current_ver.ver_str) |
1821 | + return ubuntu_apt_pkg.upstream_version(pkg.current_ver.ver_str) |
1822 | + |
1823 | + |
1824 | +def get_apt_dpkg_env(): |
1825 | + """Get environment suitable for execution of APT and DPKG tools. |
1826 | + |
1827 | + We keep this in a helper function instead of in a global constant to |
1828 | + avoid execution on import of the library. |
1829 | + :returns: Environment suitable for execution of APT and DPKG tools. |
1830 | + :rtype: Dict[str, str] |
1831 | + """ |
1832 | + # The fallback is used in the event of ``/etc/environment`` not containing |
1833 | + # avalid PATH variable. |
1834 | + return {'DEBIAN_FRONTEND': 'noninteractive', |
1835 | + 'PATH': get_system_env('PATH', '/usr/sbin:/usr/bin:/sbin:/bin')} |
1836 | diff --git a/lib/charmhelpers/fetch/ubuntu_apt_pkg.py b/lib/charmhelpers/fetch/ubuntu_apt_pkg.py |
1837 | new file mode 100644 |
1838 | index 0000000..929a75d |
1839 | --- /dev/null |
1840 | +++ b/lib/charmhelpers/fetch/ubuntu_apt_pkg.py |
1841 | @@ -0,0 +1,267 @@ |
1842 | +# Copyright 2019 Canonical Ltd |
1843 | +# |
1844 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
1845 | +# you may not use this file except in compliance with the License. |
1846 | +# You may obtain a copy of the License at |
1847 | +# |
1848 | +# http://www.apache.org/licenses/LICENSE-2.0 |
1849 | +# |
1850 | +# Unless required by applicable law or agreed to in writing, software |
1851 | +# distributed under the License is distributed on an "AS IS" BASIS, |
1852 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
1853 | +# See the License for the specific language governing permissions and |
1854 | +# limitations under the License. |
1855 | + |
1856 | +"""Provide a subset of the ``python-apt`` module API. |
1857 | + |
1858 | +Data collection is done through subprocess calls to ``apt-cache`` and |
1859 | +``dpkg-query`` commands. |
1860 | + |
1861 | +The main purpose for this module is to avoid dependency on the |
1862 | +``python-apt`` python module. |
1863 | + |
1864 | +The indicated python module is a wrapper around the ``apt`` C++ library |
1865 | +which is tightly connected to the version of the distribution it was |
1866 | +shipped on. It is not developed in a backward/forward compatible manner. |
1867 | + |
1868 | +This in turn makes it incredibly hard to distribute as a wheel for a piece |
1869 | +of python software that supports a span of distro releases [0][1]. |
1870 | + |
1871 | +Upstream feedback like [2] does not give confidence in this ever changing, |
1872 | +so with this we get rid of the dependency. |
1873 | + |
1874 | +0: https://github.com/juju-solutions/layer-basic/pull/135 |
1875 | +1: https://bugs.launchpad.net/charm-octavia/+bug/1824112 |
1876 | +2: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=845330#10 |
1877 | +""" |
1878 | + |
1879 | +import locale |
1880 | +import os |
1881 | +import subprocess |
1882 | +import sys |
1883 | + |
1884 | + |
1885 | +class _container(dict): |
1886 | + """Simple container for attributes.""" |
1887 | + __getattr__ = dict.__getitem__ |
1888 | + __setattr__ = dict.__setitem__ |
1889 | + |
1890 | + |
1891 | +class Package(_container): |
1892 | + """Simple container for package attributes.""" |
1893 | + |
1894 | + |
1895 | +class Version(_container): |
1896 | + """Simple container for version attributes.""" |
1897 | + |
1898 | + |
1899 | +class Cache(object): |
1900 | + """Simulation of ``apt_pkg`` Cache object.""" |
1901 | + def __init__(self, progress=None): |
1902 | + pass |
1903 | + |
1904 | + def __contains__(self, package): |
1905 | + try: |
1906 | + pkg = self.__getitem__(package) |
1907 | + return pkg is not None |
1908 | + except KeyError: |
1909 | + return False |
1910 | + |
1911 | + def __getitem__(self, package): |
1912 | + """Get information about a package from apt and dpkg databases. |
1913 | + |
1914 | + :param package: Name of package |
1915 | + :type package: str |
1916 | + :returns: Package object |
1917 | + :rtype: object |
1918 | + :raises: KeyError, subprocess.CalledProcessError |
1919 | + """ |
1920 | + apt_result = self._apt_cache_show([package])[package] |
1921 | + apt_result['name'] = apt_result.pop('package') |
1922 | + pkg = Package(apt_result) |
1923 | + dpkg_result = self._dpkg_list([package]).get(package, {}) |
1924 | + current_ver = None |
1925 | + installed_version = dpkg_result.get('version') |
1926 | + if installed_version: |
1927 | + current_ver = Version({'ver_str': installed_version}) |
1928 | + pkg.current_ver = current_ver |
1929 | + pkg.architecture = dpkg_result.get('architecture') |
1930 | + return pkg |
1931 | + |
1932 | + def _dpkg_list(self, packages): |
1933 | + """Get data from system dpkg database for package. |
1934 | + |
1935 | + :param packages: Packages to get data from |
1936 | + :type packages: List[str] |
1937 | + :returns: Structured data about installed packages, keys like |
1938 | + ``dpkg-query --list`` |
1939 | + :rtype: dict |
1940 | + :raises: subprocess.CalledProcessError |
1941 | + """ |
1942 | + pkgs = {} |
1943 | + cmd = ['dpkg-query', '--list'] |
1944 | + cmd.extend(packages) |
1945 | + if locale.getlocale() == (None, None): |
1946 | + # subprocess calls out to locale.getpreferredencoding(False) to |
1947 | + # determine encoding. Workaround for Trusty where the |
1948 | + # environment appears to not be set up correctly. |
1949 | + locale.setlocale(locale.LC_ALL, 'en_US.UTF-8') |
1950 | + try: |
1951 | + output = subprocess.check_output(cmd, |
1952 | + stderr=subprocess.STDOUT, |
1953 | + universal_newlines=True) |
1954 | + except subprocess.CalledProcessError as cp: |
1955 | + # ``dpkg-query`` may return error and at the same time have |
1956 | + # produced useful output, for example when asked for multiple |
1957 | + # packages where some are not installed |
1958 | + if cp.returncode != 1: |
1959 | + raise |
1960 | + output = cp.output |
1961 | + headings = [] |
1962 | + for line in output.splitlines(): |
1963 | + if line.startswith('||/'): |
1964 | + headings = line.split() |
1965 | + headings.pop(0) |
1966 | + continue |
1967 | + elif (line.startswith('|') or line.startswith('+') or |
1968 | + line.startswith('dpkg-query:')): |
1969 | + continue |
1970 | + else: |
1971 | + data = line.split(None, 4) |
1972 | + status = data.pop(0) |
1973 | + if status != 'ii': |
1974 | + continue |
1975 | + pkg = {} |
1976 | + pkg.update({k.lower(): v for k, v in zip(headings, data)}) |
1977 | + if 'name' in pkg: |
1978 | + pkgs.update({pkg['name']: pkg}) |
1979 | + return pkgs |
1980 | + |
1981 | + def _apt_cache_show(self, packages): |
1982 | + """Get data from system apt cache for package. |
1983 | + |
1984 | + :param packages: Packages to get data from |
1985 | + :type packages: List[str] |
1986 | + :returns: Structured data about package, keys like |
1987 | + ``apt-cache show`` |
1988 | + :rtype: dict |
1989 | + :raises: subprocess.CalledProcessError |
1990 | + """ |
1991 | + pkgs = {} |
1992 | + cmd = ['apt-cache', 'show', '--no-all-versions'] |
1993 | + cmd.extend(packages) |
1994 | + if locale.getlocale() == (None, None): |
1995 | + # subprocess calls out to locale.getpreferredencoding(False) to |
1996 | + # determine encoding. Workaround for Trusty where the |
1997 | + # environment appears to not be set up correctly. |
1998 | + locale.setlocale(locale.LC_ALL, 'en_US.UTF-8') |
1999 | + try: |
2000 | + output = subprocess.check_output(cmd, |
2001 | + stderr=subprocess.STDOUT, |
2002 | + universal_newlines=True) |
2003 | + previous = None |
2004 | + pkg = {} |
2005 | + for line in output.splitlines(): |
2006 | + if not line: |
2007 | + if 'package' in pkg: |
2008 | + pkgs.update({pkg['package']: pkg}) |
2009 | + pkg = {} |
2010 | + continue |
2011 | + if line.startswith(' '): |
2012 | + if previous and previous in pkg: |
2013 | + pkg[previous] += os.linesep + line.lstrip() |
2014 | + continue |
2015 | + if ':' in line: |
2016 | + kv = line.split(':', 1) |
2017 | + key = kv[0].lower() |
2018 | + if key == 'n': |
2019 | + continue |
2020 | + previous = key |
2021 | + pkg.update({key: kv[1].lstrip()}) |
2022 | + except subprocess.CalledProcessError as cp: |
2023 | + # ``apt-cache`` returns 100 if none of the packages asked for |
2024 | + # exist in the apt cache. |
2025 | + if cp.returncode != 100: |
2026 | + raise |
2027 | + return pkgs |
2028 | + |
2029 | + |
2030 | +class Config(_container): |
2031 | + def __init__(self): |
2032 | + super(Config, self).__init__(self._populate()) |
2033 | + |
2034 | + def _populate(self): |
2035 | + cfgs = {} |
2036 | + cmd = ['apt-config', 'dump'] |
2037 | + output = subprocess.check_output(cmd, |
2038 | + stderr=subprocess.STDOUT, |
2039 | + universal_newlines=True) |
2040 | + for line in output.splitlines(): |
2041 | + if not line.startswith("CommandLine"): |
2042 | + k, v = line.split(" ", 1) |
2043 | + cfgs[k] = v.strip(";").strip("\"") |
2044 | + |
2045 | + return cfgs |
2046 | + |
2047 | + |
2048 | +# Backwards compatibility with old apt_pkg module |
2049 | +sys.modules[__name__].config = Config() |
2050 | + |
2051 | + |
2052 | +def init(): |
2053 | + """Compability shim that does nothing.""" |
2054 | + pass |
2055 | + |
2056 | + |
2057 | +def upstream_version(version): |
2058 | + """Extracts upstream version from a version string. |
2059 | + |
2060 | + Upstream reference: https://salsa.debian.org/apt-team/apt/blob/master/ |
2061 | + apt-pkg/deb/debversion.cc#L259 |
2062 | + |
2063 | + :param version: Version string |
2064 | + :type version: str |
2065 | + :returns: Upstream version |
2066 | + :rtype: str |
2067 | + """ |
2068 | + if version: |
2069 | + version = version.split(':')[-1] |
2070 | + version = version.split('-')[0] |
2071 | + return version |
2072 | + |
2073 | + |
2074 | +def version_compare(a, b): |
2075 | + """Compare the given versions. |
2076 | + |
2077 | + Call out to ``dpkg`` to make sure the code doing the comparison is |
2078 | + compatible with what the ``apt`` library would do. Mimic the return |
2079 | + values. |
2080 | + |
2081 | + Upstream reference: |
2082 | + https://apt-team.pages.debian.net/python-apt/library/apt_pkg.html |
2083 | + ?highlight=version_compare#apt_pkg.version_compare |
2084 | + |
2085 | + :param a: version string |
2086 | + :type a: str |
2087 | + :param b: version string |
2088 | + :type b: str |
2089 | + :returns: >0 if ``a`` is greater than ``b``, 0 if a equals b, |
2090 | + <0 if ``a`` is smaller than ``b`` |
2091 | + :rtype: int |
2092 | + :raises: subprocess.CalledProcessError, RuntimeError |
2093 | + """ |
2094 | + for op in ('gt', 1), ('eq', 0), ('lt', -1): |
2095 | + try: |
2096 | + subprocess.check_call(['dpkg', '--compare-versions', |
2097 | + a, op[0], b], |
2098 | + stderr=subprocess.STDOUT, |
2099 | + universal_newlines=True) |
2100 | + return op[1] |
2101 | + except subprocess.CalledProcessError as cp: |
2102 | + if cp.returncode == 1: |
2103 | + continue |
2104 | + raise |
2105 | + else: |
2106 | + raise RuntimeError('Unable to compare "{}" and "{}", according to ' |
2107 | + 'our logic they are neither greater, equal nor ' |
2108 | + 'less than each other.') |
2109 | diff --git a/lib/charmhelpers/osplatform.py b/lib/charmhelpers/osplatform.py |
2110 | index d9a4d5c..78c81af 100644 |
2111 | --- a/lib/charmhelpers/osplatform.py |
2112 | +++ b/lib/charmhelpers/osplatform.py |
2113 | @@ -1,4 +1,5 @@ |
2114 | import platform |
2115 | +import os |
2116 | |
2117 | |
2118 | def get_platform(): |
2119 | @@ -9,9 +10,13 @@ def get_platform(): |
2120 | This string is used to decide which platform module should be imported. |
2121 | """ |
2122 | # linux_distribution is deprecated and will be removed in Python 3.7 |
2123 | - # Warings *not* disabled, as we certainly need to fix this. |
2124 | - tuple_platform = platform.linux_distribution() |
2125 | - current_platform = tuple_platform[0] |
2126 | + # Warnings *not* disabled, as we certainly need to fix this. |
2127 | + if hasattr(platform, 'linux_distribution'): |
2128 | + tuple_platform = platform.linux_distribution() |
2129 | + current_platform = tuple_platform[0] |
2130 | + else: |
2131 | + current_platform = _get_platform_from_fs() |
2132 | + |
2133 | if "Ubuntu" in current_platform: |
2134 | return "ubuntu" |
2135 | elif "CentOS" in current_platform: |
2136 | @@ -20,6 +25,22 @@ def get_platform(): |
2137 | # Stock Python does not detect Ubuntu and instead returns debian. |
2138 | # Or at least it does in some build environments like Travis CI |
2139 | return "ubuntu" |
2140 | + elif "elementary" in current_platform: |
2141 | + # ElementaryOS fails to run tests locally without this. |
2142 | + return "ubuntu" |
2143 | else: |
2144 | raise RuntimeError("This module is not supported on {}." |
2145 | .format(current_platform)) |
2146 | + |
2147 | + |
2148 | +def _get_platform_from_fs(): |
2149 | + """Get Platform from /etc/os-release.""" |
2150 | + with open(os.path.join(os.sep, 'etc', 'os-release')) as fin: |
2151 | + content = dict( |
2152 | + line.split('=', 1) |
2153 | + for line in fin.read().splitlines() |
2154 | + if '=' in line |
2155 | + ) |
2156 | + for k, v in content.items(): |
2157 | + content[k] = v.strip('"') |
2158 | + return content["NAME"] |
This merge proposal is being monitored by mergebot. Change the status to Approved to merge.