Merge ~mthaddon/charm-nrpe/+git/focal-1860441:master into ~nrpe-charmers/charm-nrpe:master
- Git
- lp:~mthaddon/charm-nrpe/+git/focal-1860441
- master
- Merge into master
Proposed by
Tom Haddon
Status: | Merged | ||||
---|---|---|---|---|---|
Approved by: | Tom Haddon | ||||
Approved revision: | d05d0fa33d08335cf0aa316a0597c5a6b67442ac | ||||
Merged at revision: | 2219dd742ebb4140abe9ee6122c00020cc22a340 | ||||
Proposed branch: | ~mthaddon/charm-nrpe/+git/focal-1860441:master | ||||
Merge into: | ~nrpe-charmers/charm-nrpe:master | ||||
Diff against target: |
2335 lines (+1436/-164) 23 files modified
hooks/charmhelpers/__init__.py (+4/-4) hooks/charmhelpers/core/hookenv.py (+343/-22) hooks/charmhelpers/core/host.py (+96/-13) hooks/charmhelpers/core/host_factory/ubuntu.py (+27/-1) hooks/charmhelpers/core/kernel.py (+2/-2) hooks/charmhelpers/core/services/base.py (+3/-1) hooks/charmhelpers/core/sysctl.py (+21/-10) hooks/charmhelpers/core/unitdata.py (+7/-2) hooks/charmhelpers/fetch/__init__.py (+4/-0) hooks/charmhelpers/fetch/archiveurl.py (+1/-1) hooks/charmhelpers/fetch/bzrurl.py (+2/-2) hooks/charmhelpers/fetch/giturl.py (+2/-2) hooks/charmhelpers/fetch/python/__init__.py (+13/-0) hooks/charmhelpers/fetch/python/debug.py (+54/-0) hooks/charmhelpers/fetch/python/packages.py (+154/-0) hooks/charmhelpers/fetch/python/rpdb.py (+56/-0) hooks/charmhelpers/fetch/python/version.py (+32/-0) hooks/charmhelpers/fetch/ubuntu.py (+304/-82) hooks/charmhelpers/fetch/ubuntu_apt_pkg.py (+267/-0) hooks/charmhelpers/osplatform.py (+24/-3) tests/charmhelpers/__init__.py (+4/-4) tests/charmhelpers/contrib/amulet/deployment.py (+4/-2) tests/charmhelpers/contrib/amulet/utils.py (+12/-13) |
||||
Related bugs: |
|
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Benjamin Allot | Approve | ||
Canonical IS Reviewers | Pending | ||
Review via email: mp+377872@code.launchpad.net |
Commit message
Update charmhelpers, to support focal
Description of the change
Update charmhelpers, to support focal
To post a comment you must log in.
Revision history for this message
Tom Haddon (mthaddon) wrote : | # |
Revision history for this message
🤖 Canonical IS Merge Bot (canonical-is-mergebot) wrote : | # |
This merge proposal is being monitored by mergebot. Change the status to Approved to merge.
Revision history for this message
Tom Haddon (mthaddon) wrote : | # |
And also tested to work in a charm upgrade and deploy from scratch on xenial
Revision history for this message
Benjamin Allot (ballot) wrote : | # |
Approve with a comment inline on the code change needed for focal.
review:
Approve
Revision history for this message
🤖 Canonical IS Merge Bot (canonical-is-mergebot) wrote : | # |
Change successfully merged at revision 2219dd742ebb414
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | diff --git a/hooks/charmhelpers/__init__.py b/hooks/charmhelpers/__init__.py |
2 | index e7aa471..61ef907 100644 |
3 | --- a/hooks/charmhelpers/__init__.py |
4 | +++ b/hooks/charmhelpers/__init__.py |
5 | @@ -23,22 +23,22 @@ import subprocess |
6 | import sys |
7 | |
8 | try: |
9 | - import six # flake8: noqa |
10 | + import six # NOQA:F401 |
11 | except ImportError: |
12 | if sys.version_info.major == 2: |
13 | subprocess.check_call(['apt-get', 'install', '-y', 'python-six']) |
14 | else: |
15 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-six']) |
16 | - import six # flake8: noqa |
17 | + import six # NOQA:F401 |
18 | |
19 | try: |
20 | - import yaml # flake8: noqa |
21 | + import yaml # NOQA:F401 |
22 | except ImportError: |
23 | if sys.version_info.major == 2: |
24 | subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml']) |
25 | else: |
26 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml']) |
27 | - import yaml # flake8: noqa |
28 | + import yaml # NOQA:F401 |
29 | |
30 | |
31 | # Holds a list of mapping of mangled function names that have been deprecated |
32 | diff --git a/hooks/charmhelpers/core/hookenv.py b/hooks/charmhelpers/core/hookenv.py |
33 | index 89f1024..647f6e4 100644 |
34 | --- a/hooks/charmhelpers/core/hookenv.py |
35 | +++ b/hooks/charmhelpers/core/hookenv.py |
36 | @@ -34,6 +34,8 @@ import errno |
37 | import tempfile |
38 | from subprocess import CalledProcessError |
39 | |
40 | +from charmhelpers import deprecate |
41 | + |
42 | import six |
43 | if not six.PY3: |
44 | from UserDict import UserDict |
45 | @@ -48,6 +50,12 @@ INFO = "INFO" |
46 | DEBUG = "DEBUG" |
47 | TRACE = "TRACE" |
48 | MARKER = object() |
49 | +SH_MAX_ARG = 131071 |
50 | + |
51 | + |
52 | +RANGE_WARNING = ('Passing NO_PROXY string that includes a cidr. ' |
53 | + 'This may not be compatible with software you are ' |
54 | + 'running in your shell.') |
55 | |
56 | cache = {} |
57 | |
58 | @@ -98,7 +106,7 @@ def log(message, level=None): |
59 | command += ['-l', level] |
60 | if not isinstance(message, six.string_types): |
61 | message = repr(message) |
62 | - command += [message] |
63 | + command += [message[:SH_MAX_ARG]] |
64 | # Missing juju-log should not cause failures in unit tests |
65 | # Send log output to stderr |
66 | try: |
67 | @@ -113,6 +121,24 @@ def log(message, level=None): |
68 | raise |
69 | |
70 | |
71 | +def function_log(message): |
72 | + """Write a function progress message""" |
73 | + command = ['function-log'] |
74 | + if not isinstance(message, six.string_types): |
75 | + message = repr(message) |
76 | + command += [message[:SH_MAX_ARG]] |
77 | + # Missing function-log should not cause failures in unit tests |
78 | + # Send function_log output to stderr |
79 | + try: |
80 | + subprocess.call(command) |
81 | + except OSError as e: |
82 | + if e.errno == errno.ENOENT: |
83 | + message = "function-log: {}".format(message) |
84 | + print(message, file=sys.stderr) |
85 | + else: |
86 | + raise |
87 | + |
88 | + |
89 | class Serializable(UserDict): |
90 | """Wrapper, an object that can be serialized to yaml or json""" |
91 | |
92 | @@ -201,11 +227,35 @@ def remote_unit(): |
93 | return os.environ.get('JUJU_REMOTE_UNIT', None) |
94 | |
95 | |
96 | -def service_name(): |
97 | - """The name service group this unit belongs to""" |
98 | +def application_name(): |
99 | + """ |
100 | + The name of the deployed application this unit belongs to. |
101 | + """ |
102 | return local_unit().split('/')[0] |
103 | |
104 | |
105 | +def service_name(): |
106 | + """ |
107 | + .. deprecated:: 0.19.1 |
108 | + Alias for :func:`application_name`. |
109 | + """ |
110 | + return application_name() |
111 | + |
112 | + |
113 | +def model_name(): |
114 | + """ |
115 | + Name of the model that this unit is deployed in. |
116 | + """ |
117 | + return os.environ['JUJU_MODEL_NAME'] |
118 | + |
119 | + |
120 | +def model_uuid(): |
121 | + """ |
122 | + UUID of the model that this unit is deployed in. |
123 | + """ |
124 | + return os.environ['JUJU_MODEL_UUID'] |
125 | + |
126 | + |
127 | def principal_unit(): |
128 | """Returns the principal unit of this unit, otherwise None""" |
129 | # Juju 2.2 and above provides JUJU_PRINCIPAL_UNIT |
130 | @@ -290,7 +340,7 @@ class Config(dict): |
131 | self.implicit_save = True |
132 | self._prev_dict = None |
133 | self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME) |
134 | - if os.path.exists(self.path): |
135 | + if os.path.exists(self.path) and os.stat(self.path).st_size: |
136 | self.load_previous() |
137 | atexit(self._implicit_save) |
138 | |
139 | @@ -310,7 +360,11 @@ class Config(dict): |
140 | """ |
141 | self.path = path or self.path |
142 | with open(self.path) as f: |
143 | - self._prev_dict = json.load(f) |
144 | + try: |
145 | + self._prev_dict = json.load(f) |
146 | + except ValueError as e: |
147 | + log('Unable to parse previous config data - {}'.format(str(e)), |
148 | + level=ERROR) |
149 | for k, v in copy.deepcopy(self._prev_dict).items(): |
150 | if k not in self: |
151 | self[k] = v |
152 | @@ -354,22 +408,40 @@ class Config(dict): |
153 | self.save() |
154 | |
155 | |
156 | -@cached |
157 | +_cache_config = None |
158 | + |
159 | + |
160 | def config(scope=None): |
161 | - """Juju charm configuration""" |
162 | - config_cmd_line = ['config-get'] |
163 | - if scope is not None: |
164 | - config_cmd_line.append(scope) |
165 | - else: |
166 | - config_cmd_line.append('--all') |
167 | - config_cmd_line.append('--format=json') |
168 | + """ |
169 | + Get the juju charm configuration (scope==None) or individual key, |
170 | + (scope=str). The returned value is a Python data structure loaded as |
171 | + JSON from the Juju config command. |
172 | + |
173 | + :param scope: If set, return the value for the specified key. |
174 | + :type scope: Optional[str] |
175 | + :returns: Either the whole config as a Config, or a key from it. |
176 | + :rtype: Any |
177 | + """ |
178 | + global _cache_config |
179 | + config_cmd_line = ['config-get', '--all', '--format=json'] |
180 | try: |
181 | - config_data = json.loads( |
182 | - subprocess.check_output(config_cmd_line).decode('UTF-8')) |
183 | + # JSON Decode Exception for Python3.5+ |
184 | + exc_json = json.decoder.JSONDecodeError |
185 | + except AttributeError: |
186 | + # JSON Decode Exception for Python2.7 through Python3.4 |
187 | + exc_json = ValueError |
188 | + try: |
189 | + if _cache_config is None: |
190 | + config_data = json.loads( |
191 | + subprocess.check_output(config_cmd_line).decode('UTF-8')) |
192 | + _cache_config = Config(config_data) |
193 | if scope is not None: |
194 | - return config_data |
195 | - return Config(config_data) |
196 | - except ValueError: |
197 | + return _cache_config.get(scope) |
198 | + return _cache_config |
199 | + except (exc_json, UnicodeDecodeError) as e: |
200 | + log('Unable to parse output from config-get: config_cmd_line="{}" ' |
201 | + 'message="{}"' |
202 | + .format(config_cmd_line, str(e)), level=ERROR) |
203 | return None |
204 | |
205 | |
206 | @@ -463,6 +535,67 @@ def related_units(relid=None): |
207 | subprocess.check_output(units_cmd_line).decode('UTF-8')) or [] |
208 | |
209 | |
210 | +def expected_peer_units(): |
211 | + """Get a generator for units we expect to join peer relation based on |
212 | + goal-state. |
213 | + |
214 | + The local unit is excluded from the result to make it easy to gauge |
215 | + completion of all peers joining the relation with existing hook tools. |
216 | + |
217 | + Example usage: |
218 | + log('peer {} of {} joined peer relation' |
219 | + .format(len(related_units()), |
220 | + len(list(expected_peer_units())))) |
221 | + |
222 | + This function will raise NotImplementedError if used with juju versions |
223 | + without goal-state support. |
224 | + |
225 | + :returns: iterator |
226 | + :rtype: types.GeneratorType |
227 | + :raises: NotImplementedError |
228 | + """ |
229 | + if not has_juju_version("2.4.0"): |
230 | + # goal-state first appeared in 2.4.0. |
231 | + raise NotImplementedError("goal-state") |
232 | + _goal_state = goal_state() |
233 | + return (key for key in _goal_state['units'] |
234 | + if '/' in key and key != local_unit()) |
235 | + |
236 | + |
237 | +def expected_related_units(reltype=None): |
238 | + """Get a generator for units we expect to join relation based on |
239 | + goal-state. |
240 | + |
241 | + Note that you can not use this function for the peer relation, take a look |
242 | + at expected_peer_units() for that. |
243 | + |
244 | + This function will raise KeyError if you request information for a |
245 | + relation type for which juju goal-state does not have information. It will |
246 | + raise NotImplementedError if used with juju versions without goal-state |
247 | + support. |
248 | + |
249 | + Example usage: |
250 | + log('participant {} of {} joined relation {}' |
251 | + .format(len(related_units()), |
252 | + len(list(expected_related_units())), |
253 | + relation_type())) |
254 | + |
255 | + :param reltype: Relation type to list data for, default is to list data for |
256 | + the realtion type we are currently executing a hook for. |
257 | + :type reltype: str |
258 | + :returns: iterator |
259 | + :rtype: types.GeneratorType |
260 | + :raises: KeyError, NotImplementedError |
261 | + """ |
262 | + if not has_juju_version("2.4.4"): |
263 | + # goal-state existed in 2.4.0, but did not list individual units to |
264 | + # join a relation in 2.4.1 through 2.4.3. (LP: #1794739) |
265 | + raise NotImplementedError("goal-state relation unit count") |
266 | + reltype = reltype or relation_type() |
267 | + _goal_state = goal_state() |
268 | + return (key for key in _goal_state['relations'][reltype] if '/' in key) |
269 | + |
270 | + |
271 | @cached |
272 | def relation_for_unit(unit=None, rid=None): |
273 | """Get the json represenation of a unit's relation""" |
274 | @@ -833,9 +966,23 @@ def charm_dir(): |
275 | return os.environ.get('CHARM_DIR') |
276 | |
277 | |
278 | +def cmd_exists(cmd): |
279 | + """Return True if the specified cmd exists in the path""" |
280 | + return any( |
281 | + os.access(os.path.join(path, cmd), os.X_OK) |
282 | + for path in os.environ["PATH"].split(os.pathsep) |
283 | + ) |
284 | + |
285 | + |
286 | @cached |
287 | +@deprecate("moved to function_get()", log=log) |
288 | def action_get(key=None): |
289 | - """Gets the value of an action parameter, or all key/value param pairs""" |
290 | + """ |
291 | + .. deprecated:: 0.20.7 |
292 | + Alias for :func:`function_get`. |
293 | + |
294 | + Gets the value of an action parameter, or all key/value param pairs. |
295 | + """ |
296 | cmd = ['action-get'] |
297 | if key is not None: |
298 | cmd.append(key) |
299 | @@ -844,36 +991,103 @@ def action_get(key=None): |
300 | return action_data |
301 | |
302 | |
303 | +@cached |
304 | +def function_get(key=None): |
305 | + """Gets the value of an action parameter, or all key/value param pairs""" |
306 | + cmd = ['function-get'] |
307 | + # Fallback for older charms. |
308 | + if not cmd_exists('function-get'): |
309 | + cmd = ['action-get'] |
310 | + |
311 | + if key is not None: |
312 | + cmd.append(key) |
313 | + cmd.append('--format=json') |
314 | + function_data = json.loads(subprocess.check_output(cmd).decode('UTF-8')) |
315 | + return function_data |
316 | + |
317 | + |
318 | +@deprecate("moved to function_set()", log=log) |
319 | def action_set(values): |
320 | - """Sets the values to be returned after the action finishes""" |
321 | + """ |
322 | + .. deprecated:: 0.20.7 |
323 | + Alias for :func:`function_set`. |
324 | + |
325 | + Sets the values to be returned after the action finishes. |
326 | + """ |
327 | cmd = ['action-set'] |
328 | for k, v in list(values.items()): |
329 | cmd.append('{}={}'.format(k, v)) |
330 | subprocess.check_call(cmd) |
331 | |
332 | |
333 | +def function_set(values): |
334 | + """Sets the values to be returned after the function finishes""" |
335 | + cmd = ['function-set'] |
336 | + # Fallback for older charms. |
337 | + if not cmd_exists('function-get'): |
338 | + cmd = ['action-set'] |
339 | + |
340 | + for k, v in list(values.items()): |
341 | + cmd.append('{}={}'.format(k, v)) |
342 | + subprocess.check_call(cmd) |
343 | + |
344 | + |
345 | +@deprecate("moved to function_fail()", log=log) |
346 | def action_fail(message): |
347 | - """Sets the action status to failed and sets the error message. |
348 | + """ |
349 | + .. deprecated:: 0.20.7 |
350 | + Alias for :func:`function_fail`. |
351 | + |
352 | + Sets the action status to failed and sets the error message. |
353 | |
354 | - The results set by action_set are preserved.""" |
355 | + The results set by action_set are preserved. |
356 | + """ |
357 | subprocess.check_call(['action-fail', message]) |
358 | |
359 | |
360 | +def function_fail(message): |
361 | + """Sets the function status to failed and sets the error message. |
362 | + |
363 | + The results set by function_set are preserved.""" |
364 | + cmd = ['function-fail'] |
365 | + # Fallback for older charms. |
366 | + if not cmd_exists('function-fail'): |
367 | + cmd = ['action-fail'] |
368 | + cmd.append(message) |
369 | + |
370 | + subprocess.check_call(cmd) |
371 | + |
372 | + |
373 | def action_name(): |
374 | """Get the name of the currently executing action.""" |
375 | return os.environ.get('JUJU_ACTION_NAME') |
376 | |
377 | |
378 | +def function_name(): |
379 | + """Get the name of the currently executing function.""" |
380 | + return os.environ.get('JUJU_FUNCTION_NAME') or action_name() |
381 | + |
382 | + |
383 | def action_uuid(): |
384 | """Get the UUID of the currently executing action.""" |
385 | return os.environ.get('JUJU_ACTION_UUID') |
386 | |
387 | |
388 | +def function_id(): |
389 | + """Get the ID of the currently executing function.""" |
390 | + return os.environ.get('JUJU_FUNCTION_ID') or action_uuid() |
391 | + |
392 | + |
393 | def action_tag(): |
394 | """Get the tag for the currently executing action.""" |
395 | return os.environ.get('JUJU_ACTION_TAG') |
396 | |
397 | |
398 | +def function_tag(): |
399 | + """Get the tag for the currently executing function.""" |
400 | + return os.environ.get('JUJU_FUNCTION_TAG') or action_tag() |
401 | + |
402 | + |
403 | def status_set(workload_state, message): |
404 | """Set the workload state with a message |
405 | |
406 | @@ -951,6 +1165,14 @@ def application_version_set(version): |
407 | |
408 | |
409 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) |
410 | +@cached |
411 | +def goal_state(): |
412 | + """Juju goal state values""" |
413 | + cmd = ['goal-state', '--format=json'] |
414 | + return json.loads(subprocess.check_output(cmd).decode('UTF-8')) |
415 | + |
416 | + |
417 | +@translate_exc(from_exc=OSError, to_exc=NotImplementedError) |
418 | def is_leader(): |
419 | """Does the current unit hold the juju leadership |
420 | |
421 | @@ -1268,3 +1490,102 @@ def egress_subnets(rid=None, unit=None): |
422 | if 'private-address' in settings: |
423 | return [_to_range(settings['private-address'])] |
424 | return [] # Should never happen |
425 | + |
426 | + |
427 | +def unit_doomed(unit=None): |
428 | + """Determines if the unit is being removed from the model |
429 | + |
430 | + Requires Juju 2.4.1. |
431 | + |
432 | + :param unit: string unit name, defaults to local_unit |
433 | + :side effect: calls goal_state |
434 | + :side effect: calls local_unit |
435 | + :side effect: calls has_juju_version |
436 | + :return: True if the unit is being removed, already gone, or never existed |
437 | + """ |
438 | + if not has_juju_version("2.4.1"): |
439 | + # We cannot risk blindly returning False for 'we don't know', |
440 | + # because that could cause data loss; if call sites don't |
441 | + # need an accurate answer, they likely don't need this helper |
442 | + # at all. |
443 | + # goal-state existed in 2.4.0, but did not handle removals |
444 | + # correctly until 2.4.1. |
445 | + raise NotImplementedError("is_doomed") |
446 | + if unit is None: |
447 | + unit = local_unit() |
448 | + gs = goal_state() |
449 | + units = gs.get('units', {}) |
450 | + if unit not in units: |
451 | + return True |
452 | + # I don't think 'dead' units ever show up in the goal-state, but |
453 | + # check anyway in addition to 'dying'. |
454 | + return units[unit]['status'] in ('dying', 'dead') |
455 | + |
456 | + |
457 | +def env_proxy_settings(selected_settings=None): |
458 | + """Get proxy settings from process environment variables. |
459 | + |
460 | + Get charm proxy settings from environment variables that correspond to |
461 | + juju-http-proxy, juju-https-proxy and juju-no-proxy (available as of 2.4.2, |
462 | + see lp:1782236) in a format suitable for passing to an application that |
463 | + reacts to proxy settings passed as environment variables. Some applications |
464 | + support lowercase or uppercase notation (e.g. curl), some support only |
465 | + lowercase (e.g. wget), there are also subjectively rare cases of only |
466 | + uppercase notation support. no_proxy CIDR and wildcard support also varies |
467 | + between runtimes and applications as there is no enforced standard. |
468 | + |
469 | + Some applications may connect to multiple destinations and expose config |
470 | + options that would affect only proxy settings for a specific destination |
471 | + these should be handled in charms in an application-specific manner. |
472 | + |
473 | + :param selected_settings: format only a subset of possible settings |
474 | + :type selected_settings: list |
475 | + :rtype: Option(None, dict[str, str]) |
476 | + """ |
477 | + SUPPORTED_SETTINGS = { |
478 | + 'http': 'HTTP_PROXY', |
479 | + 'https': 'HTTPS_PROXY', |
480 | + 'no_proxy': 'NO_PROXY', |
481 | + 'ftp': 'FTP_PROXY' |
482 | + } |
483 | + if selected_settings is None: |
484 | + selected_settings = SUPPORTED_SETTINGS |
485 | + |
486 | + selected_vars = [v for k, v in SUPPORTED_SETTINGS.items() |
487 | + if k in selected_settings] |
488 | + proxy_settings = {} |
489 | + for var in selected_vars: |
490 | + var_val = os.getenv(var) |
491 | + if var_val: |
492 | + proxy_settings[var] = var_val |
493 | + proxy_settings[var.lower()] = var_val |
494 | + # Now handle juju-prefixed environment variables. The legacy vs new |
495 | + # environment variable usage is mutually exclusive |
496 | + charm_var_val = os.getenv('JUJU_CHARM_{}'.format(var)) |
497 | + if charm_var_val: |
498 | + proxy_settings[var] = charm_var_val |
499 | + proxy_settings[var.lower()] = charm_var_val |
500 | + if 'no_proxy' in proxy_settings: |
501 | + if _contains_range(proxy_settings['no_proxy']): |
502 | + log(RANGE_WARNING, level=WARNING) |
503 | + return proxy_settings if proxy_settings else None |
504 | + |
505 | + |
506 | +def _contains_range(addresses): |
507 | + """Check for cidr or wildcard domain in a string. |
508 | + |
509 | + Given a string comprising a comma seperated list of ip addresses |
510 | + and domain names, determine whether the string contains IP ranges |
511 | + or wildcard domains. |
512 | + |
513 | + :param addresses: comma seperated list of domains and ip addresses. |
514 | + :type addresses: str |
515 | + """ |
516 | + return ( |
517 | + # Test for cidr (e.g. 10.20.20.0/24) |
518 | + "/" in addresses or |
519 | + # Test for wildcard domains (*.foo.com or .foo.com) |
520 | + "*" in addresses or |
521 | + addresses.startswith(".") or |
522 | + ",." in addresses or |
523 | + " ." in addresses) |
524 | diff --git a/hooks/charmhelpers/core/host.py b/hooks/charmhelpers/core/host.py |
525 | index fd14d60..b33ac90 100644 |
526 | --- a/hooks/charmhelpers/core/host.py |
527 | +++ b/hooks/charmhelpers/core/host.py |
528 | @@ -34,21 +34,23 @@ import six |
529 | |
530 | from contextlib import contextmanager |
531 | from collections import OrderedDict |
532 | -from .hookenv import log, DEBUG, local_unit |
533 | +from .hookenv import log, INFO, DEBUG, local_unit, charm_name |
534 | from .fstab import Fstab |
535 | from charmhelpers.osplatform import get_platform |
536 | |
537 | __platform__ = get_platform() |
538 | if __platform__ == "ubuntu": |
539 | - from charmhelpers.core.host_factory.ubuntu import ( |
540 | + from charmhelpers.core.host_factory.ubuntu import ( # NOQA:F401 |
541 | service_available, |
542 | add_new_group, |
543 | lsb_release, |
544 | cmp_pkgrevno, |
545 | CompareHostReleases, |
546 | + get_distrib_codename, |
547 | + arch |
548 | ) # flake8: noqa -- ignore F401 for this import |
549 | elif __platform__ == "centos": |
550 | - from charmhelpers.core.host_factory.centos import ( |
551 | + from charmhelpers.core.host_factory.centos import ( # NOQA:F401 |
552 | service_available, |
553 | add_new_group, |
554 | lsb_release, |
555 | @@ -58,6 +60,7 @@ elif __platform__ == "centos": |
556 | |
557 | UPDATEDB_PATH = '/etc/updatedb.conf' |
558 | |
559 | + |
560 | def service_start(service_name, **kwargs): |
561 | """Start a system service. |
562 | |
563 | @@ -287,8 +290,8 @@ def service_running(service_name, **kwargs): |
564 | for key, value in six.iteritems(kwargs): |
565 | parameter = '%s=%s' % (key, value) |
566 | cmd.append(parameter) |
567 | - output = subprocess.check_output(cmd, |
568 | - stderr=subprocess.STDOUT).decode('UTF-8') |
569 | + output = subprocess.check_output( |
570 | + cmd, stderr=subprocess.STDOUT).decode('UTF-8') |
571 | except subprocess.CalledProcessError: |
572 | return False |
573 | else: |
574 | @@ -442,7 +445,7 @@ def add_user_to_group(username, group): |
575 | |
576 | |
577 | def chage(username, lastday=None, expiredate=None, inactive=None, |
578 | - mindays=None, maxdays=None, root=None, warndays=None): |
579 | + mindays=None, maxdays=None, root=None, warndays=None): |
580 | """Change user password expiry information |
581 | |
582 | :param str username: User to update |
583 | @@ -482,8 +485,10 @@ def chage(username, lastday=None, expiredate=None, inactive=None, |
584 | cmd.append(username) |
585 | subprocess.check_call(cmd) |
586 | |
587 | + |
588 | remove_password_expiry = functools.partial(chage, expiredate='-1', inactive='-1', mindays='0', maxdays='-1') |
589 | |
590 | + |
591 | def rsync(from_path, to_path, flags='-r', options=None, timeout=None): |
592 | """Replicate the contents of a path""" |
593 | options = options or ['--delete', '--executability'] |
594 | @@ -535,13 +540,15 @@ def write_file(path, content, owner='root', group='root', perms=0o444): |
595 | # lets see if we can grab the file and compare the context, to avoid doing |
596 | # a write. |
597 | existing_content = None |
598 | - existing_uid, existing_gid = None, None |
599 | + existing_uid, existing_gid, existing_perms = None, None, None |
600 | try: |
601 | with open(path, 'rb') as target: |
602 | existing_content = target.read() |
603 | stat = os.stat(path) |
604 | - existing_uid, existing_gid = stat.st_uid, stat.st_gid |
605 | - except: |
606 | + existing_uid, existing_gid, existing_perms = ( |
607 | + stat.st_uid, stat.st_gid, stat.st_mode |
608 | + ) |
609 | + except Exception: |
610 | pass |
611 | if content != existing_content: |
612 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms), |
613 | @@ -554,7 +561,7 @@ def write_file(path, content, owner='root', group='root', perms=0o444): |
614 | target.write(content) |
615 | return |
616 | # the contents were the same, but we might still need to change the |
617 | - # ownership. |
618 | + # ownership or permissions. |
619 | if existing_uid != uid: |
620 | log("Changing uid on already existing content: {} -> {}" |
621 | .format(existing_uid, uid), level=DEBUG) |
622 | @@ -563,6 +570,10 @@ def write_file(path, content, owner='root', group='root', perms=0o444): |
623 | log("Changing gid on already existing content: {} -> {}" |
624 | .format(existing_gid, gid), level=DEBUG) |
625 | os.chown(path, -1, gid) |
626 | + if existing_perms != perms: |
627 | + log("Changing permissions on existing content: {} -> {}" |
628 | + .format(existing_perms, perms), level=DEBUG) |
629 | + os.chmod(path, perms) |
630 | |
631 | |
632 | def fstab_remove(mp): |
633 | @@ -827,7 +838,7 @@ def list_nics(nic_type=None): |
634 | ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') |
635 | ip_output = (line.strip() for line in ip_output if line) |
636 | |
637 | - key = re.compile('^[0-9]+:\s+(.+):') |
638 | + key = re.compile(r'^[0-9]+:\s+(.+):') |
639 | for line in ip_output: |
640 | matched = re.search(key, line) |
641 | if matched: |
642 | @@ -972,6 +983,20 @@ def is_container(): |
643 | |
644 | |
645 | def add_to_updatedb_prunepath(path, updatedb_path=UPDATEDB_PATH): |
646 | + """Adds the specified path to the mlocate's udpatedb.conf PRUNEPATH list. |
647 | + |
648 | + This method has no effect if the path specified by updatedb_path does not |
649 | + exist or is not a file. |
650 | + |
651 | + @param path: string the path to add to the updatedb.conf PRUNEPATHS value |
652 | + @param updatedb_path: the path the updatedb.conf file |
653 | + """ |
654 | + if not os.path.exists(updatedb_path) or os.path.isdir(updatedb_path): |
655 | + # If the updatedb.conf file doesn't exist then don't attempt to update |
656 | + # the file as the package providing mlocate may not be installed on |
657 | + # the local system |
658 | + return |
659 | + |
660 | with open(updatedb_path, 'r+') as f_id: |
661 | updatedb_text = f_id.read() |
662 | output = updatedb(updatedb_text, path) |
663 | @@ -993,7 +1018,7 @@ def updatedb(updatedb_text, new_path): |
664 | return output |
665 | |
666 | |
667 | -def modulo_distribution(modulo=3, wait=30): |
668 | +def modulo_distribution(modulo=3, wait=30, non_zero_wait=False): |
669 | """ Modulo distribution |
670 | |
671 | This helper uses the unit number, a modulo value and a constant wait time |
672 | @@ -1015,7 +1040,65 @@ def modulo_distribution(modulo=3, wait=30): |
673 | |
674 | @param modulo: int The modulo number creates the group distribution |
675 | @param wait: int The constant time wait value |
676 | + @param non_zero_wait: boolean Override unit % modulo == 0, |
677 | + return modulo * wait. Used to avoid collisions with |
678 | + leader nodes which are often given priority. |
679 | @return: int Calculated time to wait for unit operation |
680 | """ |
681 | unit_number = int(local_unit().split('/')[1]) |
682 | - return (unit_number % modulo) * wait |
683 | + calculated_wait_time = (unit_number % modulo) * wait |
684 | + if non_zero_wait and calculated_wait_time == 0: |
685 | + return modulo * wait |
686 | + else: |
687 | + return calculated_wait_time |
688 | + |
689 | + |
690 | +def install_ca_cert(ca_cert, name=None): |
691 | + """ |
692 | + Install the given cert as a trusted CA. |
693 | + |
694 | + The ``name`` is the stem of the filename where the cert is written, and if |
695 | + not provided, it will default to ``juju-{charm_name}``. |
696 | + |
697 | + If the cert is empty or None, or is unchanged, nothing is done. |
698 | + """ |
699 | + if not ca_cert: |
700 | + return |
701 | + if not isinstance(ca_cert, bytes): |
702 | + ca_cert = ca_cert.encode('utf8') |
703 | + if not name: |
704 | + name = 'juju-{}'.format(charm_name()) |
705 | + cert_file = '/usr/local/share/ca-certificates/{}.crt'.format(name) |
706 | + new_hash = hashlib.md5(ca_cert).hexdigest() |
707 | + if file_hash(cert_file) == new_hash: |
708 | + return |
709 | + log("Installing new CA cert at: {}".format(cert_file), level=INFO) |
710 | + write_file(cert_file, ca_cert) |
711 | + subprocess.check_call(['update-ca-certificates', '--fresh']) |
712 | + |
713 | + |
714 | +def get_system_env(key, default=None): |
715 | + """Get data from system environment as represented in ``/etc/environment``. |
716 | + |
717 | + :param key: Key to look up |
718 | + :type key: str |
719 | + :param default: Value to return if key is not found |
720 | + :type default: any |
721 | + :returns: Value for key if found or contents of default parameter |
722 | + :rtype: any |
723 | + :raises: subprocess.CalledProcessError |
724 | + """ |
725 | + env_file = '/etc/environment' |
726 | + # use the shell and env(1) to parse the global environments file. This is |
727 | + # done to get the correct result even if the user has shell variable |
728 | + # substitutions or other shell logic in that file. |
729 | + output = subprocess.check_output( |
730 | + ['env', '-i', '/bin/bash', '-c', |
731 | + 'set -a && source {} && env'.format(env_file)], |
732 | + universal_newlines=True) |
733 | + for k, v in (line.split('=', 1) |
734 | + for line in output.splitlines() if '=' in line): |
735 | + if k == key: |
736 | + return v |
737 | + else: |
738 | + return default |
739 | diff --git a/hooks/charmhelpers/core/host_factory/ubuntu.py b/hooks/charmhelpers/core/host_factory/ubuntu.py |
740 | index 99451b5..3edc068 100644 |
741 | --- a/hooks/charmhelpers/core/host_factory/ubuntu.py |
742 | +++ b/hooks/charmhelpers/core/host_factory/ubuntu.py |
743 | @@ -1,5 +1,6 @@ |
744 | import subprocess |
745 | |
746 | +from charmhelpers.core.hookenv import cached |
747 | from charmhelpers.core.strutils import BasicStringComparator |
748 | |
749 | |
750 | @@ -21,6 +22,10 @@ UBUNTU_RELEASES = ( |
751 | 'zesty', |
752 | 'artful', |
753 | 'bionic', |
754 | + 'cosmic', |
755 | + 'disco', |
756 | + 'eoan', |
757 | + 'focal' |
758 | ) |
759 | |
760 | |
761 | @@ -71,6 +76,14 @@ def lsb_release(): |
762 | return d |
763 | |
764 | |
765 | +def get_distrib_codename(): |
766 | + """Return the codename of the distribution |
767 | + :returns: The codename |
768 | + :rtype: str |
769 | + """ |
770 | + return lsb_release()['DISTRIB_CODENAME'].lower() |
771 | + |
772 | + |
773 | def cmp_pkgrevno(package, revno, pkgcache=None): |
774 | """Compare supplied revno with the revno of the installed package. |
775 | |
776 | @@ -82,9 +95,22 @@ def cmp_pkgrevno(package, revno, pkgcache=None): |
777 | the pkgcache argument is None. Be sure to add charmhelpers.fetch if |
778 | you call this function, or pass an apt_pkg.Cache() instance. |
779 | """ |
780 | - import apt_pkg |
781 | + from charmhelpers.fetch import apt_pkg |
782 | if not pkgcache: |
783 | from charmhelpers.fetch import apt_cache |
784 | pkgcache = apt_cache() |
785 | pkg = pkgcache[package] |
786 | return apt_pkg.version_compare(pkg.current_ver.ver_str, revno) |
787 | + |
788 | + |
789 | +@cached |
790 | +def arch(): |
791 | + """Return the package architecture as a string. |
792 | + |
793 | + :returns: the architecture |
794 | + :rtype: str |
795 | + :raises: subprocess.CalledProcessError if dpkg command fails |
796 | + """ |
797 | + return subprocess.check_output( |
798 | + ['dpkg', '--print-architecture'] |
799 | + ).rstrip().decode('UTF-8') |
800 | diff --git a/hooks/charmhelpers/core/kernel.py b/hooks/charmhelpers/core/kernel.py |
801 | index 2d40452..e01f4f8 100644 |
802 | --- a/hooks/charmhelpers/core/kernel.py |
803 | +++ b/hooks/charmhelpers/core/kernel.py |
804 | @@ -26,12 +26,12 @@ from charmhelpers.core.hookenv import ( |
805 | |
806 | __platform__ = get_platform() |
807 | if __platform__ == "ubuntu": |
808 | - from charmhelpers.core.kernel_factory.ubuntu import ( |
809 | + from charmhelpers.core.kernel_factory.ubuntu import ( # NOQA:F401 |
810 | persistent_modprobe, |
811 | update_initramfs, |
812 | ) # flake8: noqa -- ignore F401 for this import |
813 | elif __platform__ == "centos": |
814 | - from charmhelpers.core.kernel_factory.centos import ( |
815 | + from charmhelpers.core.kernel_factory.centos import ( # NOQA:F401 |
816 | persistent_modprobe, |
817 | update_initramfs, |
818 | ) # flake8: noqa -- ignore F401 for this import |
819 | diff --git a/hooks/charmhelpers/core/services/base.py b/hooks/charmhelpers/core/services/base.py |
820 | index 345b60d..179ad4f 100644 |
821 | --- a/hooks/charmhelpers/core/services/base.py |
822 | +++ b/hooks/charmhelpers/core/services/base.py |
823 | @@ -307,7 +307,9 @@ class PortManagerCallback(ManagerCallback): |
824 | """ |
825 | def __call__(self, manager, service_name, event_name): |
826 | service = manager.get_service(service_name) |
827 | - new_ports = service.get('ports', []) |
828 | + # turn this generator into a list, |
829 | + # as we'll be going over it multiple times |
830 | + new_ports = list(service.get('ports', [])) |
831 | port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name)) |
832 | if os.path.exists(port_file): |
833 | with open(port_file) as fp: |
834 | diff --git a/hooks/charmhelpers/core/sysctl.py b/hooks/charmhelpers/core/sysctl.py |
835 | index 6e413e3..f1f4a28 100644 |
836 | --- a/hooks/charmhelpers/core/sysctl.py |
837 | +++ b/hooks/charmhelpers/core/sysctl.py |
838 | @@ -28,27 +28,38 @@ from charmhelpers.core.hookenv import ( |
839 | __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' |
840 | |
841 | |
842 | -def create(sysctl_dict, sysctl_file): |
843 | +def create(sysctl_dict, sysctl_file, ignore=False): |
844 | """Creates a sysctl.conf file from a YAML associative array |
845 | |
846 | - :param sysctl_dict: a YAML-formatted string of sysctl options eg "{ 'kernel.max_pid': 1337 }" |
847 | + :param sysctl_dict: a dict or YAML-formatted string of sysctl |
848 | + options eg "{ 'kernel.max_pid': 1337 }" |
849 | :type sysctl_dict: str |
850 | :param sysctl_file: path to the sysctl file to be saved |
851 | :type sysctl_file: str or unicode |
852 | + :param ignore: If True, ignore "unknown variable" errors. |
853 | + :type ignore: bool |
854 | :returns: None |
855 | """ |
856 | - try: |
857 | - sysctl_dict_parsed = yaml.safe_load(sysctl_dict) |
858 | - except yaml.YAMLError: |
859 | - log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict), |
860 | - level=ERROR) |
861 | - return |
862 | + if type(sysctl_dict) is not dict: |
863 | + try: |
864 | + sysctl_dict_parsed = yaml.safe_load(sysctl_dict) |
865 | + except yaml.YAMLError: |
866 | + log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict), |
867 | + level=ERROR) |
868 | + return |
869 | + else: |
870 | + sysctl_dict_parsed = sysctl_dict |
871 | |
872 | with open(sysctl_file, "w") as fd: |
873 | for key, value in sysctl_dict_parsed.items(): |
874 | fd.write("{}={}\n".format(key, value)) |
875 | |
876 | - log("Updating sysctl_file: %s values: %s" % (sysctl_file, sysctl_dict_parsed), |
877 | + log("Updating sysctl_file: {} values: {}".format(sysctl_file, |
878 | + sysctl_dict_parsed), |
879 | level=DEBUG) |
880 | |
881 | - check_call(["sysctl", "-p", sysctl_file]) |
882 | + call = ["sysctl", "-p", sysctl_file] |
883 | + if ignore: |
884 | + call.append("-e") |
885 | + |
886 | + check_call(call) |
887 | diff --git a/hooks/charmhelpers/core/unitdata.py b/hooks/charmhelpers/core/unitdata.py |
888 | index 6d7b494..ab55432 100644 |
889 | --- a/hooks/charmhelpers/core/unitdata.py |
890 | +++ b/hooks/charmhelpers/core/unitdata.py |
891 | @@ -166,6 +166,10 @@ class Storage(object): |
892 | |
893 | To support dicts, lists, integer, floats, and booleans values |
894 | are automatically json encoded/decoded. |
895 | + |
896 | + Note: to facilitate unit testing, ':memory:' can be passed as the |
897 | + path parameter which causes sqlite3 to only build the db in memory. |
898 | + This should only be used for testing purposes. |
899 | """ |
900 | def __init__(self, path=None): |
901 | self.db_path = path |
902 | @@ -175,8 +179,9 @@ class Storage(object): |
903 | else: |
904 | self.db_path = os.path.join( |
905 | os.environ.get('CHARM_DIR', ''), '.unit-state.db') |
906 | - with open(self.db_path, 'a') as f: |
907 | - os.fchmod(f.fileno(), 0o600) |
908 | + if self.db_path != ':memory:': |
909 | + with open(self.db_path, 'a') as f: |
910 | + os.fchmod(f.fileno(), 0o600) |
911 | self.conn = sqlite3.connect('%s' % self.db_path) |
912 | self.cursor = self.conn.cursor() |
913 | self.revision = None |
914 | diff --git a/hooks/charmhelpers/fetch/__init__.py b/hooks/charmhelpers/fetch/__init__.py |
915 | index 480a627..0cc7fc8 100644 |
916 | --- a/hooks/charmhelpers/fetch/__init__.py |
917 | +++ b/hooks/charmhelpers/fetch/__init__.py |
918 | @@ -84,6 +84,7 @@ module = "charmhelpers.fetch.%s" % __platform__ |
919 | fetch = importlib.import_module(module) |
920 | |
921 | filter_installed_packages = fetch.filter_installed_packages |
922 | +filter_missing_packages = fetch.filter_missing_packages |
923 | install = fetch.apt_install |
924 | upgrade = fetch.apt_upgrade |
925 | update = _fetch_update = fetch.apt_update |
926 | @@ -96,11 +97,14 @@ if __platform__ == "ubuntu": |
927 | apt_update = fetch.apt_update |
928 | apt_upgrade = fetch.apt_upgrade |
929 | apt_purge = fetch.apt_purge |
930 | + apt_autoremove = fetch.apt_autoremove |
931 | apt_mark = fetch.apt_mark |
932 | apt_hold = fetch.apt_hold |
933 | apt_unhold = fetch.apt_unhold |
934 | import_key = fetch.import_key |
935 | get_upstream_version = fetch.get_upstream_version |
936 | + apt_pkg = fetch.ubuntu_apt_pkg |
937 | + get_apt_dpkg_env = fetch.get_apt_dpkg_env |
938 | elif __platform__ == "centos": |
939 | yum_search = fetch.yum_search |
940 | |
941 | diff --git a/hooks/charmhelpers/fetch/archiveurl.py b/hooks/charmhelpers/fetch/archiveurl.py |
942 | index dd24f9e..d25587a 100644 |
943 | --- a/hooks/charmhelpers/fetch/archiveurl.py |
944 | +++ b/hooks/charmhelpers/fetch/archiveurl.py |
945 | @@ -89,7 +89,7 @@ class ArchiveUrlFetchHandler(BaseFetchHandler): |
946 | :param str source: URL pointing to an archive file. |
947 | :param str dest: Local path location to download archive file to. |
948 | """ |
949 | - # propogate all exceptions |
950 | + # propagate all exceptions |
951 | # URLError, OSError, etc |
952 | proto, netloc, path, params, query, fragment = urlparse(source) |
953 | if proto in ('http', 'https'): |
954 | diff --git a/hooks/charmhelpers/fetch/bzrurl.py b/hooks/charmhelpers/fetch/bzrurl.py |
955 | index 07cd029..c4ab3ff 100644 |
956 | --- a/hooks/charmhelpers/fetch/bzrurl.py |
957 | +++ b/hooks/charmhelpers/fetch/bzrurl.py |
958 | @@ -13,7 +13,7 @@ |
959 | # limitations under the License. |
960 | |
961 | import os |
962 | -from subprocess import check_call |
963 | +from subprocess import STDOUT, check_output |
964 | from charmhelpers.fetch import ( |
965 | BaseFetchHandler, |
966 | UnhandledSource, |
967 | @@ -55,7 +55,7 @@ class BzrUrlFetchHandler(BaseFetchHandler): |
968 | cmd = ['bzr', 'branch'] |
969 | cmd += cmd_opts |
970 | cmd += [source, dest] |
971 | - check_call(cmd) |
972 | + check_output(cmd, stderr=STDOUT) |
973 | |
974 | def install(self, source, dest=None, revno=None): |
975 | url_parts = self.parse_url(source) |
976 | diff --git a/hooks/charmhelpers/fetch/giturl.py b/hooks/charmhelpers/fetch/giturl.py |
977 | index 4cf21bc..070ca9b 100644 |
978 | --- a/hooks/charmhelpers/fetch/giturl.py |
979 | +++ b/hooks/charmhelpers/fetch/giturl.py |
980 | @@ -13,7 +13,7 @@ |
981 | # limitations under the License. |
982 | |
983 | import os |
984 | -from subprocess import check_call, CalledProcessError |
985 | +from subprocess import check_output, CalledProcessError, STDOUT |
986 | from charmhelpers.fetch import ( |
987 | BaseFetchHandler, |
988 | UnhandledSource, |
989 | @@ -50,7 +50,7 @@ class GitUrlFetchHandler(BaseFetchHandler): |
990 | cmd = ['git', 'clone', source, dest, '--branch', branch] |
991 | if depth: |
992 | cmd.extend(['--depth', depth]) |
993 | - check_call(cmd) |
994 | + check_output(cmd, stderr=STDOUT) |
995 | |
996 | def install(self, source, branch="master", dest=None, depth=None): |
997 | url_parts = self.parse_url(source) |
998 | diff --git a/hooks/charmhelpers/fetch/python/__init__.py b/hooks/charmhelpers/fetch/python/__init__.py |
999 | new file mode 100644 |
1000 | index 0000000..bff99dc |
1001 | --- /dev/null |
1002 | +++ b/hooks/charmhelpers/fetch/python/__init__.py |
1003 | @@ -0,0 +1,13 @@ |
1004 | +# Copyright 2014-2019 Canonical Limited. |
1005 | +# |
1006 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
1007 | +# you may not use this file except in compliance with the License. |
1008 | +# You may obtain a copy of the License at |
1009 | +# |
1010 | +# http://www.apache.org/licenses/LICENSE-2.0 |
1011 | +# |
1012 | +# Unless required by applicable law or agreed to in writing, software |
1013 | +# distributed under the License is distributed on an "AS IS" BASIS, |
1014 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
1015 | +# See the License for the specific language governing permissions and |
1016 | +# limitations under the License. |
1017 | diff --git a/hooks/charmhelpers/fetch/python/debug.py b/hooks/charmhelpers/fetch/python/debug.py |
1018 | new file mode 100644 |
1019 | index 0000000..757135e |
1020 | --- /dev/null |
1021 | +++ b/hooks/charmhelpers/fetch/python/debug.py |
1022 | @@ -0,0 +1,54 @@ |
1023 | +#!/usr/bin/env python |
1024 | +# coding: utf-8 |
1025 | + |
1026 | +# Copyright 2014-2015 Canonical Limited. |
1027 | +# |
1028 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
1029 | +# you may not use this file except in compliance with the License. |
1030 | +# You may obtain a copy of the License at |
1031 | +# |
1032 | +# http://www.apache.org/licenses/LICENSE-2.0 |
1033 | +# |
1034 | +# Unless required by applicable law or agreed to in writing, software |
1035 | +# distributed under the License is distributed on an "AS IS" BASIS, |
1036 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
1037 | +# See the License for the specific language governing permissions and |
1038 | +# limitations under the License. |
1039 | + |
1040 | +from __future__ import print_function |
1041 | + |
1042 | +import atexit |
1043 | +import sys |
1044 | + |
1045 | +from charmhelpers.fetch.python.rpdb import Rpdb |
1046 | +from charmhelpers.core.hookenv import ( |
1047 | + open_port, |
1048 | + close_port, |
1049 | + ERROR, |
1050 | + log |
1051 | +) |
1052 | + |
1053 | +__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" |
1054 | + |
1055 | +DEFAULT_ADDR = "0.0.0.0" |
1056 | +DEFAULT_PORT = 4444 |
1057 | + |
1058 | + |
1059 | +def _error(message): |
1060 | + log(message, level=ERROR) |
1061 | + |
1062 | + |
1063 | +def set_trace(addr=DEFAULT_ADDR, port=DEFAULT_PORT): |
1064 | + """ |
1065 | + Set a trace point using the remote debugger |
1066 | + """ |
1067 | + atexit.register(close_port, port) |
1068 | + try: |
1069 | + log("Starting a remote python debugger session on %s:%s" % (addr, |
1070 | + port)) |
1071 | + open_port(port) |
1072 | + debugger = Rpdb(addr=addr, port=port) |
1073 | + debugger.set_trace(sys._getframe().f_back) |
1074 | + except Exception: |
1075 | + _error("Cannot start a remote debug session on %s:%s" % (addr, |
1076 | + port)) |
1077 | diff --git a/hooks/charmhelpers/fetch/python/packages.py b/hooks/charmhelpers/fetch/python/packages.py |
1078 | new file mode 100644 |
1079 | index 0000000..6e95028 |
1080 | --- /dev/null |
1081 | +++ b/hooks/charmhelpers/fetch/python/packages.py |
1082 | @@ -0,0 +1,154 @@ |
1083 | +#!/usr/bin/env python |
1084 | +# coding: utf-8 |
1085 | + |
1086 | +# Copyright 2014-2015 Canonical Limited. |
1087 | +# |
1088 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
1089 | +# you may not use this file except in compliance with the License. |
1090 | +# You may obtain a copy of the License at |
1091 | +# |
1092 | +# http://www.apache.org/licenses/LICENSE-2.0 |
1093 | +# |
1094 | +# Unless required by applicable law or agreed to in writing, software |
1095 | +# distributed under the License is distributed on an "AS IS" BASIS, |
1096 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
1097 | +# See the License for the specific language governing permissions and |
1098 | +# limitations under the License. |
1099 | + |
1100 | +import os |
1101 | +import six |
1102 | +import subprocess |
1103 | +import sys |
1104 | + |
1105 | +from charmhelpers.fetch import apt_install, apt_update |
1106 | +from charmhelpers.core.hookenv import charm_dir, log |
1107 | + |
1108 | +__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" |
1109 | + |
1110 | + |
1111 | +def pip_execute(*args, **kwargs): |
1112 | + """Overriden pip_execute() to stop sys.path being changed. |
1113 | + |
1114 | + The act of importing main from the pip module seems to cause add wheels |
1115 | + from the /usr/share/python-wheels which are installed by various tools. |
1116 | + This function ensures that sys.path remains the same after the call is |
1117 | + executed. |
1118 | + """ |
1119 | + try: |
1120 | + _path = sys.path |
1121 | + try: |
1122 | + from pip import main as _pip_execute |
1123 | + except ImportError: |
1124 | + apt_update() |
1125 | + if six.PY2: |
1126 | + apt_install('python-pip') |
1127 | + else: |
1128 | + apt_install('python3-pip') |
1129 | + from pip import main as _pip_execute |
1130 | + _pip_execute(*args, **kwargs) |
1131 | + finally: |
1132 | + sys.path = _path |
1133 | + |
1134 | + |
1135 | +def parse_options(given, available): |
1136 | + """Given a set of options, check if available""" |
1137 | + for key, value in sorted(given.items()): |
1138 | + if not value: |
1139 | + continue |
1140 | + if key in available: |
1141 | + yield "--{0}={1}".format(key, value) |
1142 | + |
1143 | + |
1144 | +def pip_install_requirements(requirements, constraints=None, **options): |
1145 | + """Install a requirements file. |
1146 | + |
1147 | + :param constraints: Path to pip constraints file. |
1148 | + http://pip.readthedocs.org/en/stable/user_guide/#constraints-files |
1149 | + """ |
1150 | + command = ["install"] |
1151 | + |
1152 | + available_options = ('proxy', 'src', 'log', ) |
1153 | + for option in parse_options(options, available_options): |
1154 | + command.append(option) |
1155 | + |
1156 | + command.append("-r {0}".format(requirements)) |
1157 | + if constraints: |
1158 | + command.append("-c {0}".format(constraints)) |
1159 | + log("Installing from file: {} with constraints {} " |
1160 | + "and options: {}".format(requirements, constraints, command)) |
1161 | + else: |
1162 | + log("Installing from file: {} with options: {}".format(requirements, |
1163 | + command)) |
1164 | + pip_execute(command) |
1165 | + |
1166 | + |
1167 | +def pip_install(package, fatal=False, upgrade=False, venv=None, |
1168 | + constraints=None, **options): |
1169 | + """Install a python package""" |
1170 | + if venv: |
1171 | + venv_python = os.path.join(venv, 'bin/pip') |
1172 | + command = [venv_python, "install"] |
1173 | + else: |
1174 | + command = ["install"] |
1175 | + |
1176 | + available_options = ('proxy', 'src', 'log', 'index-url', ) |
1177 | + for option in parse_options(options, available_options): |
1178 | + command.append(option) |
1179 | + |
1180 | + if upgrade: |
1181 | + command.append('--upgrade') |
1182 | + |
1183 | + if constraints: |
1184 | + command.extend(['-c', constraints]) |
1185 | + |
1186 | + if isinstance(package, list): |
1187 | + command.extend(package) |
1188 | + else: |
1189 | + command.append(package) |
1190 | + |
1191 | + log("Installing {} package with options: {}".format(package, |
1192 | + command)) |
1193 | + if venv: |
1194 | + subprocess.check_call(command) |
1195 | + else: |
1196 | + pip_execute(command) |
1197 | + |
1198 | + |
1199 | +def pip_uninstall(package, **options): |
1200 | + """Uninstall a python package""" |
1201 | + command = ["uninstall", "-q", "-y"] |
1202 | + |
1203 | + available_options = ('proxy', 'log', ) |
1204 | + for option in parse_options(options, available_options): |
1205 | + command.append(option) |
1206 | + |
1207 | + if isinstance(package, list): |
1208 | + command.extend(package) |
1209 | + else: |
1210 | + command.append(package) |
1211 | + |
1212 | + log("Uninstalling {} package with options: {}".format(package, |
1213 | + command)) |
1214 | + pip_execute(command) |
1215 | + |
1216 | + |
1217 | +def pip_list(): |
1218 | + """Returns the list of current python installed packages |
1219 | + """ |
1220 | + return pip_execute(["list"]) |
1221 | + |
1222 | + |
1223 | +def pip_create_virtualenv(path=None): |
1224 | + """Create an isolated Python environment.""" |
1225 | + if six.PY2: |
1226 | + apt_install('python-virtualenv') |
1227 | + else: |
1228 | + apt_install('python3-virtualenv') |
1229 | + |
1230 | + if path: |
1231 | + venv_path = path |
1232 | + else: |
1233 | + venv_path = os.path.join(charm_dir(), 'venv') |
1234 | + |
1235 | + if not os.path.exists(venv_path): |
1236 | + subprocess.check_call(['virtualenv', venv_path]) |
1237 | diff --git a/hooks/charmhelpers/fetch/python/rpdb.py b/hooks/charmhelpers/fetch/python/rpdb.py |
1238 | new file mode 100644 |
1239 | index 0000000..9b31610 |
1240 | --- /dev/null |
1241 | +++ b/hooks/charmhelpers/fetch/python/rpdb.py |
1242 | @@ -0,0 +1,56 @@ |
1243 | +# Copyright 2014-2015 Canonical Limited. |
1244 | +# |
1245 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
1246 | +# you may not use this file except in compliance with the License. |
1247 | +# You may obtain a copy of the License at |
1248 | +# |
1249 | +# http://www.apache.org/licenses/LICENSE-2.0 |
1250 | +# |
1251 | +# Unless required by applicable law or agreed to in writing, software |
1252 | +# distributed under the License is distributed on an "AS IS" BASIS, |
1253 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
1254 | +# See the License for the specific language governing permissions and |
1255 | +# limitations under the License. |
1256 | + |
1257 | +"""Remote Python Debugger (pdb wrapper).""" |
1258 | + |
1259 | +import pdb |
1260 | +import socket |
1261 | +import sys |
1262 | + |
1263 | +__author__ = "Bertrand Janin <b@janin.com>" |
1264 | +__version__ = "0.1.3" |
1265 | + |
1266 | + |
1267 | +class Rpdb(pdb.Pdb): |
1268 | + |
1269 | + def __init__(self, addr="127.0.0.1", port=4444): |
1270 | + """Initialize the socket and initialize pdb.""" |
1271 | + |
1272 | + # Backup stdin and stdout before replacing them by the socket handle |
1273 | + self.old_stdout = sys.stdout |
1274 | + self.old_stdin = sys.stdin |
1275 | + |
1276 | + # Open a 'reusable' socket to let the webapp reload on the same port |
1277 | + self.skt = socket.socket(socket.AF_INET, socket.SOCK_STREAM) |
1278 | + self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True) |
1279 | + self.skt.bind((addr, port)) |
1280 | + self.skt.listen(1) |
1281 | + (clientsocket, address) = self.skt.accept() |
1282 | + handle = clientsocket.makefile('rw') |
1283 | + pdb.Pdb.__init__(self, completekey='tab', stdin=handle, stdout=handle) |
1284 | + sys.stdout = sys.stdin = handle |
1285 | + |
1286 | + def shutdown(self): |
1287 | + """Revert stdin and stdout, close the socket.""" |
1288 | + sys.stdout = self.old_stdout |
1289 | + sys.stdin = self.old_stdin |
1290 | + self.skt.close() |
1291 | + self.set_continue() |
1292 | + |
1293 | + def do_continue(self, arg): |
1294 | + """Stop all operation on ``continue``.""" |
1295 | + self.shutdown() |
1296 | + return 1 |
1297 | + |
1298 | + do_EOF = do_quit = do_exit = do_c = do_cont = do_continue |
1299 | diff --git a/hooks/charmhelpers/fetch/python/version.py b/hooks/charmhelpers/fetch/python/version.py |
1300 | new file mode 100644 |
1301 | index 0000000..3eb4210 |
1302 | --- /dev/null |
1303 | +++ b/hooks/charmhelpers/fetch/python/version.py |
1304 | @@ -0,0 +1,32 @@ |
1305 | +#!/usr/bin/env python |
1306 | +# coding: utf-8 |
1307 | + |
1308 | +# Copyright 2014-2015 Canonical Limited. |
1309 | +# |
1310 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
1311 | +# you may not use this file except in compliance with the License. |
1312 | +# You may obtain a copy of the License at |
1313 | +# |
1314 | +# http://www.apache.org/licenses/LICENSE-2.0 |
1315 | +# |
1316 | +# Unless required by applicable law or agreed to in writing, software |
1317 | +# distributed under the License is distributed on an "AS IS" BASIS, |
1318 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
1319 | +# See the License for the specific language governing permissions and |
1320 | +# limitations under the License. |
1321 | + |
1322 | +import sys |
1323 | + |
1324 | +__author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" |
1325 | + |
1326 | + |
1327 | +def current_version(): |
1328 | + """Current system python version""" |
1329 | + return sys.version_info |
1330 | + |
1331 | + |
1332 | +def current_version_string(): |
1333 | + """Current system python version as string major.minor.micro""" |
1334 | + return "{0}.{1}.{2}".format(sys.version_info.major, |
1335 | + sys.version_info.minor, |
1336 | + sys.version_info.micro) |
1337 | diff --git a/hooks/charmhelpers/fetch/ubuntu.py b/hooks/charmhelpers/fetch/ubuntu.py |
1338 | index 910e96a..3ddaf0d 100644 |
1339 | --- a/hooks/charmhelpers/fetch/ubuntu.py |
1340 | +++ b/hooks/charmhelpers/fetch/ubuntu.py |
1341 | @@ -13,23 +13,23 @@ |
1342 | # limitations under the License. |
1343 | |
1344 | from collections import OrderedDict |
1345 | -import os |
1346 | import platform |
1347 | import re |
1348 | import six |
1349 | -import time |
1350 | import subprocess |
1351 | -from tempfile import NamedTemporaryFile |
1352 | +import sys |
1353 | +import time |
1354 | + |
1355 | +from charmhelpers.core.host import get_distrib_codename, get_system_env |
1356 | |
1357 | -from charmhelpers.core.host import ( |
1358 | - lsb_release |
1359 | -) |
1360 | from charmhelpers.core.hookenv import ( |
1361 | log, |
1362 | DEBUG, |
1363 | WARNING, |
1364 | + env_proxy_settings, |
1365 | ) |
1366 | from charmhelpers.fetch import SourceConfigError, GPGKeyError |
1367 | +from charmhelpers.fetch import ubuntu_apt_pkg |
1368 | |
1369 | PROPOSED_POCKET = ( |
1370 | "# Proposed\n" |
1371 | @@ -44,6 +44,7 @@ ARCH_TO_PROPOSED_POCKET = { |
1372 | 'x86_64': PROPOSED_POCKET, |
1373 | 'ppc64le': PROPOSED_PORTS_POCKET, |
1374 | 'aarch64': PROPOSED_PORTS_POCKET, |
1375 | + 's390x': PROPOSED_PORTS_POCKET, |
1376 | } |
1377 | CLOUD_ARCHIVE_URL = "http://ubuntu-cloud.archive.canonical.com/ubuntu" |
1378 | CLOUD_ARCHIVE_KEY_ID = '5EDB1B62EC4926EA' |
1379 | @@ -157,6 +158,38 @@ CLOUD_ARCHIVE_POCKETS = { |
1380 | 'queens/proposed': 'xenial-proposed/queens', |
1381 | 'xenial-queens/proposed': 'xenial-proposed/queens', |
1382 | 'xenial-proposed/queens': 'xenial-proposed/queens', |
1383 | + # Rocky |
1384 | + 'rocky': 'bionic-updates/rocky', |
1385 | + 'bionic-rocky': 'bionic-updates/rocky', |
1386 | + 'bionic-rocky/updates': 'bionic-updates/rocky', |
1387 | + 'bionic-updates/rocky': 'bionic-updates/rocky', |
1388 | + 'rocky/proposed': 'bionic-proposed/rocky', |
1389 | + 'bionic-rocky/proposed': 'bionic-proposed/rocky', |
1390 | + 'bionic-proposed/rocky': 'bionic-proposed/rocky', |
1391 | + # Stein |
1392 | + 'stein': 'bionic-updates/stein', |
1393 | + 'bionic-stein': 'bionic-updates/stein', |
1394 | + 'bionic-stein/updates': 'bionic-updates/stein', |
1395 | + 'bionic-updates/stein': 'bionic-updates/stein', |
1396 | + 'stein/proposed': 'bionic-proposed/stein', |
1397 | + 'bionic-stein/proposed': 'bionic-proposed/stein', |
1398 | + 'bionic-proposed/stein': 'bionic-proposed/stein', |
1399 | + # Train |
1400 | + 'train': 'bionic-updates/train', |
1401 | + 'bionic-train': 'bionic-updates/train', |
1402 | + 'bionic-train/updates': 'bionic-updates/train', |
1403 | + 'bionic-updates/train': 'bionic-updates/train', |
1404 | + 'train/proposed': 'bionic-proposed/train', |
1405 | + 'bionic-train/proposed': 'bionic-proposed/train', |
1406 | + 'bionic-proposed/train': 'bionic-proposed/train', |
1407 | + # Ussuri |
1408 | + 'ussuri': 'bionic-updates/ussuri', |
1409 | + 'bionic-ussuri': 'bionic-updates/ussuri', |
1410 | + 'bionic-ussuri/updates': 'bionic-updates/ussuri', |
1411 | + 'bionic-updates/ussuri': 'bionic-updates/ussuri', |
1412 | + 'ussuri/proposed': 'bionic-proposed/ussuri', |
1413 | + 'bionic-ussuri/proposed': 'bionic-proposed/ussuri', |
1414 | + 'bionic-proposed/ussuri': 'bionic-proposed/ussuri', |
1415 | } |
1416 | |
1417 | |
1418 | @@ -180,18 +213,54 @@ def filter_installed_packages(packages): |
1419 | return _pkgs |
1420 | |
1421 | |
1422 | -def apt_cache(in_memory=True, progress=None): |
1423 | - """Build and return an apt cache.""" |
1424 | - from apt import apt_pkg |
1425 | - apt_pkg.init() |
1426 | - if in_memory: |
1427 | - apt_pkg.config.set("Dir::Cache::pkgcache", "") |
1428 | - apt_pkg.config.set("Dir::Cache::srcpkgcache", "") |
1429 | - return apt_pkg.Cache(progress) |
1430 | +def filter_missing_packages(packages): |
1431 | + """Return a list of packages that are installed. |
1432 | + |
1433 | + :param packages: list of packages to evaluate. |
1434 | + :returns list: Packages that are installed. |
1435 | + """ |
1436 | + return list( |
1437 | + set(packages) - |
1438 | + set(filter_installed_packages(packages)) |
1439 | + ) |
1440 | + |
1441 | + |
1442 | +def apt_cache(*_, **__): |
1443 | + """Shim returning an object simulating the apt_pkg Cache. |
1444 | + |
1445 | + :param _: Accept arguments for compability, not used. |
1446 | + :type _: any |
1447 | + :param __: Accept keyword arguments for compability, not used. |
1448 | + :type __: any |
1449 | + :returns:Object used to interrogate the system apt and dpkg databases. |
1450 | + :rtype:ubuntu_apt_pkg.Cache |
1451 | + """ |
1452 | + if 'apt_pkg' in sys.modules: |
1453 | + # NOTE(fnordahl): When our consumer use the upstream ``apt_pkg`` module |
1454 | + # in conjunction with the apt_cache helper function, they may expect us |
1455 | + # to call ``apt_pkg.init()`` for them. |
1456 | + # |
1457 | + # Detect this situation, log a warning and make the call to |
1458 | + # ``apt_pkg.init()`` to avoid the consumer Python interpreter from |
1459 | + # crashing with a segmentation fault. |
1460 | + log('Support for use of upstream ``apt_pkg`` module in conjunction' |
1461 | + 'with charm-helpers is deprecated since 2019-06-25', level=WARNING) |
1462 | + sys.modules['apt_pkg'].init() |
1463 | + return ubuntu_apt_pkg.Cache() |
1464 | |
1465 | |
1466 | def apt_install(packages, options=None, fatal=False): |
1467 | - """Install one or more packages.""" |
1468 | + """Install one or more packages. |
1469 | + |
1470 | + :param packages: Package(s) to install |
1471 | + :type packages: Option[str, List[str]] |
1472 | + :param options: Options to pass on to apt-get |
1473 | + :type options: Option[None, List[str]] |
1474 | + :param fatal: Whether the command's output should be checked and |
1475 | + retried. |
1476 | + :type fatal: bool |
1477 | + :raises: subprocess.CalledProcessError |
1478 | + """ |
1479 | if options is None: |
1480 | options = ['--option=Dpkg::Options::=--force-confold'] |
1481 | |
1482 | @@ -208,7 +277,17 @@ def apt_install(packages, options=None, fatal=False): |
1483 | |
1484 | |
1485 | def apt_upgrade(options=None, fatal=False, dist=False): |
1486 | - """Upgrade all packages.""" |
1487 | + """Upgrade all packages. |
1488 | + |
1489 | + :param options: Options to pass on to apt-get |
1490 | + :type options: Option[None, List[str]] |
1491 | + :param fatal: Whether the command's output should be checked and |
1492 | + retried. |
1493 | + :type fatal: bool |
1494 | + :param dist: Whether ``dist-upgrade`` should be used over ``upgrade`` |
1495 | + :type dist: bool |
1496 | + :raises: subprocess.CalledProcessError |
1497 | + """ |
1498 | if options is None: |
1499 | options = ['--option=Dpkg::Options::=--force-confold'] |
1500 | |
1501 | @@ -229,7 +308,15 @@ def apt_update(fatal=False): |
1502 | |
1503 | |
1504 | def apt_purge(packages, fatal=False): |
1505 | - """Purge one or more packages.""" |
1506 | + """Purge one or more packages. |
1507 | + |
1508 | + :param packages: Package(s) to install |
1509 | + :type packages: Option[str, List[str]] |
1510 | + :param fatal: Whether the command's output should be checked and |
1511 | + retried. |
1512 | + :type fatal: bool |
1513 | + :raises: subprocess.CalledProcessError |
1514 | + """ |
1515 | cmd = ['apt-get', '--assume-yes', 'purge'] |
1516 | if isinstance(packages, six.string_types): |
1517 | cmd.append(packages) |
1518 | @@ -239,6 +326,21 @@ def apt_purge(packages, fatal=False): |
1519 | _run_apt_command(cmd, fatal) |
1520 | |
1521 | |
1522 | +def apt_autoremove(purge=True, fatal=False): |
1523 | + """Purge one or more packages. |
1524 | + :param purge: Whether the ``--purge`` option should be passed on or not. |
1525 | + :type purge: bool |
1526 | + :param fatal: Whether the command's output should be checked and |
1527 | + retried. |
1528 | + :type fatal: bool |
1529 | + :raises: subprocess.CalledProcessError |
1530 | + """ |
1531 | + cmd = ['apt-get', '--assume-yes', 'autoremove'] |
1532 | + if purge: |
1533 | + cmd.append('--purge') |
1534 | + _run_apt_command(cmd, fatal) |
1535 | + |
1536 | + |
1537 | def apt_mark(packages, mark, fatal=False): |
1538 | """Flag one or more packages using apt-mark.""" |
1539 | log("Marking {} as {}".format(packages, mark)) |
1540 | @@ -265,13 +367,18 @@ def apt_unhold(packages, fatal=False): |
1541 | def import_key(key): |
1542 | """Import an ASCII Armor key. |
1543 | |
1544 | - /!\ A Radix64 format keyid is also supported for backwards |
1545 | - compatibility, but should never be used; the key retrieval |
1546 | - mechanism is insecure and subject to man-in-the-middle attacks |
1547 | - voiding all signature checks using that key. |
1548 | - |
1549 | - :param keyid: The key in ASCII armor format, |
1550 | - including BEGIN and END markers. |
1551 | + A Radix64 format keyid is also supported for backwards |
1552 | + compatibility. In this case Ubuntu keyserver will be |
1553 | + queried for a key via HTTPS by its keyid. This method |
1554 | + is less preferrable because https proxy servers may |
1555 | + require traffic decryption which is equivalent to a |
1556 | + man-in-the-middle attack (a proxy server impersonates |
1557 | + keyserver TLS certificates and has to be explicitly |
1558 | + trusted by the system). |
1559 | + |
1560 | + :param key: A GPG key in ASCII armor format, |
1561 | + including BEGIN and END markers or a keyid. |
1562 | + :type key: (bytes, str) |
1563 | :raises: GPGKeyError if the key could not be imported |
1564 | """ |
1565 | key = key.strip() |
1566 | @@ -282,35 +389,131 @@ def import_key(key): |
1567 | log("PGP key found (looks like ASCII Armor format)", level=DEBUG) |
1568 | if ('-----BEGIN PGP PUBLIC KEY BLOCK-----' in key and |
1569 | '-----END PGP PUBLIC KEY BLOCK-----' in key): |
1570 | - log("Importing ASCII Armor PGP key", level=DEBUG) |
1571 | - with NamedTemporaryFile() as keyfile: |
1572 | - with open(keyfile.name, 'w') as fd: |
1573 | - fd.write(key) |
1574 | - fd.write("\n") |
1575 | - cmd = ['apt-key', 'add', keyfile.name] |
1576 | - try: |
1577 | - subprocess.check_call(cmd) |
1578 | - except subprocess.CalledProcessError: |
1579 | - error = "Error importing PGP key '{}'".format(key) |
1580 | - log(error) |
1581 | - raise GPGKeyError(error) |
1582 | + log("Writing provided PGP key in the binary format", level=DEBUG) |
1583 | + if six.PY3: |
1584 | + key_bytes = key.encode('utf-8') |
1585 | + else: |
1586 | + key_bytes = key |
1587 | + key_name = _get_keyid_by_gpg_key(key_bytes) |
1588 | + key_gpg = _dearmor_gpg_key(key_bytes) |
1589 | + _write_apt_gpg_keyfile(key_name=key_name, key_material=key_gpg) |
1590 | else: |
1591 | raise GPGKeyError("ASCII armor markers missing from GPG key") |
1592 | else: |
1593 | - # We should only send things obviously not a keyid offsite |
1594 | - # via this unsecured protocol, as it may be a secret or part |
1595 | - # of one. |
1596 | log("PGP key found (looks like Radix64 format)", level=WARNING) |
1597 | - log("INSECURLY importing PGP key from keyserver; " |
1598 | + log("SECURELY importing PGP key from keyserver; " |
1599 | "full key not provided.", level=WARNING) |
1600 | - cmd = ['apt-key', 'adv', '--keyserver', |
1601 | - 'hkp://keyserver.ubuntu.com:80', '--recv-keys', key] |
1602 | - try: |
1603 | - subprocess.check_call(cmd) |
1604 | - except subprocess.CalledProcessError: |
1605 | - error = "Error importing PGP key '{}'".format(key) |
1606 | - log(error) |
1607 | - raise GPGKeyError(error) |
1608 | + # as of bionic add-apt-repository uses curl with an HTTPS keyserver URL |
1609 | + # to retrieve GPG keys. `apt-key adv` command is deprecated as is |
1610 | + # apt-key in general as noted in its manpage. See lp:1433761 for more |
1611 | + # history. Instead, /etc/apt/trusted.gpg.d is used directly to drop |
1612 | + # gpg |
1613 | + key_asc = _get_key_by_keyid(key) |
1614 | + # write the key in GPG format so that apt-key list shows it |
1615 | + key_gpg = _dearmor_gpg_key(key_asc) |
1616 | + _write_apt_gpg_keyfile(key_name=key, key_material=key_gpg) |
1617 | + |
1618 | + |
1619 | +def _get_keyid_by_gpg_key(key_material): |
1620 | + """Get a GPG key fingerprint by GPG key material. |
1621 | + Gets a GPG key fingerprint (40-digit, 160-bit) by the ASCII armor-encoded |
1622 | + or binary GPG key material. Can be used, for example, to generate file |
1623 | + names for keys passed via charm options. |
1624 | + |
1625 | + :param key_material: ASCII armor-encoded or binary GPG key material |
1626 | + :type key_material: bytes |
1627 | + :raises: GPGKeyError if invalid key material has been provided |
1628 | + :returns: A GPG key fingerprint |
1629 | + :rtype: str |
1630 | + """ |
1631 | + # Use the same gpg command for both Xenial and Bionic |
1632 | + cmd = 'gpg --with-colons --with-fingerprint' |
1633 | + ps = subprocess.Popen(cmd.split(), |
1634 | + stdout=subprocess.PIPE, |
1635 | + stderr=subprocess.PIPE, |
1636 | + stdin=subprocess.PIPE) |
1637 | + out, err = ps.communicate(input=key_material) |
1638 | + if six.PY3: |
1639 | + out = out.decode('utf-8') |
1640 | + err = err.decode('utf-8') |
1641 | + if 'gpg: no valid OpenPGP data found.' in err: |
1642 | + raise GPGKeyError('Invalid GPG key material provided') |
1643 | + # from gnupg2 docs: fpr :: Fingerprint (fingerprint is in field 10) |
1644 | + return re.search(r"^fpr:{9}([0-9A-F]{40}):$", out, re.MULTILINE).group(1) |
1645 | + |
1646 | + |
1647 | +def _get_key_by_keyid(keyid): |
1648 | + """Get a key via HTTPS from the Ubuntu keyserver. |
1649 | + Different key ID formats are supported by SKS keyservers (the longer ones |
1650 | + are more secure, see "dead beef attack" and https://evil32.com/). Since |
1651 | + HTTPS is used, if SSLBump-like HTTPS proxies are in place, they will |
1652 | + impersonate keyserver.ubuntu.com and generate a certificate with |
1653 | + keyserver.ubuntu.com in the CN field or in SubjAltName fields of a |
1654 | + certificate. If such proxy behavior is expected it is necessary to add the |
1655 | + CA certificate chain containing the intermediate CA of the SSLBump proxy to |
1656 | + every machine that this code runs on via ca-certs cloud-init directive (via |
1657 | + cloudinit-userdata model-config) or via other means (such as through a |
1658 | + custom charm option). Also note that DNS resolution for the hostname in a |
1659 | + URL is done at a proxy server - not at the client side. |
1660 | + |
1661 | + 8-digit (32 bit) key ID |
1662 | + https://keyserver.ubuntu.com/pks/lookup?search=0x4652B4E6 |
1663 | + 16-digit (64 bit) key ID |
1664 | + https://keyserver.ubuntu.com/pks/lookup?search=0x6E85A86E4652B4E6 |
1665 | + 40-digit key ID: |
1666 | + https://keyserver.ubuntu.com/pks/lookup?search=0x35F77D63B5CEC106C577ED856E85A86E4652B4E6 |
1667 | + |
1668 | + :param keyid: An 8, 16 or 40 hex digit keyid to find a key for |
1669 | + :type keyid: (bytes, str) |
1670 | + :returns: A key material for the specified GPG key id |
1671 | + :rtype: (str, bytes) |
1672 | + :raises: subprocess.CalledProcessError |
1673 | + """ |
1674 | + # options=mr - machine-readable output (disables html wrappers) |
1675 | + keyserver_url = ('https://keyserver.ubuntu.com' |
1676 | + '/pks/lookup?op=get&options=mr&exact=on&search=0x{}') |
1677 | + curl_cmd = ['curl', keyserver_url.format(keyid)] |
1678 | + # use proxy server settings in order to retrieve the key |
1679 | + return subprocess.check_output(curl_cmd, |
1680 | + env=env_proxy_settings(['https'])) |
1681 | + |
1682 | + |
1683 | +def _dearmor_gpg_key(key_asc): |
1684 | + """Converts a GPG key in the ASCII armor format to the binary format. |
1685 | + |
1686 | + :param key_asc: A GPG key in ASCII armor format. |
1687 | + :type key_asc: (str, bytes) |
1688 | + :returns: A GPG key in binary format |
1689 | + :rtype: (str, bytes) |
1690 | + :raises: GPGKeyError |
1691 | + """ |
1692 | + ps = subprocess.Popen(['gpg', '--dearmor'], |
1693 | + stdout=subprocess.PIPE, |
1694 | + stderr=subprocess.PIPE, |
1695 | + stdin=subprocess.PIPE) |
1696 | + out, err = ps.communicate(input=key_asc) |
1697 | + # no need to decode output as it is binary (invalid utf-8), only error |
1698 | + if six.PY3: |
1699 | + err = err.decode('utf-8') |
1700 | + if 'gpg: no valid OpenPGP data found.' in err: |
1701 | + raise GPGKeyError('Invalid GPG key material. Check your network setup' |
1702 | + ' (MTU, routing, DNS) and/or proxy server settings' |
1703 | + ' as well as destination keyserver status.') |
1704 | + else: |
1705 | + return out |
1706 | + |
1707 | + |
1708 | +def _write_apt_gpg_keyfile(key_name, key_material): |
1709 | + """Writes GPG key material into a file at a provided path. |
1710 | + |
1711 | + :param key_name: A key name to use for a key file (could be a fingerprint) |
1712 | + :type key_name: str |
1713 | + :param key_material: A GPG key material (binary) |
1714 | + :type key_material: (str, bytes) |
1715 | + """ |
1716 | + with open('/etc/apt/trusted.gpg.d/{}.gpg'.format(key_name), |
1717 | + 'wb') as keyf: |
1718 | + keyf.write(key_material) |
1719 | |
1720 | |
1721 | def add_source(source, key=None, fail_invalid=False): |
1722 | @@ -385,14 +588,16 @@ def add_source(source, key=None, fail_invalid=False): |
1723 | for r, fn in six.iteritems(_mapping): |
1724 | m = re.match(r, source) |
1725 | if m: |
1726 | - # call the assoicated function with the captured groups |
1727 | - # raises SourceConfigError on error. |
1728 | - fn(*m.groups()) |
1729 | if key: |
1730 | + # Import key before adding the source which depends on it, |
1731 | + # as refreshing packages could fail otherwise. |
1732 | try: |
1733 | import_key(key) |
1734 | except GPGKeyError as e: |
1735 | raise SourceConfigError(str(e)) |
1736 | + # call the associated function with the captured groups |
1737 | + # raises SourceConfigError on error. |
1738 | + fn(*m.groups()) |
1739 | break |
1740 | else: |
1741 | # nothing matched. log an error and maybe sys.exit |
1742 | @@ -405,13 +610,13 @@ def add_source(source, key=None, fail_invalid=False): |
1743 | def _add_proposed(): |
1744 | """Add the PROPOSED_POCKET as /etc/apt/source.list.d/proposed.list |
1745 | |
1746 | - Uses lsb_release()['DISTRIB_CODENAME'] to determine the correct staza for |
1747 | + Uses get_distrib_codename to determine the correct stanza for |
1748 | the deb line. |
1749 | |
1750 | For intel architecutres PROPOSED_POCKET is used for the release, but for |
1751 | other architectures PROPOSED_PORTS_POCKET is used for the release. |
1752 | """ |
1753 | - release = lsb_release()['DISTRIB_CODENAME'] |
1754 | + release = get_distrib_codename() |
1755 | arch = platform.machine() |
1756 | if arch not in six.iterkeys(ARCH_TO_PROPOSED_POCKET): |
1757 | raise SourceConfigError("Arch {} not supported for (distro-)proposed" |
1758 | @@ -424,8 +629,16 @@ def _add_apt_repository(spec): |
1759 | """Add the spec using add_apt_repository |
1760 | |
1761 | :param spec: the parameter to pass to add_apt_repository |
1762 | + :type spec: str |
1763 | """ |
1764 | - _run_with_retries(['add-apt-repository', '--yes', spec]) |
1765 | + if '{series}' in spec: |
1766 | + series = get_distrib_codename() |
1767 | + spec = spec.replace('{series}', series) |
1768 | + # software-properties package for bionic properly reacts to proxy settings |
1769 | + # passed as environment variables (See lp:1433761). This is not the case |
1770 | + # LTS and non-LTS releases below bionic. |
1771 | + _run_with_retries(['add-apt-repository', '--yes', spec], |
1772 | + cmd_env=env_proxy_settings(['https'])) |
1773 | |
1774 | |
1775 | def _add_cloud_pocket(pocket): |
1776 | @@ -494,7 +707,7 @@ def _verify_is_ubuntu_rel(release, os_release): |
1777 | :raises: SourceConfigError if the release is not the same as the ubuntu |
1778 | release. |
1779 | """ |
1780 | - ubuntu_rel = lsb_release()['DISTRIB_CODENAME'] |
1781 | + ubuntu_rel = get_distrib_codename() |
1782 | if release != ubuntu_rel: |
1783 | raise SourceConfigError( |
1784 | 'Invalid Cloud Archive release specified: {}-{} on this Ubuntu' |
1785 | @@ -505,21 +718,22 @@ def _run_with_retries(cmd, max_retries=CMD_RETRY_COUNT, retry_exitcodes=(1,), |
1786 | retry_message="", cmd_env=None): |
1787 | """Run a command and retry until success or max_retries is reached. |
1788 | |
1789 | - :param: cmd: str: The apt command to run. |
1790 | - :param: max_retries: int: The number of retries to attempt on a fatal |
1791 | - command. Defaults to CMD_RETRY_COUNT. |
1792 | - :param: retry_exitcodes: tuple: Optional additional exit codes to retry. |
1793 | - Defaults to retry on exit code 1. |
1794 | - :param: retry_message: str: Optional log prefix emitted during retries. |
1795 | - :param: cmd_env: dict: Environment variables to add to the command run. |
1796 | + :param cmd: The apt command to run. |
1797 | + :type cmd: str |
1798 | + :param max_retries: The number of retries to attempt on a fatal |
1799 | + command. Defaults to CMD_RETRY_COUNT. |
1800 | + :type max_retries: int |
1801 | + :param retry_exitcodes: Optional additional exit codes to retry. |
1802 | + Defaults to retry on exit code 1. |
1803 | + :type retry_exitcodes: tuple |
1804 | + :param retry_message: Optional log prefix emitted during retries. |
1805 | + :type retry_message: str |
1806 | + :param: cmd_env: Environment variables to add to the command run. |
1807 | + :type cmd_env: Option[None, Dict[str, str]] |
1808 | """ |
1809 | - |
1810 | - env = None |
1811 | - kwargs = {} |
1812 | + env = get_apt_dpkg_env() |
1813 | if cmd_env: |
1814 | - env = os.environ.copy() |
1815 | env.update(cmd_env) |
1816 | - kwargs['env'] = env |
1817 | |
1818 | if not retry_message: |
1819 | retry_message = "Failed executing '{}'".format(" ".join(cmd)) |
1820 | @@ -531,8 +745,7 @@ def _run_with_retries(cmd, max_retries=CMD_RETRY_COUNT, retry_exitcodes=(1,), |
1821 | retry_results = (None,) + retry_exitcodes |
1822 | while result in retry_results: |
1823 | try: |
1824 | - # result = subprocess.check_call(cmd, env=env) |
1825 | - result = subprocess.check_call(cmd, **kwargs) |
1826 | + result = subprocess.check_call(cmd, env=env) |
1827 | except subprocess.CalledProcessError as e: |
1828 | retry_count = retry_count + 1 |
1829 | if retry_count > max_retries: |
1830 | @@ -545,22 +758,18 @@ def _run_with_retries(cmd, max_retries=CMD_RETRY_COUNT, retry_exitcodes=(1,), |
1831 | def _run_apt_command(cmd, fatal=False): |
1832 | """Run an apt command with optional retries. |
1833 | |
1834 | - :param: cmd: str: The apt command to run. |
1835 | - :param: fatal: bool: Whether the command's output should be checked and |
1836 | - retried. |
1837 | + :param cmd: The apt command to run. |
1838 | + :type cmd: str |
1839 | + :param fatal: Whether the command's output should be checked and |
1840 | + retried. |
1841 | + :type fatal: bool |
1842 | """ |
1843 | - # Provide DEBIAN_FRONTEND=noninteractive if not present in the environment. |
1844 | - cmd_env = { |
1845 | - 'DEBIAN_FRONTEND': os.environ.get('DEBIAN_FRONTEND', 'noninteractive')} |
1846 | - |
1847 | if fatal: |
1848 | _run_with_retries( |
1849 | - cmd, cmd_env=cmd_env, retry_exitcodes=(1, APT_NO_LOCK,), |
1850 | + cmd, retry_exitcodes=(1, APT_NO_LOCK,), |
1851 | retry_message="Couldn't acquire DPKG lock") |
1852 | else: |
1853 | - env = os.environ.copy() |
1854 | - env.update(cmd_env) |
1855 | - subprocess.call(cmd, env=env) |
1856 | + subprocess.call(cmd, env=get_apt_dpkg_env()) |
1857 | |
1858 | |
1859 | def get_upstream_version(package): |
1860 | @@ -568,7 +777,6 @@ def get_upstream_version(package): |
1861 | |
1862 | @returns None (if not installed) or the upstream version |
1863 | """ |
1864 | - import apt_pkg |
1865 | cache = apt_cache() |
1866 | try: |
1867 | pkg = cache[package] |
1868 | @@ -580,4 +788,18 @@ def get_upstream_version(package): |
1869 | # package is known, but no version is currently installed. |
1870 | return None |
1871 | |
1872 | - return apt_pkg.upstream_version(pkg.current_ver.ver_str) |
1873 | + return ubuntu_apt_pkg.upstream_version(pkg.current_ver.ver_str) |
1874 | + |
1875 | + |
1876 | +def get_apt_dpkg_env(): |
1877 | + """Get environment suitable for execution of APT and DPKG tools. |
1878 | + |
1879 | + We keep this in a helper function instead of in a global constant to |
1880 | + avoid execution on import of the library. |
1881 | + :returns: Environment suitable for execution of APT and DPKG tools. |
1882 | + :rtype: Dict[str, str] |
1883 | + """ |
1884 | + # The fallback is used in the event of ``/etc/environment`` not containing |
1885 | + # avalid PATH variable. |
1886 | + return {'DEBIAN_FRONTEND': 'noninteractive', |
1887 | + 'PATH': get_system_env('PATH', '/usr/sbin:/usr/bin:/sbin:/bin')} |
1888 | diff --git a/hooks/charmhelpers/fetch/ubuntu_apt_pkg.py b/hooks/charmhelpers/fetch/ubuntu_apt_pkg.py |
1889 | new file mode 100644 |
1890 | index 0000000..929a75d |
1891 | --- /dev/null |
1892 | +++ b/hooks/charmhelpers/fetch/ubuntu_apt_pkg.py |
1893 | @@ -0,0 +1,267 @@ |
1894 | +# Copyright 2019 Canonical Ltd |
1895 | +# |
1896 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
1897 | +# you may not use this file except in compliance with the License. |
1898 | +# You may obtain a copy of the License at |
1899 | +# |
1900 | +# http://www.apache.org/licenses/LICENSE-2.0 |
1901 | +# |
1902 | +# Unless required by applicable law or agreed to in writing, software |
1903 | +# distributed under the License is distributed on an "AS IS" BASIS, |
1904 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
1905 | +# See the License for the specific language governing permissions and |
1906 | +# limitations under the License. |
1907 | + |
1908 | +"""Provide a subset of the ``python-apt`` module API. |
1909 | + |
1910 | +Data collection is done through subprocess calls to ``apt-cache`` and |
1911 | +``dpkg-query`` commands. |
1912 | + |
1913 | +The main purpose for this module is to avoid dependency on the |
1914 | +``python-apt`` python module. |
1915 | + |
1916 | +The indicated python module is a wrapper around the ``apt`` C++ library |
1917 | +which is tightly connected to the version of the distribution it was |
1918 | +shipped on. It is not developed in a backward/forward compatible manner. |
1919 | + |
1920 | +This in turn makes it incredibly hard to distribute as a wheel for a piece |
1921 | +of python software that supports a span of distro releases [0][1]. |
1922 | + |
1923 | +Upstream feedback like [2] does not give confidence in this ever changing, |
1924 | +so with this we get rid of the dependency. |
1925 | + |
1926 | +0: https://github.com/juju-solutions/layer-basic/pull/135 |
1927 | +1: https://bugs.launchpad.net/charm-octavia/+bug/1824112 |
1928 | +2: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=845330#10 |
1929 | +""" |
1930 | + |
1931 | +import locale |
1932 | +import os |
1933 | +import subprocess |
1934 | +import sys |
1935 | + |
1936 | + |
1937 | +class _container(dict): |
1938 | + """Simple container for attributes.""" |
1939 | + __getattr__ = dict.__getitem__ |
1940 | + __setattr__ = dict.__setitem__ |
1941 | + |
1942 | + |
1943 | +class Package(_container): |
1944 | + """Simple container for package attributes.""" |
1945 | + |
1946 | + |
1947 | +class Version(_container): |
1948 | + """Simple container for version attributes.""" |
1949 | + |
1950 | + |
1951 | +class Cache(object): |
1952 | + """Simulation of ``apt_pkg`` Cache object.""" |
1953 | + def __init__(self, progress=None): |
1954 | + pass |
1955 | + |
1956 | + def __contains__(self, package): |
1957 | + try: |
1958 | + pkg = self.__getitem__(package) |
1959 | + return pkg is not None |
1960 | + except KeyError: |
1961 | + return False |
1962 | + |
1963 | + def __getitem__(self, package): |
1964 | + """Get information about a package from apt and dpkg databases. |
1965 | + |
1966 | + :param package: Name of package |
1967 | + :type package: str |
1968 | + :returns: Package object |
1969 | + :rtype: object |
1970 | + :raises: KeyError, subprocess.CalledProcessError |
1971 | + """ |
1972 | + apt_result = self._apt_cache_show([package])[package] |
1973 | + apt_result['name'] = apt_result.pop('package') |
1974 | + pkg = Package(apt_result) |
1975 | + dpkg_result = self._dpkg_list([package]).get(package, {}) |
1976 | + current_ver = None |
1977 | + installed_version = dpkg_result.get('version') |
1978 | + if installed_version: |
1979 | + current_ver = Version({'ver_str': installed_version}) |
1980 | + pkg.current_ver = current_ver |
1981 | + pkg.architecture = dpkg_result.get('architecture') |
1982 | + return pkg |
1983 | + |
1984 | + def _dpkg_list(self, packages): |
1985 | + """Get data from system dpkg database for package. |
1986 | + |
1987 | + :param packages: Packages to get data from |
1988 | + :type packages: List[str] |
1989 | + :returns: Structured data about installed packages, keys like |
1990 | + ``dpkg-query --list`` |
1991 | + :rtype: dict |
1992 | + :raises: subprocess.CalledProcessError |
1993 | + """ |
1994 | + pkgs = {} |
1995 | + cmd = ['dpkg-query', '--list'] |
1996 | + cmd.extend(packages) |
1997 | + if locale.getlocale() == (None, None): |
1998 | + # subprocess calls out to locale.getpreferredencoding(False) to |
1999 | + # determine encoding. Workaround for Trusty where the |
2000 | + # environment appears to not be set up correctly. |
2001 | + locale.setlocale(locale.LC_ALL, 'en_US.UTF-8') |
2002 | + try: |
2003 | + output = subprocess.check_output(cmd, |
2004 | + stderr=subprocess.STDOUT, |
2005 | + universal_newlines=True) |
2006 | + except subprocess.CalledProcessError as cp: |
2007 | + # ``dpkg-query`` may return error and at the same time have |
2008 | + # produced useful output, for example when asked for multiple |
2009 | + # packages where some are not installed |
2010 | + if cp.returncode != 1: |
2011 | + raise |
2012 | + output = cp.output |
2013 | + headings = [] |
2014 | + for line in output.splitlines(): |
2015 | + if line.startswith('||/'): |
2016 | + headings = line.split() |
2017 | + headings.pop(0) |
2018 | + continue |
2019 | + elif (line.startswith('|') or line.startswith('+') or |
2020 | + line.startswith('dpkg-query:')): |
2021 | + continue |
2022 | + else: |
2023 | + data = line.split(None, 4) |
2024 | + status = data.pop(0) |
2025 | + if status != 'ii': |
2026 | + continue |
2027 | + pkg = {} |
2028 | + pkg.update({k.lower(): v for k, v in zip(headings, data)}) |
2029 | + if 'name' in pkg: |
2030 | + pkgs.update({pkg['name']: pkg}) |
2031 | + return pkgs |
2032 | + |
2033 | + def _apt_cache_show(self, packages): |
2034 | + """Get data from system apt cache for package. |
2035 | + |
2036 | + :param packages: Packages to get data from |
2037 | + :type packages: List[str] |
2038 | + :returns: Structured data about package, keys like |
2039 | + ``apt-cache show`` |
2040 | + :rtype: dict |
2041 | + :raises: subprocess.CalledProcessError |
2042 | + """ |
2043 | + pkgs = {} |
2044 | + cmd = ['apt-cache', 'show', '--no-all-versions'] |
2045 | + cmd.extend(packages) |
2046 | + if locale.getlocale() == (None, None): |
2047 | + # subprocess calls out to locale.getpreferredencoding(False) to |
2048 | + # determine encoding. Workaround for Trusty where the |
2049 | + # environment appears to not be set up correctly. |
2050 | + locale.setlocale(locale.LC_ALL, 'en_US.UTF-8') |
2051 | + try: |
2052 | + output = subprocess.check_output(cmd, |
2053 | + stderr=subprocess.STDOUT, |
2054 | + universal_newlines=True) |
2055 | + previous = None |
2056 | + pkg = {} |
2057 | + for line in output.splitlines(): |
2058 | + if not line: |
2059 | + if 'package' in pkg: |
2060 | + pkgs.update({pkg['package']: pkg}) |
2061 | + pkg = {} |
2062 | + continue |
2063 | + if line.startswith(' '): |
2064 | + if previous and previous in pkg: |
2065 | + pkg[previous] += os.linesep + line.lstrip() |
2066 | + continue |
2067 | + if ':' in line: |
2068 | + kv = line.split(':', 1) |
2069 | + key = kv[0].lower() |
2070 | + if key == 'n': |
2071 | + continue |
2072 | + previous = key |
2073 | + pkg.update({key: kv[1].lstrip()}) |
2074 | + except subprocess.CalledProcessError as cp: |
2075 | + # ``apt-cache`` returns 100 if none of the packages asked for |
2076 | + # exist in the apt cache. |
2077 | + if cp.returncode != 100: |
2078 | + raise |
2079 | + return pkgs |
2080 | + |
2081 | + |
2082 | +class Config(_container): |
2083 | + def __init__(self): |
2084 | + super(Config, self).__init__(self._populate()) |
2085 | + |
2086 | + def _populate(self): |
2087 | + cfgs = {} |
2088 | + cmd = ['apt-config', 'dump'] |
2089 | + output = subprocess.check_output(cmd, |
2090 | + stderr=subprocess.STDOUT, |
2091 | + universal_newlines=True) |
2092 | + for line in output.splitlines(): |
2093 | + if not line.startswith("CommandLine"): |
2094 | + k, v = line.split(" ", 1) |
2095 | + cfgs[k] = v.strip(";").strip("\"") |
2096 | + |
2097 | + return cfgs |
2098 | + |
2099 | + |
2100 | +# Backwards compatibility with old apt_pkg module |
2101 | +sys.modules[__name__].config = Config() |
2102 | + |
2103 | + |
2104 | +def init(): |
2105 | + """Compability shim that does nothing.""" |
2106 | + pass |
2107 | + |
2108 | + |
2109 | +def upstream_version(version): |
2110 | + """Extracts upstream version from a version string. |
2111 | + |
2112 | + Upstream reference: https://salsa.debian.org/apt-team/apt/blob/master/ |
2113 | + apt-pkg/deb/debversion.cc#L259 |
2114 | + |
2115 | + :param version: Version string |
2116 | + :type version: str |
2117 | + :returns: Upstream version |
2118 | + :rtype: str |
2119 | + """ |
2120 | + if version: |
2121 | + version = version.split(':')[-1] |
2122 | + version = version.split('-')[0] |
2123 | + return version |
2124 | + |
2125 | + |
2126 | +def version_compare(a, b): |
2127 | + """Compare the given versions. |
2128 | + |
2129 | + Call out to ``dpkg`` to make sure the code doing the comparison is |
2130 | + compatible with what the ``apt`` library would do. Mimic the return |
2131 | + values. |
2132 | + |
2133 | + Upstream reference: |
2134 | + https://apt-team.pages.debian.net/python-apt/library/apt_pkg.html |
2135 | + ?highlight=version_compare#apt_pkg.version_compare |
2136 | + |
2137 | + :param a: version string |
2138 | + :type a: str |
2139 | + :param b: version string |
2140 | + :type b: str |
2141 | + :returns: >0 if ``a`` is greater than ``b``, 0 if a equals b, |
2142 | + <0 if ``a`` is smaller than ``b`` |
2143 | + :rtype: int |
2144 | + :raises: subprocess.CalledProcessError, RuntimeError |
2145 | + """ |
2146 | + for op in ('gt', 1), ('eq', 0), ('lt', -1): |
2147 | + try: |
2148 | + subprocess.check_call(['dpkg', '--compare-versions', |
2149 | + a, op[0], b], |
2150 | + stderr=subprocess.STDOUT, |
2151 | + universal_newlines=True) |
2152 | + return op[1] |
2153 | + except subprocess.CalledProcessError as cp: |
2154 | + if cp.returncode == 1: |
2155 | + continue |
2156 | + raise |
2157 | + else: |
2158 | + raise RuntimeError('Unable to compare "{}" and "{}", according to ' |
2159 | + 'our logic they are neither greater, equal nor ' |
2160 | + 'less than each other.') |
2161 | diff --git a/hooks/charmhelpers/osplatform.py b/hooks/charmhelpers/osplatform.py |
2162 | index d9a4d5c..78c81af 100644 |
2163 | --- a/hooks/charmhelpers/osplatform.py |
2164 | +++ b/hooks/charmhelpers/osplatform.py |
2165 | @@ -1,4 +1,5 @@ |
2166 | import platform |
2167 | +import os |
2168 | |
2169 | |
2170 | def get_platform(): |
2171 | @@ -9,9 +10,13 @@ def get_platform(): |
2172 | This string is used to decide which platform module should be imported. |
2173 | """ |
2174 | # linux_distribution is deprecated and will be removed in Python 3.7 |
2175 | - # Warings *not* disabled, as we certainly need to fix this. |
2176 | - tuple_platform = platform.linux_distribution() |
2177 | - current_platform = tuple_platform[0] |
2178 | + # Warnings *not* disabled, as we certainly need to fix this. |
2179 | + if hasattr(platform, 'linux_distribution'): |
2180 | + tuple_platform = platform.linux_distribution() |
2181 | + current_platform = tuple_platform[0] |
2182 | + else: |
2183 | + current_platform = _get_platform_from_fs() |
2184 | + |
2185 | if "Ubuntu" in current_platform: |
2186 | return "ubuntu" |
2187 | elif "CentOS" in current_platform: |
2188 | @@ -20,6 +25,22 @@ def get_platform(): |
2189 | # Stock Python does not detect Ubuntu and instead returns debian. |
2190 | # Or at least it does in some build environments like Travis CI |
2191 | return "ubuntu" |
2192 | + elif "elementary" in current_platform: |
2193 | + # ElementaryOS fails to run tests locally without this. |
2194 | + return "ubuntu" |
2195 | else: |
2196 | raise RuntimeError("This module is not supported on {}." |
2197 | .format(current_platform)) |
2198 | + |
2199 | + |
2200 | +def _get_platform_from_fs(): |
2201 | + """Get Platform from /etc/os-release.""" |
2202 | + with open(os.path.join(os.sep, 'etc', 'os-release')) as fin: |
2203 | + content = dict( |
2204 | + line.split('=', 1) |
2205 | + for line in fin.read().splitlines() |
2206 | + if '=' in line |
2207 | + ) |
2208 | + for k, v in content.items(): |
2209 | + content[k] = v.strip('"') |
2210 | + return content["NAME"] |
2211 | diff --git a/tests/charmhelpers/__init__.py b/tests/charmhelpers/__init__.py |
2212 | index e7aa471..61ef907 100644 |
2213 | --- a/tests/charmhelpers/__init__.py |
2214 | +++ b/tests/charmhelpers/__init__.py |
2215 | @@ -23,22 +23,22 @@ import subprocess |
2216 | import sys |
2217 | |
2218 | try: |
2219 | - import six # flake8: noqa |
2220 | + import six # NOQA:F401 |
2221 | except ImportError: |
2222 | if sys.version_info.major == 2: |
2223 | subprocess.check_call(['apt-get', 'install', '-y', 'python-six']) |
2224 | else: |
2225 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-six']) |
2226 | - import six # flake8: noqa |
2227 | + import six # NOQA:F401 |
2228 | |
2229 | try: |
2230 | - import yaml # flake8: noqa |
2231 | + import yaml # NOQA:F401 |
2232 | except ImportError: |
2233 | if sys.version_info.major == 2: |
2234 | subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml']) |
2235 | else: |
2236 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml']) |
2237 | - import yaml # flake8: noqa |
2238 | + import yaml # NOQA:F401 |
2239 | |
2240 | |
2241 | # Holds a list of mapping of mangled function names that have been deprecated |
2242 | diff --git a/tests/charmhelpers/contrib/amulet/deployment.py b/tests/charmhelpers/contrib/amulet/deployment.py |
2243 | index 9c65518..d21d01d 100644 |
2244 | --- a/tests/charmhelpers/contrib/amulet/deployment.py |
2245 | +++ b/tests/charmhelpers/contrib/amulet/deployment.py |
2246 | @@ -50,7 +50,8 @@ class AmuletDeployment(object): |
2247 | this_service['units'] = 1 |
2248 | |
2249 | self.d.add(this_service['name'], units=this_service['units'], |
2250 | - constraints=this_service.get('constraints')) |
2251 | + constraints=this_service.get('constraints'), |
2252 | + storage=this_service.get('storage')) |
2253 | |
2254 | for svc in other_services: |
2255 | if 'location' in svc: |
2256 | @@ -64,7 +65,8 @@ class AmuletDeployment(object): |
2257 | svc['units'] = 1 |
2258 | |
2259 | self.d.add(svc['name'], charm=branch_location, units=svc['units'], |
2260 | - constraints=svc.get('constraints')) |
2261 | + constraints=svc.get('constraints'), |
2262 | + storage=svc.get('storage')) |
2263 | |
2264 | def _add_relations(self, relations): |
2265 | """Add all of the relations for the services.""" |
2266 | diff --git a/tests/charmhelpers/contrib/amulet/utils.py b/tests/charmhelpers/contrib/amulet/utils.py |
2267 | index 8a6b764..5428308 100644 |
2268 | --- a/tests/charmhelpers/contrib/amulet/utils.py |
2269 | +++ b/tests/charmhelpers/contrib/amulet/utils.py |
2270 | @@ -88,7 +88,7 @@ class AmuletUtils(object): |
2271 | """ |
2272 | msg = None |
2273 | cmd = 'lsb_release -cs' |
2274 | - release, code = sentry_unit.run(cmd) |
2275 | + release, code = sentry_unit.ssh(cmd) |
2276 | if code == 0: |
2277 | self.log.debug('{} lsb_release: {}'.format( |
2278 | sentry_unit.info['unit_name'], release)) |
2279 | @@ -282,14 +282,8 @@ class AmuletUtils(object): |
2280 | :param sentry_units: list of sentry unit pointers |
2281 | :returns: None if successful; Failure message otherwise |
2282 | """ |
2283 | - if pgrep_full is not None: |
2284 | - # /!\ DEPRECATION WARNING (beisner): |
2285 | - # No longer implemented, as pidof is now used instead of pgrep. |
2286 | - # https://bugs.launchpad.net/charm-helpers/+bug/1474030 |
2287 | - self.log.warn('DEPRECATION WARNING: pgrep_full bool is no ' |
2288 | - 'longer implemented re: lp 1474030.') |
2289 | - |
2290 | - pid_list = self.get_process_id_list(sentry_unit, service) |
2291 | + pid_list = self.get_process_id_list( |
2292 | + sentry_unit, service, pgrep_full=pgrep_full) |
2293 | pid = pid_list[0] |
2294 | proc_dir = '/proc/{}'.format(pid) |
2295 | self.log.debug('Pid for {} on {}: {}'.format( |
2296 | @@ -537,7 +531,7 @@ class AmuletUtils(object): |
2297 | return None |
2298 | |
2299 | def get_process_id_list(self, sentry_unit, process_name, |
2300 | - expect_success=True): |
2301 | + expect_success=True, pgrep_full=False): |
2302 | """Get a list of process ID(s) from a single sentry juju unit |
2303 | for a single process name. |
2304 | |
2305 | @@ -547,7 +541,10 @@ class AmuletUtils(object): |
2306 | raise if it is present. |
2307 | :returns: List of process IDs |
2308 | """ |
2309 | - cmd = 'pidof -x "{}"'.format(process_name) |
2310 | + if pgrep_full: |
2311 | + cmd = 'pgrep -f "{}"'.format(process_name) |
2312 | + else: |
2313 | + cmd = 'pidof -x "{}"'.format(process_name) |
2314 | if not expect_success: |
2315 | cmd += " || exit 0 && exit 1" |
2316 | output, code = sentry_unit.run(cmd) |
2317 | @@ -558,7 +555,8 @@ class AmuletUtils(object): |
2318 | amulet.raise_status(amulet.FAIL, msg=msg) |
2319 | return str(output).split() |
2320 | |
2321 | - def get_unit_process_ids(self, unit_processes, expect_success=True): |
2322 | + def get_unit_process_ids( |
2323 | + self, unit_processes, expect_success=True, pgrep_full=False): |
2324 | """Construct a dict containing unit sentries, process names, and |
2325 | process IDs. |
2326 | |
2327 | @@ -574,7 +572,8 @@ class AmuletUtils(object): |
2328 | pid_dict[sentry_unit] = {} |
2329 | for process in process_list: |
2330 | pids = self.get_process_id_list( |
2331 | - sentry_unit, process, expect_success=expect_success) |
2332 | + sentry_unit, process, expect_success=expect_success, |
2333 | + pgrep_full=pgrep_full) |
2334 | pid_dict[sentry_unit].update({process: pids}) |
2335 | return pid_dict |
2336 |
This has been tested to work on focal