Merge lp:~simpoir/landscape-charm/charmhelpers-and-keys into lp:~landscape/landscape-charm/trunk
- charmhelpers-and-keys
- Merge into trunk
Status: | Merged |
---|---|
Approved by: | Simon Poirier |
Approved revision: | 397 |
Merged at revision: | 397 |
Proposed branch: | lp:~simpoir/landscape-charm/charmhelpers-and-keys |
Merge into: | lp:~landscape/landscape-charm/trunk |
Diff against target: |
2745 lines (+1661/-199) 26 files modified
Makefile (+1/-2) charmhelpers/__init__.py (+65/-4) charmhelpers/contrib/hahelpers/apache.py (+5/-14) charmhelpers/contrib/hahelpers/cluster.py (+43/-0) charmhelpers/core/hookenv.py (+450/-28) charmhelpers/core/host.py (+166/-11) charmhelpers/core/host_factory/ubuntu.py (+26/-0) charmhelpers/core/kernel.py (+2/-2) charmhelpers/core/services/base.py (+18/-7) charmhelpers/core/strutils.py (+11/-5) charmhelpers/core/sysctl.py (+21/-10) charmhelpers/core/templating.py (+18/-9) charmhelpers/core/unitdata.py (+8/-1) charmhelpers/fetch/__init__.py (+19/-9) charmhelpers/fetch/archiveurl.py (+1/-1) charmhelpers/fetch/bzrurl.py (+2/-2) charmhelpers/fetch/centos.py (+1/-1) charmhelpers/fetch/giturl.py (+2/-2) charmhelpers/fetch/python/__init__.py (+13/-0) charmhelpers/fetch/python/debug.py (+54/-0) charmhelpers/fetch/python/packages.py (+154/-0) charmhelpers/fetch/python/rpdb.py (+56/-0) charmhelpers/fetch/python/version.py (+32/-0) charmhelpers/fetch/snap.py (+33/-5) charmhelpers/fetch/ubuntu.py (+428/-62) dev/charm_helpers_sync.py (+32/-24) |
To merge this branch: | bzr merge lp:~simpoir/landscape-charm/charmhelpers-and-keys |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
🤖 Landscape Builder | test results | Approve | |
Adam Collard (community) | Approve | ||
Review via email: mp+367881@code.launchpad.net |
Commit message
This branch updates charm helpers, and add the fix proposed as
https:/
This should fix apt failures when specifying a deb source and key.
Description of the change
This branch updates charm helpers, and add the fix proposed as
https:/
This should fix apt failures when specifying a deb source and key.
Testing instructions:
juju deploy . --config install_
juju debug-log
check for apt refresh failures (or lack thereof). Packages will install and charm will block on relations. Previous behaviour was a series of failed update/install, eventually succeeding after many retries.
🤖 Landscape Builder (landscape-builder) wrote : | # |
Voting does not meet specified criteria. Required: Approve >= 2, Disapprove == 0. Got: 1 Approve.
🤖 Landscape Builder (landscape-builder) wrote : | # |
No approved revision specified.
🤖 Landscape Builder (landscape-builder) : | # |
🤖 Landscape Builder (landscape-builder) wrote : | # |
Command: make ci-test
Result: Success
Revno: 397
Branch: lp:~simpoir/landscape-charm/charmhelpers-and-keys
Jenkins: https:/
Preview Diff
1 | === modified file 'Makefile' | |||
2 | --- Makefile 2019-01-17 14:23:55 +0000 | |||
3 | +++ Makefile 2019-05-24 12:43:31 +0000 | |||
4 | @@ -88,8 +88,7 @@ | |||
5 | 88 | 88 | ||
6 | 89 | dev/charm_helpers_sync.py: | 89 | dev/charm_helpers_sync.py: |
7 | 90 | @mkdir -p dev | 90 | @mkdir -p dev |
10 | 91 | @bzr cat lp:charm-helpers/tools/charm_helpers_sync/charm_helpers_sync.py \ | 91 | @curl https://git.launchpad.net/charm-helpers/plain/tools/charm_helpers_sync/charm_helpers_sync.py > dev/charm_helpers_sync.py |
9 | 92 | > dev/charm_helpers_sync.py | ||
11 | 93 | 92 | ||
12 | 94 | sync: dev/charm_helpers_sync.py | 93 | sync: dev/charm_helpers_sync.py |
13 | 95 | $(PYTHON) dev/charm_helpers_sync.py -c charm-helpers.yaml | 94 | $(PYTHON) dev/charm_helpers_sync.py -c charm-helpers.yaml |
14 | 96 | 95 | ||
15 | === modified file 'charmhelpers/__init__.py' | |||
16 | --- charmhelpers/__init__.py 2017-03-03 21:03:14 +0000 | |||
17 | +++ charmhelpers/__init__.py 2019-05-24 12:43:31 +0000 | |||
18 | @@ -14,23 +14,84 @@ | |||
19 | 14 | 14 | ||
20 | 15 | # Bootstrap charm-helpers, installing its dependencies if necessary using | 15 | # Bootstrap charm-helpers, installing its dependencies if necessary using |
21 | 16 | # only standard libraries. | 16 | # only standard libraries. |
22 | 17 | from __future__ import print_function | ||
23 | 18 | from __future__ import absolute_import | ||
24 | 19 | |||
25 | 20 | import functools | ||
26 | 21 | import inspect | ||
27 | 17 | import subprocess | 22 | import subprocess |
28 | 18 | import sys | 23 | import sys |
29 | 19 | 24 | ||
30 | 20 | try: | 25 | try: |
32 | 21 | import six # flake8: noqa | 26 | import six # NOQA:F401 |
33 | 22 | except ImportError: | 27 | except ImportError: |
34 | 23 | if sys.version_info.major == 2: | 28 | if sys.version_info.major == 2: |
35 | 24 | subprocess.check_call(['apt-get', 'install', '-y', 'python-six']) | 29 | subprocess.check_call(['apt-get', 'install', '-y', 'python-six']) |
36 | 25 | else: | 30 | else: |
37 | 26 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-six']) | 31 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-six']) |
39 | 27 | import six # flake8: noqa | 32 | import six # NOQA:F401 |
40 | 28 | 33 | ||
41 | 29 | try: | 34 | try: |
43 | 30 | import yaml # flake8: noqa | 35 | import yaml # NOQA:F401 |
44 | 31 | except ImportError: | 36 | except ImportError: |
45 | 32 | if sys.version_info.major == 2: | 37 | if sys.version_info.major == 2: |
46 | 33 | subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml']) | 38 | subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml']) |
47 | 34 | else: | 39 | else: |
48 | 35 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml']) | 40 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml']) |
50 | 36 | import yaml # flake8: noqa | 41 | import yaml # NOQA:F401 |
51 | 42 | |||
52 | 43 | |||
53 | 44 | # Holds a list of mapping of mangled function names that have been deprecated | ||
54 | 45 | # using the @deprecate decorator below. This is so that the warning is only | ||
55 | 46 | # printed once for each usage of the function. | ||
56 | 47 | __deprecated_functions = {} | ||
57 | 48 | |||
58 | 49 | |||
59 | 50 | def deprecate(warning, date=None, log=None): | ||
60 | 51 | """Add a deprecation warning the first time the function is used. | ||
61 | 52 | The date, which is a string in semi-ISO8660 format indicate the year-month | ||
62 | 53 | that the function is officially going to be removed. | ||
63 | 54 | |||
64 | 55 | usage: | ||
65 | 56 | |||
66 | 57 | @deprecate('use core/fetch/add_source() instead', '2017-04') | ||
67 | 58 | def contributed_add_source_thing(...): | ||
68 | 59 | ... | ||
69 | 60 | |||
70 | 61 | And it then prints to the log ONCE that the function is deprecated. | ||
71 | 62 | The reason for passing the logging function (log) is so that hookenv.log | ||
72 | 63 | can be used for a charm if needed. | ||
73 | 64 | |||
74 | 65 | :param warning: String to indicat where it has moved ot. | ||
75 | 66 | :param date: optional sting, in YYYY-MM format to indicate when the | ||
76 | 67 | function will definitely (probably) be removed. | ||
77 | 68 | :param log: The log function to call to log. If not, logs to stdout | ||
78 | 69 | """ | ||
79 | 70 | def wrap(f): | ||
80 | 71 | |||
81 | 72 | @functools.wraps(f) | ||
82 | 73 | def wrapped_f(*args, **kwargs): | ||
83 | 74 | try: | ||
84 | 75 | module = inspect.getmodule(f) | ||
85 | 76 | file = inspect.getsourcefile(f) | ||
86 | 77 | lines = inspect.getsourcelines(f) | ||
87 | 78 | f_name = "{}-{}-{}..{}-{}".format( | ||
88 | 79 | module.__name__, file, lines[0], lines[-1], f.__name__) | ||
89 | 80 | except (IOError, TypeError): | ||
90 | 81 | # assume it was local, so just use the name of the function | ||
91 | 82 | f_name = f.__name__ | ||
92 | 83 | if f_name not in __deprecated_functions: | ||
93 | 84 | __deprecated_functions[f_name] = True | ||
94 | 85 | s = "DEPRECATION WARNING: Function {} is being removed".format( | ||
95 | 86 | f.__name__) | ||
96 | 87 | if date: | ||
97 | 88 | s = "{} on/around {}".format(s, date) | ||
98 | 89 | if warning: | ||
99 | 90 | s = "{} : {}".format(s, warning) | ||
100 | 91 | if log: | ||
101 | 92 | log(s) | ||
102 | 93 | else: | ||
103 | 94 | print(s) | ||
104 | 95 | return f(*args, **kwargs) | ||
105 | 96 | return wrapped_f | ||
106 | 97 | return wrap | ||
107 | 37 | 98 | ||
108 | === modified file 'charmhelpers/contrib/hahelpers/apache.py' | |||
109 | --- charmhelpers/contrib/hahelpers/apache.py 2017-03-03 21:03:14 +0000 | |||
110 | +++ charmhelpers/contrib/hahelpers/apache.py 2019-05-24 12:43:31 +0000 | |||
111 | @@ -23,8 +23,8 @@ | |||
112 | 23 | # | 23 | # |
113 | 24 | 24 | ||
114 | 25 | import os | 25 | import os |
115 | 26 | import subprocess | ||
116 | 27 | 26 | ||
117 | 27 | from charmhelpers.core import host | ||
118 | 28 | from charmhelpers.core.hookenv import ( | 28 | from charmhelpers.core.hookenv import ( |
119 | 29 | config as config_get, | 29 | config as config_get, |
120 | 30 | relation_get, | 30 | relation_get, |
121 | @@ -65,7 +65,8 @@ | |||
122 | 65 | if ca_cert is None: | 65 | if ca_cert is None: |
123 | 66 | log("Inspecting identity-service relations for CA SSL certificate.", | 66 | log("Inspecting identity-service relations for CA SSL certificate.", |
124 | 67 | level=INFO) | 67 | level=INFO) |
126 | 68 | for r_id in relation_ids('identity-service'): | 68 | for r_id in (relation_ids('identity-service') + |
127 | 69 | relation_ids('identity-credentials')): | ||
128 | 69 | for unit in relation_list(r_id): | 70 | for unit in relation_list(r_id): |
129 | 70 | if ca_cert is None: | 71 | if ca_cert is None: |
130 | 71 | ca_cert = relation_get('ca_cert', | 72 | ca_cert = relation_get('ca_cert', |
131 | @@ -76,20 +77,10 @@ | |||
132 | 76 | def retrieve_ca_cert(cert_file): | 77 | def retrieve_ca_cert(cert_file): |
133 | 77 | cert = None | 78 | cert = None |
134 | 78 | if os.path.isfile(cert_file): | 79 | if os.path.isfile(cert_file): |
136 | 79 | with open(cert_file, 'r') as crt: | 80 | with open(cert_file, 'rb') as crt: |
137 | 80 | cert = crt.read() | 81 | cert = crt.read() |
138 | 81 | return cert | 82 | return cert |
139 | 82 | 83 | ||
140 | 83 | 84 | ||
141 | 84 | def install_ca_cert(ca_cert): | 85 | def install_ca_cert(ca_cert): |
153 | 85 | if ca_cert: | 86 | host.install_ca_cert(ca_cert, 'keystone_juju_ca_cert') |
143 | 86 | cert_file = ('/usr/local/share/ca-certificates/' | ||
144 | 87 | 'keystone_juju_ca_cert.crt') | ||
145 | 88 | old_cert = retrieve_ca_cert(cert_file) | ||
146 | 89 | if old_cert and old_cert == ca_cert: | ||
147 | 90 | log("CA cert is the same as installed version", level=INFO) | ||
148 | 91 | else: | ||
149 | 92 | log("Installing new CA cert", level=INFO) | ||
150 | 93 | with open(cert_file, 'w') as crt: | ||
151 | 94 | crt.write(ca_cert) | ||
152 | 95 | subprocess.check_call(['update-ca-certificates', '--fresh']) | ||
154 | 96 | 87 | ||
155 | === modified file 'charmhelpers/contrib/hahelpers/cluster.py' | |||
156 | --- charmhelpers/contrib/hahelpers/cluster.py 2017-03-03 21:03:14 +0000 | |||
157 | +++ charmhelpers/contrib/hahelpers/cluster.py 2019-05-24 12:43:31 +0000 | |||
158 | @@ -27,6 +27,7 @@ | |||
159 | 27 | 27 | ||
160 | 28 | import subprocess | 28 | import subprocess |
161 | 29 | import os | 29 | import os |
162 | 30 | import time | ||
163 | 30 | 31 | ||
164 | 31 | from socket import gethostname as get_unit_hostname | 32 | from socket import gethostname as get_unit_hostname |
165 | 32 | 33 | ||
166 | @@ -45,6 +46,9 @@ | |||
167 | 45 | is_leader as juju_is_leader, | 46 | is_leader as juju_is_leader, |
168 | 46 | status_set, | 47 | status_set, |
169 | 47 | ) | 48 | ) |
170 | 49 | from charmhelpers.core.host import ( | ||
171 | 50 | modulo_distribution, | ||
172 | 51 | ) | ||
173 | 48 | from charmhelpers.core.decorators import ( | 52 | from charmhelpers.core.decorators import ( |
174 | 49 | retry_on_exception, | 53 | retry_on_exception, |
175 | 50 | ) | 54 | ) |
176 | @@ -219,6 +223,11 @@ | |||
177 | 219 | return True | 223 | return True |
178 | 220 | if config_get('ssl_cert') and config_get('ssl_key'): | 224 | if config_get('ssl_cert') and config_get('ssl_key'): |
179 | 221 | return True | 225 | return True |
180 | 226 | for r_id in relation_ids('certificates'): | ||
181 | 227 | for unit in relation_list(r_id): | ||
182 | 228 | ca = relation_get('ca', rid=r_id, unit=unit) | ||
183 | 229 | if ca: | ||
184 | 230 | return True | ||
185 | 222 | for r_id in relation_ids('identity-service'): | 231 | for r_id in relation_ids('identity-service'): |
186 | 223 | for unit in relation_list(r_id): | 232 | for unit in relation_list(r_id): |
187 | 224 | # TODO - needs fixing for new helper as ssl_cert/key suffixes with CN | 233 | # TODO - needs fixing for new helper as ssl_cert/key suffixes with CN |
188 | @@ -361,3 +370,37 @@ | |||
189 | 361 | else: | 370 | else: |
190 | 362 | addr = unit_get('private-address') | 371 | addr = unit_get('private-address') |
191 | 363 | return '%s://%s' % (scheme, addr) | 372 | return '%s://%s' % (scheme, addr) |
192 | 373 | |||
193 | 374 | |||
194 | 375 | def distributed_wait(modulo=None, wait=None, operation_name='operation'): | ||
195 | 376 | ''' Distribute operations by waiting based on modulo_distribution | ||
196 | 377 | |||
197 | 378 | If modulo and or wait are not set, check config_get for those values. | ||
198 | 379 | If config values are not set, default to modulo=3 and wait=30. | ||
199 | 380 | |||
200 | 381 | :param modulo: int The modulo number creates the group distribution | ||
201 | 382 | :param wait: int The constant time wait value | ||
202 | 383 | :param operation_name: string Operation name for status message | ||
203 | 384 | i.e. 'restart' | ||
204 | 385 | :side effect: Calls config_get() | ||
205 | 386 | :side effect: Calls log() | ||
206 | 387 | :side effect: Calls status_set() | ||
207 | 388 | :side effect: Calls time.sleep() | ||
208 | 389 | ''' | ||
209 | 390 | if modulo is None: | ||
210 | 391 | modulo = config_get('modulo-nodes') or 3 | ||
211 | 392 | if wait is None: | ||
212 | 393 | wait = config_get('known-wait') or 30 | ||
213 | 394 | if juju_is_leader(): | ||
214 | 395 | # The leader should never wait | ||
215 | 396 | calculated_wait = 0 | ||
216 | 397 | else: | ||
217 | 398 | # non_zero_wait=True guarantees the non-leader who gets modulo 0 | ||
218 | 399 | # will still wait | ||
219 | 400 | calculated_wait = modulo_distribution(modulo=modulo, wait=wait, | ||
220 | 401 | non_zero_wait=True) | ||
221 | 402 | msg = "Waiting {} seconds for {} ...".format(calculated_wait, | ||
222 | 403 | operation_name) | ||
223 | 404 | log(msg, DEBUG) | ||
224 | 405 | status_set('maintenance', msg) | ||
225 | 406 | time.sleep(calculated_wait) | ||
226 | 364 | 407 | ||
227 | === modified file 'charmhelpers/core/hookenv.py' | |||
228 | --- charmhelpers/core/hookenv.py 2017-03-03 21:03:14 +0000 | |||
229 | +++ charmhelpers/core/hookenv.py 2019-05-24 12:43:31 +0000 | |||
230 | @@ -22,10 +22,12 @@ | |||
231 | 22 | import copy | 22 | import copy |
232 | 23 | from distutils.version import LooseVersion | 23 | from distutils.version import LooseVersion |
233 | 24 | from functools import wraps | 24 | from functools import wraps |
234 | 25 | from collections import namedtuple | ||
235 | 25 | import glob | 26 | import glob |
236 | 26 | import os | 27 | import os |
237 | 27 | import json | 28 | import json |
238 | 28 | import yaml | 29 | import yaml |
239 | 30 | import re | ||
240 | 29 | import subprocess | 31 | import subprocess |
241 | 30 | import sys | 32 | import sys |
242 | 31 | import errno | 33 | import errno |
243 | @@ -38,12 +40,20 @@ | |||
244 | 38 | else: | 40 | else: |
245 | 39 | from collections import UserDict | 41 | from collections import UserDict |
246 | 40 | 42 | ||
247 | 43 | |||
248 | 41 | CRITICAL = "CRITICAL" | 44 | CRITICAL = "CRITICAL" |
249 | 42 | ERROR = "ERROR" | 45 | ERROR = "ERROR" |
250 | 43 | WARNING = "WARNING" | 46 | WARNING = "WARNING" |
251 | 44 | INFO = "INFO" | 47 | INFO = "INFO" |
252 | 45 | DEBUG = "DEBUG" | 48 | DEBUG = "DEBUG" |
253 | 49 | TRACE = "TRACE" | ||
254 | 46 | MARKER = object() | 50 | MARKER = object() |
255 | 51 | SH_MAX_ARG = 131071 | ||
256 | 52 | |||
257 | 53 | |||
258 | 54 | RANGE_WARNING = ('Passing NO_PROXY string that includes a cidr. ' | ||
259 | 55 | 'This may not be compatible with software you are ' | ||
260 | 56 | 'running in your shell.') | ||
261 | 47 | 57 | ||
262 | 48 | cache = {} | 58 | cache = {} |
263 | 49 | 59 | ||
264 | @@ -64,7 +74,7 @@ | |||
265 | 64 | @wraps(func) | 74 | @wraps(func) |
266 | 65 | def wrapper(*args, **kwargs): | 75 | def wrapper(*args, **kwargs): |
267 | 66 | global cache | 76 | global cache |
269 | 67 | key = str((func, args, kwargs)) | 77 | key = json.dumps((func, args, kwargs), sort_keys=True, default=str) |
270 | 68 | try: | 78 | try: |
271 | 69 | return cache[key] | 79 | return cache[key] |
272 | 70 | except KeyError: | 80 | except KeyError: |
273 | @@ -94,7 +104,7 @@ | |||
274 | 94 | command += ['-l', level] | 104 | command += ['-l', level] |
275 | 95 | if not isinstance(message, six.string_types): | 105 | if not isinstance(message, six.string_types): |
276 | 96 | message = repr(message) | 106 | message = repr(message) |
278 | 97 | command += [message] | 107 | command += [message[:SH_MAX_ARG]] |
279 | 98 | # Missing juju-log should not cause failures in unit tests | 108 | # Missing juju-log should not cause failures in unit tests |
280 | 99 | # Send log output to stderr | 109 | # Send log output to stderr |
281 | 100 | try: | 110 | try: |
282 | @@ -197,9 +207,56 @@ | |||
283 | 197 | return os.environ.get('JUJU_REMOTE_UNIT', None) | 207 | return os.environ.get('JUJU_REMOTE_UNIT', None) |
284 | 198 | 208 | ||
285 | 199 | 209 | ||
286 | 210 | def application_name(): | ||
287 | 211 | """ | ||
288 | 212 | The name of the deployed application this unit belongs to. | ||
289 | 213 | """ | ||
290 | 214 | return local_unit().split('/')[0] | ||
291 | 215 | |||
292 | 216 | |||
293 | 200 | def service_name(): | 217 | def service_name(): |
296 | 201 | """The name service group this unit belongs to""" | 218 | """ |
297 | 202 | return local_unit().split('/')[0] | 219 | .. deprecated:: 0.19.1 |
298 | 220 | Alias for :func:`application_name`. | ||
299 | 221 | """ | ||
300 | 222 | return application_name() | ||
301 | 223 | |||
302 | 224 | |||
303 | 225 | def model_name(): | ||
304 | 226 | """ | ||
305 | 227 | Name of the model that this unit is deployed in. | ||
306 | 228 | """ | ||
307 | 229 | return os.environ['JUJU_MODEL_NAME'] | ||
308 | 230 | |||
309 | 231 | |||
310 | 232 | def model_uuid(): | ||
311 | 233 | """ | ||
312 | 234 | UUID of the model that this unit is deployed in. | ||
313 | 235 | """ | ||
314 | 236 | return os.environ['JUJU_MODEL_UUID'] | ||
315 | 237 | |||
316 | 238 | |||
317 | 239 | def principal_unit(): | ||
318 | 240 | """Returns the principal unit of this unit, otherwise None""" | ||
319 | 241 | # Juju 2.2 and above provides JUJU_PRINCIPAL_UNIT | ||
320 | 242 | principal_unit = os.environ.get('JUJU_PRINCIPAL_UNIT', None) | ||
321 | 243 | # If it's empty, then this unit is the principal | ||
322 | 244 | if principal_unit == '': | ||
323 | 245 | return os.environ['JUJU_UNIT_NAME'] | ||
324 | 246 | elif principal_unit is not None: | ||
325 | 247 | return principal_unit | ||
326 | 248 | # For Juju 2.1 and below, let's try work out the principle unit by | ||
327 | 249 | # the various charms' metadata.yaml. | ||
328 | 250 | for reltype in relation_types(): | ||
329 | 251 | for rid in relation_ids(reltype): | ||
330 | 252 | for unit in related_units(rid): | ||
331 | 253 | md = _metadata_unit(unit) | ||
332 | 254 | if not md: | ||
333 | 255 | continue | ||
334 | 256 | subordinate = md.pop('subordinate', None) | ||
335 | 257 | if not subordinate: | ||
336 | 258 | return unit | ||
337 | 259 | return None | ||
338 | 203 | 260 | ||
339 | 204 | 261 | ||
340 | 205 | @cached | 262 | @cached |
341 | @@ -263,7 +320,7 @@ | |||
342 | 263 | self.implicit_save = True | 320 | self.implicit_save = True |
343 | 264 | self._prev_dict = None | 321 | self._prev_dict = None |
344 | 265 | self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME) | 322 | self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME) |
346 | 266 | if os.path.exists(self.path): | 323 | if os.path.exists(self.path) and os.stat(self.path).st_size: |
347 | 267 | self.load_previous() | 324 | self.load_previous() |
348 | 268 | atexit(self._implicit_save) | 325 | atexit(self._implicit_save) |
349 | 269 | 326 | ||
350 | @@ -283,7 +340,11 @@ | |||
351 | 283 | """ | 340 | """ |
352 | 284 | self.path = path or self.path | 341 | self.path = path or self.path |
353 | 285 | with open(self.path) as f: | 342 | with open(self.path) as f: |
355 | 286 | self._prev_dict = json.load(f) | 343 | try: |
356 | 344 | self._prev_dict = json.load(f) | ||
357 | 345 | except ValueError as e: | ||
358 | 346 | log('Unable to parse previous config data - {}'.format(str(e)), | ||
359 | 347 | level=ERROR) | ||
360 | 287 | for k, v in copy.deepcopy(self._prev_dict).items(): | 348 | for k, v in copy.deepcopy(self._prev_dict).items(): |
361 | 288 | if k not in self: | 349 | if k not in self: |
362 | 289 | self[k] = v | 350 | self[k] = v |
363 | @@ -319,6 +380,7 @@ | |||
364 | 319 | 380 | ||
365 | 320 | """ | 381 | """ |
366 | 321 | with open(self.path, 'w') as f: | 382 | with open(self.path, 'w') as f: |
367 | 383 | os.fchmod(f.fileno(), 0o600) | ||
368 | 322 | json.dump(self, f) | 384 | json.dump(self, f) |
369 | 323 | 385 | ||
370 | 324 | def _implicit_save(self): | 386 | def _implicit_save(self): |
371 | @@ -326,22 +388,40 @@ | |||
372 | 326 | self.save() | 388 | self.save() |
373 | 327 | 389 | ||
374 | 328 | 390 | ||
376 | 329 | @cached | 391 | _cache_config = None |
377 | 392 | |||
378 | 393 | |||
379 | 330 | def config(scope=None): | 394 | def config(scope=None): |
390 | 331 | """Juju charm configuration""" | 395 | """ |
391 | 332 | config_cmd_line = ['config-get'] | 396 | Get the juju charm configuration (scope==None) or individual key, |
392 | 333 | if scope is not None: | 397 | (scope=str). The returned value is a Python data structure loaded as |
393 | 334 | config_cmd_line.append(scope) | 398 | JSON from the Juju config command. |
394 | 335 | else: | 399 | |
395 | 336 | config_cmd_line.append('--all') | 400 | :param scope: If set, return the value for the specified key. |
396 | 337 | config_cmd_line.append('--format=json') | 401 | :type scope: Optional[str] |
397 | 338 | try: | 402 | :returns: Either the whole config as a Config, or a key from it. |
398 | 339 | config_data = json.loads( | 403 | :rtype: Any |
399 | 340 | subprocess.check_output(config_cmd_line).decode('UTF-8')) | 404 | """ |
400 | 405 | global _cache_config | ||
401 | 406 | config_cmd_line = ['config-get', '--all', '--format=json'] | ||
402 | 407 | try: | ||
403 | 408 | # JSON Decode Exception for Python3.5+ | ||
404 | 409 | exc_json = json.decoder.JSONDecodeError | ||
405 | 410 | except AttributeError: | ||
406 | 411 | # JSON Decode Exception for Python2.7 through Python3.4 | ||
407 | 412 | exc_json = ValueError | ||
408 | 413 | try: | ||
409 | 414 | if _cache_config is None: | ||
410 | 415 | config_data = json.loads( | ||
411 | 416 | subprocess.check_output(config_cmd_line).decode('UTF-8')) | ||
412 | 417 | _cache_config = Config(config_data) | ||
413 | 341 | if scope is not None: | 418 | if scope is not None: |
417 | 342 | return config_data | 419 | return _cache_config.get(scope) |
418 | 343 | return Config(config_data) | 420 | return _cache_config |
419 | 344 | except ValueError: | 421 | except (exc_json, UnicodeDecodeError) as e: |
420 | 422 | log('Unable to parse output from config-get: config_cmd_line="{}" ' | ||
421 | 423 | 'message="{}"' | ||
422 | 424 | .format(config_cmd_line, str(e)), level=ERROR) | ||
423 | 345 | return None | 425 | return None |
424 | 346 | 426 | ||
425 | 347 | 427 | ||
426 | @@ -435,6 +515,67 @@ | |||
427 | 435 | subprocess.check_output(units_cmd_line).decode('UTF-8')) or [] | 515 | subprocess.check_output(units_cmd_line).decode('UTF-8')) or [] |
428 | 436 | 516 | ||
429 | 437 | 517 | ||
430 | 518 | def expected_peer_units(): | ||
431 | 519 | """Get a generator for units we expect to join peer relation based on | ||
432 | 520 | goal-state. | ||
433 | 521 | |||
434 | 522 | The local unit is excluded from the result to make it easy to gauge | ||
435 | 523 | completion of all peers joining the relation with existing hook tools. | ||
436 | 524 | |||
437 | 525 | Example usage: | ||
438 | 526 | log('peer {} of {} joined peer relation' | ||
439 | 527 | .format(len(related_units()), | ||
440 | 528 | len(list(expected_peer_units())))) | ||
441 | 529 | |||
442 | 530 | This function will raise NotImplementedError if used with juju versions | ||
443 | 531 | without goal-state support. | ||
444 | 532 | |||
445 | 533 | :returns: iterator | ||
446 | 534 | :rtype: types.GeneratorType | ||
447 | 535 | :raises: NotImplementedError | ||
448 | 536 | """ | ||
449 | 537 | if not has_juju_version("2.4.0"): | ||
450 | 538 | # goal-state first appeared in 2.4.0. | ||
451 | 539 | raise NotImplementedError("goal-state") | ||
452 | 540 | _goal_state = goal_state() | ||
453 | 541 | return (key for key in _goal_state['units'] | ||
454 | 542 | if '/' in key and key != local_unit()) | ||
455 | 543 | |||
456 | 544 | |||
457 | 545 | def expected_related_units(reltype=None): | ||
458 | 546 | """Get a generator for units we expect to join relation based on | ||
459 | 547 | goal-state. | ||
460 | 548 | |||
461 | 549 | Note that you can not use this function for the peer relation, take a look | ||
462 | 550 | at expected_peer_units() for that. | ||
463 | 551 | |||
464 | 552 | This function will raise KeyError if you request information for a | ||
465 | 553 | relation type for which juju goal-state does not have information. It will | ||
466 | 554 | raise NotImplementedError if used with juju versions without goal-state | ||
467 | 555 | support. | ||
468 | 556 | |||
469 | 557 | Example usage: | ||
470 | 558 | log('participant {} of {} joined relation {}' | ||
471 | 559 | .format(len(related_units()), | ||
472 | 560 | len(list(expected_related_units())), | ||
473 | 561 | relation_type())) | ||
474 | 562 | |||
475 | 563 | :param reltype: Relation type to list data for, default is to list data for | ||
476 | 564 | the realtion type we are currently executing a hook for. | ||
477 | 565 | :type reltype: str | ||
478 | 566 | :returns: iterator | ||
479 | 567 | :rtype: types.GeneratorType | ||
480 | 568 | :raises: KeyError, NotImplementedError | ||
481 | 569 | """ | ||
482 | 570 | if not has_juju_version("2.4.4"): | ||
483 | 571 | # goal-state existed in 2.4.0, but did not list individual units to | ||
484 | 572 | # join a relation in 2.4.1 through 2.4.3. (LP: #1794739) | ||
485 | 573 | raise NotImplementedError("goal-state relation unit count") | ||
486 | 574 | reltype = reltype or relation_type() | ||
487 | 575 | _goal_state = goal_state() | ||
488 | 576 | return (key for key in _goal_state['relations'][reltype] if '/' in key) | ||
489 | 577 | |||
490 | 578 | |||
491 | 438 | @cached | 579 | @cached |
492 | 439 | def relation_for_unit(unit=None, rid=None): | 580 | def relation_for_unit(unit=None, rid=None): |
493 | 440 | """Get the json represenation of a unit's relation""" | 581 | """Get the json represenation of a unit's relation""" |
494 | @@ -478,6 +619,24 @@ | |||
495 | 478 | return yaml.safe_load(md) | 619 | return yaml.safe_load(md) |
496 | 479 | 620 | ||
497 | 480 | 621 | ||
498 | 622 | def _metadata_unit(unit): | ||
499 | 623 | """Given the name of a unit (e.g. apache2/0), get the unit charm's | ||
500 | 624 | metadata.yaml. Very similar to metadata() but allows us to inspect | ||
501 | 625 | other units. Unit needs to be co-located, such as a subordinate or | ||
502 | 626 | principal/primary. | ||
503 | 627 | |||
504 | 628 | :returns: metadata.yaml as a python object. | ||
505 | 629 | |||
506 | 630 | """ | ||
507 | 631 | basedir = os.sep.join(charm_dir().split(os.sep)[:-2]) | ||
508 | 632 | unitdir = 'unit-{}'.format(unit.replace(os.sep, '-')) | ||
509 | 633 | joineddir = os.path.join(basedir, unitdir, 'charm', 'metadata.yaml') | ||
510 | 634 | if not os.path.exists(joineddir): | ||
511 | 635 | return None | ||
512 | 636 | with open(joineddir) as md: | ||
513 | 637 | return yaml.safe_load(md) | ||
514 | 638 | |||
515 | 639 | |||
516 | 481 | @cached | 640 | @cached |
517 | 482 | def relation_types(): | 641 | def relation_types(): |
518 | 483 | """Get a list of relation types supported by this charm""" | 642 | """Get a list of relation types supported by this charm""" |
519 | @@ -602,18 +761,31 @@ | |||
520 | 602 | return False | 761 | return False |
521 | 603 | 762 | ||
522 | 604 | 763 | ||
523 | 764 | def _port_op(op_name, port, protocol="TCP"): | ||
524 | 765 | """Open or close a service network port""" | ||
525 | 766 | _args = [op_name] | ||
526 | 767 | icmp = protocol.upper() == "ICMP" | ||
527 | 768 | if icmp: | ||
528 | 769 | _args.append(protocol) | ||
529 | 770 | else: | ||
530 | 771 | _args.append('{}/{}'.format(port, protocol)) | ||
531 | 772 | try: | ||
532 | 773 | subprocess.check_call(_args) | ||
533 | 774 | except subprocess.CalledProcessError: | ||
534 | 775 | # Older Juju pre 2.3 doesn't support ICMP | ||
535 | 776 | # so treat it as a no-op if it fails. | ||
536 | 777 | if not icmp: | ||
537 | 778 | raise | ||
538 | 779 | |||
539 | 780 | |||
540 | 605 | def open_port(port, protocol="TCP"): | 781 | def open_port(port, protocol="TCP"): |
541 | 606 | """Open a service network port""" | 782 | """Open a service network port""" |
545 | 607 | _args = ['open-port'] | 783 | _port_op('open-port', port, protocol) |
543 | 608 | _args.append('{}/{}'.format(port, protocol)) | ||
544 | 609 | subprocess.check_call(_args) | ||
546 | 610 | 784 | ||
547 | 611 | 785 | ||
548 | 612 | def close_port(port, protocol="TCP"): | 786 | def close_port(port, protocol="TCP"): |
549 | 613 | """Close a service network port""" | 787 | """Close a service network port""" |
553 | 614 | _args = ['close-port'] | 788 | _port_op('close-port', port, protocol) |
551 | 615 | _args.append('{}/{}'.format(port, protocol)) | ||
552 | 616 | subprocess.check_call(_args) | ||
554 | 617 | 789 | ||
555 | 618 | 790 | ||
556 | 619 | def open_ports(start, end, protocol="TCP"): | 791 | def open_ports(start, end, protocol="TCP"): |
557 | @@ -630,6 +802,17 @@ | |||
558 | 630 | subprocess.check_call(_args) | 802 | subprocess.check_call(_args) |
559 | 631 | 803 | ||
560 | 632 | 804 | ||
561 | 805 | def opened_ports(): | ||
562 | 806 | """Get the opened ports | ||
563 | 807 | |||
564 | 808 | *Note that this will only show ports opened in a previous hook* | ||
565 | 809 | |||
566 | 810 | :returns: Opened ports as a list of strings: ``['8080/tcp', '8081-8083/tcp']`` | ||
567 | 811 | """ | ||
568 | 812 | _args = ['opened-ports', '--format=json'] | ||
569 | 813 | return json.loads(subprocess.check_output(_args).decode('UTF-8')) | ||
570 | 814 | |||
571 | 815 | |||
572 | 633 | @cached | 816 | @cached |
573 | 634 | def unit_get(attribute): | 817 | def unit_get(attribute): |
574 | 635 | """Get the unit ID for the remote unit""" | 818 | """Get the unit ID for the remote unit""" |
575 | @@ -751,8 +934,15 @@ | |||
576 | 751 | return wrapper | 934 | return wrapper |
577 | 752 | 935 | ||
578 | 753 | 936 | ||
579 | 937 | class NoNetworkBinding(Exception): | ||
580 | 938 | pass | ||
581 | 939 | |||
582 | 940 | |||
583 | 754 | def charm_dir(): | 941 | def charm_dir(): |
584 | 755 | """Return the root directory of the current charm""" | 942 | """Return the root directory of the current charm""" |
585 | 943 | d = os.environ.get('JUJU_CHARM_DIR') | ||
586 | 944 | if d is not None: | ||
587 | 945 | return d | ||
588 | 756 | return os.environ.get('CHARM_DIR') | 946 | return os.environ.get('CHARM_DIR') |
589 | 757 | 947 | ||
590 | 758 | 948 | ||
591 | @@ -874,6 +1064,14 @@ | |||
592 | 874 | 1064 | ||
593 | 875 | 1065 | ||
594 | 876 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | 1066 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) |
595 | 1067 | @cached | ||
596 | 1068 | def goal_state(): | ||
597 | 1069 | """Juju goal state values""" | ||
598 | 1070 | cmd = ['goal-state', '--format=json'] | ||
599 | 1071 | return json.loads(subprocess.check_output(cmd).decode('UTF-8')) | ||
600 | 1072 | |||
601 | 1073 | |||
602 | 1074 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | ||
603 | 877 | def is_leader(): | 1075 | def is_leader(): |
604 | 878 | """Does the current unit hold the juju leadership | 1076 | """Does the current unit hold the juju leadership |
605 | 879 | 1077 | ||
606 | @@ -967,7 +1165,6 @@ | |||
607 | 967 | universal_newlines=True).strip() | 1165 | universal_newlines=True).strip() |
608 | 968 | 1166 | ||
609 | 969 | 1167 | ||
610 | 970 | @cached | ||
611 | 971 | def has_juju_version(minimum_version): | 1168 | def has_juju_version(minimum_version): |
612 | 972 | """Return True if the Juju version is at least the provided version""" | 1169 | """Return True if the Juju version is at least the provided version""" |
613 | 973 | return LooseVersion(juju_version()) >= LooseVersion(minimum_version) | 1170 | return LooseVersion(juju_version()) >= LooseVersion(minimum_version) |
614 | @@ -1027,6 +1224,8 @@ | |||
615 | 1027 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | 1224 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) |
616 | 1028 | def network_get_primary_address(binding): | 1225 | def network_get_primary_address(binding): |
617 | 1029 | ''' | 1226 | ''' |
618 | 1227 | Deprecated since Juju 2.3; use network_get() | ||
619 | 1228 | |||
620 | 1030 | Retrieve the primary network address for a named binding | 1229 | Retrieve the primary network address for a named binding |
621 | 1031 | 1230 | ||
622 | 1032 | :param binding: string. The name of a relation of extra-binding | 1231 | :param binding: string. The name of a relation of extra-binding |
623 | @@ -1034,7 +1233,41 @@ | |||
624 | 1034 | :raise: NotImplementedError if run on Juju < 2.0 | 1233 | :raise: NotImplementedError if run on Juju < 2.0 |
625 | 1035 | ''' | 1234 | ''' |
626 | 1036 | cmd = ['network-get', '--primary-address', binding] | 1235 | cmd = ['network-get', '--primary-address', binding] |
628 | 1037 | return subprocess.check_output(cmd).decode('UTF-8').strip() | 1236 | try: |
629 | 1237 | response = subprocess.check_output( | ||
630 | 1238 | cmd, | ||
631 | 1239 | stderr=subprocess.STDOUT).decode('UTF-8').strip() | ||
632 | 1240 | except CalledProcessError as e: | ||
633 | 1241 | if 'no network config found for binding' in e.output.decode('UTF-8'): | ||
634 | 1242 | raise NoNetworkBinding("No network binding for {}" | ||
635 | 1243 | .format(binding)) | ||
636 | 1244 | else: | ||
637 | 1245 | raise | ||
638 | 1246 | return response | ||
639 | 1247 | |||
640 | 1248 | |||
641 | 1249 | def network_get(endpoint, relation_id=None): | ||
642 | 1250 | """ | ||
643 | 1251 | Retrieve the network details for a relation endpoint | ||
644 | 1252 | |||
645 | 1253 | :param endpoint: string. The name of a relation endpoint | ||
646 | 1254 | :param relation_id: int. The ID of the relation for the current context. | ||
647 | 1255 | :return: dict. The loaded YAML output of the network-get query. | ||
648 | 1256 | :raise: NotImplementedError if request not supported by the Juju version. | ||
649 | 1257 | """ | ||
650 | 1258 | if not has_juju_version('2.2'): | ||
651 | 1259 | raise NotImplementedError(juju_version()) # earlier versions require --primary-address | ||
652 | 1260 | if relation_id and not has_juju_version('2.3'): | ||
653 | 1261 | raise NotImplementedError # 2.3 added the -r option | ||
654 | 1262 | |||
655 | 1263 | cmd = ['network-get', endpoint, '--format', 'yaml'] | ||
656 | 1264 | if relation_id: | ||
657 | 1265 | cmd.append('-r') | ||
658 | 1266 | cmd.append(relation_id) | ||
659 | 1267 | response = subprocess.check_output( | ||
660 | 1268 | cmd, | ||
661 | 1269 | stderr=subprocess.STDOUT).decode('UTF-8').strip() | ||
662 | 1270 | return yaml.safe_load(response) | ||
663 | 1038 | 1271 | ||
664 | 1039 | 1272 | ||
665 | 1040 | def add_metric(*args, **kwargs): | 1273 | def add_metric(*args, **kwargs): |
666 | @@ -1066,3 +1299,192 @@ | |||
667 | 1066 | """Get the meter status information, if running in the meter-status-changed | 1299 | """Get the meter status information, if running in the meter-status-changed |
668 | 1067 | hook.""" | 1300 | hook.""" |
669 | 1068 | return os.environ.get('JUJU_METER_INFO') | 1301 | return os.environ.get('JUJU_METER_INFO') |
670 | 1302 | |||
671 | 1303 | |||
672 | 1304 | def iter_units_for_relation_name(relation_name): | ||
673 | 1305 | """Iterate through all units in a relation | ||
674 | 1306 | |||
675 | 1307 | Generator that iterates through all the units in a relation and yields | ||
676 | 1308 | a named tuple with rid and unit field names. | ||
677 | 1309 | |||
678 | 1310 | Usage: | ||
679 | 1311 | data = [(u.rid, u.unit) | ||
680 | 1312 | for u in iter_units_for_relation_name(relation_name)] | ||
681 | 1313 | |||
682 | 1314 | :param relation_name: string relation name | ||
683 | 1315 | :yield: Named Tuple with rid and unit field names | ||
684 | 1316 | """ | ||
685 | 1317 | RelatedUnit = namedtuple('RelatedUnit', 'rid, unit') | ||
686 | 1318 | for rid in relation_ids(relation_name): | ||
687 | 1319 | for unit in related_units(rid): | ||
688 | 1320 | yield RelatedUnit(rid, unit) | ||
689 | 1321 | |||
690 | 1322 | |||
691 | 1323 | def ingress_address(rid=None, unit=None): | ||
692 | 1324 | """ | ||
693 | 1325 | Retrieve the ingress-address from a relation when available. | ||
694 | 1326 | Otherwise, return the private-address. | ||
695 | 1327 | |||
696 | 1328 | When used on the consuming side of the relation (unit is a remote | ||
697 | 1329 | unit), the ingress-address is the IP address that this unit needs | ||
698 | 1330 | to use to reach the provided service on the remote unit. | ||
699 | 1331 | |||
700 | 1332 | When used on the providing side of the relation (unit == local_unit()), | ||
701 | 1333 | the ingress-address is the IP address that is advertised to remote | ||
702 | 1334 | units on this relation. Remote units need to use this address to | ||
703 | 1335 | reach the local provided service on this unit. | ||
704 | 1336 | |||
705 | 1337 | Note that charms may document some other method to use in | ||
706 | 1338 | preference to the ingress_address(), such as an address provided | ||
707 | 1339 | on a different relation attribute or a service discovery mechanism. | ||
708 | 1340 | This allows charms to redirect inbound connections to their peers | ||
709 | 1341 | or different applications such as load balancers. | ||
710 | 1342 | |||
711 | 1343 | Usage: | ||
712 | 1344 | addresses = [ingress_address(rid=u.rid, unit=u.unit) | ||
713 | 1345 | for u in iter_units_for_relation_name(relation_name)] | ||
714 | 1346 | |||
715 | 1347 | :param rid: string relation id | ||
716 | 1348 | :param unit: string unit name | ||
717 | 1349 | :side effect: calls relation_get | ||
718 | 1350 | :return: string IP address | ||
719 | 1351 | """ | ||
720 | 1352 | settings = relation_get(rid=rid, unit=unit) | ||
721 | 1353 | return (settings.get('ingress-address') or | ||
722 | 1354 | settings.get('private-address')) | ||
723 | 1355 | |||
724 | 1356 | |||
725 | 1357 | def egress_subnets(rid=None, unit=None): | ||
726 | 1358 | """ | ||
727 | 1359 | Retrieve the egress-subnets from a relation. | ||
728 | 1360 | |||
729 | 1361 | This function is to be used on the providing side of the | ||
730 | 1362 | relation, and provides the ranges of addresses that client | ||
731 | 1363 | connections may come from. The result is uninteresting on | ||
732 | 1364 | the consuming side of a relation (unit == local_unit()). | ||
733 | 1365 | |||
734 | 1366 | Returns a stable list of subnets in CIDR format. | ||
735 | 1367 | eg. ['192.168.1.0/24', '2001::F00F/128'] | ||
736 | 1368 | |||
737 | 1369 | If egress-subnets is not available, falls back to using the published | ||
738 | 1370 | ingress-address, or finally private-address. | ||
739 | 1371 | |||
740 | 1372 | :param rid: string relation id | ||
741 | 1373 | :param unit: string unit name | ||
742 | 1374 | :side effect: calls relation_get | ||
743 | 1375 | :return: list of subnets in CIDR format. eg. ['192.168.1.0/24', '2001::F00F/128'] | ||
744 | 1376 | """ | ||
745 | 1377 | def _to_range(addr): | ||
746 | 1378 | if re.search(r'^(?:\d{1,3}\.){3}\d{1,3}$', addr) is not None: | ||
747 | 1379 | addr += '/32' | ||
748 | 1380 | elif ':' in addr and '/' not in addr: # IPv6 | ||
749 | 1381 | addr += '/128' | ||
750 | 1382 | return addr | ||
751 | 1383 | |||
752 | 1384 | settings = relation_get(rid=rid, unit=unit) | ||
753 | 1385 | if 'egress-subnets' in settings: | ||
754 | 1386 | return [n.strip() for n in settings['egress-subnets'].split(',') if n.strip()] | ||
755 | 1387 | if 'ingress-address' in settings: | ||
756 | 1388 | return [_to_range(settings['ingress-address'])] | ||
757 | 1389 | if 'private-address' in settings: | ||
758 | 1390 | return [_to_range(settings['private-address'])] | ||
759 | 1391 | return [] # Should never happen | ||
760 | 1392 | |||
761 | 1393 | |||
762 | 1394 | def unit_doomed(unit=None): | ||
763 | 1395 | """Determines if the unit is being removed from the model | ||
764 | 1396 | |||
765 | 1397 | Requires Juju 2.4.1. | ||
766 | 1398 | |||
767 | 1399 | :param unit: string unit name, defaults to local_unit | ||
768 | 1400 | :side effect: calls goal_state | ||
769 | 1401 | :side effect: calls local_unit | ||
770 | 1402 | :side effect: calls has_juju_version | ||
771 | 1403 | :return: True if the unit is being removed, already gone, or never existed | ||
772 | 1404 | """ | ||
773 | 1405 | if not has_juju_version("2.4.1"): | ||
774 | 1406 | # We cannot risk blindly returning False for 'we don't know', | ||
775 | 1407 | # because that could cause data loss; if call sites don't | ||
776 | 1408 | # need an accurate answer, they likely don't need this helper | ||
777 | 1409 | # at all. | ||
778 | 1410 | # goal-state existed in 2.4.0, but did not handle removals | ||
779 | 1411 | # correctly until 2.4.1. | ||
780 | 1412 | raise NotImplementedError("is_doomed") | ||
781 | 1413 | if unit is None: | ||
782 | 1414 | unit = local_unit() | ||
783 | 1415 | gs = goal_state() | ||
784 | 1416 | units = gs.get('units', {}) | ||
785 | 1417 | if unit not in units: | ||
786 | 1418 | return True | ||
787 | 1419 | # I don't think 'dead' units ever show up in the goal-state, but | ||
788 | 1420 | # check anyway in addition to 'dying'. | ||
789 | 1421 | return units[unit]['status'] in ('dying', 'dead') | ||
790 | 1422 | |||
791 | 1423 | |||
792 | 1424 | def env_proxy_settings(selected_settings=None): | ||
793 | 1425 | """Get proxy settings from process environment variables. | ||
794 | 1426 | |||
795 | 1427 | Get charm proxy settings from environment variables that correspond to | ||
796 | 1428 | juju-http-proxy, juju-https-proxy and juju-no-proxy (available as of 2.4.2, | ||
797 | 1429 | see lp:1782236) in a format suitable for passing to an application that | ||
798 | 1430 | reacts to proxy settings passed as environment variables. Some applications | ||
799 | 1431 | support lowercase or uppercase notation (e.g. curl), some support only | ||
800 | 1432 | lowercase (e.g. wget), there are also subjectively rare cases of only | ||
801 | 1433 | uppercase notation support. no_proxy CIDR and wildcard support also varies | ||
802 | 1434 | between runtimes and applications as there is no enforced standard. | ||
803 | 1435 | |||
804 | 1436 | Some applications may connect to multiple destinations and expose config | ||
805 | 1437 | options that would affect only proxy settings for a specific destination | ||
806 | 1438 | these should be handled in charms in an application-specific manner. | ||
807 | 1439 | |||
808 | 1440 | :param selected_settings: format only a subset of possible settings | ||
809 | 1441 | :type selected_settings: list | ||
810 | 1442 | :rtype: Option(None, dict[str, str]) | ||
811 | 1443 | """ | ||
812 | 1444 | SUPPORTED_SETTINGS = { | ||
813 | 1445 | 'http': 'HTTP_PROXY', | ||
814 | 1446 | 'https': 'HTTPS_PROXY', | ||
815 | 1447 | 'no_proxy': 'NO_PROXY', | ||
816 | 1448 | 'ftp': 'FTP_PROXY' | ||
817 | 1449 | } | ||
818 | 1450 | if selected_settings is None: | ||
819 | 1451 | selected_settings = SUPPORTED_SETTINGS | ||
820 | 1452 | |||
821 | 1453 | selected_vars = [v for k, v in SUPPORTED_SETTINGS.items() | ||
822 | 1454 | if k in selected_settings] | ||
823 | 1455 | proxy_settings = {} | ||
824 | 1456 | for var in selected_vars: | ||
825 | 1457 | var_val = os.getenv(var) | ||
826 | 1458 | if var_val: | ||
827 | 1459 | proxy_settings[var] = var_val | ||
828 | 1460 | proxy_settings[var.lower()] = var_val | ||
829 | 1461 | # Now handle juju-prefixed environment variables. The legacy vs new | ||
830 | 1462 | # environment variable usage is mutually exclusive | ||
831 | 1463 | charm_var_val = os.getenv('JUJU_CHARM_{}'.format(var)) | ||
832 | 1464 | if charm_var_val: | ||
833 | 1465 | proxy_settings[var] = charm_var_val | ||
834 | 1466 | proxy_settings[var.lower()] = charm_var_val | ||
835 | 1467 | if 'no_proxy' in proxy_settings: | ||
836 | 1468 | if _contains_range(proxy_settings['no_proxy']): | ||
837 | 1469 | log(RANGE_WARNING, level=WARNING) | ||
838 | 1470 | return proxy_settings if proxy_settings else None | ||
839 | 1471 | |||
840 | 1472 | |||
841 | 1473 | def _contains_range(addresses): | ||
842 | 1474 | """Check for cidr or wildcard domain in a string. | ||
843 | 1475 | |||
844 | 1476 | Given a string comprising a comma seperated list of ip addresses | ||
845 | 1477 | and domain names, determine whether the string contains IP ranges | ||
846 | 1478 | or wildcard domains. | ||
847 | 1479 | |||
848 | 1480 | :param addresses: comma seperated list of domains and ip addresses. | ||
849 | 1481 | :type addresses: str | ||
850 | 1482 | """ | ||
851 | 1483 | return ( | ||
852 | 1484 | # Test for cidr (e.g. 10.20.20.0/24) | ||
853 | 1485 | "/" in addresses or | ||
854 | 1486 | # Test for wildcard domains (*.foo.com or .foo.com) | ||
855 | 1487 | "*" in addresses or | ||
856 | 1488 | addresses.startswith(".") or | ||
857 | 1489 | ",." in addresses or | ||
858 | 1490 | " ." in addresses) | ||
859 | 1069 | 1491 | ||
860 | === modified file 'charmhelpers/core/host.py' | |||
861 | --- charmhelpers/core/host.py 2017-04-11 18:01:45 +0000 | |||
862 | +++ charmhelpers/core/host.py 2019-05-24 12:43:31 +0000 | |||
863 | @@ -34,21 +34,23 @@ | |||
864 | 34 | 34 | ||
865 | 35 | from contextlib import contextmanager | 35 | from contextlib import contextmanager |
866 | 36 | from collections import OrderedDict | 36 | from collections import OrderedDict |
868 | 37 | from .hookenv import log | 37 | from .hookenv import log, INFO, DEBUG, local_unit, charm_name |
869 | 38 | from .fstab import Fstab | 38 | from .fstab import Fstab |
870 | 39 | from charmhelpers.osplatform import get_platform | 39 | from charmhelpers.osplatform import get_platform |
871 | 40 | 40 | ||
872 | 41 | __platform__ = get_platform() | 41 | __platform__ = get_platform() |
873 | 42 | if __platform__ == "ubuntu": | 42 | if __platform__ == "ubuntu": |
875 | 43 | from charmhelpers.core.host_factory.ubuntu import ( | 43 | from charmhelpers.core.host_factory.ubuntu import ( # NOQA:F401 |
876 | 44 | service_available, | 44 | service_available, |
877 | 45 | add_new_group, | 45 | add_new_group, |
878 | 46 | lsb_release, | 46 | lsb_release, |
879 | 47 | cmp_pkgrevno, | 47 | cmp_pkgrevno, |
880 | 48 | CompareHostReleases, | 48 | CompareHostReleases, |
881 | 49 | get_distrib_codename, | ||
882 | 50 | arch | ||
883 | 49 | ) # flake8: noqa -- ignore F401 for this import | 51 | ) # flake8: noqa -- ignore F401 for this import |
884 | 50 | elif __platform__ == "centos": | 52 | elif __platform__ == "centos": |
886 | 51 | from charmhelpers.core.host_factory.centos import ( | 53 | from charmhelpers.core.host_factory.centos import ( # NOQA:F401 |
887 | 52 | service_available, | 54 | service_available, |
888 | 53 | add_new_group, | 55 | add_new_group, |
889 | 54 | lsb_release, | 56 | lsb_release, |
890 | @@ -58,6 +60,7 @@ | |||
891 | 58 | 60 | ||
892 | 59 | UPDATEDB_PATH = '/etc/updatedb.conf' | 61 | UPDATEDB_PATH = '/etc/updatedb.conf' |
893 | 60 | 62 | ||
894 | 63 | |||
895 | 61 | def service_start(service_name, **kwargs): | 64 | def service_start(service_name, **kwargs): |
896 | 62 | """Start a system service. | 65 | """Start a system service. |
897 | 63 | 66 | ||
898 | @@ -191,6 +194,7 @@ | |||
899 | 191 | upstart_file = os.path.join(init_dir, "{}.conf".format(service_name)) | 194 | upstart_file = os.path.join(init_dir, "{}.conf".format(service_name)) |
900 | 192 | sysv_file = os.path.join(initd_dir, service_name) | 195 | sysv_file = os.path.join(initd_dir, service_name) |
901 | 193 | if init_is_systemd(): | 196 | if init_is_systemd(): |
902 | 197 | service('disable', service_name) | ||
903 | 194 | service('mask', service_name) | 198 | service('mask', service_name) |
904 | 195 | elif os.path.exists(upstart_file): | 199 | elif os.path.exists(upstart_file): |
905 | 196 | override_path = os.path.join( | 200 | override_path = os.path.join( |
906 | @@ -225,6 +229,7 @@ | |||
907 | 225 | sysv_file = os.path.join(initd_dir, service_name) | 229 | sysv_file = os.path.join(initd_dir, service_name) |
908 | 226 | if init_is_systemd(): | 230 | if init_is_systemd(): |
909 | 227 | service('unmask', service_name) | 231 | service('unmask', service_name) |
910 | 232 | service('enable', service_name) | ||
911 | 228 | elif os.path.exists(upstart_file): | 233 | elif os.path.exists(upstart_file): |
912 | 229 | override_path = os.path.join( | 234 | override_path = os.path.join( |
913 | 230 | init_dir, '{}.override'.format(service_name)) | 235 | init_dir, '{}.override'.format(service_name)) |
914 | @@ -285,8 +290,8 @@ | |||
915 | 285 | for key, value in six.iteritems(kwargs): | 290 | for key, value in six.iteritems(kwargs): |
916 | 286 | parameter = '%s=%s' % (key, value) | 291 | parameter = '%s=%s' % (key, value) |
917 | 287 | cmd.append(parameter) | 292 | cmd.append(parameter) |
920 | 288 | output = subprocess.check_output(cmd, | 293 | output = subprocess.check_output( |
921 | 289 | stderr=subprocess.STDOUT).decode('UTF-8') | 294 | cmd, stderr=subprocess.STDOUT).decode('UTF-8') |
922 | 290 | except subprocess.CalledProcessError: | 295 | except subprocess.CalledProcessError: |
923 | 291 | return False | 296 | return False |
924 | 292 | else: | 297 | else: |
925 | @@ -439,6 +444,51 @@ | |||
926 | 439 | subprocess.check_call(cmd) | 444 | subprocess.check_call(cmd) |
927 | 440 | 445 | ||
928 | 441 | 446 | ||
929 | 447 | def chage(username, lastday=None, expiredate=None, inactive=None, | ||
930 | 448 | mindays=None, maxdays=None, root=None, warndays=None): | ||
931 | 449 | """Change user password expiry information | ||
932 | 450 | |||
933 | 451 | :param str username: User to update | ||
934 | 452 | :param str lastday: Set when password was changed in YYYY-MM-DD format | ||
935 | 453 | :param str expiredate: Set when user's account will no longer be | ||
936 | 454 | accessible in YYYY-MM-DD format. | ||
937 | 455 | -1 will remove an account expiration date. | ||
938 | 456 | :param str inactive: Set the number of days of inactivity after a password | ||
939 | 457 | has expired before the account is locked. | ||
940 | 458 | -1 will remove an account's inactivity. | ||
941 | 459 | :param str mindays: Set the minimum number of days between password | ||
942 | 460 | changes to MIN_DAYS. | ||
943 | 461 | 0 indicates the password can be changed anytime. | ||
944 | 462 | :param str maxdays: Set the maximum number of days during which a | ||
945 | 463 | password is valid. | ||
946 | 464 | -1 as MAX_DAYS will remove checking maxdays | ||
947 | 465 | :param str root: Apply changes in the CHROOT_DIR directory | ||
948 | 466 | :param str warndays: Set the number of days of warning before a password | ||
949 | 467 | change is required | ||
950 | 468 | :raises subprocess.CalledProcessError: if call to chage fails | ||
951 | 469 | """ | ||
952 | 470 | cmd = ['chage'] | ||
953 | 471 | if root: | ||
954 | 472 | cmd.extend(['--root', root]) | ||
955 | 473 | if lastday: | ||
956 | 474 | cmd.extend(['--lastday', lastday]) | ||
957 | 475 | if expiredate: | ||
958 | 476 | cmd.extend(['--expiredate', expiredate]) | ||
959 | 477 | if inactive: | ||
960 | 478 | cmd.extend(['--inactive', inactive]) | ||
961 | 479 | if mindays: | ||
962 | 480 | cmd.extend(['--mindays', mindays]) | ||
963 | 481 | if maxdays: | ||
964 | 482 | cmd.extend(['--maxdays', maxdays]) | ||
965 | 483 | if warndays: | ||
966 | 484 | cmd.extend(['--warndays', warndays]) | ||
967 | 485 | cmd.append(username) | ||
968 | 486 | subprocess.check_call(cmd) | ||
969 | 487 | |||
970 | 488 | |||
971 | 489 | remove_password_expiry = functools.partial(chage, expiredate='-1', inactive='-1', mindays='0', maxdays='-1') | ||
972 | 490 | |||
973 | 491 | |||
974 | 442 | def rsync(from_path, to_path, flags='-r', options=None, timeout=None): | 492 | def rsync(from_path, to_path, flags='-r', options=None, timeout=None): |
975 | 443 | """Replicate the contents of a path""" | 493 | """Replicate the contents of a path""" |
976 | 444 | options = options or ['--delete', '--executability'] | 494 | options = options or ['--delete', '--executability'] |
977 | @@ -485,13 +535,45 @@ | |||
978 | 485 | 535 | ||
979 | 486 | def write_file(path, content, owner='root', group='root', perms=0o444): | 536 | def write_file(path, content, owner='root', group='root', perms=0o444): |
980 | 487 | """Create or overwrite a file with the contents of a byte string.""" | 537 | """Create or overwrite a file with the contents of a byte string.""" |
981 | 488 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms)) | ||
982 | 489 | uid = pwd.getpwnam(owner).pw_uid | 538 | uid = pwd.getpwnam(owner).pw_uid |
983 | 490 | gid = grp.getgrnam(group).gr_gid | 539 | gid = grp.getgrnam(group).gr_gid |
988 | 491 | with open(path, 'wb') as target: | 540 | # lets see if we can grab the file and compare the context, to avoid doing |
989 | 492 | os.fchown(target.fileno(), uid, gid) | 541 | # a write. |
990 | 493 | os.fchmod(target.fileno(), perms) | 542 | existing_content = None |
991 | 494 | target.write(content) | 543 | existing_uid, existing_gid, existing_perms = None, None, None |
992 | 544 | try: | ||
993 | 545 | with open(path, 'rb') as target: | ||
994 | 546 | existing_content = target.read() | ||
995 | 547 | stat = os.stat(path) | ||
996 | 548 | existing_uid, existing_gid, existing_perms = ( | ||
997 | 549 | stat.st_uid, stat.st_gid, stat.st_mode | ||
998 | 550 | ) | ||
999 | 551 | except Exception: | ||
1000 | 552 | pass | ||
1001 | 553 | if content != existing_content: | ||
1002 | 554 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms), | ||
1003 | 555 | level=DEBUG) | ||
1004 | 556 | with open(path, 'wb') as target: | ||
1005 | 557 | os.fchown(target.fileno(), uid, gid) | ||
1006 | 558 | os.fchmod(target.fileno(), perms) | ||
1007 | 559 | if six.PY3 and isinstance(content, six.string_types): | ||
1008 | 560 | content = content.encode('UTF-8') | ||
1009 | 561 | target.write(content) | ||
1010 | 562 | return | ||
1011 | 563 | # the contents were the same, but we might still need to change the | ||
1012 | 564 | # ownership or permissions. | ||
1013 | 565 | if existing_uid != uid: | ||
1014 | 566 | log("Changing uid on already existing content: {} -> {}" | ||
1015 | 567 | .format(existing_uid, uid), level=DEBUG) | ||
1016 | 568 | os.chown(path, uid, -1) | ||
1017 | 569 | if existing_gid != gid: | ||
1018 | 570 | log("Changing gid on already existing content: {} -> {}" | ||
1019 | 571 | .format(existing_gid, gid), level=DEBUG) | ||
1020 | 572 | os.chown(path, -1, gid) | ||
1021 | 573 | if existing_perms != perms: | ||
1022 | 574 | log("Changing permissions on existing content: {} -> {}" | ||
1023 | 575 | .format(existing_perms, perms), level=DEBUG) | ||
1024 | 576 | os.chmod(path, perms) | ||
1025 | 495 | 577 | ||
1026 | 496 | 578 | ||
1027 | 497 | def fstab_remove(mp): | 579 | def fstab_remove(mp): |
1028 | @@ -756,7 +838,7 @@ | |||
1029 | 756 | ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') | 838 | ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') |
1030 | 757 | ip_output = (line.strip() for line in ip_output if line) | 839 | ip_output = (line.strip() for line in ip_output if line) |
1031 | 758 | 840 | ||
1033 | 759 | key = re.compile('^[0-9]+:\s+(.+):') | 841 | key = re.compile(r'^[0-9]+:\s+(.+):') |
1034 | 760 | for line in ip_output: | 842 | for line in ip_output: |
1035 | 761 | matched = re.search(key, line) | 843 | matched = re.search(key, line) |
1036 | 762 | if matched: | 844 | if matched: |
1037 | @@ -901,6 +983,20 @@ | |||
1038 | 901 | 983 | ||
1039 | 902 | 984 | ||
1040 | 903 | def add_to_updatedb_prunepath(path, updatedb_path=UPDATEDB_PATH): | 985 | def add_to_updatedb_prunepath(path, updatedb_path=UPDATEDB_PATH): |
1041 | 986 | """Adds the specified path to the mlocate's udpatedb.conf PRUNEPATH list. | ||
1042 | 987 | |||
1043 | 988 | This method has no effect if the path specified by updatedb_path does not | ||
1044 | 989 | exist or is not a file. | ||
1045 | 990 | |||
1046 | 991 | @param path: string the path to add to the updatedb.conf PRUNEPATHS value | ||
1047 | 992 | @param updatedb_path: the path the updatedb.conf file | ||
1048 | 993 | """ | ||
1049 | 994 | if not os.path.exists(updatedb_path) or os.path.isdir(updatedb_path): | ||
1050 | 995 | # If the updatedb.conf file doesn't exist then don't attempt to update | ||
1051 | 996 | # the file as the package providing mlocate may not be installed on | ||
1052 | 997 | # the local system | ||
1053 | 998 | return | ||
1054 | 999 | |||
1055 | 904 | with open(updatedb_path, 'r+') as f_id: | 1000 | with open(updatedb_path, 'r+') as f_id: |
1056 | 905 | updatedb_text = f_id.read() | 1001 | updatedb_text = f_id.read() |
1057 | 906 | output = updatedb(updatedb_text, path) | 1002 | output = updatedb(updatedb_text, path) |
1058 | @@ -920,3 +1016,62 @@ | |||
1059 | 920 | lines[i] = 'PRUNEPATHS="{}"'.format(' '.join(paths)) | 1016 | lines[i] = 'PRUNEPATHS="{}"'.format(' '.join(paths)) |
1060 | 921 | output = "\n".join(lines) | 1017 | output = "\n".join(lines) |
1061 | 922 | return output | 1018 | return output |
1062 | 1019 | |||
1063 | 1020 | |||
1064 | 1021 | def modulo_distribution(modulo=3, wait=30, non_zero_wait=False): | ||
1065 | 1022 | """ Modulo distribution | ||
1066 | 1023 | |||
1067 | 1024 | This helper uses the unit number, a modulo value and a constant wait time | ||
1068 | 1025 | to produce a calculated wait time distribution. This is useful in large | ||
1069 | 1026 | scale deployments to distribute load during an expensive operation such as | ||
1070 | 1027 | service restarts. | ||
1071 | 1028 | |||
1072 | 1029 | If you have 1000 nodes that need to restart 100 at a time 1 minute at a | ||
1073 | 1030 | time: | ||
1074 | 1031 | |||
1075 | 1032 | time.wait(modulo_distribution(modulo=100, wait=60)) | ||
1076 | 1033 | restart() | ||
1077 | 1034 | |||
1078 | 1035 | If you need restarts to happen serially set modulo to the exact number of | ||
1079 | 1036 | nodes and set a high constant wait time: | ||
1080 | 1037 | |||
1081 | 1038 | time.wait(modulo_distribution(modulo=10, wait=120)) | ||
1082 | 1039 | restart() | ||
1083 | 1040 | |||
1084 | 1041 | @param modulo: int The modulo number creates the group distribution | ||
1085 | 1042 | @param wait: int The constant time wait value | ||
1086 | 1043 | @param non_zero_wait: boolean Override unit % modulo == 0, | ||
1087 | 1044 | return modulo * wait. Used to avoid collisions with | ||
1088 | 1045 | leader nodes which are often given priority. | ||
1089 | 1046 | @return: int Calculated time to wait for unit operation | ||
1090 | 1047 | """ | ||
1091 | 1048 | unit_number = int(local_unit().split('/')[1]) | ||
1092 | 1049 | calculated_wait_time = (unit_number % modulo) * wait | ||
1093 | 1050 | if non_zero_wait and calculated_wait_time == 0: | ||
1094 | 1051 | return modulo * wait | ||
1095 | 1052 | else: | ||
1096 | 1053 | return calculated_wait_time | ||
1097 | 1054 | |||
1098 | 1055 | |||
1099 | 1056 | def install_ca_cert(ca_cert, name=None): | ||
1100 | 1057 | """ | ||
1101 | 1058 | Install the given cert as a trusted CA. | ||
1102 | 1059 | |||
1103 | 1060 | The ``name`` is the stem of the filename where the cert is written, and if | ||
1104 | 1061 | not provided, it will default to ``juju-{charm_name}``. | ||
1105 | 1062 | |||
1106 | 1063 | If the cert is empty or None, or is unchanged, nothing is done. | ||
1107 | 1064 | """ | ||
1108 | 1065 | if not ca_cert: | ||
1109 | 1066 | return | ||
1110 | 1067 | if not isinstance(ca_cert, bytes): | ||
1111 | 1068 | ca_cert = ca_cert.encode('utf8') | ||
1112 | 1069 | if not name: | ||
1113 | 1070 | name = 'juju-{}'.format(charm_name()) | ||
1114 | 1071 | cert_file = '/usr/local/share/ca-certificates/{}.crt'.format(name) | ||
1115 | 1072 | new_hash = hashlib.md5(ca_cert).hexdigest() | ||
1116 | 1073 | if file_hash(cert_file) == new_hash: | ||
1117 | 1074 | return | ||
1118 | 1075 | log("Installing new CA cert at: {}".format(cert_file), level=INFO) | ||
1119 | 1076 | write_file(cert_file, ca_cert) | ||
1120 | 1077 | subprocess.check_call(['update-ca-certificates', '--fresh']) | ||
1121 | 923 | 1078 | ||
1122 | === modified file 'charmhelpers/core/host_factory/ubuntu.py' | |||
1123 | --- charmhelpers/core/host_factory/ubuntu.py 2017-04-11 18:01:45 +0000 | |||
1124 | +++ charmhelpers/core/host_factory/ubuntu.py 2019-05-24 12:43:31 +0000 | |||
1125 | @@ -1,5 +1,6 @@ | |||
1126 | 1 | import subprocess | 1 | import subprocess |
1127 | 2 | 2 | ||
1128 | 3 | from charmhelpers.core.hookenv import cached | ||
1129 | 3 | from charmhelpers.core.strutils import BasicStringComparator | 4 | from charmhelpers.core.strutils import BasicStringComparator |
1130 | 4 | 5 | ||
1131 | 5 | 6 | ||
1132 | @@ -19,6 +20,10 @@ | |||
1133 | 19 | 'xenial', | 20 | 'xenial', |
1134 | 20 | 'yakkety', | 21 | 'yakkety', |
1135 | 21 | 'zesty', | 22 | 'zesty', |
1136 | 23 | 'artful', | ||
1137 | 24 | 'bionic', | ||
1138 | 25 | 'cosmic', | ||
1139 | 26 | 'disco', | ||
1140 | 22 | ) | 27 | ) |
1141 | 23 | 28 | ||
1142 | 24 | 29 | ||
1143 | @@ -69,6 +74,14 @@ | |||
1144 | 69 | return d | 74 | return d |
1145 | 70 | 75 | ||
1146 | 71 | 76 | ||
1147 | 77 | def get_distrib_codename(): | ||
1148 | 78 | """Return the codename of the distribution | ||
1149 | 79 | :returns: The codename | ||
1150 | 80 | :rtype: str | ||
1151 | 81 | """ | ||
1152 | 82 | return lsb_release()['DISTRIB_CODENAME'].lower() | ||
1153 | 83 | |||
1154 | 84 | |||
1155 | 72 | def cmp_pkgrevno(package, revno, pkgcache=None): | 85 | def cmp_pkgrevno(package, revno, pkgcache=None): |
1156 | 73 | """Compare supplied revno with the revno of the installed package. | 86 | """Compare supplied revno with the revno of the installed package. |
1157 | 74 | 87 | ||
1158 | @@ -86,3 +99,16 @@ | |||
1159 | 86 | pkgcache = apt_cache() | 99 | pkgcache = apt_cache() |
1160 | 87 | pkg = pkgcache[package] | 100 | pkg = pkgcache[package] |
1161 | 88 | return apt_pkg.version_compare(pkg.current_ver.ver_str, revno) | 101 | return apt_pkg.version_compare(pkg.current_ver.ver_str, revno) |
1162 | 102 | |||
1163 | 103 | |||
1164 | 104 | @cached | ||
1165 | 105 | def arch(): | ||
1166 | 106 | """Return the package architecture as a string. | ||
1167 | 107 | |||
1168 | 108 | :returns: the architecture | ||
1169 | 109 | :rtype: str | ||
1170 | 110 | :raises: subprocess.CalledProcessError if dpkg command fails | ||
1171 | 111 | """ | ||
1172 | 112 | return subprocess.check_output( | ||
1173 | 113 | ['dpkg', '--print-architecture'] | ||
1174 | 114 | ).rstrip().decode('UTF-8') | ||
1175 | 89 | 115 | ||
1176 | === modified file 'charmhelpers/core/kernel.py' | |||
1177 | --- charmhelpers/core/kernel.py 2017-03-03 21:03:14 +0000 | |||
1178 | +++ charmhelpers/core/kernel.py 2019-05-24 12:43:31 +0000 | |||
1179 | @@ -26,12 +26,12 @@ | |||
1180 | 26 | 26 | ||
1181 | 27 | __platform__ = get_platform() | 27 | __platform__ = get_platform() |
1182 | 28 | if __platform__ == "ubuntu": | 28 | if __platform__ == "ubuntu": |
1184 | 29 | from charmhelpers.core.kernel_factory.ubuntu import ( | 29 | from charmhelpers.core.kernel_factory.ubuntu import ( # NOQA:F401 |
1185 | 30 | persistent_modprobe, | 30 | persistent_modprobe, |
1186 | 31 | update_initramfs, | 31 | update_initramfs, |
1187 | 32 | ) # flake8: noqa -- ignore F401 for this import | 32 | ) # flake8: noqa -- ignore F401 for this import |
1188 | 33 | elif __platform__ == "centos": | 33 | elif __platform__ == "centos": |
1190 | 34 | from charmhelpers.core.kernel_factory.centos import ( | 34 | from charmhelpers.core.kernel_factory.centos import ( # NOQA:F401 |
1191 | 35 | persistent_modprobe, | 35 | persistent_modprobe, |
1192 | 36 | update_initramfs, | 36 | update_initramfs, |
1193 | 37 | ) # flake8: noqa -- ignore F401 for this import | 37 | ) # flake8: noqa -- ignore F401 for this import |
1194 | 38 | 38 | ||
1195 | === modified file 'charmhelpers/core/services/base.py' | |||
1196 | --- charmhelpers/core/services/base.py 2017-03-03 21:03:14 +0000 | |||
1197 | +++ charmhelpers/core/services/base.py 2019-05-24 12:43:31 +0000 | |||
1198 | @@ -307,23 +307,34 @@ | |||
1199 | 307 | """ | 307 | """ |
1200 | 308 | def __call__(self, manager, service_name, event_name): | 308 | def __call__(self, manager, service_name, event_name): |
1201 | 309 | service = manager.get_service(service_name) | 309 | service = manager.get_service(service_name) |
1203 | 310 | new_ports = service.get('ports', []) | 310 | # turn this generator into a list, |
1204 | 311 | # as we'll be going over it multiple times | ||
1205 | 312 | new_ports = list(service.get('ports', [])) | ||
1206 | 311 | port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name)) | 313 | port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name)) |
1207 | 312 | if os.path.exists(port_file): | 314 | if os.path.exists(port_file): |
1208 | 313 | with open(port_file) as fp: | 315 | with open(port_file) as fp: |
1209 | 314 | old_ports = fp.read().split(',') | 316 | old_ports = fp.read().split(',') |
1210 | 315 | for old_port in old_ports: | 317 | for old_port in old_ports: |
1215 | 316 | if bool(old_port): | 318 | if bool(old_port) and not self.ports_contains(old_port, new_ports): |
1216 | 317 | old_port = int(old_port) | 319 | hookenv.close_port(old_port) |
1213 | 318 | if old_port not in new_ports: | ||
1214 | 319 | hookenv.close_port(old_port) | ||
1217 | 320 | with open(port_file, 'w') as fp: | 320 | with open(port_file, 'w') as fp: |
1218 | 321 | fp.write(','.join(str(port) for port in new_ports)) | 321 | fp.write(','.join(str(port) for port in new_ports)) |
1219 | 322 | for port in new_ports: | 322 | for port in new_ports: |
1220 | 323 | # A port is either a number or 'ICMP' | ||
1221 | 324 | protocol = 'TCP' | ||
1222 | 325 | if str(port).upper() == 'ICMP': | ||
1223 | 326 | protocol = 'ICMP' | ||
1224 | 323 | if event_name == 'start': | 327 | if event_name == 'start': |
1226 | 324 | hookenv.open_port(port) | 328 | hookenv.open_port(port, protocol) |
1227 | 325 | elif event_name == 'stop': | 329 | elif event_name == 'stop': |
1229 | 326 | hookenv.close_port(port) | 330 | hookenv.close_port(port, protocol) |
1230 | 331 | |||
1231 | 332 | def ports_contains(self, port, ports): | ||
1232 | 333 | if not bool(port): | ||
1233 | 334 | return False | ||
1234 | 335 | if str(port).upper() != 'ICMP': | ||
1235 | 336 | port = int(port) | ||
1236 | 337 | return port in ports | ||
1237 | 327 | 338 | ||
1238 | 328 | 339 | ||
1239 | 329 | def service_stop(service_name): | 340 | def service_stop(service_name): |
1240 | 330 | 341 | ||
1241 | === modified file 'charmhelpers/core/strutils.py' | |||
1242 | --- charmhelpers/core/strutils.py 2017-04-11 18:01:45 +0000 | |||
1243 | +++ charmhelpers/core/strutils.py 2019-05-24 12:43:31 +0000 | |||
1244 | @@ -61,13 +61,19 @@ | |||
1245 | 61 | if isinstance(value, six.string_types): | 61 | if isinstance(value, six.string_types): |
1246 | 62 | value = six.text_type(value) | 62 | value = six.text_type(value) |
1247 | 63 | else: | 63 | else: |
1249 | 64 | msg = "Unable to interpret non-string value '%s' as boolean" % (value) | 64 | msg = "Unable to interpret non-string value '%s' as bytes" % (value) |
1250 | 65 | raise ValueError(msg) | 65 | raise ValueError(msg) |
1251 | 66 | matches = re.match("([0-9]+)([a-zA-Z]+)", value) | 66 | matches = re.match("([0-9]+)([a-zA-Z]+)", value) |
1256 | 67 | if not matches: | 67 | if matches: |
1257 | 68 | msg = "Unable to interpret string value '%s' as bytes" % (value) | 68 | size = int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)]) |
1258 | 69 | raise ValueError(msg) | 69 | else: |
1259 | 70 | return int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)]) | 70 | # Assume that value passed in is bytes |
1260 | 71 | try: | ||
1261 | 72 | size = int(value) | ||
1262 | 73 | except ValueError: | ||
1263 | 74 | msg = "Unable to interpret string value '%s' as bytes" % (value) | ||
1264 | 75 | raise ValueError(msg) | ||
1265 | 76 | return size | ||
1266 | 71 | 77 | ||
1267 | 72 | 78 | ||
1268 | 73 | class BasicStringComparator(object): | 79 | class BasicStringComparator(object): |
1269 | 74 | 80 | ||
1270 | === modified file 'charmhelpers/core/sysctl.py' | |||
1271 | --- charmhelpers/core/sysctl.py 2017-03-03 21:03:14 +0000 | |||
1272 | +++ charmhelpers/core/sysctl.py 2019-05-24 12:43:31 +0000 | |||
1273 | @@ -28,27 +28,38 @@ | |||
1274 | 28 | __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' | 28 | __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' |
1275 | 29 | 29 | ||
1276 | 30 | 30 | ||
1278 | 31 | def create(sysctl_dict, sysctl_file): | 31 | def create(sysctl_dict, sysctl_file, ignore=False): |
1279 | 32 | """Creates a sysctl.conf file from a YAML associative array | 32 | """Creates a sysctl.conf file from a YAML associative array |
1280 | 33 | 33 | ||
1282 | 34 | :param sysctl_dict: a YAML-formatted string of sysctl options eg "{ 'kernel.max_pid': 1337 }" | 34 | :param sysctl_dict: a dict or YAML-formatted string of sysctl |
1283 | 35 | options eg "{ 'kernel.max_pid': 1337 }" | ||
1284 | 35 | :type sysctl_dict: str | 36 | :type sysctl_dict: str |
1285 | 36 | :param sysctl_file: path to the sysctl file to be saved | 37 | :param sysctl_file: path to the sysctl file to be saved |
1286 | 37 | :type sysctl_file: str or unicode | 38 | :type sysctl_file: str or unicode |
1287 | 39 | :param ignore: If True, ignore "unknown variable" errors. | ||
1288 | 40 | :type ignore: bool | ||
1289 | 38 | :returns: None | 41 | :returns: None |
1290 | 39 | """ | 42 | """ |
1297 | 40 | try: | 43 | if type(sysctl_dict) is not dict: |
1298 | 41 | sysctl_dict_parsed = yaml.safe_load(sysctl_dict) | 44 | try: |
1299 | 42 | except yaml.YAMLError: | 45 | sysctl_dict_parsed = yaml.safe_load(sysctl_dict) |
1300 | 43 | log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict), | 46 | except yaml.YAMLError: |
1301 | 44 | level=ERROR) | 47 | log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict), |
1302 | 45 | return | 48 | level=ERROR) |
1303 | 49 | return | ||
1304 | 50 | else: | ||
1305 | 51 | sysctl_dict_parsed = sysctl_dict | ||
1306 | 46 | 52 | ||
1307 | 47 | with open(sysctl_file, "w") as fd: | 53 | with open(sysctl_file, "w") as fd: |
1308 | 48 | for key, value in sysctl_dict_parsed.items(): | 54 | for key, value in sysctl_dict_parsed.items(): |
1309 | 49 | fd.write("{}={}\n".format(key, value)) | 55 | fd.write("{}={}\n".format(key, value)) |
1310 | 50 | 56 | ||
1312 | 51 | log("Updating sysctl_file: %s values: %s" % (sysctl_file, sysctl_dict_parsed), | 57 | log("Updating sysctl_file: {} values: {}".format(sysctl_file, |
1313 | 58 | sysctl_dict_parsed), | ||
1314 | 52 | level=DEBUG) | 59 | level=DEBUG) |
1315 | 53 | 60 | ||
1317 | 54 | check_call(["sysctl", "-p", sysctl_file]) | 61 | call = ["sysctl", "-p", sysctl_file] |
1318 | 62 | if ignore: | ||
1319 | 63 | call.append("-e") | ||
1320 | 64 | |||
1321 | 65 | check_call(call) | ||
1322 | 55 | 66 | ||
1323 | === modified file 'charmhelpers/core/templating.py' | |||
1324 | --- charmhelpers/core/templating.py 2017-03-03 21:03:14 +0000 | |||
1325 | +++ charmhelpers/core/templating.py 2019-05-24 12:43:31 +0000 | |||
1326 | @@ -20,7 +20,8 @@ | |||
1327 | 20 | 20 | ||
1328 | 21 | 21 | ||
1329 | 22 | def render(source, target, context, owner='root', group='root', | 22 | def render(source, target, context, owner='root', group='root', |
1331 | 23 | perms=0o444, templates_dir=None, encoding='UTF-8', template_loader=None): | 23 | perms=0o444, templates_dir=None, encoding='UTF-8', |
1332 | 24 | template_loader=None, config_template=None): | ||
1333 | 24 | """ | 25 | """ |
1334 | 25 | Render a template. | 26 | Render a template. |
1335 | 26 | 27 | ||
1336 | @@ -32,6 +33,9 @@ | |||
1337 | 32 | The context should be a dict containing the values to be replaced in the | 33 | The context should be a dict containing the values to be replaced in the |
1338 | 33 | template. | 34 | template. |
1339 | 34 | 35 | ||
1340 | 36 | config_template may be provided to render from a provided template instead | ||
1341 | 37 | of loading from a file. | ||
1342 | 38 | |||
1343 | 35 | The `owner`, `group`, and `perms` options will be passed to `write_file`. | 39 | The `owner`, `group`, and `perms` options will be passed to `write_file`. |
1344 | 36 | 40 | ||
1345 | 37 | If omitted, `templates_dir` defaults to the `templates` folder in the charm. | 41 | If omitted, `templates_dir` defaults to the `templates` folder in the charm. |
1346 | @@ -65,14 +69,19 @@ | |||
1347 | 65 | if templates_dir is None: | 69 | if templates_dir is None: |
1348 | 66 | templates_dir = os.path.join(hookenv.charm_dir(), 'templates') | 70 | templates_dir = os.path.join(hookenv.charm_dir(), 'templates') |
1349 | 67 | template_env = Environment(loader=FileSystemLoader(templates_dir)) | 71 | template_env = Environment(loader=FileSystemLoader(templates_dir)) |
1358 | 68 | try: | 72 | |
1359 | 69 | source = source | 73 | # load from a string if provided explicitly |
1360 | 70 | template = template_env.get_template(source) | 74 | if config_template is not None: |
1361 | 71 | except exceptions.TemplateNotFound as e: | 75 | template = template_env.from_string(config_template) |
1362 | 72 | hookenv.log('Could not load template %s from %s.' % | 76 | else: |
1363 | 73 | (source, templates_dir), | 77 | try: |
1364 | 74 | level=hookenv.ERROR) | 78 | source = source |
1365 | 75 | raise e | 79 | template = template_env.get_template(source) |
1366 | 80 | except exceptions.TemplateNotFound as e: | ||
1367 | 81 | hookenv.log('Could not load template %s from %s.' % | ||
1368 | 82 | (source, templates_dir), | ||
1369 | 83 | level=hookenv.ERROR) | ||
1370 | 84 | raise e | ||
1371 | 76 | content = template.render(context) | 85 | content = template.render(context) |
1372 | 77 | if target is not None: | 86 | if target is not None: |
1373 | 78 | target_dir = os.path.dirname(target) | 87 | target_dir = os.path.dirname(target) |
1374 | 79 | 88 | ||
1375 | === modified file 'charmhelpers/core/unitdata.py' | |||
1376 | --- charmhelpers/core/unitdata.py 2017-03-03 21:03:14 +0000 | |||
1377 | +++ charmhelpers/core/unitdata.py 2019-05-24 12:43:31 +0000 | |||
1378 | @@ -166,6 +166,10 @@ | |||
1379 | 166 | 166 | ||
1380 | 167 | To support dicts, lists, integer, floats, and booleans values | 167 | To support dicts, lists, integer, floats, and booleans values |
1381 | 168 | are automatically json encoded/decoded. | 168 | are automatically json encoded/decoded. |
1382 | 169 | |||
1383 | 170 | Note: to facilitate unit testing, ':memory:' can be passed as the | ||
1384 | 171 | path parameter which causes sqlite3 to only build the db in memory. | ||
1385 | 172 | This should only be used for testing purposes. | ||
1386 | 169 | """ | 173 | """ |
1387 | 170 | def __init__(self, path=None): | 174 | def __init__(self, path=None): |
1388 | 171 | self.db_path = path | 175 | self.db_path = path |
1389 | @@ -175,6 +179,9 @@ | |||
1390 | 175 | else: | 179 | else: |
1391 | 176 | self.db_path = os.path.join( | 180 | self.db_path = os.path.join( |
1392 | 177 | os.environ.get('CHARM_DIR', ''), '.unit-state.db') | 181 | os.environ.get('CHARM_DIR', ''), '.unit-state.db') |
1393 | 182 | if self.db_path != ':memory:': | ||
1394 | 183 | with open(self.db_path, 'a') as f: | ||
1395 | 184 | os.fchmod(f.fileno(), 0o600) | ||
1396 | 178 | self.conn = sqlite3.connect('%s' % self.db_path) | 185 | self.conn = sqlite3.connect('%s' % self.db_path) |
1397 | 179 | self.cursor = self.conn.cursor() | 186 | self.cursor = self.conn.cursor() |
1398 | 180 | self.revision = None | 187 | self.revision = None |
1399 | @@ -358,7 +365,7 @@ | |||
1400 | 358 | try: | 365 | try: |
1401 | 359 | yield self.revision | 366 | yield self.revision |
1402 | 360 | self.revision = None | 367 | self.revision = None |
1404 | 361 | except: | 368 | except Exception: |
1405 | 362 | self.flush(False) | 369 | self.flush(False) |
1406 | 363 | self.revision = None | 370 | self.revision = None |
1407 | 364 | raise | 371 | raise |
1408 | 365 | 372 | ||
1409 | === modified file 'charmhelpers/fetch/__init__.py' | |||
1410 | --- charmhelpers/fetch/__init__.py 2017-03-03 21:03:14 +0000 | |||
1411 | +++ charmhelpers/fetch/__init__.py 2019-05-24 12:43:31 +0000 | |||
1412 | @@ -48,6 +48,13 @@ | |||
1413 | 48 | pass | 48 | pass |
1414 | 49 | 49 | ||
1415 | 50 | 50 | ||
1416 | 51 | class GPGKeyError(Exception): | ||
1417 | 52 | """Exception occurs when a GPG key cannot be fetched or used. The message | ||
1418 | 53 | indicates what the problem is. | ||
1419 | 54 | """ | ||
1420 | 55 | pass | ||
1421 | 56 | |||
1422 | 57 | |||
1423 | 51 | class BaseFetchHandler(object): | 58 | class BaseFetchHandler(object): |
1424 | 52 | 59 | ||
1425 | 53 | """Base class for FetchHandler implementations in fetch plugins""" | 60 | """Base class for FetchHandler implementations in fetch plugins""" |
1426 | @@ -77,21 +84,24 @@ | |||
1427 | 77 | fetch = importlib.import_module(module) | 84 | fetch = importlib.import_module(module) |
1428 | 78 | 85 | ||
1429 | 79 | filter_installed_packages = fetch.filter_installed_packages | 86 | filter_installed_packages = fetch.filter_installed_packages |
1434 | 80 | install = fetch.install | 87 | filter_missing_packages = fetch.filter_missing_packages |
1435 | 81 | upgrade = fetch.upgrade | 88 | install = fetch.apt_install |
1436 | 82 | update = fetch.update | 89 | upgrade = fetch.apt_upgrade |
1437 | 83 | purge = fetch.purge | 90 | update = _fetch_update = fetch.apt_update |
1438 | 91 | purge = fetch.apt_purge | ||
1439 | 84 | add_source = fetch.add_source | 92 | add_source = fetch.add_source |
1440 | 85 | 93 | ||
1441 | 86 | if __platform__ == "ubuntu": | 94 | if __platform__ == "ubuntu": |
1442 | 87 | apt_cache = fetch.apt_cache | 95 | apt_cache = fetch.apt_cache |
1447 | 88 | apt_install = fetch.install | 96 | apt_install = fetch.apt_install |
1448 | 89 | apt_update = fetch.update | 97 | apt_update = fetch.apt_update |
1449 | 90 | apt_upgrade = fetch.upgrade | 98 | apt_upgrade = fetch.apt_upgrade |
1450 | 91 | apt_purge = fetch.purge | 99 | apt_purge = fetch.apt_purge |
1451 | 100 | apt_autoremove = fetch.apt_autoremove | ||
1452 | 92 | apt_mark = fetch.apt_mark | 101 | apt_mark = fetch.apt_mark |
1453 | 93 | apt_hold = fetch.apt_hold | 102 | apt_hold = fetch.apt_hold |
1454 | 94 | apt_unhold = fetch.apt_unhold | 103 | apt_unhold = fetch.apt_unhold |
1455 | 104 | import_key = fetch.import_key | ||
1456 | 95 | get_upstream_version = fetch.get_upstream_version | 105 | get_upstream_version = fetch.get_upstream_version |
1457 | 96 | elif __platform__ == "centos": | 106 | elif __platform__ == "centos": |
1458 | 97 | yum_search = fetch.yum_search | 107 | yum_search = fetch.yum_search |
1459 | @@ -135,7 +145,7 @@ | |||
1460 | 135 | for source, key in zip(sources, keys): | 145 | for source, key in zip(sources, keys): |
1461 | 136 | add_source(source, key) | 146 | add_source(source, key) |
1462 | 137 | if update: | 147 | if update: |
1464 | 138 | fetch.update(fatal=True) | 148 | _fetch_update(fatal=True) |
1465 | 139 | 149 | ||
1466 | 140 | 150 | ||
1467 | 141 | def install_remote(source, *args, **kwargs): | 151 | def install_remote(source, *args, **kwargs): |
1468 | 142 | 152 | ||
1469 | === modified file 'charmhelpers/fetch/archiveurl.py' | |||
1470 | --- charmhelpers/fetch/archiveurl.py 2017-03-03 21:03:14 +0000 | |||
1471 | +++ charmhelpers/fetch/archiveurl.py 2019-05-24 12:43:31 +0000 | |||
1472 | @@ -89,7 +89,7 @@ | |||
1473 | 89 | :param str source: URL pointing to an archive file. | 89 | :param str source: URL pointing to an archive file. |
1474 | 90 | :param str dest: Local path location to download archive file to. | 90 | :param str dest: Local path location to download archive file to. |
1475 | 91 | """ | 91 | """ |
1477 | 92 | # propogate all exceptions | 92 | # propagate all exceptions |
1478 | 93 | # URLError, OSError, etc | 93 | # URLError, OSError, etc |
1479 | 94 | proto, netloc, path, params, query, fragment = urlparse(source) | 94 | proto, netloc, path, params, query, fragment = urlparse(source) |
1480 | 95 | if proto in ('http', 'https'): | 95 | if proto in ('http', 'https'): |
1481 | 96 | 96 | ||
1482 | === modified file 'charmhelpers/fetch/bzrurl.py' | |||
1483 | --- charmhelpers/fetch/bzrurl.py 2017-03-03 21:03:14 +0000 | |||
1484 | +++ charmhelpers/fetch/bzrurl.py 2019-05-24 12:43:31 +0000 | |||
1485 | @@ -13,7 +13,7 @@ | |||
1486 | 13 | # limitations under the License. | 13 | # limitations under the License. |
1487 | 14 | 14 | ||
1488 | 15 | import os | 15 | import os |
1490 | 16 | from subprocess import check_call | 16 | from subprocess import STDOUT, check_output |
1491 | 17 | from charmhelpers.fetch import ( | 17 | from charmhelpers.fetch import ( |
1492 | 18 | BaseFetchHandler, | 18 | BaseFetchHandler, |
1493 | 19 | UnhandledSource, | 19 | UnhandledSource, |
1494 | @@ -55,7 +55,7 @@ | |||
1495 | 55 | cmd = ['bzr', 'branch'] | 55 | cmd = ['bzr', 'branch'] |
1496 | 56 | cmd += cmd_opts | 56 | cmd += cmd_opts |
1497 | 57 | cmd += [source, dest] | 57 | cmd += [source, dest] |
1499 | 58 | check_call(cmd) | 58 | check_output(cmd, stderr=STDOUT) |
1500 | 59 | 59 | ||
1501 | 60 | def install(self, source, dest=None, revno=None): | 60 | def install(self, source, dest=None, revno=None): |
1502 | 61 | url_parts = self.parse_url(source) | 61 | url_parts = self.parse_url(source) |
1503 | 62 | 62 | ||
1504 | === modified file 'charmhelpers/fetch/centos.py' | |||
1505 | --- charmhelpers/fetch/centos.py 2017-03-03 21:03:14 +0000 | |||
1506 | +++ charmhelpers/fetch/centos.py 2019-05-24 12:43:31 +0000 | |||
1507 | @@ -132,7 +132,7 @@ | |||
1508 | 132 | key_file.write(key) | 132 | key_file.write(key) |
1509 | 133 | key_file.flush() | 133 | key_file.flush() |
1510 | 134 | key_file.seek(0) | 134 | key_file.seek(0) |
1512 | 135 | subprocess.check_call(['rpm', '--import', key_file]) | 135 | subprocess.check_call(['rpm', '--import', key_file.name]) |
1513 | 136 | else: | 136 | else: |
1514 | 137 | subprocess.check_call(['rpm', '--import', key]) | 137 | subprocess.check_call(['rpm', '--import', key]) |
1515 | 138 | 138 | ||
1516 | 139 | 139 | ||
1517 | === modified file 'charmhelpers/fetch/giturl.py' | |||
1518 | --- charmhelpers/fetch/giturl.py 2017-03-03 21:03:14 +0000 | |||
1519 | +++ charmhelpers/fetch/giturl.py 2019-05-24 12:43:31 +0000 | |||
1520 | @@ -13,7 +13,7 @@ | |||
1521 | 13 | # limitations under the License. | 13 | # limitations under the License. |
1522 | 14 | 14 | ||
1523 | 15 | import os | 15 | import os |
1525 | 16 | from subprocess import check_call, CalledProcessError | 16 | from subprocess import check_output, CalledProcessError, STDOUT |
1526 | 17 | from charmhelpers.fetch import ( | 17 | from charmhelpers.fetch import ( |
1527 | 18 | BaseFetchHandler, | 18 | BaseFetchHandler, |
1528 | 19 | UnhandledSource, | 19 | UnhandledSource, |
1529 | @@ -50,7 +50,7 @@ | |||
1530 | 50 | cmd = ['git', 'clone', source, dest, '--branch', branch] | 50 | cmd = ['git', 'clone', source, dest, '--branch', branch] |
1531 | 51 | if depth: | 51 | if depth: |
1532 | 52 | cmd.extend(['--depth', depth]) | 52 | cmd.extend(['--depth', depth]) |
1534 | 53 | check_call(cmd) | 53 | check_output(cmd, stderr=STDOUT) |
1535 | 54 | 54 | ||
1536 | 55 | def install(self, source, branch="master", dest=None, depth=None): | 55 | def install(self, source, branch="master", dest=None, depth=None): |
1537 | 56 | url_parts = self.parse_url(source) | 56 | url_parts = self.parse_url(source) |
1538 | 57 | 57 | ||
1539 | === added directory 'charmhelpers/fetch/python' | |||
1540 | === added file 'charmhelpers/fetch/python/__init__.py' | |||
1541 | --- charmhelpers/fetch/python/__init__.py 1970-01-01 00:00:00 +0000 | |||
1542 | +++ charmhelpers/fetch/python/__init__.py 2019-05-24 12:43:31 +0000 | |||
1543 | @@ -0,0 +1,13 @@ | |||
1544 | 1 | # Copyright 2014-2019 Canonical Limited. | ||
1545 | 2 | # | ||
1546 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
1547 | 4 | # you may not use this file except in compliance with the License. | ||
1548 | 5 | # You may obtain a copy of the License at | ||
1549 | 6 | # | ||
1550 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
1551 | 8 | # | ||
1552 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
1553 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
1554 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
1555 | 12 | # See the License for the specific language governing permissions and | ||
1556 | 13 | # limitations under the License. | ||
1557 | 0 | 14 | ||
1558 | === added file 'charmhelpers/fetch/python/debug.py' | |||
1559 | --- charmhelpers/fetch/python/debug.py 1970-01-01 00:00:00 +0000 | |||
1560 | +++ charmhelpers/fetch/python/debug.py 2019-05-24 12:43:31 +0000 | |||
1561 | @@ -0,0 +1,54 @@ | |||
1562 | 1 | #!/usr/bin/env python | ||
1563 | 2 | # coding: utf-8 | ||
1564 | 3 | |||
1565 | 4 | # Copyright 2014-2015 Canonical Limited. | ||
1566 | 5 | # | ||
1567 | 6 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
1568 | 7 | # you may not use this file except in compliance with the License. | ||
1569 | 8 | # You may obtain a copy of the License at | ||
1570 | 9 | # | ||
1571 | 10 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
1572 | 11 | # | ||
1573 | 12 | # Unless required by applicable law or agreed to in writing, software | ||
1574 | 13 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
1575 | 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
1576 | 15 | # See the License for the specific language governing permissions and | ||
1577 | 16 | # limitations under the License. | ||
1578 | 17 | |||
1579 | 18 | from __future__ import print_function | ||
1580 | 19 | |||
1581 | 20 | import atexit | ||
1582 | 21 | import sys | ||
1583 | 22 | |||
1584 | 23 | from charmhelpers.fetch.python.rpdb import Rpdb | ||
1585 | 24 | from charmhelpers.core.hookenv import ( | ||
1586 | 25 | open_port, | ||
1587 | 26 | close_port, | ||
1588 | 27 | ERROR, | ||
1589 | 28 | log | ||
1590 | 29 | ) | ||
1591 | 30 | |||
1592 | 31 | __author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" | ||
1593 | 32 | |||
1594 | 33 | DEFAULT_ADDR = "0.0.0.0" | ||
1595 | 34 | DEFAULT_PORT = 4444 | ||
1596 | 35 | |||
1597 | 36 | |||
1598 | 37 | def _error(message): | ||
1599 | 38 | log(message, level=ERROR) | ||
1600 | 39 | |||
1601 | 40 | |||
1602 | 41 | def set_trace(addr=DEFAULT_ADDR, port=DEFAULT_PORT): | ||
1603 | 42 | """ | ||
1604 | 43 | Set a trace point using the remote debugger | ||
1605 | 44 | """ | ||
1606 | 45 | atexit.register(close_port, port) | ||
1607 | 46 | try: | ||
1608 | 47 | log("Starting a remote python debugger session on %s:%s" % (addr, | ||
1609 | 48 | port)) | ||
1610 | 49 | open_port(port) | ||
1611 | 50 | debugger = Rpdb(addr=addr, port=port) | ||
1612 | 51 | debugger.set_trace(sys._getframe().f_back) | ||
1613 | 52 | except Exception: | ||
1614 | 53 | _error("Cannot start a remote debug session on %s:%s" % (addr, | ||
1615 | 54 | port)) | ||
1616 | 0 | 55 | ||
1617 | === added file 'charmhelpers/fetch/python/packages.py' | |||
1618 | --- charmhelpers/fetch/python/packages.py 1970-01-01 00:00:00 +0000 | |||
1619 | +++ charmhelpers/fetch/python/packages.py 2019-05-24 12:43:31 +0000 | |||
1620 | @@ -0,0 +1,154 @@ | |||
1621 | 1 | #!/usr/bin/env python | ||
1622 | 2 | # coding: utf-8 | ||
1623 | 3 | |||
1624 | 4 | # Copyright 2014-2015 Canonical Limited. | ||
1625 | 5 | # | ||
1626 | 6 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
1627 | 7 | # you may not use this file except in compliance with the License. | ||
1628 | 8 | # You may obtain a copy of the License at | ||
1629 | 9 | # | ||
1630 | 10 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
1631 | 11 | # | ||
1632 | 12 | # Unless required by applicable law or agreed to in writing, software | ||
1633 | 13 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
1634 | 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
1635 | 15 | # See the License for the specific language governing permissions and | ||
1636 | 16 | # limitations under the License. | ||
1637 | 17 | |||
1638 | 18 | import os | ||
1639 | 19 | import six | ||
1640 | 20 | import subprocess | ||
1641 | 21 | import sys | ||
1642 | 22 | |||
1643 | 23 | from charmhelpers.fetch import apt_install, apt_update | ||
1644 | 24 | from charmhelpers.core.hookenv import charm_dir, log | ||
1645 | 25 | |||
1646 | 26 | __author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" | ||
1647 | 27 | |||
1648 | 28 | |||
1649 | 29 | def pip_execute(*args, **kwargs): | ||
1650 | 30 | """Overriden pip_execute() to stop sys.path being changed. | ||
1651 | 31 | |||
1652 | 32 | The act of importing main from the pip module seems to cause add wheels | ||
1653 | 33 | from the /usr/share/python-wheels which are installed by various tools. | ||
1654 | 34 | This function ensures that sys.path remains the same after the call is | ||
1655 | 35 | executed. | ||
1656 | 36 | """ | ||
1657 | 37 | try: | ||
1658 | 38 | _path = sys.path | ||
1659 | 39 | try: | ||
1660 | 40 | from pip import main as _pip_execute | ||
1661 | 41 | except ImportError: | ||
1662 | 42 | apt_update() | ||
1663 | 43 | if six.PY2: | ||
1664 | 44 | apt_install('python-pip') | ||
1665 | 45 | else: | ||
1666 | 46 | apt_install('python3-pip') | ||
1667 | 47 | from pip import main as _pip_execute | ||
1668 | 48 | _pip_execute(*args, **kwargs) | ||
1669 | 49 | finally: | ||
1670 | 50 | sys.path = _path | ||
1671 | 51 | |||
1672 | 52 | |||
1673 | 53 | def parse_options(given, available): | ||
1674 | 54 | """Given a set of options, check if available""" | ||
1675 | 55 | for key, value in sorted(given.items()): | ||
1676 | 56 | if not value: | ||
1677 | 57 | continue | ||
1678 | 58 | if key in available: | ||
1679 | 59 | yield "--{0}={1}".format(key, value) | ||
1680 | 60 | |||
1681 | 61 | |||
1682 | 62 | def pip_install_requirements(requirements, constraints=None, **options): | ||
1683 | 63 | """Install a requirements file. | ||
1684 | 64 | |||
1685 | 65 | :param constraints: Path to pip constraints file. | ||
1686 | 66 | http://pip.readthedocs.org/en/stable/user_guide/#constraints-files | ||
1687 | 67 | """ | ||
1688 | 68 | command = ["install"] | ||
1689 | 69 | |||
1690 | 70 | available_options = ('proxy', 'src', 'log', ) | ||
1691 | 71 | for option in parse_options(options, available_options): | ||
1692 | 72 | command.append(option) | ||
1693 | 73 | |||
1694 | 74 | command.append("-r {0}".format(requirements)) | ||
1695 | 75 | if constraints: | ||
1696 | 76 | command.append("-c {0}".format(constraints)) | ||
1697 | 77 | log("Installing from file: {} with constraints {} " | ||
1698 | 78 | "and options: {}".format(requirements, constraints, command)) | ||
1699 | 79 | else: | ||
1700 | 80 | log("Installing from file: {} with options: {}".format(requirements, | ||
1701 | 81 | command)) | ||
1702 | 82 | pip_execute(command) | ||
1703 | 83 | |||
1704 | 84 | |||
1705 | 85 | def pip_install(package, fatal=False, upgrade=False, venv=None, | ||
1706 | 86 | constraints=None, **options): | ||
1707 | 87 | """Install a python package""" | ||
1708 | 88 | if venv: | ||
1709 | 89 | venv_python = os.path.join(venv, 'bin/pip') | ||
1710 | 90 | command = [venv_python, "install"] | ||
1711 | 91 | else: | ||
1712 | 92 | command = ["install"] | ||
1713 | 93 | |||
1714 | 94 | available_options = ('proxy', 'src', 'log', 'index-url', ) | ||
1715 | 95 | for option in parse_options(options, available_options): | ||
1716 | 96 | command.append(option) | ||
1717 | 97 | |||
1718 | 98 | if upgrade: | ||
1719 | 99 | command.append('--upgrade') | ||
1720 | 100 | |||
1721 | 101 | if constraints: | ||
1722 | 102 | command.extend(['-c', constraints]) | ||
1723 | 103 | |||
1724 | 104 | if isinstance(package, list): | ||
1725 | 105 | command.extend(package) | ||
1726 | 106 | else: | ||
1727 | 107 | command.append(package) | ||
1728 | 108 | |||
1729 | 109 | log("Installing {} package with options: {}".format(package, | ||
1730 | 110 | command)) | ||
1731 | 111 | if venv: | ||
1732 | 112 | subprocess.check_call(command) | ||
1733 | 113 | else: | ||
1734 | 114 | pip_execute(command) | ||
1735 | 115 | |||
1736 | 116 | |||
1737 | 117 | def pip_uninstall(package, **options): | ||
1738 | 118 | """Uninstall a python package""" | ||
1739 | 119 | command = ["uninstall", "-q", "-y"] | ||
1740 | 120 | |||
1741 | 121 | available_options = ('proxy', 'log', ) | ||
1742 | 122 | for option in parse_options(options, available_options): | ||
1743 | 123 | command.append(option) | ||
1744 | 124 | |||
1745 | 125 | if isinstance(package, list): | ||
1746 | 126 | command.extend(package) | ||
1747 | 127 | else: | ||
1748 | 128 | command.append(package) | ||
1749 | 129 | |||
1750 | 130 | log("Uninstalling {} package with options: {}".format(package, | ||
1751 | 131 | command)) | ||
1752 | 132 | pip_execute(command) | ||
1753 | 133 | |||
1754 | 134 | |||
1755 | 135 | def pip_list(): | ||
1756 | 136 | """Returns the list of current python installed packages | ||
1757 | 137 | """ | ||
1758 | 138 | return pip_execute(["list"]) | ||
1759 | 139 | |||
1760 | 140 | |||
1761 | 141 | def pip_create_virtualenv(path=None): | ||
1762 | 142 | """Create an isolated Python environment.""" | ||
1763 | 143 | if six.PY2: | ||
1764 | 144 | apt_install('python-virtualenv') | ||
1765 | 145 | else: | ||
1766 | 146 | apt_install('python3-virtualenv') | ||
1767 | 147 | |||
1768 | 148 | if path: | ||
1769 | 149 | venv_path = path | ||
1770 | 150 | else: | ||
1771 | 151 | venv_path = os.path.join(charm_dir(), 'venv') | ||
1772 | 152 | |||
1773 | 153 | if not os.path.exists(venv_path): | ||
1774 | 154 | subprocess.check_call(['virtualenv', venv_path]) | ||
1775 | 0 | 155 | ||
1776 | === added file 'charmhelpers/fetch/python/rpdb.py' | |||
1777 | --- charmhelpers/fetch/python/rpdb.py 1970-01-01 00:00:00 +0000 | |||
1778 | +++ charmhelpers/fetch/python/rpdb.py 2019-05-24 12:43:31 +0000 | |||
1779 | @@ -0,0 +1,56 @@ | |||
1780 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
1781 | 2 | # | ||
1782 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
1783 | 4 | # you may not use this file except in compliance with the License. | ||
1784 | 5 | # You may obtain a copy of the License at | ||
1785 | 6 | # | ||
1786 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
1787 | 8 | # | ||
1788 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
1789 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
1790 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
1791 | 12 | # See the License for the specific language governing permissions and | ||
1792 | 13 | # limitations under the License. | ||
1793 | 14 | |||
1794 | 15 | """Remote Python Debugger (pdb wrapper).""" | ||
1795 | 16 | |||
1796 | 17 | import pdb | ||
1797 | 18 | import socket | ||
1798 | 19 | import sys | ||
1799 | 20 | |||
1800 | 21 | __author__ = "Bertrand Janin <b@janin.com>" | ||
1801 | 22 | __version__ = "0.1.3" | ||
1802 | 23 | |||
1803 | 24 | |||
1804 | 25 | class Rpdb(pdb.Pdb): | ||
1805 | 26 | |||
1806 | 27 | def __init__(self, addr="127.0.0.1", port=4444): | ||
1807 | 28 | """Initialize the socket and initialize pdb.""" | ||
1808 | 29 | |||
1809 | 30 | # Backup stdin and stdout before replacing them by the socket handle | ||
1810 | 31 | self.old_stdout = sys.stdout | ||
1811 | 32 | self.old_stdin = sys.stdin | ||
1812 | 33 | |||
1813 | 34 | # Open a 'reusable' socket to let the webapp reload on the same port | ||
1814 | 35 | self.skt = socket.socket(socket.AF_INET, socket.SOCK_STREAM) | ||
1815 | 36 | self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True) | ||
1816 | 37 | self.skt.bind((addr, port)) | ||
1817 | 38 | self.skt.listen(1) | ||
1818 | 39 | (clientsocket, address) = self.skt.accept() | ||
1819 | 40 | handle = clientsocket.makefile('rw') | ||
1820 | 41 | pdb.Pdb.__init__(self, completekey='tab', stdin=handle, stdout=handle) | ||
1821 | 42 | sys.stdout = sys.stdin = handle | ||
1822 | 43 | |||
1823 | 44 | def shutdown(self): | ||
1824 | 45 | """Revert stdin and stdout, close the socket.""" | ||
1825 | 46 | sys.stdout = self.old_stdout | ||
1826 | 47 | sys.stdin = self.old_stdin | ||
1827 | 48 | self.skt.close() | ||
1828 | 49 | self.set_continue() | ||
1829 | 50 | |||
1830 | 51 | def do_continue(self, arg): | ||
1831 | 52 | """Stop all operation on ``continue``.""" | ||
1832 | 53 | self.shutdown() | ||
1833 | 54 | return 1 | ||
1834 | 55 | |||
1835 | 56 | do_EOF = do_quit = do_exit = do_c = do_cont = do_continue | ||
1836 | 0 | 57 | ||
1837 | === added file 'charmhelpers/fetch/python/version.py' | |||
1838 | --- charmhelpers/fetch/python/version.py 1970-01-01 00:00:00 +0000 | |||
1839 | +++ charmhelpers/fetch/python/version.py 2019-05-24 12:43:31 +0000 | |||
1840 | @@ -0,0 +1,32 @@ | |||
1841 | 1 | #!/usr/bin/env python | ||
1842 | 2 | # coding: utf-8 | ||
1843 | 3 | |||
1844 | 4 | # Copyright 2014-2015 Canonical Limited. | ||
1845 | 5 | # | ||
1846 | 6 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
1847 | 7 | # you may not use this file except in compliance with the License. | ||
1848 | 8 | # You may obtain a copy of the License at | ||
1849 | 9 | # | ||
1850 | 10 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
1851 | 11 | # | ||
1852 | 12 | # Unless required by applicable law or agreed to in writing, software | ||
1853 | 13 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
1854 | 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
1855 | 15 | # See the License for the specific language governing permissions and | ||
1856 | 16 | # limitations under the License. | ||
1857 | 17 | |||
1858 | 18 | import sys | ||
1859 | 19 | |||
1860 | 20 | __author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" | ||
1861 | 21 | |||
1862 | 22 | |||
1863 | 23 | def current_version(): | ||
1864 | 24 | """Current system python version""" | ||
1865 | 25 | return sys.version_info | ||
1866 | 26 | |||
1867 | 27 | |||
1868 | 28 | def current_version_string(): | ||
1869 | 29 | """Current system python version as string major.minor.micro""" | ||
1870 | 30 | return "{0}.{1}.{2}".format(sys.version_info.major, | ||
1871 | 31 | sys.version_info.minor, | ||
1872 | 32 | sys.version_info.micro) | ||
1873 | 0 | 33 | ||
1874 | === modified file 'charmhelpers/fetch/snap.py' | |||
1875 | --- charmhelpers/fetch/snap.py 2017-03-03 21:03:14 +0000 | |||
1876 | +++ charmhelpers/fetch/snap.py 2019-05-24 12:43:31 +0000 | |||
1877 | @@ -18,21 +18,33 @@ | |||
1878 | 18 | https://lists.ubuntu.com/archives/snapcraft/2016-September/001114.html | 18 | https://lists.ubuntu.com/archives/snapcraft/2016-September/001114.html |
1879 | 19 | """ | 19 | """ |
1880 | 20 | import subprocess | 20 | import subprocess |
1882 | 21 | from os import environ | 21 | import os |
1883 | 22 | from time import sleep | 22 | from time import sleep |
1884 | 23 | from charmhelpers.core.hookenv import log | 23 | from charmhelpers.core.hookenv import log |
1885 | 24 | 24 | ||
1886 | 25 | __author__ = 'Joseph Borg <joseph.borg@canonical.com>' | 25 | __author__ = 'Joseph Borg <joseph.borg@canonical.com>' |
1887 | 26 | 26 | ||
1889 | 27 | SNAP_NO_LOCK = 1 # The return code for "couldn't acquire lock" in Snap (hopefully this will be improved). | 27 | # The return code for "couldn't acquire lock" in Snap |
1890 | 28 | # (hopefully this will be improved). | ||
1891 | 29 | SNAP_NO_LOCK = 1 | ||
1892 | 28 | SNAP_NO_LOCK_RETRY_DELAY = 10 # Wait X seconds between Snap lock checks. | 30 | SNAP_NO_LOCK_RETRY_DELAY = 10 # Wait X seconds between Snap lock checks. |
1893 | 29 | SNAP_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times. | 31 | SNAP_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times. |
1894 | 32 | SNAP_CHANNELS = [ | ||
1895 | 33 | 'edge', | ||
1896 | 34 | 'beta', | ||
1897 | 35 | 'candidate', | ||
1898 | 36 | 'stable', | ||
1899 | 37 | ] | ||
1900 | 30 | 38 | ||
1901 | 31 | 39 | ||
1902 | 32 | class CouldNotAcquireLockException(Exception): | 40 | class CouldNotAcquireLockException(Exception): |
1903 | 33 | pass | 41 | pass |
1904 | 34 | 42 | ||
1905 | 35 | 43 | ||
1906 | 44 | class InvalidSnapChannel(Exception): | ||
1907 | 45 | pass | ||
1908 | 46 | |||
1909 | 47 | |||
1910 | 36 | def _snap_exec(commands): | 48 | def _snap_exec(commands): |
1911 | 37 | """ | 49 | """ |
1912 | 38 | Execute snap commands. | 50 | Execute snap commands. |
1913 | @@ -47,13 +59,17 @@ | |||
1914 | 47 | 59 | ||
1915 | 48 | while return_code is None or return_code == SNAP_NO_LOCK: | 60 | while return_code is None or return_code == SNAP_NO_LOCK: |
1916 | 49 | try: | 61 | try: |
1918 | 50 | return_code = subprocess.check_call(['snap'] + commands, env=environ) | 62 | return_code = subprocess.check_call(['snap'] + commands, |
1919 | 63 | env=os.environ) | ||
1920 | 51 | except subprocess.CalledProcessError as e: | 64 | except subprocess.CalledProcessError as e: |
1921 | 52 | retry_count += + 1 | 65 | retry_count += + 1 |
1922 | 53 | if retry_count > SNAP_NO_LOCK_RETRY_COUNT: | 66 | if retry_count > SNAP_NO_LOCK_RETRY_COUNT: |
1924 | 54 | raise CouldNotAcquireLockException('Could not aquire lock after %s attempts' % SNAP_NO_LOCK_RETRY_COUNT) | 67 | raise CouldNotAcquireLockException( |
1925 | 68 | 'Could not aquire lock after {} attempts' | ||
1926 | 69 | .format(SNAP_NO_LOCK_RETRY_COUNT)) | ||
1927 | 55 | return_code = e.returncode | 70 | return_code = e.returncode |
1929 | 56 | log('Snap failed to acquire lock, trying again in %s seconds.' % SNAP_NO_LOCK_RETRY_DELAY, level='WARN') | 71 | log('Snap failed to acquire lock, trying again in {} seconds.' |
1930 | 72 | .format(SNAP_NO_LOCK_RETRY_DELAY, level='WARN')) | ||
1931 | 57 | sleep(SNAP_NO_LOCK_RETRY_DELAY) | 73 | sleep(SNAP_NO_LOCK_RETRY_DELAY) |
1932 | 58 | 74 | ||
1933 | 59 | return return_code | 75 | return return_code |
1934 | @@ -120,3 +136,15 @@ | |||
1935 | 120 | 136 | ||
1936 | 121 | log(message, level='INFO') | 137 | log(message, level='INFO') |
1937 | 122 | return _snap_exec(['refresh'] + flags + packages) | 138 | return _snap_exec(['refresh'] + flags + packages) |
1938 | 139 | |||
1939 | 140 | |||
1940 | 141 | def valid_snap_channel(channel): | ||
1941 | 142 | """ Validate snap channel exists | ||
1942 | 143 | |||
1943 | 144 | :raises InvalidSnapChannel: When channel does not exist | ||
1944 | 145 | :return: Boolean | ||
1945 | 146 | """ | ||
1946 | 147 | if channel.lower() in SNAP_CHANNELS: | ||
1947 | 148 | return True | ||
1948 | 149 | else: | ||
1949 | 150 | raise InvalidSnapChannel("Invalid Snap Channel: {}".format(channel)) | ||
1950 | 123 | 151 | ||
1951 | === modified file 'charmhelpers/fetch/ubuntu.py' | |||
1952 | --- charmhelpers/fetch/ubuntu.py 2017-03-03 21:03:14 +0000 | |||
1953 | +++ charmhelpers/fetch/ubuntu.py 2019-05-24 12:43:31 +0000 | |||
1954 | @@ -12,29 +12,48 @@ | |||
1955 | 12 | # See the License for the specific language governing permissions and | 12 | # See the License for the specific language governing permissions and |
1956 | 13 | # limitations under the License. | 13 | # limitations under the License. |
1957 | 14 | 14 | ||
1958 | 15 | from collections import OrderedDict | ||
1959 | 15 | import os | 16 | import os |
1960 | 17 | import platform | ||
1961 | 18 | import re | ||
1962 | 16 | import six | 19 | import six |
1963 | 17 | import time | 20 | import time |
1964 | 18 | import subprocess | 21 | import subprocess |
1965 | 19 | 22 | ||
1969 | 20 | from tempfile import NamedTemporaryFile | 23 | from charmhelpers.core.host import get_distrib_codename |
1970 | 21 | from charmhelpers.core.host import ( | 24 | |
1971 | 22 | lsb_release | 25 | from charmhelpers.core.hookenv import ( |
1972 | 26 | log, | ||
1973 | 27 | DEBUG, | ||
1974 | 28 | WARNING, | ||
1975 | 29 | env_proxy_settings, | ||
1976 | 23 | ) | 30 | ) |
1979 | 24 | from charmhelpers.core.hookenv import log | 31 | from charmhelpers.fetch import SourceConfigError, GPGKeyError |
1978 | 25 | from charmhelpers.fetch import SourceConfigError | ||
1980 | 26 | 32 | ||
1981 | 33 | PROPOSED_POCKET = ( | ||
1982 | 34 | "# Proposed\n" | ||
1983 | 35 | "deb http://archive.ubuntu.com/ubuntu {}-proposed main universe " | ||
1984 | 36 | "multiverse restricted\n") | ||
1985 | 37 | PROPOSED_PORTS_POCKET = ( | ||
1986 | 38 | "# Proposed\n" | ||
1987 | 39 | "deb http://ports.ubuntu.com/ubuntu-ports {}-proposed main universe " | ||
1988 | 40 | "multiverse restricted\n") | ||
1989 | 41 | # Only supports 64bit and ppc64 at the moment. | ||
1990 | 42 | ARCH_TO_PROPOSED_POCKET = { | ||
1991 | 43 | 'x86_64': PROPOSED_POCKET, | ||
1992 | 44 | 'ppc64le': PROPOSED_PORTS_POCKET, | ||
1993 | 45 | 'aarch64': PROPOSED_PORTS_POCKET, | ||
1994 | 46 | 's390x': PROPOSED_PORTS_POCKET, | ||
1995 | 47 | } | ||
1996 | 48 | CLOUD_ARCHIVE_URL = "http://ubuntu-cloud.archive.canonical.com/ubuntu" | ||
1997 | 49 | CLOUD_ARCHIVE_KEY_ID = '5EDB1B62EC4926EA' | ||
1998 | 27 | CLOUD_ARCHIVE = """# Ubuntu Cloud Archive | 50 | CLOUD_ARCHIVE = """# Ubuntu Cloud Archive |
1999 | 28 | deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main | 51 | deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main |
2000 | 29 | """ | 52 | """ |
2001 | 30 | |||
2002 | 31 | PROPOSED_POCKET = """# Proposed | ||
2003 | 32 | deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted | ||
2004 | 33 | """ | ||
2005 | 34 | |||
2006 | 35 | CLOUD_ARCHIVE_POCKETS = { | 53 | CLOUD_ARCHIVE_POCKETS = { |
2007 | 36 | # Folsom | 54 | # Folsom |
2008 | 37 | 'folsom': 'precise-updates/folsom', | 55 | 'folsom': 'precise-updates/folsom', |
2009 | 56 | 'folsom/updates': 'precise-updates/folsom', | ||
2010 | 38 | 'precise-folsom': 'precise-updates/folsom', | 57 | 'precise-folsom': 'precise-updates/folsom', |
2011 | 39 | 'precise-folsom/updates': 'precise-updates/folsom', | 58 | 'precise-folsom/updates': 'precise-updates/folsom', |
2012 | 40 | 'precise-updates/folsom': 'precise-updates/folsom', | 59 | 'precise-updates/folsom': 'precise-updates/folsom', |
2013 | @@ -43,6 +62,7 @@ | |||
2014 | 43 | 'precise-proposed/folsom': 'precise-proposed/folsom', | 62 | 'precise-proposed/folsom': 'precise-proposed/folsom', |
2015 | 44 | # Grizzly | 63 | # Grizzly |
2016 | 45 | 'grizzly': 'precise-updates/grizzly', | 64 | 'grizzly': 'precise-updates/grizzly', |
2017 | 65 | 'grizzly/updates': 'precise-updates/grizzly', | ||
2018 | 46 | 'precise-grizzly': 'precise-updates/grizzly', | 66 | 'precise-grizzly': 'precise-updates/grizzly', |
2019 | 47 | 'precise-grizzly/updates': 'precise-updates/grizzly', | 67 | 'precise-grizzly/updates': 'precise-updates/grizzly', |
2020 | 48 | 'precise-updates/grizzly': 'precise-updates/grizzly', | 68 | 'precise-updates/grizzly': 'precise-updates/grizzly', |
2021 | @@ -51,6 +71,7 @@ | |||
2022 | 51 | 'precise-proposed/grizzly': 'precise-proposed/grizzly', | 71 | 'precise-proposed/grizzly': 'precise-proposed/grizzly', |
2023 | 52 | # Havana | 72 | # Havana |
2024 | 53 | 'havana': 'precise-updates/havana', | 73 | 'havana': 'precise-updates/havana', |
2025 | 74 | 'havana/updates': 'precise-updates/havana', | ||
2026 | 54 | 'precise-havana': 'precise-updates/havana', | 75 | 'precise-havana': 'precise-updates/havana', |
2027 | 55 | 'precise-havana/updates': 'precise-updates/havana', | 76 | 'precise-havana/updates': 'precise-updates/havana', |
2028 | 56 | 'precise-updates/havana': 'precise-updates/havana', | 77 | 'precise-updates/havana': 'precise-updates/havana', |
2029 | @@ -59,6 +80,7 @@ | |||
2030 | 59 | 'precise-proposed/havana': 'precise-proposed/havana', | 80 | 'precise-proposed/havana': 'precise-proposed/havana', |
2031 | 60 | # Icehouse | 81 | # Icehouse |
2032 | 61 | 'icehouse': 'precise-updates/icehouse', | 82 | 'icehouse': 'precise-updates/icehouse', |
2033 | 83 | 'icehouse/updates': 'precise-updates/icehouse', | ||
2034 | 62 | 'precise-icehouse': 'precise-updates/icehouse', | 84 | 'precise-icehouse': 'precise-updates/icehouse', |
2035 | 63 | 'precise-icehouse/updates': 'precise-updates/icehouse', | 85 | 'precise-icehouse/updates': 'precise-updates/icehouse', |
2036 | 64 | 'precise-updates/icehouse': 'precise-updates/icehouse', | 86 | 'precise-updates/icehouse': 'precise-updates/icehouse', |
2037 | @@ -67,6 +89,7 @@ | |||
2038 | 67 | 'precise-proposed/icehouse': 'precise-proposed/icehouse', | 89 | 'precise-proposed/icehouse': 'precise-proposed/icehouse', |
2039 | 68 | # Juno | 90 | # Juno |
2040 | 69 | 'juno': 'trusty-updates/juno', | 91 | 'juno': 'trusty-updates/juno', |
2041 | 92 | 'juno/updates': 'trusty-updates/juno', | ||
2042 | 70 | 'trusty-juno': 'trusty-updates/juno', | 93 | 'trusty-juno': 'trusty-updates/juno', |
2043 | 71 | 'trusty-juno/updates': 'trusty-updates/juno', | 94 | 'trusty-juno/updates': 'trusty-updates/juno', |
2044 | 72 | 'trusty-updates/juno': 'trusty-updates/juno', | 95 | 'trusty-updates/juno': 'trusty-updates/juno', |
2045 | @@ -75,6 +98,7 @@ | |||
2046 | 75 | 'trusty-proposed/juno': 'trusty-proposed/juno', | 98 | 'trusty-proposed/juno': 'trusty-proposed/juno', |
2047 | 76 | # Kilo | 99 | # Kilo |
2048 | 77 | 'kilo': 'trusty-updates/kilo', | 100 | 'kilo': 'trusty-updates/kilo', |
2049 | 101 | 'kilo/updates': 'trusty-updates/kilo', | ||
2050 | 78 | 'trusty-kilo': 'trusty-updates/kilo', | 102 | 'trusty-kilo': 'trusty-updates/kilo', |
2051 | 79 | 'trusty-kilo/updates': 'trusty-updates/kilo', | 103 | 'trusty-kilo/updates': 'trusty-updates/kilo', |
2052 | 80 | 'trusty-updates/kilo': 'trusty-updates/kilo', | 104 | 'trusty-updates/kilo': 'trusty-updates/kilo', |
2053 | @@ -83,6 +107,7 @@ | |||
2054 | 83 | 'trusty-proposed/kilo': 'trusty-proposed/kilo', | 107 | 'trusty-proposed/kilo': 'trusty-proposed/kilo', |
2055 | 84 | # Liberty | 108 | # Liberty |
2056 | 85 | 'liberty': 'trusty-updates/liberty', | 109 | 'liberty': 'trusty-updates/liberty', |
2057 | 110 | 'liberty/updates': 'trusty-updates/liberty', | ||
2058 | 86 | 'trusty-liberty': 'trusty-updates/liberty', | 111 | 'trusty-liberty': 'trusty-updates/liberty', |
2059 | 87 | 'trusty-liberty/updates': 'trusty-updates/liberty', | 112 | 'trusty-liberty/updates': 'trusty-updates/liberty', |
2060 | 88 | 'trusty-updates/liberty': 'trusty-updates/liberty', | 113 | 'trusty-updates/liberty': 'trusty-updates/liberty', |
2061 | @@ -91,6 +116,7 @@ | |||
2062 | 91 | 'trusty-proposed/liberty': 'trusty-proposed/liberty', | 116 | 'trusty-proposed/liberty': 'trusty-proposed/liberty', |
2063 | 92 | # Mitaka | 117 | # Mitaka |
2064 | 93 | 'mitaka': 'trusty-updates/mitaka', | 118 | 'mitaka': 'trusty-updates/mitaka', |
2065 | 119 | 'mitaka/updates': 'trusty-updates/mitaka', | ||
2066 | 94 | 'trusty-mitaka': 'trusty-updates/mitaka', | 120 | 'trusty-mitaka': 'trusty-updates/mitaka', |
2067 | 95 | 'trusty-mitaka/updates': 'trusty-updates/mitaka', | 121 | 'trusty-mitaka/updates': 'trusty-updates/mitaka', |
2068 | 96 | 'trusty-updates/mitaka': 'trusty-updates/mitaka', | 122 | 'trusty-updates/mitaka': 'trusty-updates/mitaka', |
2069 | @@ -99,6 +125,7 @@ | |||
2070 | 99 | 'trusty-proposed/mitaka': 'trusty-proposed/mitaka', | 125 | 'trusty-proposed/mitaka': 'trusty-proposed/mitaka', |
2071 | 100 | # Newton | 126 | # Newton |
2072 | 101 | 'newton': 'xenial-updates/newton', | 127 | 'newton': 'xenial-updates/newton', |
2073 | 128 | 'newton/updates': 'xenial-updates/newton', | ||
2074 | 102 | 'xenial-newton': 'xenial-updates/newton', | 129 | 'xenial-newton': 'xenial-updates/newton', |
2075 | 103 | 'xenial-newton/updates': 'xenial-updates/newton', | 130 | 'xenial-newton/updates': 'xenial-updates/newton', |
2076 | 104 | 'xenial-updates/newton': 'xenial-updates/newton', | 131 | 'xenial-updates/newton': 'xenial-updates/newton', |
2077 | @@ -107,17 +134,51 @@ | |||
2078 | 107 | 'xenial-proposed/newton': 'xenial-proposed/newton', | 134 | 'xenial-proposed/newton': 'xenial-proposed/newton', |
2079 | 108 | # Ocata | 135 | # Ocata |
2080 | 109 | 'ocata': 'xenial-updates/ocata', | 136 | 'ocata': 'xenial-updates/ocata', |
2081 | 137 | 'ocata/updates': 'xenial-updates/ocata', | ||
2082 | 110 | 'xenial-ocata': 'xenial-updates/ocata', | 138 | 'xenial-ocata': 'xenial-updates/ocata', |
2083 | 111 | 'xenial-ocata/updates': 'xenial-updates/ocata', | 139 | 'xenial-ocata/updates': 'xenial-updates/ocata', |
2084 | 112 | 'xenial-updates/ocata': 'xenial-updates/ocata', | 140 | 'xenial-updates/ocata': 'xenial-updates/ocata', |
2085 | 113 | 'ocata/proposed': 'xenial-proposed/ocata', | 141 | 'ocata/proposed': 'xenial-proposed/ocata', |
2086 | 114 | 'xenial-ocata/proposed': 'xenial-proposed/ocata', | 142 | 'xenial-ocata/proposed': 'xenial-proposed/ocata', |
2088 | 115 | 'xenial-ocata/newton': 'xenial-proposed/ocata', | 143 | 'xenial-proposed/ocata': 'xenial-proposed/ocata', |
2089 | 144 | # Pike | ||
2090 | 145 | 'pike': 'xenial-updates/pike', | ||
2091 | 146 | 'xenial-pike': 'xenial-updates/pike', | ||
2092 | 147 | 'xenial-pike/updates': 'xenial-updates/pike', | ||
2093 | 148 | 'xenial-updates/pike': 'xenial-updates/pike', | ||
2094 | 149 | 'pike/proposed': 'xenial-proposed/pike', | ||
2095 | 150 | 'xenial-pike/proposed': 'xenial-proposed/pike', | ||
2096 | 151 | 'xenial-proposed/pike': 'xenial-proposed/pike', | ||
2097 | 152 | # Queens | ||
2098 | 153 | 'queens': 'xenial-updates/queens', | ||
2099 | 154 | 'xenial-queens': 'xenial-updates/queens', | ||
2100 | 155 | 'xenial-queens/updates': 'xenial-updates/queens', | ||
2101 | 156 | 'xenial-updates/queens': 'xenial-updates/queens', | ||
2102 | 157 | 'queens/proposed': 'xenial-proposed/queens', | ||
2103 | 158 | 'xenial-queens/proposed': 'xenial-proposed/queens', | ||
2104 | 159 | 'xenial-proposed/queens': 'xenial-proposed/queens', | ||
2105 | 160 | # Rocky | ||
2106 | 161 | 'rocky': 'bionic-updates/rocky', | ||
2107 | 162 | 'bionic-rocky': 'bionic-updates/rocky', | ||
2108 | 163 | 'bionic-rocky/updates': 'bionic-updates/rocky', | ||
2109 | 164 | 'bionic-updates/rocky': 'bionic-updates/rocky', | ||
2110 | 165 | 'rocky/proposed': 'bionic-proposed/rocky', | ||
2111 | 166 | 'bionic-rocky/proposed': 'bionic-proposed/rocky', | ||
2112 | 167 | 'bionic-proposed/rocky': 'bionic-proposed/rocky', | ||
2113 | 168 | # Stein | ||
2114 | 169 | 'stein': 'bionic-updates/stein', | ||
2115 | 170 | 'bionic-stein': 'bionic-updates/stein', | ||
2116 | 171 | 'bionic-stein/updates': 'bionic-updates/stein', | ||
2117 | 172 | 'bionic-updates/stein': 'bionic-updates/stein', | ||
2118 | 173 | 'stein/proposed': 'bionic-proposed/stein', | ||
2119 | 174 | 'bionic-stein/proposed': 'bionic-proposed/stein', | ||
2120 | 175 | 'bionic-proposed/stein': 'bionic-proposed/stein', | ||
2121 | 116 | } | 176 | } |
2122 | 117 | 177 | ||
2123 | 178 | |||
2124 | 118 | APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT. | 179 | APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT. |
2125 | 119 | CMD_RETRY_DELAY = 10 # Wait 10 seconds between command retries. | 180 | CMD_RETRY_DELAY = 10 # Wait 10 seconds between command retries. |
2127 | 120 | CMD_RETRY_COUNT = 30 # Retry a failing fatal command X times. | 181 | CMD_RETRY_COUNT = 3 # Retry a failing fatal command X times. |
2128 | 121 | 182 | ||
2129 | 122 | 183 | ||
2130 | 123 | def filter_installed_packages(packages): | 184 | def filter_installed_packages(packages): |
2131 | @@ -135,6 +196,18 @@ | |||
2132 | 135 | return _pkgs | 196 | return _pkgs |
2133 | 136 | 197 | ||
2134 | 137 | 198 | ||
2135 | 199 | def filter_missing_packages(packages): | ||
2136 | 200 | """Return a list of packages that are installed. | ||
2137 | 201 | |||
2138 | 202 | :param packages: list of packages to evaluate. | ||
2139 | 203 | :returns list: Packages that are installed. | ||
2140 | 204 | """ | ||
2141 | 205 | return list( | ||
2142 | 206 | set(packages) - | ||
2143 | 207 | set(filter_installed_packages(packages)) | ||
2144 | 208 | ) | ||
2145 | 209 | |||
2146 | 210 | |||
2147 | 138 | def apt_cache(in_memory=True, progress=None): | 211 | def apt_cache(in_memory=True, progress=None): |
2148 | 139 | """Build and return an apt cache.""" | 212 | """Build and return an apt cache.""" |
2149 | 140 | from apt import apt_pkg | 213 | from apt import apt_pkg |
2150 | @@ -145,7 +218,7 @@ | |||
2151 | 145 | return apt_pkg.Cache(progress) | 218 | return apt_pkg.Cache(progress) |
2152 | 146 | 219 | ||
2153 | 147 | 220 | ||
2155 | 148 | def install(packages, options=None, fatal=False): | 221 | def apt_install(packages, options=None, fatal=False): |
2156 | 149 | """Install one or more packages.""" | 222 | """Install one or more packages.""" |
2157 | 150 | if options is None: | 223 | if options is None: |
2158 | 151 | options = ['--option=Dpkg::Options::=--force-confold'] | 224 | options = ['--option=Dpkg::Options::=--force-confold'] |
2159 | @@ -162,7 +235,7 @@ | |||
2160 | 162 | _run_apt_command(cmd, fatal) | 235 | _run_apt_command(cmd, fatal) |
2161 | 163 | 236 | ||
2162 | 164 | 237 | ||
2164 | 165 | def upgrade(options=None, fatal=False, dist=False): | 238 | def apt_upgrade(options=None, fatal=False, dist=False): |
2165 | 166 | """Upgrade all packages.""" | 239 | """Upgrade all packages.""" |
2166 | 167 | if options is None: | 240 | if options is None: |
2167 | 168 | options = ['--option=Dpkg::Options::=--force-confold'] | 241 | options = ['--option=Dpkg::Options::=--force-confold'] |
2168 | @@ -177,13 +250,13 @@ | |||
2169 | 177 | _run_apt_command(cmd, fatal) | 250 | _run_apt_command(cmd, fatal) |
2170 | 178 | 251 | ||
2171 | 179 | 252 | ||
2173 | 180 | def update(fatal=False): | 253 | def apt_update(fatal=False): |
2174 | 181 | """Update local apt cache.""" | 254 | """Update local apt cache.""" |
2175 | 182 | cmd = ['apt-get', 'update'] | 255 | cmd = ['apt-get', 'update'] |
2176 | 183 | _run_apt_command(cmd, fatal) | 256 | _run_apt_command(cmd, fatal) |
2177 | 184 | 257 | ||
2178 | 185 | 258 | ||
2180 | 186 | def purge(packages, fatal=False): | 259 | def apt_purge(packages, fatal=False): |
2181 | 187 | """Purge one or more packages.""" | 260 | """Purge one or more packages.""" |
2182 | 188 | cmd = ['apt-get', '--assume-yes', 'purge'] | 261 | cmd = ['apt-get', '--assume-yes', 'purge'] |
2183 | 189 | if isinstance(packages, six.string_types): | 262 | if isinstance(packages, six.string_types): |
2184 | @@ -194,6 +267,14 @@ | |||
2185 | 194 | _run_apt_command(cmd, fatal) | 267 | _run_apt_command(cmd, fatal) |
2186 | 195 | 268 | ||
2187 | 196 | 269 | ||
2188 | 270 | def apt_autoremove(purge=True, fatal=False): | ||
2189 | 271 | """Purge one or more packages.""" | ||
2190 | 272 | cmd = ['apt-get', '--assume-yes', 'autoremove'] | ||
2191 | 273 | if purge: | ||
2192 | 274 | cmd.append('--purge') | ||
2193 | 275 | _run_apt_command(cmd, fatal) | ||
2194 | 276 | |||
2195 | 277 | |||
2196 | 197 | def apt_mark(packages, mark, fatal=False): | 278 | def apt_mark(packages, mark, fatal=False): |
2197 | 198 | """Flag one or more packages using apt-mark.""" | 279 | """Flag one or more packages using apt-mark.""" |
2198 | 199 | log("Marking {} as {}".format(packages, mark)) | 280 | log("Marking {} as {}".format(packages, mark)) |
2199 | @@ -217,7 +298,159 @@ | |||
2200 | 217 | return apt_mark(packages, 'unhold', fatal=fatal) | 298 | return apt_mark(packages, 'unhold', fatal=fatal) |
2201 | 218 | 299 | ||
2202 | 219 | 300 | ||
2204 | 220 | def add_source(source, key=None): | 301 | def import_key(key): |
2205 | 302 | """Import an ASCII Armor key. | ||
2206 | 303 | |||
2207 | 304 | A Radix64 format keyid is also supported for backwards | ||
2208 | 305 | compatibility. In this case Ubuntu keyserver will be | ||
2209 | 306 | queried for a key via HTTPS by its keyid. This method | ||
2210 | 307 | is less preferrable because https proxy servers may | ||
2211 | 308 | require traffic decryption which is equivalent to a | ||
2212 | 309 | man-in-the-middle attack (a proxy server impersonates | ||
2213 | 310 | keyserver TLS certificates and has to be explicitly | ||
2214 | 311 | trusted by the system). | ||
2215 | 312 | |||
2216 | 313 | :param key: A GPG key in ASCII armor format, | ||
2217 | 314 | including BEGIN and END markers or a keyid. | ||
2218 | 315 | :type key: (bytes, str) | ||
2219 | 316 | :raises: GPGKeyError if the key could not be imported | ||
2220 | 317 | """ | ||
2221 | 318 | key = key.strip() | ||
2222 | 319 | if '-' in key or '\n' in key: | ||
2223 | 320 | # Send everything not obviously a keyid to GPG to import, as | ||
2224 | 321 | # we trust its validation better than our own. eg. handling | ||
2225 | 322 | # comments before the key. | ||
2226 | 323 | log("PGP key found (looks like ASCII Armor format)", level=DEBUG) | ||
2227 | 324 | if ('-----BEGIN PGP PUBLIC KEY BLOCK-----' in key and | ||
2228 | 325 | '-----END PGP PUBLIC KEY BLOCK-----' in key): | ||
2229 | 326 | log("Writing provided PGP key in the binary format", level=DEBUG) | ||
2230 | 327 | if six.PY3: | ||
2231 | 328 | key_bytes = key.encode('utf-8') | ||
2232 | 329 | else: | ||
2233 | 330 | key_bytes = key | ||
2234 | 331 | key_name = _get_keyid_by_gpg_key(key_bytes) | ||
2235 | 332 | key_gpg = _dearmor_gpg_key(key_bytes) | ||
2236 | 333 | _write_apt_gpg_keyfile(key_name=key_name, key_material=key_gpg) | ||
2237 | 334 | else: | ||
2238 | 335 | raise GPGKeyError("ASCII armor markers missing from GPG key") | ||
2239 | 336 | else: | ||
2240 | 337 | log("PGP key found (looks like Radix64 format)", level=WARNING) | ||
2241 | 338 | log("SECURELY importing PGP key from keyserver; " | ||
2242 | 339 | "full key not provided.", level=WARNING) | ||
2243 | 340 | # as of bionic add-apt-repository uses curl with an HTTPS keyserver URL | ||
2244 | 341 | # to retrieve GPG keys. `apt-key adv` command is deprecated as is | ||
2245 | 342 | # apt-key in general as noted in its manpage. See lp:1433761 for more | ||
2246 | 343 | # history. Instead, /etc/apt/trusted.gpg.d is used directly to drop | ||
2247 | 344 | # gpg | ||
2248 | 345 | key_asc = _get_key_by_keyid(key) | ||
2249 | 346 | # write the key in GPG format so that apt-key list shows it | ||
2250 | 347 | key_gpg = _dearmor_gpg_key(key_asc) | ||
2251 | 348 | _write_apt_gpg_keyfile(key_name=key, key_material=key_gpg) | ||
2252 | 349 | |||
2253 | 350 | |||
2254 | 351 | def _get_keyid_by_gpg_key(key_material): | ||
2255 | 352 | """Get a GPG key fingerprint by GPG key material. | ||
2256 | 353 | Gets a GPG key fingerprint (40-digit, 160-bit) by the ASCII armor-encoded | ||
2257 | 354 | or binary GPG key material. Can be used, for example, to generate file | ||
2258 | 355 | names for keys passed via charm options. | ||
2259 | 356 | |||
2260 | 357 | :param key_material: ASCII armor-encoded or binary GPG key material | ||
2261 | 358 | :type key_material: bytes | ||
2262 | 359 | :raises: GPGKeyError if invalid key material has been provided | ||
2263 | 360 | :returns: A GPG key fingerprint | ||
2264 | 361 | :rtype: str | ||
2265 | 362 | """ | ||
2266 | 363 | # Use the same gpg command for both Xenial and Bionic | ||
2267 | 364 | cmd = 'gpg --with-colons --with-fingerprint' | ||
2268 | 365 | ps = subprocess.Popen(cmd.split(), | ||
2269 | 366 | stdout=subprocess.PIPE, | ||
2270 | 367 | stderr=subprocess.PIPE, | ||
2271 | 368 | stdin=subprocess.PIPE) | ||
2272 | 369 | out, err = ps.communicate(input=key_material) | ||
2273 | 370 | if six.PY3: | ||
2274 | 371 | out = out.decode('utf-8') | ||
2275 | 372 | err = err.decode('utf-8') | ||
2276 | 373 | if 'gpg: no valid OpenPGP data found.' in err: | ||
2277 | 374 | raise GPGKeyError('Invalid GPG key material provided') | ||
2278 | 375 | # from gnupg2 docs: fpr :: Fingerprint (fingerprint is in field 10) | ||
2279 | 376 | return re.search(r"^fpr:{9}([0-9A-F]{40}):$", out, re.MULTILINE).group(1) | ||
2280 | 377 | |||
2281 | 378 | |||
2282 | 379 | def _get_key_by_keyid(keyid): | ||
2283 | 380 | """Get a key via HTTPS from the Ubuntu keyserver. | ||
2284 | 381 | Different key ID formats are supported by SKS keyservers (the longer ones | ||
2285 | 382 | are more secure, see "dead beef attack" and https://evil32.com/). Since | ||
2286 | 383 | HTTPS is used, if SSLBump-like HTTPS proxies are in place, they will | ||
2287 | 384 | impersonate keyserver.ubuntu.com and generate a certificate with | ||
2288 | 385 | keyserver.ubuntu.com in the CN field or in SubjAltName fields of a | ||
2289 | 386 | certificate. If such proxy behavior is expected it is necessary to add the | ||
2290 | 387 | CA certificate chain containing the intermediate CA of the SSLBump proxy to | ||
2291 | 388 | every machine that this code runs on via ca-certs cloud-init directive (via | ||
2292 | 389 | cloudinit-userdata model-config) or via other means (such as through a | ||
2293 | 390 | custom charm option). Also note that DNS resolution for the hostname in a | ||
2294 | 391 | URL is done at a proxy server - not at the client side. | ||
2295 | 392 | |||
2296 | 393 | 8-digit (32 bit) key ID | ||
2297 | 394 | https://keyserver.ubuntu.com/pks/lookup?search=0x4652B4E6 | ||
2298 | 395 | 16-digit (64 bit) key ID | ||
2299 | 396 | https://keyserver.ubuntu.com/pks/lookup?search=0x6E85A86E4652B4E6 | ||
2300 | 397 | 40-digit key ID: | ||
2301 | 398 | https://keyserver.ubuntu.com/pks/lookup?search=0x35F77D63B5CEC106C577ED856E85A86E4652B4E6 | ||
2302 | 399 | |||
2303 | 400 | :param keyid: An 8, 16 or 40 hex digit keyid to find a key for | ||
2304 | 401 | :type keyid: (bytes, str) | ||
2305 | 402 | :returns: A key material for the specified GPG key id | ||
2306 | 403 | :rtype: (str, bytes) | ||
2307 | 404 | :raises: subprocess.CalledProcessError | ||
2308 | 405 | """ | ||
2309 | 406 | # options=mr - machine-readable output (disables html wrappers) | ||
2310 | 407 | keyserver_url = ('https://keyserver.ubuntu.com' | ||
2311 | 408 | '/pks/lookup?op=get&options=mr&exact=on&search=0x{}') | ||
2312 | 409 | curl_cmd = ['curl', keyserver_url.format(keyid)] | ||
2313 | 410 | # use proxy server settings in order to retrieve the key | ||
2314 | 411 | return subprocess.check_output(curl_cmd, | ||
2315 | 412 | env=env_proxy_settings(['https'])) | ||
2316 | 413 | |||
2317 | 414 | |||
2318 | 415 | def _dearmor_gpg_key(key_asc): | ||
2319 | 416 | """Converts a GPG key in the ASCII armor format to the binary format. | ||
2320 | 417 | |||
2321 | 418 | :param key_asc: A GPG key in ASCII armor format. | ||
2322 | 419 | :type key_asc: (str, bytes) | ||
2323 | 420 | :returns: A GPG key in binary format | ||
2324 | 421 | :rtype: (str, bytes) | ||
2325 | 422 | :raises: GPGKeyError | ||
2326 | 423 | """ | ||
2327 | 424 | ps = subprocess.Popen(['gpg', '--dearmor'], | ||
2328 | 425 | stdout=subprocess.PIPE, | ||
2329 | 426 | stderr=subprocess.PIPE, | ||
2330 | 427 | stdin=subprocess.PIPE) | ||
2331 | 428 | out, err = ps.communicate(input=key_asc) | ||
2332 | 429 | # no need to decode output as it is binary (invalid utf-8), only error | ||
2333 | 430 | if six.PY3: | ||
2334 | 431 | err = err.decode('utf-8') | ||
2335 | 432 | if 'gpg: no valid OpenPGP data found.' in err: | ||
2336 | 433 | raise GPGKeyError('Invalid GPG key material. Check your network setup' | ||
2337 | 434 | ' (MTU, routing, DNS) and/or proxy server settings' | ||
2338 | 435 | ' as well as destination keyserver status.') | ||
2339 | 436 | else: | ||
2340 | 437 | return out | ||
2341 | 438 | |||
2342 | 439 | |||
2343 | 440 | def _write_apt_gpg_keyfile(key_name, key_material): | ||
2344 | 441 | """Writes GPG key material into a file at a provided path. | ||
2345 | 442 | |||
2346 | 443 | :param key_name: A key name to use for a key file (could be a fingerprint) | ||
2347 | 444 | :type key_name: str | ||
2348 | 445 | :param key_material: A GPG key material (binary) | ||
2349 | 446 | :type key_material: (str, bytes) | ||
2350 | 447 | """ | ||
2351 | 448 | with open('/etc/apt/trusted.gpg.d/{}.gpg'.format(key_name), | ||
2352 | 449 | 'wb') as keyf: | ||
2353 | 450 | keyf.write(key_material) | ||
2354 | 451 | |||
2355 | 452 | |||
2356 | 453 | def add_source(source, key=None, fail_invalid=False): | ||
2357 | 221 | """Add a package source to this system. | 454 | """Add a package source to this system. |
2358 | 222 | 455 | ||
2359 | 223 | @param source: a URL or sources.list entry, as supported by | 456 | @param source: a URL or sources.list entry, as supported by |
2360 | @@ -233,6 +466,33 @@ | |||
2361 | 233 | such as 'cloud:icehouse' | 466 | such as 'cloud:icehouse' |
2362 | 234 | 'distro' may be used as a noop | 467 | 'distro' may be used as a noop |
2363 | 235 | 468 | ||
2364 | 469 | Full list of source specifications supported by the function are: | ||
2365 | 470 | |||
2366 | 471 | 'distro': A NOP; i.e. it has no effect. | ||
2367 | 472 | 'proposed': the proposed deb spec [2] is wrtten to | ||
2368 | 473 | /etc/apt/sources.list/proposed | ||
2369 | 474 | 'distro-proposed': adds <version>-proposed to the debs [2] | ||
2370 | 475 | 'ppa:<ppa-name>': add-apt-repository --yes <ppa_name> | ||
2371 | 476 | 'deb <deb-spec>': add-apt-repository --yes deb <deb-spec> | ||
2372 | 477 | 'http://....': add-apt-repository --yes http://... | ||
2373 | 478 | 'cloud-archive:<spec>': add-apt-repository -yes cloud-archive:<spec> | ||
2374 | 479 | 'cloud:<release>[-staging]': specify a Cloud Archive pocket <release> with | ||
2375 | 480 | optional staging version. If staging is used then the staging PPA [2] | ||
2376 | 481 | with be used. If staging is NOT used then the cloud archive [3] will be | ||
2377 | 482 | added, and the 'ubuntu-cloud-keyring' package will be added for the | ||
2378 | 483 | current distro. | ||
2379 | 484 | |||
2380 | 485 | Otherwise the source is not recognised and this is logged to the juju log. | ||
2381 | 486 | However, no error is raised, unless sys_error_on_exit is True. | ||
2382 | 487 | |||
2383 | 488 | [1] deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main | ||
2384 | 489 | where {} is replaced with the derived pocket name. | ||
2385 | 490 | [2] deb http://archive.ubuntu.com/ubuntu {}-proposed \ | ||
2386 | 491 | main universe multiverse restricted | ||
2387 | 492 | where {} is replaced with the lsb_release codename (e.g. xenial) | ||
2388 | 493 | [3] deb http://ubuntu-cloud.archive.canonical.com/ubuntu <pocket> | ||
2389 | 494 | to /etc/apt/sources.list.d/cloud-archive-list | ||
2390 | 495 | |||
2391 | 236 | @param key: A key to be added to the system's APT keyring and used | 496 | @param key: A key to be added to the system's APT keyring and used |
2392 | 237 | to verify the signatures on packages. Ideally, this should be an | 497 | to verify the signatures on packages. Ideally, this should be an |
2393 | 238 | ASCII format GPG public key including the block headers. A GPG key | 498 | ASCII format GPG public key including the block headers. A GPG key |
2394 | @@ -240,51 +500,152 @@ | |||
2395 | 240 | available to retrieve the actual public key from a public keyserver | 500 | available to retrieve the actual public key from a public keyserver |
2396 | 241 | placing your Juju environment at risk. ppa and cloud archive keys | 501 | placing your Juju environment at risk. ppa and cloud archive keys |
2397 | 242 | are securely added automtically, so sould not be provided. | 502 | are securely added automtically, so sould not be provided. |
2398 | 503 | |||
2399 | 504 | @param fail_invalid: (boolean) if True, then the function raises a | ||
2400 | 505 | SourceConfigError is there is no matching installation source. | ||
2401 | 506 | |||
2402 | 507 | @raises SourceConfigError() if for cloud:<pocket>, the <pocket> is not a | ||
2403 | 508 | valid pocket in CLOUD_ARCHIVE_POCKETS | ||
2404 | 243 | """ | 509 | """ |
2405 | 510 | _mapping = OrderedDict([ | ||
2406 | 511 | (r"^distro$", lambda: None), # This is a NOP | ||
2407 | 512 | (r"^(?:proposed|distro-proposed)$", _add_proposed), | ||
2408 | 513 | (r"^cloud-archive:(.*)$", _add_apt_repository), | ||
2409 | 514 | (r"^((?:deb |http:|https:|ppa:).*)$", _add_apt_repository), | ||
2410 | 515 | (r"^cloud:(.*)-(.*)\/staging$", _add_cloud_staging), | ||
2411 | 516 | (r"^cloud:(.*)-(.*)$", _add_cloud_distro_check), | ||
2412 | 517 | (r"^cloud:(.*)$", _add_cloud_pocket), | ||
2413 | 518 | (r"^snap:.*-(.*)-(.*)$", _add_cloud_distro_check), | ||
2414 | 519 | ]) | ||
2415 | 244 | if source is None: | 520 | if source is None: |
2427 | 245 | log('Source is not present. Skipping') | 521 | source = '' |
2428 | 246 | return | 522 | for r, fn in six.iteritems(_mapping): |
2429 | 247 | 523 | m = re.match(r, source) | |
2430 | 248 | if (source.startswith('ppa:') or | 524 | if m: |
2431 | 249 | source.startswith('http') or | 525 | if key: |
2432 | 250 | source.startswith('deb ') or | 526 | # Import key before adding the source which depends on it, |
2433 | 251 | source.startswith('cloud-archive:')): | 527 | # as refreshing packages could fail otherwise. |
2434 | 252 | cmd = ['add-apt-repository', '--yes', source] | 528 | try: |
2435 | 253 | _run_with_retries(cmd) | 529 | import_key(key) |
2436 | 254 | elif source.startswith('cloud:'): | 530 | except GPGKeyError as e: |
2437 | 255 | install(filter_installed_packages(['ubuntu-cloud-keyring']), | 531 | raise SourceConfigError(str(e)) |
2438 | 532 | # call the associated function with the captured groups | ||
2439 | 533 | # raises SourceConfigError on error. | ||
2440 | 534 | fn(*m.groups()) | ||
2441 | 535 | break | ||
2442 | 536 | else: | ||
2443 | 537 | # nothing matched. log an error and maybe sys.exit | ||
2444 | 538 | err = "Unknown source: {!r}".format(source) | ||
2445 | 539 | log(err) | ||
2446 | 540 | if fail_invalid: | ||
2447 | 541 | raise SourceConfigError(err) | ||
2448 | 542 | |||
2449 | 543 | |||
2450 | 544 | def _add_proposed(): | ||
2451 | 545 | """Add the PROPOSED_POCKET as /etc/apt/source.list.d/proposed.list | ||
2452 | 546 | |||
2453 | 547 | Uses get_distrib_codename to determine the correct stanza for | ||
2454 | 548 | the deb line. | ||
2455 | 549 | |||
2456 | 550 | For intel architecutres PROPOSED_POCKET is used for the release, but for | ||
2457 | 551 | other architectures PROPOSED_PORTS_POCKET is used for the release. | ||
2458 | 552 | """ | ||
2459 | 553 | release = get_distrib_codename() | ||
2460 | 554 | arch = platform.machine() | ||
2461 | 555 | if arch not in six.iterkeys(ARCH_TO_PROPOSED_POCKET): | ||
2462 | 556 | raise SourceConfigError("Arch {} not supported for (distro-)proposed" | ||
2463 | 557 | .format(arch)) | ||
2464 | 558 | with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt: | ||
2465 | 559 | apt.write(ARCH_TO_PROPOSED_POCKET[arch].format(release)) | ||
2466 | 560 | |||
2467 | 561 | |||
2468 | 562 | def _add_apt_repository(spec): | ||
2469 | 563 | """Add the spec using add_apt_repository | ||
2470 | 564 | |||
2471 | 565 | :param spec: the parameter to pass to add_apt_repository | ||
2472 | 566 | :type spec: str | ||
2473 | 567 | """ | ||
2474 | 568 | if '{series}' in spec: | ||
2475 | 569 | series = get_distrib_codename() | ||
2476 | 570 | spec = spec.replace('{series}', series) | ||
2477 | 571 | # software-properties package for bionic properly reacts to proxy settings | ||
2478 | 572 | # passed as environment variables (See lp:1433761). This is not the case | ||
2479 | 573 | # LTS and non-LTS releases below bionic. | ||
2480 | 574 | _run_with_retries(['add-apt-repository', '--yes', spec], | ||
2481 | 575 | cmd_env=env_proxy_settings(['https'])) | ||
2482 | 576 | |||
2483 | 577 | |||
2484 | 578 | def _add_cloud_pocket(pocket): | ||
2485 | 579 | """Add a cloud pocket as /etc/apt/sources.d/cloud-archive.list | ||
2486 | 580 | |||
2487 | 581 | Note that this overwrites the existing file if there is one. | ||
2488 | 582 | |||
2489 | 583 | This function also converts the simple pocket in to the actual pocket using | ||
2490 | 584 | the CLOUD_ARCHIVE_POCKETS mapping. | ||
2491 | 585 | |||
2492 | 586 | :param pocket: string representing the pocket to add a deb spec for. | ||
2493 | 587 | :raises: SourceConfigError if the cloud pocket doesn't exist or the | ||
2494 | 588 | requested release doesn't match the current distro version. | ||
2495 | 589 | """ | ||
2496 | 590 | apt_install(filter_installed_packages(['ubuntu-cloud-keyring']), | ||
2497 | 256 | fatal=True) | 591 | fatal=True) |
2529 | 257 | pocket = source.split(':')[-1] | 592 | if pocket not in CLOUD_ARCHIVE_POCKETS: |
2530 | 258 | if pocket not in CLOUD_ARCHIVE_POCKETS: | 593 | raise SourceConfigError( |
2531 | 259 | raise SourceConfigError( | 594 | 'Unsupported cloud: source option %s' % |
2532 | 260 | 'Unsupported cloud: source option %s' % | 595 | pocket) |
2533 | 261 | pocket) | 596 | actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket] |
2534 | 262 | actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket] | 597 | with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt: |
2535 | 263 | with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt: | 598 | apt.write(CLOUD_ARCHIVE.format(actual_pocket)) |
2536 | 264 | apt.write(CLOUD_ARCHIVE.format(actual_pocket)) | 599 | |
2537 | 265 | elif source == 'proposed': | 600 | |
2538 | 266 | release = lsb_release()['DISTRIB_CODENAME'] | 601 | def _add_cloud_staging(cloud_archive_release, openstack_release): |
2539 | 267 | with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt: | 602 | """Add the cloud staging repository which is in |
2540 | 268 | apt.write(PROPOSED_POCKET.format(release)) | 603 | ppa:ubuntu-cloud-archive/<openstack_release>-staging |
2541 | 269 | elif source == 'distro': | 604 | |
2542 | 270 | pass | 605 | This function checks that the cloud_archive_release matches the current |
2543 | 271 | else: | 606 | codename for the distro that charm is being installed on. |
2544 | 272 | log("Unknown source: {!r}".format(source)) | 607 | |
2545 | 273 | 608 | :param cloud_archive_release: string, codename for the release. | |
2546 | 274 | if key: | 609 | :param openstack_release: String, codename for the openstack release. |
2547 | 275 | if '-----BEGIN PGP PUBLIC KEY BLOCK-----' in key: | 610 | :raises: SourceConfigError if the cloud_archive_release doesn't match the |
2548 | 276 | with NamedTemporaryFile('w+') as key_file: | 611 | current version of the os. |
2549 | 277 | key_file.write(key) | 612 | """ |
2550 | 278 | key_file.flush() | 613 | _verify_is_ubuntu_rel(cloud_archive_release, openstack_release) |
2551 | 279 | key_file.seek(0) | 614 | ppa = 'ppa:ubuntu-cloud-archive/{}-staging'.format(openstack_release) |
2552 | 280 | subprocess.check_call(['apt-key', 'add', '-'], stdin=key_file) | 615 | cmd = 'add-apt-repository -y {}'.format(ppa) |
2553 | 281 | else: | 616 | _run_with_retries(cmd.split(' ')) |
2554 | 282 | # Note that hkp: is in no way a secure protocol. Using a | 617 | |
2555 | 283 | # GPG key id is pointless from a security POV unless you | 618 | |
2556 | 284 | # absolutely trust your network and DNS. | 619 | def _add_cloud_distro_check(cloud_archive_release, openstack_release): |
2557 | 285 | subprocess.check_call(['apt-key', 'adv', '--keyserver', | 620 | """Add the cloud pocket, but also check the cloud_archive_release against |
2558 | 286 | 'hkp://keyserver.ubuntu.com:80', '--recv', | 621 | the current distro, and use the openstack_release as the full lookup. |
2559 | 287 | key]) | 622 | |
2560 | 623 | This just calls _add_cloud_pocket() with the openstack_release as pocket | ||
2561 | 624 | to get the correct cloud-archive.list for dpkg to work with. | ||
2562 | 625 | |||
2563 | 626 | :param cloud_archive_release:String, codename for the distro release. | ||
2564 | 627 | :param openstack_release: String, spec for the release to look up in the | ||
2565 | 628 | CLOUD_ARCHIVE_POCKETS | ||
2566 | 629 | :raises: SourceConfigError if this is the wrong distro, or the pocket spec | ||
2567 | 630 | doesn't exist. | ||
2568 | 631 | """ | ||
2569 | 632 | _verify_is_ubuntu_rel(cloud_archive_release, openstack_release) | ||
2570 | 633 | _add_cloud_pocket("{}-{}".format(cloud_archive_release, openstack_release)) | ||
2571 | 634 | |||
2572 | 635 | |||
2573 | 636 | def _verify_is_ubuntu_rel(release, os_release): | ||
2574 | 637 | """Verify that the release is in the same as the current ubuntu release. | ||
2575 | 638 | |||
2576 | 639 | :param release: String, lowercase for the release. | ||
2577 | 640 | :param os_release: String, the os_release being asked for | ||
2578 | 641 | :raises: SourceConfigError if the release is not the same as the ubuntu | ||
2579 | 642 | release. | ||
2580 | 643 | """ | ||
2581 | 644 | ubuntu_rel = get_distrib_codename() | ||
2582 | 645 | if release != ubuntu_rel: | ||
2583 | 646 | raise SourceConfigError( | ||
2584 | 647 | 'Invalid Cloud Archive release specified: {}-{} on this Ubuntu' | ||
2585 | 648 | 'version ({})'.format(release, os_release, ubuntu_rel)) | ||
2586 | 288 | 649 | ||
2587 | 289 | 650 | ||
2588 | 290 | def _run_with_retries(cmd, max_retries=CMD_RETRY_COUNT, retry_exitcodes=(1,), | 651 | def _run_with_retries(cmd, max_retries=CMD_RETRY_COUNT, retry_exitcodes=(1,), |
2589 | @@ -300,9 +661,12 @@ | |||
2590 | 300 | :param: cmd_env: dict: Environment variables to add to the command run. | 661 | :param: cmd_env: dict: Environment variables to add to the command run. |
2591 | 301 | """ | 662 | """ |
2592 | 302 | 663 | ||
2594 | 303 | env = os.environ.copy() | 664 | env = None |
2595 | 665 | kwargs = {} | ||
2596 | 304 | if cmd_env: | 666 | if cmd_env: |
2597 | 667 | env = os.environ.copy() | ||
2598 | 305 | env.update(cmd_env) | 668 | env.update(cmd_env) |
2599 | 669 | kwargs['env'] = env | ||
2600 | 306 | 670 | ||
2601 | 307 | if not retry_message: | 671 | if not retry_message: |
2602 | 308 | retry_message = "Failed executing '{}'".format(" ".join(cmd)) | 672 | retry_message = "Failed executing '{}'".format(" ".join(cmd)) |
2603 | @@ -314,7 +678,8 @@ | |||
2604 | 314 | retry_results = (None,) + retry_exitcodes | 678 | retry_results = (None,) + retry_exitcodes |
2605 | 315 | while result in retry_results: | 679 | while result in retry_results: |
2606 | 316 | try: | 680 | try: |
2608 | 317 | result = subprocess.check_call(cmd, env=env) | 681 | # result = subprocess.check_call(cmd, env=env) |
2609 | 682 | result = subprocess.check_call(cmd, **kwargs) | ||
2610 | 318 | except subprocess.CalledProcessError as e: | 683 | except subprocess.CalledProcessError as e: |
2611 | 319 | retry_count = retry_count + 1 | 684 | retry_count = retry_count + 1 |
2612 | 320 | if retry_count > max_retries: | 685 | if retry_count > max_retries: |
2613 | @@ -327,6 +692,7 @@ | |||
2614 | 327 | def _run_apt_command(cmd, fatal=False): | 692 | def _run_apt_command(cmd, fatal=False): |
2615 | 328 | """Run an apt command with optional retries. | 693 | """Run an apt command with optional retries. |
2616 | 329 | 694 | ||
2617 | 695 | :param: cmd: str: The apt command to run. | ||
2618 | 330 | :param: fatal: bool: Whether the command's output should be checked and | 696 | :param: fatal: bool: Whether the command's output should be checked and |
2619 | 331 | retried. | 697 | retried. |
2620 | 332 | """ | 698 | """ |
2621 | @@ -353,7 +719,7 @@ | |||
2622 | 353 | cache = apt_cache() | 719 | cache = apt_cache() |
2623 | 354 | try: | 720 | try: |
2624 | 355 | pkg = cache[package] | 721 | pkg = cache[package] |
2626 | 356 | except: | 722 | except Exception: |
2627 | 357 | # the package is unknown to the current apt cache. | 723 | # the package is unknown to the current apt cache. |
2628 | 358 | return None | 724 | return None |
2629 | 359 | 725 | ||
2630 | 360 | 726 | ||
2631 | === modified file 'dev/charm_helpers_sync.py' | |||
2632 | --- dev/charm_helpers_sync.py 2015-01-28 08:59:02 +0000 | |||
2633 | +++ dev/charm_helpers_sync.py 2019-05-24 12:43:31 +0000 | |||
2634 | @@ -2,19 +2,17 @@ | |||
2635 | 2 | 2 | ||
2636 | 3 | # Copyright 2014-2015 Canonical Limited. | 3 | # Copyright 2014-2015 Canonical Limited. |
2637 | 4 | # | 4 | # |
2651 | 5 | # This file is part of charm-helpers. | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
2652 | 6 | # | 6 | # you may not use this file except in compliance with the License. |
2653 | 7 | # charm-helpers is free software: you can redistribute it and/or modify | 7 | # You may obtain a copy of the License at |
2654 | 8 | # it under the terms of the GNU Lesser General Public License version 3 as | 8 | # |
2655 | 9 | # published by the Free Software Foundation. | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
2656 | 10 | # | 10 | # |
2657 | 11 | # charm-helpers is distributed in the hope that it will be useful, | 11 | # Unless required by applicable law or agreed to in writing, software |
2658 | 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
2659 | 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
2660 | 14 | # GNU Lesser General Public License for more details. | 14 | # See the License for the specific language governing permissions and |
2661 | 15 | # | 15 | # limitations under the License. |
2649 | 16 | # You should have received a copy of the GNU Lesser General Public License | ||
2650 | 17 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | ||
2662 | 18 | 16 | ||
2663 | 19 | # Authors: | 17 | # Authors: |
2664 | 20 | # Adam Gandelman <adamg@ubuntu.com> | 18 | # Adam Gandelman <adamg@ubuntu.com> |
2665 | @@ -31,7 +29,7 @@ | |||
2666 | 31 | 29 | ||
2667 | 32 | import six | 30 | import six |
2668 | 33 | 31 | ||
2670 | 34 | CHARM_HELPERS_BRANCH = 'lp:charm-helpers' | 32 | CHARM_HELPERS_REPO = 'https://github.com/juju/charm-helpers' |
2671 | 35 | 33 | ||
2672 | 36 | 34 | ||
2673 | 37 | def parse_config(conf_file): | 35 | def parse_config(conf_file): |
2674 | @@ -41,10 +39,16 @@ | |||
2675 | 41 | return yaml.load(open(conf_file).read()) | 39 | return yaml.load(open(conf_file).read()) |
2676 | 42 | 40 | ||
2677 | 43 | 41 | ||
2679 | 44 | def clone_helpers(work_dir, branch): | 42 | def clone_helpers(work_dir, repo): |
2680 | 45 | dest = os.path.join(work_dir, 'charm-helpers') | 43 | dest = os.path.join(work_dir, 'charm-helpers') |
2683 | 46 | logging.info('Checking out %s to %s.' % (branch, dest)) | 44 | logging.info('Cloning out %s to %s.' % (repo, dest)) |
2684 | 47 | cmd = ['bzr', 'checkout', '--lightweight', branch, dest] | 45 | branch = None |
2685 | 46 | if '@' in repo: | ||
2686 | 47 | repo, branch = repo.split('@', 1) | ||
2687 | 48 | cmd = ['git', 'clone', '--depth=1'] | ||
2688 | 49 | if branch is not None: | ||
2689 | 50 | cmd += ['--branch', branch] | ||
2690 | 51 | cmd += [repo, dest] | ||
2691 | 48 | subprocess.check_call(cmd) | 52 | subprocess.check_call(cmd) |
2692 | 49 | return dest | 53 | return dest |
2693 | 50 | 54 | ||
2694 | @@ -176,6 +180,9 @@ | |||
2695 | 176 | 180 | ||
2696 | 177 | 181 | ||
2697 | 178 | def sync_helpers(include, src, dest, options=None): | 182 | def sync_helpers(include, src, dest, options=None): |
2698 | 183 | if os.path.exists(dest): | ||
2699 | 184 | logging.debug('Removing existing directory: %s' % dest) | ||
2700 | 185 | shutil.rmtree(dest) | ||
2701 | 179 | if not os.path.isdir(dest): | 186 | if not os.path.isdir(dest): |
2702 | 180 | os.makedirs(dest) | 187 | os.makedirs(dest) |
2703 | 181 | 188 | ||
2704 | @@ -193,14 +200,15 @@ | |||
2705 | 193 | inc, opts = extract_options(m, global_options) | 200 | inc, opts = extract_options(m, global_options) |
2706 | 194 | sync(src, dest, '%s.%s' % (k, inc), opts) | 201 | sync(src, dest, '%s.%s' % (k, inc), opts) |
2707 | 195 | 202 | ||
2708 | 203 | |||
2709 | 196 | if __name__ == '__main__': | 204 | if __name__ == '__main__': |
2710 | 197 | parser = optparse.OptionParser() | 205 | parser = optparse.OptionParser() |
2711 | 198 | parser.add_option('-c', '--config', action='store', dest='config', | 206 | parser.add_option('-c', '--config', action='store', dest='config', |
2712 | 199 | default=None, help='helper config file') | 207 | default=None, help='helper config file') |
2713 | 200 | parser.add_option('-D', '--debug', action='store_true', dest='debug', | 208 | parser.add_option('-D', '--debug', action='store_true', dest='debug', |
2714 | 201 | default=False, help='debug') | 209 | default=False, help='debug') |
2717 | 202 | parser.add_option('-b', '--branch', action='store', dest='branch', | 210 | parser.add_option('-r', '--repository', action='store', dest='repo', |
2718 | 203 | help='charm-helpers bzr branch (overrides config)') | 211 | help='charm-helpers git repository (overrides config)') |
2719 | 204 | parser.add_option('-d', '--destination', action='store', dest='dest_dir', | 212 | parser.add_option('-d', '--destination', action='store', dest='dest_dir', |
2720 | 205 | help='sync destination dir (overrides config)') | 213 | help='sync destination dir (overrides config)') |
2721 | 206 | (opts, args) = parser.parse_args() | 214 | (opts, args) = parser.parse_args() |
2722 | @@ -219,10 +227,10 @@ | |||
2723 | 219 | else: | 227 | else: |
2724 | 220 | config = {} | 228 | config = {} |
2725 | 221 | 229 | ||
2730 | 222 | if 'branch' not in config: | 230 | if 'repo' not in config: |
2731 | 223 | config['branch'] = CHARM_HELPERS_BRANCH | 231 | config['repo'] = CHARM_HELPERS_REPO |
2732 | 224 | if opts.branch: | 232 | if opts.repo: |
2733 | 225 | config['branch'] = opts.branch | 233 | config['repo'] = opts.repo |
2734 | 226 | if opts.dest_dir: | 234 | if opts.dest_dir: |
2735 | 227 | config['destination'] = opts.dest_dir | 235 | config['destination'] = opts.dest_dir |
2736 | 228 | 236 | ||
2737 | @@ -242,7 +250,7 @@ | |||
2738 | 242 | sync_options = config['options'] | 250 | sync_options = config['options'] |
2739 | 243 | tmpd = tempfile.mkdtemp() | 251 | tmpd = tempfile.mkdtemp() |
2740 | 244 | try: | 252 | try: |
2742 | 245 | checkout = clone_helpers(tmpd, config['branch']) | 253 | checkout = clone_helpers(tmpd, config['repo']) |
2743 | 246 | sync_helpers(config['include'], checkout, config['destination'], | 254 | sync_helpers(config['include'], checkout, config['destination'], |
2744 | 247 | options=sync_options) | 255 | options=sync_options) |
2745 | 248 | except Exception as e: | 256 | except Exception as e: |
+1