Merge ~xavpaice/charm-thruk-agent:update_charmhelpers into ~nagios-charmers/charm-thruk-agent:master
- Git
- lp:~xavpaice/charm-thruk-agent
- update_charmhelpers
- Merge into master
Proposed by
Xav Paice
Status: | Merged |
---|---|
Approved by: | Wouter van Bommel |
Approved revision: | 66c19905c8c565dfb495a14e83a809681496ac46 |
Merged at revision: | 3a9512547f97be9b44dacbc79f31818b441be4ec |
Proposed branch: | ~xavpaice/charm-thruk-agent:update_charmhelpers |
Merge into: | ~nagios-charmers/charm-thruk-agent:master |
Diff against target: |
2206 lines (+1317/-155) 23 files modified
bin/charm_helpers_sync.py (+30/-22) hooks/actions.py (+2/-1) hooks/charmhelpers/__init__.py (+4/-4) hooks/charmhelpers/core/hookenv.py (+449/-27) hooks/charmhelpers/core/host.py (+164/-11) hooks/charmhelpers/core/host_factory/ubuntu.py (+25/-0) hooks/charmhelpers/core/kernel.py (+2/-2) hooks/charmhelpers/core/services/base.py (+18/-7) hooks/charmhelpers/core/strutils.py (+11/-5) hooks/charmhelpers/core/sysctl.py (+21/-10) hooks/charmhelpers/core/templating.py (+18/-9) hooks/charmhelpers/core/unitdata.py (+8/-1) hooks/charmhelpers/fetch/__init__.py (+2/-0) hooks/charmhelpers/fetch/archiveurl.py (+1/-1) hooks/charmhelpers/fetch/bzrurl.py (+2/-2) hooks/charmhelpers/fetch/giturl.py (+2/-2) hooks/charmhelpers/fetch/python/__init__.py (+13/-0) hooks/charmhelpers/fetch/python/debug.py (+54/-0) hooks/charmhelpers/fetch/python/packages.py (+154/-0) hooks/charmhelpers/fetch/python/rpdb.py (+56/-0) hooks/charmhelpers/fetch/python/version.py (+32/-0) hooks/charmhelpers/fetch/snap.py (+33/-5) hooks/charmhelpers/fetch/ubuntu.py (+216/-46) |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Wouter van Bommel (community) | Approve | ||
Canonical IS Reviewers | Pending | ||
Review via email: mp+368892@code.launchpad.net |
Commit message
Update to allow the use of the newer juju proxy settings for apt installations.
Description of the change
To post a comment you must log in.
Revision history for this message
🤖 Canonical IS Merge Bot (canonical-is-mergebot) wrote : | # |
Revision history for this message
Wouter van Bommel (woutervb) wrote : | # |
Upgrade of charmhelpers
review:
Approve
Revision history for this message
🤖 Canonical IS Merge Bot (canonical-is-mergebot) wrote : | # |
Change successfully merged at revision 3a9512547f97be9
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | diff --git a/bin/charm_helpers_sync.py b/bin/charm_helpers_sync.py | |||
2 | index f67fdb9..7c0c194 100644 | |||
3 | --- a/bin/charm_helpers_sync.py | |||
4 | +++ b/bin/charm_helpers_sync.py | |||
5 | @@ -2,19 +2,17 @@ | |||
6 | 2 | 2 | ||
7 | 3 | # Copyright 2014-2015 Canonical Limited. | 3 | # Copyright 2014-2015 Canonical Limited. |
8 | 4 | # | 4 | # |
10 | 5 | # This file is part of charm-helpers. | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); |
11 | 6 | # you may not use this file except in compliance with the License. | ||
12 | 7 | # You may obtain a copy of the License at | ||
13 | 6 | # | 8 | # |
17 | 7 | # charm-helpers is free software: you can redistribute it and/or modify | 9 | # http://www.apache.org/licenses/LICENSE-2.0 |
15 | 8 | # it under the terms of the GNU Lesser General Public License version 3 as | ||
16 | 9 | # published by the Free Software Foundation. | ||
18 | 10 | # | 10 | # |
26 | 11 | # charm-helpers is distributed in the hope that it will be useful, | 11 | # Unless required by applicable law or agreed to in writing, software |
27 | 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | 12 | # distributed under the License is distributed on an "AS IS" BASIS, |
28 | 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
29 | 14 | # GNU Lesser General Public License for more details. | 14 | # See the License for the specific language governing permissions and |
30 | 15 | # | 15 | # limitations under the License. |
24 | 16 | # You should have received a copy of the GNU Lesser General Public License | ||
25 | 17 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | ||
31 | 18 | 16 | ||
32 | 19 | # Authors: | 17 | # Authors: |
33 | 20 | # Adam Gandelman <adamg@ubuntu.com> | 18 | # Adam Gandelman <adamg@ubuntu.com> |
34 | @@ -31,7 +29,7 @@ from fnmatch import fnmatch | |||
35 | 31 | 29 | ||
36 | 32 | import six | 30 | import six |
37 | 33 | 31 | ||
39 | 34 | CHARM_HELPERS_BRANCH = 'lp:charm-helpers' | 32 | CHARM_HELPERS_REPO = 'https://github.com/juju/charm-helpers' |
40 | 35 | 33 | ||
41 | 36 | 34 | ||
42 | 37 | def parse_config(conf_file): | 35 | def parse_config(conf_file): |
43 | @@ -41,10 +39,16 @@ def parse_config(conf_file): | |||
44 | 41 | return yaml.load(open(conf_file).read()) | 39 | return yaml.load(open(conf_file).read()) |
45 | 42 | 40 | ||
46 | 43 | 41 | ||
48 | 44 | def clone_helpers(work_dir, branch): | 42 | def clone_helpers(work_dir, repo): |
49 | 45 | dest = os.path.join(work_dir, 'charm-helpers') | 43 | dest = os.path.join(work_dir, 'charm-helpers') |
52 | 46 | logging.info('Checking out %s to %s.' % (branch, dest)) | 44 | logging.info('Cloning out %s to %s.' % (repo, dest)) |
53 | 47 | cmd = ['bzr', 'checkout', '--lightweight', branch, dest] | 45 | branch = None |
54 | 46 | if '@' in repo: | ||
55 | 47 | repo, branch = repo.split('@', 1) | ||
56 | 48 | cmd = ['git', 'clone', '--depth=1'] | ||
57 | 49 | if branch is not None: | ||
58 | 50 | cmd += ['--branch', branch] | ||
59 | 51 | cmd += [repo, dest] | ||
60 | 48 | subprocess.check_call(cmd) | 52 | subprocess.check_call(cmd) |
61 | 49 | return dest | 53 | return dest |
62 | 50 | 54 | ||
63 | @@ -176,6 +180,9 @@ def extract_options(inc, global_options=None): | |||
64 | 176 | 180 | ||
65 | 177 | 181 | ||
66 | 178 | def sync_helpers(include, src, dest, options=None): | 182 | def sync_helpers(include, src, dest, options=None): |
67 | 183 | if os.path.exists(dest): | ||
68 | 184 | logging.debug('Removing existing directory: %s' % dest) | ||
69 | 185 | shutil.rmtree(dest) | ||
70 | 179 | if not os.path.isdir(dest): | 186 | if not os.path.isdir(dest): |
71 | 180 | os.makedirs(dest) | 187 | os.makedirs(dest) |
72 | 181 | 188 | ||
73 | @@ -193,14 +200,15 @@ def sync_helpers(include, src, dest, options=None): | |||
74 | 193 | inc, opts = extract_options(m, global_options) | 200 | inc, opts = extract_options(m, global_options) |
75 | 194 | sync(src, dest, '%s.%s' % (k, inc), opts) | 201 | sync(src, dest, '%s.%s' % (k, inc), opts) |
76 | 195 | 202 | ||
77 | 203 | |||
78 | 196 | if __name__ == '__main__': | 204 | if __name__ == '__main__': |
79 | 197 | parser = optparse.OptionParser() | 205 | parser = optparse.OptionParser() |
80 | 198 | parser.add_option('-c', '--config', action='store', dest='config', | 206 | parser.add_option('-c', '--config', action='store', dest='config', |
81 | 199 | default=None, help='helper config file') | 207 | default=None, help='helper config file') |
82 | 200 | parser.add_option('-D', '--debug', action='store_true', dest='debug', | 208 | parser.add_option('-D', '--debug', action='store_true', dest='debug', |
83 | 201 | default=False, help='debug') | 209 | default=False, help='debug') |
86 | 202 | parser.add_option('-b', '--branch', action='store', dest='branch', | 210 | parser.add_option('-r', '--repository', action='store', dest='repo', |
87 | 203 | help='charm-helpers bzr branch (overrides config)') | 211 | help='charm-helpers git repository (overrides config)') |
88 | 204 | parser.add_option('-d', '--destination', action='store', dest='dest_dir', | 212 | parser.add_option('-d', '--destination', action='store', dest='dest_dir', |
89 | 205 | help='sync destination dir (overrides config)') | 213 | help='sync destination dir (overrides config)') |
90 | 206 | (opts, args) = parser.parse_args() | 214 | (opts, args) = parser.parse_args() |
91 | @@ -219,10 +227,10 @@ if __name__ == '__main__': | |||
92 | 219 | else: | 227 | else: |
93 | 220 | config = {} | 228 | config = {} |
94 | 221 | 229 | ||
99 | 222 | if 'branch' not in config: | 230 | if 'repo' not in config: |
100 | 223 | config['branch'] = CHARM_HELPERS_BRANCH | 231 | config['repo'] = CHARM_HELPERS_REPO |
101 | 224 | if opts.branch: | 232 | if opts.repo: |
102 | 225 | config['branch'] = opts.branch | 233 | config['repo'] = opts.repo |
103 | 226 | if opts.dest_dir: | 234 | if opts.dest_dir: |
104 | 227 | config['destination'] = opts.dest_dir | 235 | config['destination'] = opts.dest_dir |
105 | 228 | 236 | ||
106 | @@ -242,7 +250,7 @@ if __name__ == '__main__': | |||
107 | 242 | sync_options = config['options'] | 250 | sync_options = config['options'] |
108 | 243 | tmpd = tempfile.mkdtemp() | 251 | tmpd = tempfile.mkdtemp() |
109 | 244 | try: | 252 | try: |
111 | 245 | checkout = clone_helpers(tmpd, config['branch']) | 253 | checkout = clone_helpers(tmpd, config['repo']) |
112 | 246 | sync_helpers(config['include'], checkout, config['destination'], | 254 | sync_helpers(config['include'], checkout, config['destination'], |
113 | 247 | options=sync_options) | 255 | options=sync_options) |
114 | 248 | except Exception as e: | 256 | except Exception as e: |
115 | diff --git a/hooks/actions.py b/hooks/actions.py | |||
116 | index a72e9a6..1593b51 100644 | |||
117 | --- a/hooks/actions.py | |||
118 | +++ b/hooks/actions.py | |||
119 | @@ -93,7 +93,8 @@ def update_ppa(service_name): | |||
120 | 93 | prev_source = config.previous('source') | 93 | prev_source = config.previous('source') |
121 | 94 | if prev_source is not None and prev_source != new_source: | 94 | if prev_source is not None and prev_source != new_source: |
122 | 95 | subprocess.check_call(['add-apt-repository', | 95 | subprocess.check_call(['add-apt-repository', |
124 | 96 | '--yes', '--remove', prev_source]) | 96 | '--yes', '--remove', prev_source], |
125 | 97 | env=hookenv.env_proxy_settings(['https'])) | ||
126 | 97 | add_source(config.get('source'), config.get('key', None)) | 98 | add_source(config.get('source'), config.get('key', None)) |
127 | 98 | apt_update(fatal=True) | 99 | apt_update(fatal=True) |
128 | 99 | package_list = ["thruk", "pwgen", "apache2-utils"] | 100 | package_list = ["thruk", "pwgen", "apache2-utils"] |
129 | diff --git a/hooks/charmhelpers/__init__.py b/hooks/charmhelpers/__init__.py | |||
130 | index e7aa471..61ef907 100644 | |||
131 | --- a/hooks/charmhelpers/__init__.py | |||
132 | +++ b/hooks/charmhelpers/__init__.py | |||
133 | @@ -23,22 +23,22 @@ import subprocess | |||
134 | 23 | import sys | 23 | import sys |
135 | 24 | 24 | ||
136 | 25 | try: | 25 | try: |
138 | 26 | import six # flake8: noqa | 26 | import six # NOQA:F401 |
139 | 27 | except ImportError: | 27 | except ImportError: |
140 | 28 | if sys.version_info.major == 2: | 28 | if sys.version_info.major == 2: |
141 | 29 | subprocess.check_call(['apt-get', 'install', '-y', 'python-six']) | 29 | subprocess.check_call(['apt-get', 'install', '-y', 'python-six']) |
142 | 30 | else: | 30 | else: |
143 | 31 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-six']) | 31 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-six']) |
145 | 32 | import six # flake8: noqa | 32 | import six # NOQA:F401 |
146 | 33 | 33 | ||
147 | 34 | try: | 34 | try: |
149 | 35 | import yaml # flake8: noqa | 35 | import yaml # NOQA:F401 |
150 | 36 | except ImportError: | 36 | except ImportError: |
151 | 37 | if sys.version_info.major == 2: | 37 | if sys.version_info.major == 2: |
152 | 38 | subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml']) | 38 | subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml']) |
153 | 39 | else: | 39 | else: |
154 | 40 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml']) | 40 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml']) |
156 | 41 | import yaml # flake8: noqa | 41 | import yaml # NOQA:F401 |
157 | 42 | 42 | ||
158 | 43 | 43 | ||
159 | 44 | # Holds a list of mapping of mangled function names that have been deprecated | 44 | # Holds a list of mapping of mangled function names that have been deprecated |
160 | diff --git a/hooks/charmhelpers/core/hookenv.py b/hooks/charmhelpers/core/hookenv.py | |||
161 | index e44e22b..4744eb4 100644 | |||
162 | --- a/hooks/charmhelpers/core/hookenv.py | |||
163 | +++ b/hooks/charmhelpers/core/hookenv.py | |||
164 | @@ -22,10 +22,12 @@ from __future__ import print_function | |||
165 | 22 | import copy | 22 | import copy |
166 | 23 | from distutils.version import LooseVersion | 23 | from distutils.version import LooseVersion |
167 | 24 | from functools import wraps | 24 | from functools import wraps |
168 | 25 | from collections import namedtuple | ||
169 | 25 | import glob | 26 | import glob |
170 | 26 | import os | 27 | import os |
171 | 27 | import json | 28 | import json |
172 | 28 | import yaml | 29 | import yaml |
173 | 30 | import re | ||
174 | 29 | import subprocess | 31 | import subprocess |
175 | 30 | import sys | 32 | import sys |
176 | 31 | import errno | 33 | import errno |
177 | @@ -38,12 +40,20 @@ if not six.PY3: | |||
178 | 38 | else: | 40 | else: |
179 | 39 | from collections import UserDict | 41 | from collections import UserDict |
180 | 40 | 42 | ||
181 | 43 | |||
182 | 41 | CRITICAL = "CRITICAL" | 44 | CRITICAL = "CRITICAL" |
183 | 42 | ERROR = "ERROR" | 45 | ERROR = "ERROR" |
184 | 43 | WARNING = "WARNING" | 46 | WARNING = "WARNING" |
185 | 44 | INFO = "INFO" | 47 | INFO = "INFO" |
186 | 45 | DEBUG = "DEBUG" | 48 | DEBUG = "DEBUG" |
187 | 49 | TRACE = "TRACE" | ||
188 | 46 | MARKER = object() | 50 | MARKER = object() |
189 | 51 | SH_MAX_ARG = 131071 | ||
190 | 52 | |||
191 | 53 | |||
192 | 54 | RANGE_WARNING = ('Passing NO_PROXY string that includes a cidr. ' | ||
193 | 55 | 'This may not be compatible with software you are ' | ||
194 | 56 | 'running in your shell.') | ||
195 | 47 | 57 | ||
196 | 48 | cache = {} | 58 | cache = {} |
197 | 49 | 59 | ||
198 | @@ -64,7 +74,7 @@ def cached(func): | |||
199 | 64 | @wraps(func) | 74 | @wraps(func) |
200 | 65 | def wrapper(*args, **kwargs): | 75 | def wrapper(*args, **kwargs): |
201 | 66 | global cache | 76 | global cache |
203 | 67 | key = str((func, args, kwargs)) | 77 | key = json.dumps((func, args, kwargs), sort_keys=True, default=str) |
204 | 68 | try: | 78 | try: |
205 | 69 | return cache[key] | 79 | return cache[key] |
206 | 70 | except KeyError: | 80 | except KeyError: |
207 | @@ -94,7 +104,7 @@ def log(message, level=None): | |||
208 | 94 | command += ['-l', level] | 104 | command += ['-l', level] |
209 | 95 | if not isinstance(message, six.string_types): | 105 | if not isinstance(message, six.string_types): |
210 | 96 | message = repr(message) | 106 | message = repr(message) |
212 | 97 | command += [message] | 107 | command += [message[:SH_MAX_ARG]] |
213 | 98 | # Missing juju-log should not cause failures in unit tests | 108 | # Missing juju-log should not cause failures in unit tests |
214 | 99 | # Send log output to stderr | 109 | # Send log output to stderr |
215 | 100 | try: | 110 | try: |
216 | @@ -197,11 +207,58 @@ def remote_unit(): | |||
217 | 197 | return os.environ.get('JUJU_REMOTE_UNIT', None) | 207 | return os.environ.get('JUJU_REMOTE_UNIT', None) |
218 | 198 | 208 | ||
219 | 199 | 209 | ||
222 | 200 | def service_name(): | 210 | def application_name(): |
223 | 201 | """The name service group this unit belongs to""" | 211 | """ |
224 | 212 | The name of the deployed application this unit belongs to. | ||
225 | 213 | """ | ||
226 | 202 | return local_unit().split('/')[0] | 214 | return local_unit().split('/')[0] |
227 | 203 | 215 | ||
228 | 204 | 216 | ||
229 | 217 | def service_name(): | ||
230 | 218 | """ | ||
231 | 219 | .. deprecated:: 0.19.1 | ||
232 | 220 | Alias for :func:`application_name`. | ||
233 | 221 | """ | ||
234 | 222 | return application_name() | ||
235 | 223 | |||
236 | 224 | |||
237 | 225 | def model_name(): | ||
238 | 226 | """ | ||
239 | 227 | Name of the model that this unit is deployed in. | ||
240 | 228 | """ | ||
241 | 229 | return os.environ['JUJU_MODEL_NAME'] | ||
242 | 230 | |||
243 | 231 | |||
244 | 232 | def model_uuid(): | ||
245 | 233 | """ | ||
246 | 234 | UUID of the model that this unit is deployed in. | ||
247 | 235 | """ | ||
248 | 236 | return os.environ['JUJU_MODEL_UUID'] | ||
249 | 237 | |||
250 | 238 | |||
251 | 239 | def principal_unit(): | ||
252 | 240 | """Returns the principal unit of this unit, otherwise None""" | ||
253 | 241 | # Juju 2.2 and above provides JUJU_PRINCIPAL_UNIT | ||
254 | 242 | principal_unit = os.environ.get('JUJU_PRINCIPAL_UNIT', None) | ||
255 | 243 | # If it's empty, then this unit is the principal | ||
256 | 244 | if principal_unit == '': | ||
257 | 245 | return os.environ['JUJU_UNIT_NAME'] | ||
258 | 246 | elif principal_unit is not None: | ||
259 | 247 | return principal_unit | ||
260 | 248 | # For Juju 2.1 and below, let's try work out the principle unit by | ||
261 | 249 | # the various charms' metadata.yaml. | ||
262 | 250 | for reltype in relation_types(): | ||
263 | 251 | for rid in relation_ids(reltype): | ||
264 | 252 | for unit in related_units(rid): | ||
265 | 253 | md = _metadata_unit(unit) | ||
266 | 254 | if not md: | ||
267 | 255 | continue | ||
268 | 256 | subordinate = md.pop('subordinate', None) | ||
269 | 257 | if not subordinate: | ||
270 | 258 | return unit | ||
271 | 259 | return None | ||
272 | 260 | |||
273 | 261 | |||
274 | 205 | @cached | 262 | @cached |
275 | 206 | def remote_service_name(relid=None): | 263 | def remote_service_name(relid=None): |
276 | 207 | """The remote service name for a given relation-id (or the current relation)""" | 264 | """The remote service name for a given relation-id (or the current relation)""" |
277 | @@ -263,7 +320,7 @@ class Config(dict): | |||
278 | 263 | self.implicit_save = True | 320 | self.implicit_save = True |
279 | 264 | self._prev_dict = None | 321 | self._prev_dict = None |
280 | 265 | self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME) | 322 | self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME) |
282 | 266 | if os.path.exists(self.path): | 323 | if os.path.exists(self.path) and os.stat(self.path).st_size: |
283 | 267 | self.load_previous() | 324 | self.load_previous() |
284 | 268 | atexit(self._implicit_save) | 325 | atexit(self._implicit_save) |
285 | 269 | 326 | ||
286 | @@ -283,7 +340,11 @@ class Config(dict): | |||
287 | 283 | """ | 340 | """ |
288 | 284 | self.path = path or self.path | 341 | self.path = path or self.path |
289 | 285 | with open(self.path) as f: | 342 | with open(self.path) as f: |
291 | 286 | self._prev_dict = json.load(f) | 343 | try: |
292 | 344 | self._prev_dict = json.load(f) | ||
293 | 345 | except ValueError as e: | ||
294 | 346 | log('Unable to parse previous config data - {}'.format(str(e)), | ||
295 | 347 | level=ERROR) | ||
296 | 287 | for k, v in copy.deepcopy(self._prev_dict).items(): | 348 | for k, v in copy.deepcopy(self._prev_dict).items(): |
297 | 288 | if k not in self: | 349 | if k not in self: |
298 | 289 | self[k] = v | 350 | self[k] = v |
299 | @@ -319,6 +380,7 @@ class Config(dict): | |||
300 | 319 | 380 | ||
301 | 320 | """ | 381 | """ |
302 | 321 | with open(self.path, 'w') as f: | 382 | with open(self.path, 'w') as f: |
303 | 383 | os.fchmod(f.fileno(), 0o600) | ||
304 | 322 | json.dump(self, f) | 384 | json.dump(self, f) |
305 | 323 | 385 | ||
306 | 324 | def _implicit_save(self): | 386 | def _implicit_save(self): |
307 | @@ -326,22 +388,40 @@ class Config(dict): | |||
308 | 326 | self.save() | 388 | self.save() |
309 | 327 | 389 | ||
310 | 328 | 390 | ||
312 | 329 | @cached | 391 | _cache_config = None |
313 | 392 | |||
314 | 393 | |||
315 | 330 | def config(scope=None): | 394 | def config(scope=None): |
323 | 331 | """Juju charm configuration""" | 395 | """ |
324 | 332 | config_cmd_line = ['config-get'] | 396 | Get the juju charm configuration (scope==None) or individual key, |
325 | 333 | if scope is not None: | 397 | (scope=str). The returned value is a Python data structure loaded as |
326 | 334 | config_cmd_line.append(scope) | 398 | JSON from the Juju config command. |
327 | 335 | else: | 399 | |
328 | 336 | config_cmd_line.append('--all') | 400 | :param scope: If set, return the value for the specified key. |
329 | 337 | config_cmd_line.append('--format=json') | 401 | :type scope: Optional[str] |
330 | 402 | :returns: Either the whole config as a Config, or a key from it. | ||
331 | 403 | :rtype: Any | ||
332 | 404 | """ | ||
333 | 405 | global _cache_config | ||
334 | 406 | config_cmd_line = ['config-get', '--all', '--format=json'] | ||
335 | 338 | try: | 407 | try: |
338 | 339 | config_data = json.loads( | 408 | # JSON Decode Exception for Python3.5+ |
339 | 340 | subprocess.check_output(config_cmd_line).decode('UTF-8')) | 409 | exc_json = json.decoder.JSONDecodeError |
340 | 410 | except AttributeError: | ||
341 | 411 | # JSON Decode Exception for Python2.7 through Python3.4 | ||
342 | 412 | exc_json = ValueError | ||
343 | 413 | try: | ||
344 | 414 | if _cache_config is None: | ||
345 | 415 | config_data = json.loads( | ||
346 | 416 | subprocess.check_output(config_cmd_line).decode('UTF-8')) | ||
347 | 417 | _cache_config = Config(config_data) | ||
348 | 341 | if scope is not None: | 418 | if scope is not None: |
352 | 342 | return config_data | 419 | return _cache_config.get(scope) |
353 | 343 | return Config(config_data) | 420 | return _cache_config |
354 | 344 | except ValueError: | 421 | except (exc_json, UnicodeDecodeError) as e: |
355 | 422 | log('Unable to parse output from config-get: config_cmd_line="{}" ' | ||
356 | 423 | 'message="{}"' | ||
357 | 424 | .format(config_cmd_line, str(e)), level=ERROR) | ||
358 | 345 | return None | 425 | return None |
359 | 346 | 426 | ||
360 | 347 | 427 | ||
361 | @@ -435,6 +515,67 @@ def related_units(relid=None): | |||
362 | 435 | subprocess.check_output(units_cmd_line).decode('UTF-8')) or [] | 515 | subprocess.check_output(units_cmd_line).decode('UTF-8')) or [] |
363 | 436 | 516 | ||
364 | 437 | 517 | ||
365 | 518 | def expected_peer_units(): | ||
366 | 519 | """Get a generator for units we expect to join peer relation based on | ||
367 | 520 | goal-state. | ||
368 | 521 | |||
369 | 522 | The local unit is excluded from the result to make it easy to gauge | ||
370 | 523 | completion of all peers joining the relation with existing hook tools. | ||
371 | 524 | |||
372 | 525 | Example usage: | ||
373 | 526 | log('peer {} of {} joined peer relation' | ||
374 | 527 | .format(len(related_units()), | ||
375 | 528 | len(list(expected_peer_units())))) | ||
376 | 529 | |||
377 | 530 | This function will raise NotImplementedError if used with juju versions | ||
378 | 531 | without goal-state support. | ||
379 | 532 | |||
380 | 533 | :returns: iterator | ||
381 | 534 | :rtype: types.GeneratorType | ||
382 | 535 | :raises: NotImplementedError | ||
383 | 536 | """ | ||
384 | 537 | if not has_juju_version("2.4.0"): | ||
385 | 538 | # goal-state first appeared in 2.4.0. | ||
386 | 539 | raise NotImplementedError("goal-state") | ||
387 | 540 | _goal_state = goal_state() | ||
388 | 541 | return (key for key in _goal_state['units'] | ||
389 | 542 | if '/' in key and key != local_unit()) | ||
390 | 543 | |||
391 | 544 | |||
392 | 545 | def expected_related_units(reltype=None): | ||
393 | 546 | """Get a generator for units we expect to join relation based on | ||
394 | 547 | goal-state. | ||
395 | 548 | |||
396 | 549 | Note that you can not use this function for the peer relation, take a look | ||
397 | 550 | at expected_peer_units() for that. | ||
398 | 551 | |||
399 | 552 | This function will raise KeyError if you request information for a | ||
400 | 553 | relation type for which juju goal-state does not have information. It will | ||
401 | 554 | raise NotImplementedError if used with juju versions without goal-state | ||
402 | 555 | support. | ||
403 | 556 | |||
404 | 557 | Example usage: | ||
405 | 558 | log('participant {} of {} joined relation {}' | ||
406 | 559 | .format(len(related_units()), | ||
407 | 560 | len(list(expected_related_units())), | ||
408 | 561 | relation_type())) | ||
409 | 562 | |||
410 | 563 | :param reltype: Relation type to list data for, default is to list data for | ||
411 | 564 | the realtion type we are currently executing a hook for. | ||
412 | 565 | :type reltype: str | ||
413 | 566 | :returns: iterator | ||
414 | 567 | :rtype: types.GeneratorType | ||
415 | 568 | :raises: KeyError, NotImplementedError | ||
416 | 569 | """ | ||
417 | 570 | if not has_juju_version("2.4.4"): | ||
418 | 571 | # goal-state existed in 2.4.0, but did not list individual units to | ||
419 | 572 | # join a relation in 2.4.1 through 2.4.3. (LP: #1794739) | ||
420 | 573 | raise NotImplementedError("goal-state relation unit count") | ||
421 | 574 | reltype = reltype or relation_type() | ||
422 | 575 | _goal_state = goal_state() | ||
423 | 576 | return (key for key in _goal_state['relations'][reltype] if '/' in key) | ||
424 | 577 | |||
425 | 578 | |||
426 | 438 | @cached | 579 | @cached |
427 | 439 | def relation_for_unit(unit=None, rid=None): | 580 | def relation_for_unit(unit=None, rid=None): |
428 | 440 | """Get the json represenation of a unit's relation""" | 581 | """Get the json represenation of a unit's relation""" |
429 | @@ -478,6 +619,24 @@ def metadata(): | |||
430 | 478 | return yaml.safe_load(md) | 619 | return yaml.safe_load(md) |
431 | 479 | 620 | ||
432 | 480 | 621 | ||
433 | 622 | def _metadata_unit(unit): | ||
434 | 623 | """Given the name of a unit (e.g. apache2/0), get the unit charm's | ||
435 | 624 | metadata.yaml. Very similar to metadata() but allows us to inspect | ||
436 | 625 | other units. Unit needs to be co-located, such as a subordinate or | ||
437 | 626 | principal/primary. | ||
438 | 627 | |||
439 | 628 | :returns: metadata.yaml as a python object. | ||
440 | 629 | |||
441 | 630 | """ | ||
442 | 631 | basedir = os.sep.join(charm_dir().split(os.sep)[:-2]) | ||
443 | 632 | unitdir = 'unit-{}'.format(unit.replace(os.sep, '-')) | ||
444 | 633 | joineddir = os.path.join(basedir, unitdir, 'charm', 'metadata.yaml') | ||
445 | 634 | if not os.path.exists(joineddir): | ||
446 | 635 | return None | ||
447 | 636 | with open(joineddir) as md: | ||
448 | 637 | return yaml.safe_load(md) | ||
449 | 638 | |||
450 | 639 | |||
451 | 481 | @cached | 640 | @cached |
452 | 482 | def relation_types(): | 641 | def relation_types(): |
453 | 483 | """Get a list of relation types supported by this charm""" | 642 | """Get a list of relation types supported by this charm""" |
454 | @@ -602,18 +761,31 @@ def is_relation_made(relation, keys='private-address'): | |||
455 | 602 | return False | 761 | return False |
456 | 603 | 762 | ||
457 | 604 | 763 | ||
458 | 764 | def _port_op(op_name, port, protocol="TCP"): | ||
459 | 765 | """Open or close a service network port""" | ||
460 | 766 | _args = [op_name] | ||
461 | 767 | icmp = protocol.upper() == "ICMP" | ||
462 | 768 | if icmp: | ||
463 | 769 | _args.append(protocol) | ||
464 | 770 | else: | ||
465 | 771 | _args.append('{}/{}'.format(port, protocol)) | ||
466 | 772 | try: | ||
467 | 773 | subprocess.check_call(_args) | ||
468 | 774 | except subprocess.CalledProcessError: | ||
469 | 775 | # Older Juju pre 2.3 doesn't support ICMP | ||
470 | 776 | # so treat it as a no-op if it fails. | ||
471 | 777 | if not icmp: | ||
472 | 778 | raise | ||
473 | 779 | |||
474 | 780 | |||
475 | 605 | def open_port(port, protocol="TCP"): | 781 | def open_port(port, protocol="TCP"): |
476 | 606 | """Open a service network port""" | 782 | """Open a service network port""" |
480 | 607 | _args = ['open-port'] | 783 | _port_op('open-port', port, protocol) |
478 | 608 | _args.append('{}/{}'.format(port, protocol)) | ||
479 | 609 | subprocess.check_call(_args) | ||
481 | 610 | 784 | ||
482 | 611 | 785 | ||
483 | 612 | def close_port(port, protocol="TCP"): | 786 | def close_port(port, protocol="TCP"): |
484 | 613 | """Close a service network port""" | 787 | """Close a service network port""" |
488 | 614 | _args = ['close-port'] | 788 | _port_op('close-port', port, protocol) |
486 | 615 | _args.append('{}/{}'.format(port, protocol)) | ||
487 | 616 | subprocess.check_call(_args) | ||
489 | 617 | 789 | ||
490 | 618 | 790 | ||
491 | 619 | def open_ports(start, end, protocol="TCP"): | 791 | def open_ports(start, end, protocol="TCP"): |
492 | @@ -630,6 +802,17 @@ def close_ports(start, end, protocol="TCP"): | |||
493 | 630 | subprocess.check_call(_args) | 802 | subprocess.check_call(_args) |
494 | 631 | 803 | ||
495 | 632 | 804 | ||
496 | 805 | def opened_ports(): | ||
497 | 806 | """Get the opened ports | ||
498 | 807 | |||
499 | 808 | *Note that this will only show ports opened in a previous hook* | ||
500 | 809 | |||
501 | 810 | :returns: Opened ports as a list of strings: ``['8080/tcp', '8081-8083/tcp']`` | ||
502 | 811 | """ | ||
503 | 812 | _args = ['opened-ports', '--format=json'] | ||
504 | 813 | return json.loads(subprocess.check_output(_args).decode('UTF-8')) | ||
505 | 814 | |||
506 | 815 | |||
507 | 633 | @cached | 816 | @cached |
508 | 634 | def unit_get(attribute): | 817 | def unit_get(attribute): |
509 | 635 | """Get the unit ID for the remote unit""" | 818 | """Get the unit ID for the remote unit""" |
510 | @@ -751,8 +934,15 @@ class Hooks(object): | |||
511 | 751 | return wrapper | 934 | return wrapper |
512 | 752 | 935 | ||
513 | 753 | 936 | ||
514 | 937 | class NoNetworkBinding(Exception): | ||
515 | 938 | pass | ||
516 | 939 | |||
517 | 940 | |||
518 | 754 | def charm_dir(): | 941 | def charm_dir(): |
519 | 755 | """Return the root directory of the current charm""" | 942 | """Return the root directory of the current charm""" |
520 | 943 | d = os.environ.get('JUJU_CHARM_DIR') | ||
521 | 944 | if d is not None: | ||
522 | 945 | return d | ||
523 | 756 | return os.environ.get('CHARM_DIR') | 946 | return os.environ.get('CHARM_DIR') |
524 | 757 | 947 | ||
525 | 758 | 948 | ||
526 | @@ -874,6 +1064,14 @@ def application_version_set(version): | |||
527 | 874 | 1064 | ||
528 | 875 | 1065 | ||
529 | 876 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | 1066 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) |
530 | 1067 | @cached | ||
531 | 1068 | def goal_state(): | ||
532 | 1069 | """Juju goal state values""" | ||
533 | 1070 | cmd = ['goal-state', '--format=json'] | ||
534 | 1071 | return json.loads(subprocess.check_output(cmd).decode('UTF-8')) | ||
535 | 1072 | |||
536 | 1073 | |||
537 | 1074 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | ||
538 | 877 | def is_leader(): | 1075 | def is_leader(): |
539 | 878 | """Does the current unit hold the juju leadership | 1076 | """Does the current unit hold the juju leadership |
540 | 879 | 1077 | ||
541 | @@ -967,7 +1165,6 @@ def juju_version(): | |||
542 | 967 | universal_newlines=True).strip() | 1165 | universal_newlines=True).strip() |
543 | 968 | 1166 | ||
544 | 969 | 1167 | ||
545 | 970 | @cached | ||
546 | 971 | def has_juju_version(minimum_version): | 1168 | def has_juju_version(minimum_version): |
547 | 972 | """Return True if the Juju version is at least the provided version""" | 1169 | """Return True if the Juju version is at least the provided version""" |
548 | 973 | return LooseVersion(juju_version()) >= LooseVersion(minimum_version) | 1170 | return LooseVersion(juju_version()) >= LooseVersion(minimum_version) |
549 | @@ -1027,6 +1224,8 @@ def _run_atexit(): | |||
550 | 1027 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | 1224 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) |
551 | 1028 | def network_get_primary_address(binding): | 1225 | def network_get_primary_address(binding): |
552 | 1029 | ''' | 1226 | ''' |
553 | 1227 | Deprecated since Juju 2.3; use network_get() | ||
554 | 1228 | |||
555 | 1030 | Retrieve the primary network address for a named binding | 1229 | Retrieve the primary network address for a named binding |
556 | 1031 | 1230 | ||
557 | 1032 | :param binding: string. The name of a relation of extra-binding | 1231 | :param binding: string. The name of a relation of extra-binding |
558 | @@ -1034,7 +1233,41 @@ def network_get_primary_address(binding): | |||
559 | 1034 | :raise: NotImplementedError if run on Juju < 2.0 | 1233 | :raise: NotImplementedError if run on Juju < 2.0 |
560 | 1035 | ''' | 1234 | ''' |
561 | 1036 | cmd = ['network-get', '--primary-address', binding] | 1235 | cmd = ['network-get', '--primary-address', binding] |
563 | 1037 | return subprocess.check_output(cmd).decode('UTF-8').strip() | 1236 | try: |
564 | 1237 | response = subprocess.check_output( | ||
565 | 1238 | cmd, | ||
566 | 1239 | stderr=subprocess.STDOUT).decode('UTF-8').strip() | ||
567 | 1240 | except CalledProcessError as e: | ||
568 | 1241 | if 'no network config found for binding' in e.output.decode('UTF-8'): | ||
569 | 1242 | raise NoNetworkBinding("No network binding for {}" | ||
570 | 1243 | .format(binding)) | ||
571 | 1244 | else: | ||
572 | 1245 | raise | ||
573 | 1246 | return response | ||
574 | 1247 | |||
575 | 1248 | |||
576 | 1249 | def network_get(endpoint, relation_id=None): | ||
577 | 1250 | """ | ||
578 | 1251 | Retrieve the network details for a relation endpoint | ||
579 | 1252 | |||
580 | 1253 | :param endpoint: string. The name of a relation endpoint | ||
581 | 1254 | :param relation_id: int. The ID of the relation for the current context. | ||
582 | 1255 | :return: dict. The loaded YAML output of the network-get query. | ||
583 | 1256 | :raise: NotImplementedError if request not supported by the Juju version. | ||
584 | 1257 | """ | ||
585 | 1258 | if not has_juju_version('2.2'): | ||
586 | 1259 | raise NotImplementedError(juju_version()) # earlier versions require --primary-address | ||
587 | 1260 | if relation_id and not has_juju_version('2.3'): | ||
588 | 1261 | raise NotImplementedError # 2.3 added the -r option | ||
589 | 1262 | |||
590 | 1263 | cmd = ['network-get', endpoint, '--format', 'yaml'] | ||
591 | 1264 | if relation_id: | ||
592 | 1265 | cmd.append('-r') | ||
593 | 1266 | cmd.append(relation_id) | ||
594 | 1267 | response = subprocess.check_output( | ||
595 | 1268 | cmd, | ||
596 | 1269 | stderr=subprocess.STDOUT).decode('UTF-8').strip() | ||
597 | 1270 | return yaml.safe_load(response) | ||
598 | 1038 | 1271 | ||
599 | 1039 | 1272 | ||
600 | 1040 | def add_metric(*args, **kwargs): | 1273 | def add_metric(*args, **kwargs): |
601 | @@ -1066,3 +1299,192 @@ def meter_info(): | |||
602 | 1066 | """Get the meter status information, if running in the meter-status-changed | 1299 | """Get the meter status information, if running in the meter-status-changed |
603 | 1067 | hook.""" | 1300 | hook.""" |
604 | 1068 | return os.environ.get('JUJU_METER_INFO') | 1301 | return os.environ.get('JUJU_METER_INFO') |
605 | 1302 | |||
606 | 1303 | |||
607 | 1304 | def iter_units_for_relation_name(relation_name): | ||
608 | 1305 | """Iterate through all units in a relation | ||
609 | 1306 | |||
610 | 1307 | Generator that iterates through all the units in a relation and yields | ||
611 | 1308 | a named tuple with rid and unit field names. | ||
612 | 1309 | |||
613 | 1310 | Usage: | ||
614 | 1311 | data = [(u.rid, u.unit) | ||
615 | 1312 | for u in iter_units_for_relation_name(relation_name)] | ||
616 | 1313 | |||
617 | 1314 | :param relation_name: string relation name | ||
618 | 1315 | :yield: Named Tuple with rid and unit field names | ||
619 | 1316 | """ | ||
620 | 1317 | RelatedUnit = namedtuple('RelatedUnit', 'rid, unit') | ||
621 | 1318 | for rid in relation_ids(relation_name): | ||
622 | 1319 | for unit in related_units(rid): | ||
623 | 1320 | yield RelatedUnit(rid, unit) | ||
624 | 1321 | |||
625 | 1322 | |||
626 | 1323 | def ingress_address(rid=None, unit=None): | ||
627 | 1324 | """ | ||
628 | 1325 | Retrieve the ingress-address from a relation when available. | ||
629 | 1326 | Otherwise, return the private-address. | ||
630 | 1327 | |||
631 | 1328 | When used on the consuming side of the relation (unit is a remote | ||
632 | 1329 | unit), the ingress-address is the IP address that this unit needs | ||
633 | 1330 | to use to reach the provided service on the remote unit. | ||
634 | 1331 | |||
635 | 1332 | When used on the providing side of the relation (unit == local_unit()), | ||
636 | 1333 | the ingress-address is the IP address that is advertised to remote | ||
637 | 1334 | units on this relation. Remote units need to use this address to | ||
638 | 1335 | reach the local provided service on this unit. | ||
639 | 1336 | |||
640 | 1337 | Note that charms may document some other method to use in | ||
641 | 1338 | preference to the ingress_address(), such as an address provided | ||
642 | 1339 | on a different relation attribute or a service discovery mechanism. | ||
643 | 1340 | This allows charms to redirect inbound connections to their peers | ||
644 | 1341 | or different applications such as load balancers. | ||
645 | 1342 | |||
646 | 1343 | Usage: | ||
647 | 1344 | addresses = [ingress_address(rid=u.rid, unit=u.unit) | ||
648 | 1345 | for u in iter_units_for_relation_name(relation_name)] | ||
649 | 1346 | |||
650 | 1347 | :param rid: string relation id | ||
651 | 1348 | :param unit: string unit name | ||
652 | 1349 | :side effect: calls relation_get | ||
653 | 1350 | :return: string IP address | ||
654 | 1351 | """ | ||
655 | 1352 | settings = relation_get(rid=rid, unit=unit) | ||
656 | 1353 | return (settings.get('ingress-address') or | ||
657 | 1354 | settings.get('private-address')) | ||
658 | 1355 | |||
659 | 1356 | |||
660 | 1357 | def egress_subnets(rid=None, unit=None): | ||
661 | 1358 | """ | ||
662 | 1359 | Retrieve the egress-subnets from a relation. | ||
663 | 1360 | |||
664 | 1361 | This function is to be used on the providing side of the | ||
665 | 1362 | relation, and provides the ranges of addresses that client | ||
666 | 1363 | connections may come from. The result is uninteresting on | ||
667 | 1364 | the consuming side of a relation (unit == local_unit()). | ||
668 | 1365 | |||
669 | 1366 | Returns a stable list of subnets in CIDR format. | ||
670 | 1367 | eg. ['192.168.1.0/24', '2001::F00F/128'] | ||
671 | 1368 | |||
672 | 1369 | If egress-subnets is not available, falls back to using the published | ||
673 | 1370 | ingress-address, or finally private-address. | ||
674 | 1371 | |||
675 | 1372 | :param rid: string relation id | ||
676 | 1373 | :param unit: string unit name | ||
677 | 1374 | :side effect: calls relation_get | ||
678 | 1375 | :return: list of subnets in CIDR format. eg. ['192.168.1.0/24', '2001::F00F/128'] | ||
679 | 1376 | """ | ||
680 | 1377 | def _to_range(addr): | ||
681 | 1378 | if re.search(r'^(?:\d{1,3}\.){3}\d{1,3}$', addr) is not None: | ||
682 | 1379 | addr += '/32' | ||
683 | 1380 | elif ':' in addr and '/' not in addr: # IPv6 | ||
684 | 1381 | addr += '/128' | ||
685 | 1382 | return addr | ||
686 | 1383 | |||
687 | 1384 | settings = relation_get(rid=rid, unit=unit) | ||
688 | 1385 | if 'egress-subnets' in settings: | ||
689 | 1386 | return [n.strip() for n in settings['egress-subnets'].split(',') if n.strip()] | ||
690 | 1387 | if 'ingress-address' in settings: | ||
691 | 1388 | return [_to_range(settings['ingress-address'])] | ||
692 | 1389 | if 'private-address' in settings: | ||
693 | 1390 | return [_to_range(settings['private-address'])] | ||
694 | 1391 | return [] # Should never happen | ||
695 | 1392 | |||
696 | 1393 | |||
697 | 1394 | def unit_doomed(unit=None): | ||
698 | 1395 | """Determines if the unit is being removed from the model | ||
699 | 1396 | |||
700 | 1397 | Requires Juju 2.4.1. | ||
701 | 1398 | |||
702 | 1399 | :param unit: string unit name, defaults to local_unit | ||
703 | 1400 | :side effect: calls goal_state | ||
704 | 1401 | :side effect: calls local_unit | ||
705 | 1402 | :side effect: calls has_juju_version | ||
706 | 1403 | :return: True if the unit is being removed, already gone, or never existed | ||
707 | 1404 | """ | ||
708 | 1405 | if not has_juju_version("2.4.1"): | ||
709 | 1406 | # We cannot risk blindly returning False for 'we don't know', | ||
710 | 1407 | # because that could cause data loss; if call sites don't | ||
711 | 1408 | # need an accurate answer, they likely don't need this helper | ||
712 | 1409 | # at all. | ||
713 | 1410 | # goal-state existed in 2.4.0, but did not handle removals | ||
714 | 1411 | # correctly until 2.4.1. | ||
715 | 1412 | raise NotImplementedError("is_doomed") | ||
716 | 1413 | if unit is None: | ||
717 | 1414 | unit = local_unit() | ||
718 | 1415 | gs = goal_state() | ||
719 | 1416 | units = gs.get('units', {}) | ||
720 | 1417 | if unit not in units: | ||
721 | 1418 | return True | ||
722 | 1419 | # I don't think 'dead' units ever show up in the goal-state, but | ||
723 | 1420 | # check anyway in addition to 'dying'. | ||
724 | 1421 | return units[unit]['status'] in ('dying', 'dead') | ||
725 | 1422 | |||
726 | 1423 | |||
727 | 1424 | def env_proxy_settings(selected_settings=None): | ||
728 | 1425 | """Get proxy settings from process environment variables. | ||
729 | 1426 | |||
730 | 1427 | Get charm proxy settings from environment variables that correspond to | ||
731 | 1428 | juju-http-proxy, juju-https-proxy and juju-no-proxy (available as of 2.4.2, | ||
732 | 1429 | see lp:1782236) in a format suitable for passing to an application that | ||
733 | 1430 | reacts to proxy settings passed as environment variables. Some applications | ||
734 | 1431 | support lowercase or uppercase notation (e.g. curl), some support only | ||
735 | 1432 | lowercase (e.g. wget), there are also subjectively rare cases of only | ||
736 | 1433 | uppercase notation support. no_proxy CIDR and wildcard support also varies | ||
737 | 1434 | between runtimes and applications as there is no enforced standard. | ||
738 | 1435 | |||
739 | 1436 | Some applications may connect to multiple destinations and expose config | ||
740 | 1437 | options that would affect only proxy settings for a specific destination | ||
741 | 1438 | these should be handled in charms in an application-specific manner. | ||
742 | 1439 | |||
743 | 1440 | :param selected_settings: format only a subset of possible settings | ||
744 | 1441 | :type selected_settings: list | ||
745 | 1442 | :rtype: Option(None, dict[str, str]) | ||
746 | 1443 | """ | ||
747 | 1444 | SUPPORTED_SETTINGS = { | ||
748 | 1445 | 'http': 'HTTP_PROXY', | ||
749 | 1446 | 'https': 'HTTPS_PROXY', | ||
750 | 1447 | 'no_proxy': 'NO_PROXY', | ||
751 | 1448 | 'ftp': 'FTP_PROXY' | ||
752 | 1449 | } | ||
753 | 1450 | if selected_settings is None: | ||
754 | 1451 | selected_settings = SUPPORTED_SETTINGS | ||
755 | 1452 | |||
756 | 1453 | selected_vars = [v for k, v in SUPPORTED_SETTINGS.items() | ||
757 | 1454 | if k in selected_settings] | ||
758 | 1455 | proxy_settings = {} | ||
759 | 1456 | for var in selected_vars: | ||
760 | 1457 | var_val = os.getenv(var) | ||
761 | 1458 | if var_val: | ||
762 | 1459 | proxy_settings[var] = var_val | ||
763 | 1460 | proxy_settings[var.lower()] = var_val | ||
764 | 1461 | # Now handle juju-prefixed environment variables. The legacy vs new | ||
765 | 1462 | # environment variable usage is mutually exclusive | ||
766 | 1463 | charm_var_val = os.getenv('JUJU_CHARM_{}'.format(var)) | ||
767 | 1464 | if charm_var_val: | ||
768 | 1465 | proxy_settings[var] = charm_var_val | ||
769 | 1466 | proxy_settings[var.lower()] = charm_var_val | ||
770 | 1467 | if 'no_proxy' in proxy_settings: | ||
771 | 1468 | if _contains_range(proxy_settings['no_proxy']): | ||
772 | 1469 | log(RANGE_WARNING, level=WARNING) | ||
773 | 1470 | return proxy_settings if proxy_settings else None | ||
774 | 1471 | |||
775 | 1472 | |||
776 | 1473 | def _contains_range(addresses): | ||
777 | 1474 | """Check for cidr or wildcard domain in a string. | ||
778 | 1475 | |||
779 | 1476 | Given a string comprising a comma seperated list of ip addresses | ||
780 | 1477 | and domain names, determine whether the string contains IP ranges | ||
781 | 1478 | or wildcard domains. | ||
782 | 1479 | |||
783 | 1480 | :param addresses: comma seperated list of domains and ip addresses. | ||
784 | 1481 | :type addresses: str | ||
785 | 1482 | """ | ||
786 | 1483 | return ( | ||
787 | 1484 | # Test for cidr (e.g. 10.20.20.0/24) | ||
788 | 1485 | "/" in addresses or | ||
789 | 1486 | # Test for wildcard domains (*.foo.com or .foo.com) | ||
790 | 1487 | "*" in addresses or | ||
791 | 1488 | addresses.startswith(".") or | ||
792 | 1489 | ",." in addresses or | ||
793 | 1490 | " ." in addresses) | ||
794 | diff --git a/hooks/charmhelpers/core/host.py b/hooks/charmhelpers/core/host.py | |||
795 | index b0043cb..32754ff 100644 | |||
796 | --- a/hooks/charmhelpers/core/host.py | |||
797 | +++ b/hooks/charmhelpers/core/host.py | |||
798 | @@ -34,21 +34,23 @@ import six | |||
799 | 34 | 34 | ||
800 | 35 | from contextlib import contextmanager | 35 | from contextlib import contextmanager |
801 | 36 | from collections import OrderedDict | 36 | from collections import OrderedDict |
803 | 37 | from .hookenv import log | 37 | from .hookenv import log, INFO, DEBUG, local_unit, charm_name |
804 | 38 | from .fstab import Fstab | 38 | from .fstab import Fstab |
805 | 39 | from charmhelpers.osplatform import get_platform | 39 | from charmhelpers.osplatform import get_platform |
806 | 40 | 40 | ||
807 | 41 | __platform__ = get_platform() | 41 | __platform__ = get_platform() |
808 | 42 | if __platform__ == "ubuntu": | 42 | if __platform__ == "ubuntu": |
810 | 43 | from charmhelpers.core.host_factory.ubuntu import ( | 43 | from charmhelpers.core.host_factory.ubuntu import ( # NOQA:F401 |
811 | 44 | service_available, | 44 | service_available, |
812 | 45 | add_new_group, | 45 | add_new_group, |
813 | 46 | lsb_release, | 46 | lsb_release, |
814 | 47 | cmp_pkgrevno, | 47 | cmp_pkgrevno, |
815 | 48 | CompareHostReleases, | 48 | CompareHostReleases, |
816 | 49 | get_distrib_codename, | ||
817 | 50 | arch | ||
818 | 49 | ) # flake8: noqa -- ignore F401 for this import | 51 | ) # flake8: noqa -- ignore F401 for this import |
819 | 50 | elif __platform__ == "centos": | 52 | elif __platform__ == "centos": |
821 | 51 | from charmhelpers.core.host_factory.centos import ( | 53 | from charmhelpers.core.host_factory.centos import ( # NOQA:F401 |
822 | 52 | service_available, | 54 | service_available, |
823 | 53 | add_new_group, | 55 | add_new_group, |
824 | 54 | lsb_release, | 56 | lsb_release, |
825 | @@ -58,6 +60,7 @@ elif __platform__ == "centos": | |||
826 | 58 | 60 | ||
827 | 59 | UPDATEDB_PATH = '/etc/updatedb.conf' | 61 | UPDATEDB_PATH = '/etc/updatedb.conf' |
828 | 60 | 62 | ||
829 | 63 | |||
830 | 61 | def service_start(service_name, **kwargs): | 64 | def service_start(service_name, **kwargs): |
831 | 62 | """Start a system service. | 65 | """Start a system service. |
832 | 63 | 66 | ||
833 | @@ -287,8 +290,8 @@ def service_running(service_name, **kwargs): | |||
834 | 287 | for key, value in six.iteritems(kwargs): | 290 | for key, value in six.iteritems(kwargs): |
835 | 288 | parameter = '%s=%s' % (key, value) | 291 | parameter = '%s=%s' % (key, value) |
836 | 289 | cmd.append(parameter) | 292 | cmd.append(parameter) |
839 | 290 | output = subprocess.check_output(cmd, | 293 | output = subprocess.check_output( |
840 | 291 | stderr=subprocess.STDOUT).decode('UTF-8') | 294 | cmd, stderr=subprocess.STDOUT).decode('UTF-8') |
841 | 292 | except subprocess.CalledProcessError: | 295 | except subprocess.CalledProcessError: |
842 | 293 | return False | 296 | return False |
843 | 294 | else: | 297 | else: |
844 | @@ -441,6 +444,51 @@ def add_user_to_group(username, group): | |||
845 | 441 | subprocess.check_call(cmd) | 444 | subprocess.check_call(cmd) |
846 | 442 | 445 | ||
847 | 443 | 446 | ||
848 | 447 | def chage(username, lastday=None, expiredate=None, inactive=None, | ||
849 | 448 | mindays=None, maxdays=None, root=None, warndays=None): | ||
850 | 449 | """Change user password expiry information | ||
851 | 450 | |||
852 | 451 | :param str username: User to update | ||
853 | 452 | :param str lastday: Set when password was changed in YYYY-MM-DD format | ||
854 | 453 | :param str expiredate: Set when user's account will no longer be | ||
855 | 454 | accessible in YYYY-MM-DD format. | ||
856 | 455 | -1 will remove an account expiration date. | ||
857 | 456 | :param str inactive: Set the number of days of inactivity after a password | ||
858 | 457 | has expired before the account is locked. | ||
859 | 458 | -1 will remove an account's inactivity. | ||
860 | 459 | :param str mindays: Set the minimum number of days between password | ||
861 | 460 | changes to MIN_DAYS. | ||
862 | 461 | 0 indicates the password can be changed anytime. | ||
863 | 462 | :param str maxdays: Set the maximum number of days during which a | ||
864 | 463 | password is valid. | ||
865 | 464 | -1 as MAX_DAYS will remove checking maxdays | ||
866 | 465 | :param str root: Apply changes in the CHROOT_DIR directory | ||
867 | 466 | :param str warndays: Set the number of days of warning before a password | ||
868 | 467 | change is required | ||
869 | 468 | :raises subprocess.CalledProcessError: if call to chage fails | ||
870 | 469 | """ | ||
871 | 470 | cmd = ['chage'] | ||
872 | 471 | if root: | ||
873 | 472 | cmd.extend(['--root', root]) | ||
874 | 473 | if lastday: | ||
875 | 474 | cmd.extend(['--lastday', lastday]) | ||
876 | 475 | if expiredate: | ||
877 | 476 | cmd.extend(['--expiredate', expiredate]) | ||
878 | 477 | if inactive: | ||
879 | 478 | cmd.extend(['--inactive', inactive]) | ||
880 | 479 | if mindays: | ||
881 | 480 | cmd.extend(['--mindays', mindays]) | ||
882 | 481 | if maxdays: | ||
883 | 482 | cmd.extend(['--maxdays', maxdays]) | ||
884 | 483 | if warndays: | ||
885 | 484 | cmd.extend(['--warndays', warndays]) | ||
886 | 485 | cmd.append(username) | ||
887 | 486 | subprocess.check_call(cmd) | ||
888 | 487 | |||
889 | 488 | |||
890 | 489 | remove_password_expiry = functools.partial(chage, expiredate='-1', inactive='-1', mindays='0', maxdays='-1') | ||
891 | 490 | |||
892 | 491 | |||
893 | 444 | def rsync(from_path, to_path, flags='-r', options=None, timeout=None): | 492 | def rsync(from_path, to_path, flags='-r', options=None, timeout=None): |
894 | 445 | """Replicate the contents of a path""" | 493 | """Replicate the contents of a path""" |
895 | 446 | options = options or ['--delete', '--executability'] | 494 | options = options or ['--delete', '--executability'] |
896 | @@ -487,13 +535,45 @@ def mkdir(path, owner='root', group='root', perms=0o555, force=False): | |||
897 | 487 | 535 | ||
898 | 488 | def write_file(path, content, owner='root', group='root', perms=0o444): | 536 | def write_file(path, content, owner='root', group='root', perms=0o444): |
899 | 489 | """Create or overwrite a file with the contents of a byte string.""" | 537 | """Create or overwrite a file with the contents of a byte string.""" |
900 | 490 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms)) | ||
901 | 491 | uid = pwd.getpwnam(owner).pw_uid | 538 | uid = pwd.getpwnam(owner).pw_uid |
902 | 492 | gid = grp.getgrnam(group).gr_gid | 539 | gid = grp.getgrnam(group).gr_gid |
907 | 493 | with open(path, 'wb') as target: | 540 | # lets see if we can grab the file and compare the context, to avoid doing |
908 | 494 | os.fchown(target.fileno(), uid, gid) | 541 | # a write. |
909 | 495 | os.fchmod(target.fileno(), perms) | 542 | existing_content = None |
910 | 496 | target.write(content) | 543 | existing_uid, existing_gid, existing_perms = None, None, None |
911 | 544 | try: | ||
912 | 545 | with open(path, 'rb') as target: | ||
913 | 546 | existing_content = target.read() | ||
914 | 547 | stat = os.stat(path) | ||
915 | 548 | existing_uid, existing_gid, existing_perms = ( | ||
916 | 549 | stat.st_uid, stat.st_gid, stat.st_mode | ||
917 | 550 | ) | ||
918 | 551 | except Exception: | ||
919 | 552 | pass | ||
920 | 553 | if content != existing_content: | ||
921 | 554 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms), | ||
922 | 555 | level=DEBUG) | ||
923 | 556 | with open(path, 'wb') as target: | ||
924 | 557 | os.fchown(target.fileno(), uid, gid) | ||
925 | 558 | os.fchmod(target.fileno(), perms) | ||
926 | 559 | if six.PY3 and isinstance(content, six.string_types): | ||
927 | 560 | content = content.encode('UTF-8') | ||
928 | 561 | target.write(content) | ||
929 | 562 | return | ||
930 | 563 | # the contents were the same, but we might still need to change the | ||
931 | 564 | # ownership or permissions. | ||
932 | 565 | if existing_uid != uid: | ||
933 | 566 | log("Changing uid on already existing content: {} -> {}" | ||
934 | 567 | .format(existing_uid, uid), level=DEBUG) | ||
935 | 568 | os.chown(path, uid, -1) | ||
936 | 569 | if existing_gid != gid: | ||
937 | 570 | log("Changing gid on already existing content: {} -> {}" | ||
938 | 571 | .format(existing_gid, gid), level=DEBUG) | ||
939 | 572 | os.chown(path, -1, gid) | ||
940 | 573 | if existing_perms != perms: | ||
941 | 574 | log("Changing permissions on existing content: {} -> {}" | ||
942 | 575 | .format(existing_perms, perms), level=DEBUG) | ||
943 | 576 | os.chmod(path, perms) | ||
944 | 497 | 577 | ||
945 | 498 | 578 | ||
946 | 499 | def fstab_remove(mp): | 579 | def fstab_remove(mp): |
947 | @@ -758,7 +838,7 @@ def list_nics(nic_type=None): | |||
948 | 758 | ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') | 838 | ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') |
949 | 759 | ip_output = (line.strip() for line in ip_output if line) | 839 | ip_output = (line.strip() for line in ip_output if line) |
950 | 760 | 840 | ||
952 | 761 | key = re.compile('^[0-9]+:\s+(.+):') | 841 | key = re.compile(r'^[0-9]+:\s+(.+):') |
953 | 762 | for line in ip_output: | 842 | for line in ip_output: |
954 | 763 | matched = re.search(key, line) | 843 | matched = re.search(key, line) |
955 | 764 | if matched: | 844 | if matched: |
956 | @@ -903,6 +983,20 @@ def is_container(): | |||
957 | 903 | 983 | ||
958 | 904 | 984 | ||
959 | 905 | def add_to_updatedb_prunepath(path, updatedb_path=UPDATEDB_PATH): | 985 | def add_to_updatedb_prunepath(path, updatedb_path=UPDATEDB_PATH): |
960 | 986 | """Adds the specified path to the mlocate's udpatedb.conf PRUNEPATH list. | ||
961 | 987 | |||
962 | 988 | This method has no effect if the path specified by updatedb_path does not | ||
963 | 989 | exist or is not a file. | ||
964 | 990 | |||
965 | 991 | @param path: string the path to add to the updatedb.conf PRUNEPATHS value | ||
966 | 992 | @param updatedb_path: the path the updatedb.conf file | ||
967 | 993 | """ | ||
968 | 994 | if not os.path.exists(updatedb_path) or os.path.isdir(updatedb_path): | ||
969 | 995 | # If the updatedb.conf file doesn't exist then don't attempt to update | ||
970 | 996 | # the file as the package providing mlocate may not be installed on | ||
971 | 997 | # the local system | ||
972 | 998 | return | ||
973 | 999 | |||
974 | 906 | with open(updatedb_path, 'r+') as f_id: | 1000 | with open(updatedb_path, 'r+') as f_id: |
975 | 907 | updatedb_text = f_id.read() | 1001 | updatedb_text = f_id.read() |
976 | 908 | output = updatedb(updatedb_text, path) | 1002 | output = updatedb(updatedb_text, path) |
977 | @@ -922,3 +1016,62 @@ def updatedb(updatedb_text, new_path): | |||
978 | 922 | lines[i] = 'PRUNEPATHS="{}"'.format(' '.join(paths)) | 1016 | lines[i] = 'PRUNEPATHS="{}"'.format(' '.join(paths)) |
979 | 923 | output = "\n".join(lines) | 1017 | output = "\n".join(lines) |
980 | 924 | return output | 1018 | return output |
981 | 1019 | |||
982 | 1020 | |||
983 | 1021 | def modulo_distribution(modulo=3, wait=30, non_zero_wait=False): | ||
984 | 1022 | """ Modulo distribution | ||
985 | 1023 | |||
986 | 1024 | This helper uses the unit number, a modulo value and a constant wait time | ||
987 | 1025 | to produce a calculated wait time distribution. This is useful in large | ||
988 | 1026 | scale deployments to distribute load during an expensive operation such as | ||
989 | 1027 | service restarts. | ||
990 | 1028 | |||
991 | 1029 | If you have 1000 nodes that need to restart 100 at a time 1 minute at a | ||
992 | 1030 | time: | ||
993 | 1031 | |||
994 | 1032 | time.wait(modulo_distribution(modulo=100, wait=60)) | ||
995 | 1033 | restart() | ||
996 | 1034 | |||
997 | 1035 | If you need restarts to happen serially set modulo to the exact number of | ||
998 | 1036 | nodes and set a high constant wait time: | ||
999 | 1037 | |||
1000 | 1038 | time.wait(modulo_distribution(modulo=10, wait=120)) | ||
1001 | 1039 | restart() | ||
1002 | 1040 | |||
1003 | 1041 | @param modulo: int The modulo number creates the group distribution | ||
1004 | 1042 | @param wait: int The constant time wait value | ||
1005 | 1043 | @param non_zero_wait: boolean Override unit % modulo == 0, | ||
1006 | 1044 | return modulo * wait. Used to avoid collisions with | ||
1007 | 1045 | leader nodes which are often given priority. | ||
1008 | 1046 | @return: int Calculated time to wait for unit operation | ||
1009 | 1047 | """ | ||
1010 | 1048 | unit_number = int(local_unit().split('/')[1]) | ||
1011 | 1049 | calculated_wait_time = (unit_number % modulo) * wait | ||
1012 | 1050 | if non_zero_wait and calculated_wait_time == 0: | ||
1013 | 1051 | return modulo * wait | ||
1014 | 1052 | else: | ||
1015 | 1053 | return calculated_wait_time | ||
1016 | 1054 | |||
1017 | 1055 | |||
1018 | 1056 | def install_ca_cert(ca_cert, name=None): | ||
1019 | 1057 | """ | ||
1020 | 1058 | Install the given cert as a trusted CA. | ||
1021 | 1059 | |||
1022 | 1060 | The ``name`` is the stem of the filename where the cert is written, and if | ||
1023 | 1061 | not provided, it will default to ``juju-{charm_name}``. | ||
1024 | 1062 | |||
1025 | 1063 | If the cert is empty or None, or is unchanged, nothing is done. | ||
1026 | 1064 | """ | ||
1027 | 1065 | if not ca_cert: | ||
1028 | 1066 | return | ||
1029 | 1067 | if not isinstance(ca_cert, bytes): | ||
1030 | 1068 | ca_cert = ca_cert.encode('utf8') | ||
1031 | 1069 | if not name: | ||
1032 | 1070 | name = 'juju-{}'.format(charm_name()) | ||
1033 | 1071 | cert_file = '/usr/local/share/ca-certificates/{}.crt'.format(name) | ||
1034 | 1072 | new_hash = hashlib.md5(ca_cert).hexdigest() | ||
1035 | 1073 | if file_hash(cert_file) == new_hash: | ||
1036 | 1074 | return | ||
1037 | 1075 | log("Installing new CA cert at: {}".format(cert_file), level=INFO) | ||
1038 | 1076 | write_file(cert_file, ca_cert) | ||
1039 | 1077 | subprocess.check_call(['update-ca-certificates', '--fresh']) | ||
1040 | diff --git a/hooks/charmhelpers/core/host_factory/ubuntu.py b/hooks/charmhelpers/core/host_factory/ubuntu.py | |||
1041 | index d8dc378..0ee2b66 100644 | |||
1042 | --- a/hooks/charmhelpers/core/host_factory/ubuntu.py | |||
1043 | +++ b/hooks/charmhelpers/core/host_factory/ubuntu.py | |||
1044 | @@ -1,5 +1,6 @@ | |||
1045 | 1 | import subprocess | 1 | import subprocess |
1046 | 2 | 2 | ||
1047 | 3 | from charmhelpers.core.hookenv import cached | ||
1048 | 3 | from charmhelpers.core.strutils import BasicStringComparator | 4 | from charmhelpers.core.strutils import BasicStringComparator |
1049 | 4 | 5 | ||
1050 | 5 | 6 | ||
1051 | @@ -20,6 +21,9 @@ UBUNTU_RELEASES = ( | |||
1052 | 20 | 'yakkety', | 21 | 'yakkety', |
1053 | 21 | 'zesty', | 22 | 'zesty', |
1054 | 22 | 'artful', | 23 | 'artful', |
1055 | 24 | 'bionic', | ||
1056 | 25 | 'cosmic', | ||
1057 | 26 | 'disco', | ||
1058 | 23 | ) | 27 | ) |
1059 | 24 | 28 | ||
1060 | 25 | 29 | ||
1061 | @@ -70,6 +74,14 @@ def lsb_release(): | |||
1062 | 70 | return d | 74 | return d |
1063 | 71 | 75 | ||
1064 | 72 | 76 | ||
1065 | 77 | def get_distrib_codename(): | ||
1066 | 78 | """Return the codename of the distribution | ||
1067 | 79 | :returns: The codename | ||
1068 | 80 | :rtype: str | ||
1069 | 81 | """ | ||
1070 | 82 | return lsb_release()['DISTRIB_CODENAME'].lower() | ||
1071 | 83 | |||
1072 | 84 | |||
1073 | 73 | def cmp_pkgrevno(package, revno, pkgcache=None): | 85 | def cmp_pkgrevno(package, revno, pkgcache=None): |
1074 | 74 | """Compare supplied revno with the revno of the installed package. | 86 | """Compare supplied revno with the revno of the installed package. |
1075 | 75 | 87 | ||
1076 | @@ -87,3 +99,16 @@ def cmp_pkgrevno(package, revno, pkgcache=None): | |||
1077 | 87 | pkgcache = apt_cache() | 99 | pkgcache = apt_cache() |
1078 | 88 | pkg = pkgcache[package] | 100 | pkg = pkgcache[package] |
1079 | 89 | return apt_pkg.version_compare(pkg.current_ver.ver_str, revno) | 101 | return apt_pkg.version_compare(pkg.current_ver.ver_str, revno) |
1080 | 102 | |||
1081 | 103 | |||
1082 | 104 | @cached | ||
1083 | 105 | def arch(): | ||
1084 | 106 | """Return the package architecture as a string. | ||
1085 | 107 | |||
1086 | 108 | :returns: the architecture | ||
1087 | 109 | :rtype: str | ||
1088 | 110 | :raises: subprocess.CalledProcessError if dpkg command fails | ||
1089 | 111 | """ | ||
1090 | 112 | return subprocess.check_output( | ||
1091 | 113 | ['dpkg', '--print-architecture'] | ||
1092 | 114 | ).rstrip().decode('UTF-8') | ||
1093 | diff --git a/hooks/charmhelpers/core/kernel.py b/hooks/charmhelpers/core/kernel.py | |||
1094 | index 2d40452..e01f4f8 100644 | |||
1095 | --- a/hooks/charmhelpers/core/kernel.py | |||
1096 | +++ b/hooks/charmhelpers/core/kernel.py | |||
1097 | @@ -26,12 +26,12 @@ from charmhelpers.core.hookenv import ( | |||
1098 | 26 | 26 | ||
1099 | 27 | __platform__ = get_platform() | 27 | __platform__ = get_platform() |
1100 | 28 | if __platform__ == "ubuntu": | 28 | if __platform__ == "ubuntu": |
1102 | 29 | from charmhelpers.core.kernel_factory.ubuntu import ( | 29 | from charmhelpers.core.kernel_factory.ubuntu import ( # NOQA:F401 |
1103 | 30 | persistent_modprobe, | 30 | persistent_modprobe, |
1104 | 31 | update_initramfs, | 31 | update_initramfs, |
1105 | 32 | ) # flake8: noqa -- ignore F401 for this import | 32 | ) # flake8: noqa -- ignore F401 for this import |
1106 | 33 | elif __platform__ == "centos": | 33 | elif __platform__ == "centos": |
1108 | 34 | from charmhelpers.core.kernel_factory.centos import ( | 34 | from charmhelpers.core.kernel_factory.centos import ( # NOQA:F401 |
1109 | 35 | persistent_modprobe, | 35 | persistent_modprobe, |
1110 | 36 | update_initramfs, | 36 | update_initramfs, |
1111 | 37 | ) # flake8: noqa -- ignore F401 for this import | 37 | ) # flake8: noqa -- ignore F401 for this import |
1112 | diff --git a/hooks/charmhelpers/core/services/base.py b/hooks/charmhelpers/core/services/base.py | |||
1113 | index ca9dc99..179ad4f 100644 | |||
1114 | --- a/hooks/charmhelpers/core/services/base.py | |||
1115 | +++ b/hooks/charmhelpers/core/services/base.py | |||
1116 | @@ -307,23 +307,34 @@ class PortManagerCallback(ManagerCallback): | |||
1117 | 307 | """ | 307 | """ |
1118 | 308 | def __call__(self, manager, service_name, event_name): | 308 | def __call__(self, manager, service_name, event_name): |
1119 | 309 | service = manager.get_service(service_name) | 309 | service = manager.get_service(service_name) |
1121 | 310 | new_ports = service.get('ports', []) | 310 | # turn this generator into a list, |
1122 | 311 | # as we'll be going over it multiple times | ||
1123 | 312 | new_ports = list(service.get('ports', [])) | ||
1124 | 311 | port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name)) | 313 | port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name)) |
1125 | 312 | if os.path.exists(port_file): | 314 | if os.path.exists(port_file): |
1126 | 313 | with open(port_file) as fp: | 315 | with open(port_file) as fp: |
1127 | 314 | old_ports = fp.read().split(',') | 316 | old_ports = fp.read().split(',') |
1128 | 315 | for old_port in old_ports: | 317 | for old_port in old_ports: |
1133 | 316 | if bool(old_port): | 318 | if bool(old_port) and not self.ports_contains(old_port, new_ports): |
1134 | 317 | old_port = int(old_port) | 319 | hookenv.close_port(old_port) |
1131 | 318 | if old_port not in new_ports: | ||
1132 | 319 | hookenv.close_port(old_port) | ||
1135 | 320 | with open(port_file, 'w') as fp: | 320 | with open(port_file, 'w') as fp: |
1136 | 321 | fp.write(','.join(str(port) for port in new_ports)) | 321 | fp.write(','.join(str(port) for port in new_ports)) |
1137 | 322 | for port in new_ports: | 322 | for port in new_ports: |
1138 | 323 | # A port is either a number or 'ICMP' | ||
1139 | 324 | protocol = 'TCP' | ||
1140 | 325 | if str(port).upper() == 'ICMP': | ||
1141 | 326 | protocol = 'ICMP' | ||
1142 | 323 | if event_name == 'start': | 327 | if event_name == 'start': |
1144 | 324 | hookenv.open_port(port) | 328 | hookenv.open_port(port, protocol) |
1145 | 325 | elif event_name == 'stop': | 329 | elif event_name == 'stop': |
1147 | 326 | hookenv.close_port(port) | 330 | hookenv.close_port(port, protocol) |
1148 | 331 | |||
1149 | 332 | def ports_contains(self, port, ports): | ||
1150 | 333 | if not bool(port): | ||
1151 | 334 | return False | ||
1152 | 335 | if str(port).upper() != 'ICMP': | ||
1153 | 336 | port = int(port) | ||
1154 | 337 | return port in ports | ||
1155 | 327 | 338 | ||
1156 | 328 | 339 | ||
1157 | 329 | def service_stop(service_name): | 340 | def service_stop(service_name): |
1158 | diff --git a/hooks/charmhelpers/core/strutils.py b/hooks/charmhelpers/core/strutils.py | |||
1159 | index 685dabd..e8df045 100644 | |||
1160 | --- a/hooks/charmhelpers/core/strutils.py | |||
1161 | +++ b/hooks/charmhelpers/core/strutils.py | |||
1162 | @@ -61,13 +61,19 @@ def bytes_from_string(value): | |||
1163 | 61 | if isinstance(value, six.string_types): | 61 | if isinstance(value, six.string_types): |
1164 | 62 | value = six.text_type(value) | 62 | value = six.text_type(value) |
1165 | 63 | else: | 63 | else: |
1167 | 64 | msg = "Unable to interpret non-string value '%s' as boolean" % (value) | 64 | msg = "Unable to interpret non-string value '%s' as bytes" % (value) |
1168 | 65 | raise ValueError(msg) | 65 | raise ValueError(msg) |
1169 | 66 | matches = re.match("([0-9]+)([a-zA-Z]+)", value) | 66 | matches = re.match("([0-9]+)([a-zA-Z]+)", value) |
1174 | 67 | if not matches: | 67 | if matches: |
1175 | 68 | msg = "Unable to interpret string value '%s' as bytes" % (value) | 68 | size = int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)]) |
1176 | 69 | raise ValueError(msg) | 69 | else: |
1177 | 70 | return int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)]) | 70 | # Assume that value passed in is bytes |
1178 | 71 | try: | ||
1179 | 72 | size = int(value) | ||
1180 | 73 | except ValueError: | ||
1181 | 74 | msg = "Unable to interpret string value '%s' as bytes" % (value) | ||
1182 | 75 | raise ValueError(msg) | ||
1183 | 76 | return size | ||
1184 | 71 | 77 | ||
1185 | 72 | 78 | ||
1186 | 73 | class BasicStringComparator(object): | 79 | class BasicStringComparator(object): |
1187 | diff --git a/hooks/charmhelpers/core/sysctl.py b/hooks/charmhelpers/core/sysctl.py | |||
1188 | index 6e413e3..f1f4a28 100644 | |||
1189 | --- a/hooks/charmhelpers/core/sysctl.py | |||
1190 | +++ b/hooks/charmhelpers/core/sysctl.py | |||
1191 | @@ -28,27 +28,38 @@ from charmhelpers.core.hookenv import ( | |||
1192 | 28 | __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' | 28 | __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' |
1193 | 29 | 29 | ||
1194 | 30 | 30 | ||
1196 | 31 | def create(sysctl_dict, sysctl_file): | 31 | def create(sysctl_dict, sysctl_file, ignore=False): |
1197 | 32 | """Creates a sysctl.conf file from a YAML associative array | 32 | """Creates a sysctl.conf file from a YAML associative array |
1198 | 33 | 33 | ||
1200 | 34 | :param sysctl_dict: a YAML-formatted string of sysctl options eg "{ 'kernel.max_pid': 1337 }" | 34 | :param sysctl_dict: a dict or YAML-formatted string of sysctl |
1201 | 35 | options eg "{ 'kernel.max_pid': 1337 }" | ||
1202 | 35 | :type sysctl_dict: str | 36 | :type sysctl_dict: str |
1203 | 36 | :param sysctl_file: path to the sysctl file to be saved | 37 | :param sysctl_file: path to the sysctl file to be saved |
1204 | 37 | :type sysctl_file: str or unicode | 38 | :type sysctl_file: str or unicode |
1205 | 39 | :param ignore: If True, ignore "unknown variable" errors. | ||
1206 | 40 | :type ignore: bool | ||
1207 | 38 | :returns: None | 41 | :returns: None |
1208 | 39 | """ | 42 | """ |
1215 | 40 | try: | 43 | if type(sysctl_dict) is not dict: |
1216 | 41 | sysctl_dict_parsed = yaml.safe_load(sysctl_dict) | 44 | try: |
1217 | 42 | except yaml.YAMLError: | 45 | sysctl_dict_parsed = yaml.safe_load(sysctl_dict) |
1218 | 43 | log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict), | 46 | except yaml.YAMLError: |
1219 | 44 | level=ERROR) | 47 | log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict), |
1220 | 45 | return | 48 | level=ERROR) |
1221 | 49 | return | ||
1222 | 50 | else: | ||
1223 | 51 | sysctl_dict_parsed = sysctl_dict | ||
1224 | 46 | 52 | ||
1225 | 47 | with open(sysctl_file, "w") as fd: | 53 | with open(sysctl_file, "w") as fd: |
1226 | 48 | for key, value in sysctl_dict_parsed.items(): | 54 | for key, value in sysctl_dict_parsed.items(): |
1227 | 49 | fd.write("{}={}\n".format(key, value)) | 55 | fd.write("{}={}\n".format(key, value)) |
1228 | 50 | 56 | ||
1230 | 51 | log("Updating sysctl_file: %s values: %s" % (sysctl_file, sysctl_dict_parsed), | 57 | log("Updating sysctl_file: {} values: {}".format(sysctl_file, |
1231 | 58 | sysctl_dict_parsed), | ||
1232 | 52 | level=DEBUG) | 59 | level=DEBUG) |
1233 | 53 | 60 | ||
1235 | 54 | check_call(["sysctl", "-p", sysctl_file]) | 61 | call = ["sysctl", "-p", sysctl_file] |
1236 | 62 | if ignore: | ||
1237 | 63 | call.append("-e") | ||
1238 | 64 | |||
1239 | 65 | check_call(call) | ||
1240 | diff --git a/hooks/charmhelpers/core/templating.py b/hooks/charmhelpers/core/templating.py | |||
1241 | index 7b801a3..9014015 100644 | |||
1242 | --- a/hooks/charmhelpers/core/templating.py | |||
1243 | +++ b/hooks/charmhelpers/core/templating.py | |||
1244 | @@ -20,7 +20,8 @@ from charmhelpers.core import hookenv | |||
1245 | 20 | 20 | ||
1246 | 21 | 21 | ||
1247 | 22 | def render(source, target, context, owner='root', group='root', | 22 | def render(source, target, context, owner='root', group='root', |
1249 | 23 | perms=0o444, templates_dir=None, encoding='UTF-8', template_loader=None): | 23 | perms=0o444, templates_dir=None, encoding='UTF-8', |
1250 | 24 | template_loader=None, config_template=None): | ||
1251 | 24 | """ | 25 | """ |
1252 | 25 | Render a template. | 26 | Render a template. |
1253 | 26 | 27 | ||
1254 | @@ -32,6 +33,9 @@ def render(source, target, context, owner='root', group='root', | |||
1255 | 32 | The context should be a dict containing the values to be replaced in the | 33 | The context should be a dict containing the values to be replaced in the |
1256 | 33 | template. | 34 | template. |
1257 | 34 | 35 | ||
1258 | 36 | config_template may be provided to render from a provided template instead | ||
1259 | 37 | of loading from a file. | ||
1260 | 38 | |||
1261 | 35 | The `owner`, `group`, and `perms` options will be passed to `write_file`. | 39 | The `owner`, `group`, and `perms` options will be passed to `write_file`. |
1262 | 36 | 40 | ||
1263 | 37 | If omitted, `templates_dir` defaults to the `templates` folder in the charm. | 41 | If omitted, `templates_dir` defaults to the `templates` folder in the charm. |
1264 | @@ -65,14 +69,19 @@ def render(source, target, context, owner='root', group='root', | |||
1265 | 65 | if templates_dir is None: | 69 | if templates_dir is None: |
1266 | 66 | templates_dir = os.path.join(hookenv.charm_dir(), 'templates') | 70 | templates_dir = os.path.join(hookenv.charm_dir(), 'templates') |
1267 | 67 | template_env = Environment(loader=FileSystemLoader(templates_dir)) | 71 | template_env = Environment(loader=FileSystemLoader(templates_dir)) |
1276 | 68 | try: | 72 | |
1277 | 69 | source = source | 73 | # load from a string if provided explicitly |
1278 | 70 | template = template_env.get_template(source) | 74 | if config_template is not None: |
1279 | 71 | except exceptions.TemplateNotFound as e: | 75 | template = template_env.from_string(config_template) |
1280 | 72 | hookenv.log('Could not load template %s from %s.' % | 76 | else: |
1281 | 73 | (source, templates_dir), | 77 | try: |
1282 | 74 | level=hookenv.ERROR) | 78 | source = source |
1283 | 75 | raise e | 79 | template = template_env.get_template(source) |
1284 | 80 | except exceptions.TemplateNotFound as e: | ||
1285 | 81 | hookenv.log('Could not load template %s from %s.' % | ||
1286 | 82 | (source, templates_dir), | ||
1287 | 83 | level=hookenv.ERROR) | ||
1288 | 84 | raise e | ||
1289 | 76 | content = template.render(context) | 85 | content = template.render(context) |
1290 | 77 | if target is not None: | 86 | if target is not None: |
1291 | 78 | target_dir = os.path.dirname(target) | 87 | target_dir = os.path.dirname(target) |
1292 | diff --git a/hooks/charmhelpers/core/unitdata.py b/hooks/charmhelpers/core/unitdata.py | |||
1293 | index 54ec969..ab55432 100644 | |||
1294 | --- a/hooks/charmhelpers/core/unitdata.py | |||
1295 | +++ b/hooks/charmhelpers/core/unitdata.py | |||
1296 | @@ -166,6 +166,10 @@ class Storage(object): | |||
1297 | 166 | 166 | ||
1298 | 167 | To support dicts, lists, integer, floats, and booleans values | 167 | To support dicts, lists, integer, floats, and booleans values |
1299 | 168 | are automatically json encoded/decoded. | 168 | are automatically json encoded/decoded. |
1300 | 169 | |||
1301 | 170 | Note: to facilitate unit testing, ':memory:' can be passed as the | ||
1302 | 171 | path parameter which causes sqlite3 to only build the db in memory. | ||
1303 | 172 | This should only be used for testing purposes. | ||
1304 | 169 | """ | 173 | """ |
1305 | 170 | def __init__(self, path=None): | 174 | def __init__(self, path=None): |
1306 | 171 | self.db_path = path | 175 | self.db_path = path |
1307 | @@ -175,6 +179,9 @@ class Storage(object): | |||
1308 | 175 | else: | 179 | else: |
1309 | 176 | self.db_path = os.path.join( | 180 | self.db_path = os.path.join( |
1310 | 177 | os.environ.get('CHARM_DIR', ''), '.unit-state.db') | 181 | os.environ.get('CHARM_DIR', ''), '.unit-state.db') |
1311 | 182 | if self.db_path != ':memory:': | ||
1312 | 183 | with open(self.db_path, 'a') as f: | ||
1313 | 184 | os.fchmod(f.fileno(), 0o600) | ||
1314 | 178 | self.conn = sqlite3.connect('%s' % self.db_path) | 185 | self.conn = sqlite3.connect('%s' % self.db_path) |
1315 | 179 | self.cursor = self.conn.cursor() | 186 | self.cursor = self.conn.cursor() |
1316 | 180 | self.revision = None | 187 | self.revision = None |
1317 | @@ -358,7 +365,7 @@ class Storage(object): | |||
1318 | 358 | try: | 365 | try: |
1319 | 359 | yield self.revision | 366 | yield self.revision |
1320 | 360 | self.revision = None | 367 | self.revision = None |
1322 | 361 | except: | 368 | except Exception: |
1323 | 362 | self.flush(False) | 369 | self.flush(False) |
1324 | 363 | self.revision = None | 370 | self.revision = None |
1325 | 364 | raise | 371 | raise |
1326 | diff --git a/hooks/charmhelpers/fetch/__init__.py b/hooks/charmhelpers/fetch/__init__.py | |||
1327 | index 480a627..8572d34 100644 | |||
1328 | --- a/hooks/charmhelpers/fetch/__init__.py | |||
1329 | +++ b/hooks/charmhelpers/fetch/__init__.py | |||
1330 | @@ -84,6 +84,7 @@ module = "charmhelpers.fetch.%s" % __platform__ | |||
1331 | 84 | fetch = importlib.import_module(module) | 84 | fetch = importlib.import_module(module) |
1332 | 85 | 85 | ||
1333 | 86 | filter_installed_packages = fetch.filter_installed_packages | 86 | filter_installed_packages = fetch.filter_installed_packages |
1334 | 87 | filter_missing_packages = fetch.filter_missing_packages | ||
1335 | 87 | install = fetch.apt_install | 88 | install = fetch.apt_install |
1336 | 88 | upgrade = fetch.apt_upgrade | 89 | upgrade = fetch.apt_upgrade |
1337 | 89 | update = _fetch_update = fetch.apt_update | 90 | update = _fetch_update = fetch.apt_update |
1338 | @@ -96,6 +97,7 @@ if __platform__ == "ubuntu": | |||
1339 | 96 | apt_update = fetch.apt_update | 97 | apt_update = fetch.apt_update |
1340 | 97 | apt_upgrade = fetch.apt_upgrade | 98 | apt_upgrade = fetch.apt_upgrade |
1341 | 98 | apt_purge = fetch.apt_purge | 99 | apt_purge = fetch.apt_purge |
1342 | 100 | apt_autoremove = fetch.apt_autoremove | ||
1343 | 99 | apt_mark = fetch.apt_mark | 101 | apt_mark = fetch.apt_mark |
1344 | 100 | apt_hold = fetch.apt_hold | 102 | apt_hold = fetch.apt_hold |
1345 | 101 | apt_unhold = fetch.apt_unhold | 103 | apt_unhold = fetch.apt_unhold |
1346 | diff --git a/hooks/charmhelpers/fetch/archiveurl.py b/hooks/charmhelpers/fetch/archiveurl.py | |||
1347 | index dd24f9e..d25587a 100644 | |||
1348 | --- a/hooks/charmhelpers/fetch/archiveurl.py | |||
1349 | +++ b/hooks/charmhelpers/fetch/archiveurl.py | |||
1350 | @@ -89,7 +89,7 @@ class ArchiveUrlFetchHandler(BaseFetchHandler): | |||
1351 | 89 | :param str source: URL pointing to an archive file. | 89 | :param str source: URL pointing to an archive file. |
1352 | 90 | :param str dest: Local path location to download archive file to. | 90 | :param str dest: Local path location to download archive file to. |
1353 | 91 | """ | 91 | """ |
1355 | 92 | # propogate all exceptions | 92 | # propagate all exceptions |
1356 | 93 | # URLError, OSError, etc | 93 | # URLError, OSError, etc |
1357 | 94 | proto, netloc, path, params, query, fragment = urlparse(source) | 94 | proto, netloc, path, params, query, fragment = urlparse(source) |
1358 | 95 | if proto in ('http', 'https'): | 95 | if proto in ('http', 'https'): |
1359 | diff --git a/hooks/charmhelpers/fetch/bzrurl.py b/hooks/charmhelpers/fetch/bzrurl.py | |||
1360 | index 07cd029..c4ab3ff 100644 | |||
1361 | --- a/hooks/charmhelpers/fetch/bzrurl.py | |||
1362 | +++ b/hooks/charmhelpers/fetch/bzrurl.py | |||
1363 | @@ -13,7 +13,7 @@ | |||
1364 | 13 | # limitations under the License. | 13 | # limitations under the License. |
1365 | 14 | 14 | ||
1366 | 15 | import os | 15 | import os |
1368 | 16 | from subprocess import check_call | 16 | from subprocess import STDOUT, check_output |
1369 | 17 | from charmhelpers.fetch import ( | 17 | from charmhelpers.fetch import ( |
1370 | 18 | BaseFetchHandler, | 18 | BaseFetchHandler, |
1371 | 19 | UnhandledSource, | 19 | UnhandledSource, |
1372 | @@ -55,7 +55,7 @@ class BzrUrlFetchHandler(BaseFetchHandler): | |||
1373 | 55 | cmd = ['bzr', 'branch'] | 55 | cmd = ['bzr', 'branch'] |
1374 | 56 | cmd += cmd_opts | 56 | cmd += cmd_opts |
1375 | 57 | cmd += [source, dest] | 57 | cmd += [source, dest] |
1377 | 58 | check_call(cmd) | 58 | check_output(cmd, stderr=STDOUT) |
1378 | 59 | 59 | ||
1379 | 60 | def install(self, source, dest=None, revno=None): | 60 | def install(self, source, dest=None, revno=None): |
1380 | 61 | url_parts = self.parse_url(source) | 61 | url_parts = self.parse_url(source) |
1381 | diff --git a/hooks/charmhelpers/fetch/giturl.py b/hooks/charmhelpers/fetch/giturl.py | |||
1382 | index 4cf21bc..070ca9b 100644 | |||
1383 | --- a/hooks/charmhelpers/fetch/giturl.py | |||
1384 | +++ b/hooks/charmhelpers/fetch/giturl.py | |||
1385 | @@ -13,7 +13,7 @@ | |||
1386 | 13 | # limitations under the License. | 13 | # limitations under the License. |
1387 | 14 | 14 | ||
1388 | 15 | import os | 15 | import os |
1390 | 16 | from subprocess import check_call, CalledProcessError | 16 | from subprocess import check_output, CalledProcessError, STDOUT |
1391 | 17 | from charmhelpers.fetch import ( | 17 | from charmhelpers.fetch import ( |
1392 | 18 | BaseFetchHandler, | 18 | BaseFetchHandler, |
1393 | 19 | UnhandledSource, | 19 | UnhandledSource, |
1394 | @@ -50,7 +50,7 @@ class GitUrlFetchHandler(BaseFetchHandler): | |||
1395 | 50 | cmd = ['git', 'clone', source, dest, '--branch', branch] | 50 | cmd = ['git', 'clone', source, dest, '--branch', branch] |
1396 | 51 | if depth: | 51 | if depth: |
1397 | 52 | cmd.extend(['--depth', depth]) | 52 | cmd.extend(['--depth', depth]) |
1399 | 53 | check_call(cmd) | 53 | check_output(cmd, stderr=STDOUT) |
1400 | 54 | 54 | ||
1401 | 55 | def install(self, source, branch="master", dest=None, depth=None): | 55 | def install(self, source, branch="master", dest=None, depth=None): |
1402 | 56 | url_parts = self.parse_url(source) | 56 | url_parts = self.parse_url(source) |
1403 | diff --git a/hooks/charmhelpers/fetch/python/__init__.py b/hooks/charmhelpers/fetch/python/__init__.py | |||
1404 | 57 | new file mode 100644 | 57 | new file mode 100644 |
1405 | index 0000000..bff99dc | |||
1406 | --- /dev/null | |||
1407 | +++ b/hooks/charmhelpers/fetch/python/__init__.py | |||
1408 | @@ -0,0 +1,13 @@ | |||
1409 | 1 | # Copyright 2014-2019 Canonical Limited. | ||
1410 | 2 | # | ||
1411 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
1412 | 4 | # you may not use this file except in compliance with the License. | ||
1413 | 5 | # You may obtain a copy of the License at | ||
1414 | 6 | # | ||
1415 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
1416 | 8 | # | ||
1417 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
1418 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
1419 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
1420 | 12 | # See the License for the specific language governing permissions and | ||
1421 | 13 | # limitations under the License. | ||
1422 | diff --git a/hooks/charmhelpers/fetch/python/debug.py b/hooks/charmhelpers/fetch/python/debug.py | |||
1423 | 0 | new file mode 100644 | 14 | new file mode 100644 |
1424 | index 0000000..757135e | |||
1425 | --- /dev/null | |||
1426 | +++ b/hooks/charmhelpers/fetch/python/debug.py | |||
1427 | @@ -0,0 +1,54 @@ | |||
1428 | 1 | #!/usr/bin/env python | ||
1429 | 2 | # coding: utf-8 | ||
1430 | 3 | |||
1431 | 4 | # Copyright 2014-2015 Canonical Limited. | ||
1432 | 5 | # | ||
1433 | 6 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
1434 | 7 | # you may not use this file except in compliance with the License. | ||
1435 | 8 | # You may obtain a copy of the License at | ||
1436 | 9 | # | ||
1437 | 10 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
1438 | 11 | # | ||
1439 | 12 | # Unless required by applicable law or agreed to in writing, software | ||
1440 | 13 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
1441 | 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
1442 | 15 | # See the License for the specific language governing permissions and | ||
1443 | 16 | # limitations under the License. | ||
1444 | 17 | |||
1445 | 18 | from __future__ import print_function | ||
1446 | 19 | |||
1447 | 20 | import atexit | ||
1448 | 21 | import sys | ||
1449 | 22 | |||
1450 | 23 | from charmhelpers.fetch.python.rpdb import Rpdb | ||
1451 | 24 | from charmhelpers.core.hookenv import ( | ||
1452 | 25 | open_port, | ||
1453 | 26 | close_port, | ||
1454 | 27 | ERROR, | ||
1455 | 28 | log | ||
1456 | 29 | ) | ||
1457 | 30 | |||
1458 | 31 | __author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" | ||
1459 | 32 | |||
1460 | 33 | DEFAULT_ADDR = "0.0.0.0" | ||
1461 | 34 | DEFAULT_PORT = 4444 | ||
1462 | 35 | |||
1463 | 36 | |||
1464 | 37 | def _error(message): | ||
1465 | 38 | log(message, level=ERROR) | ||
1466 | 39 | |||
1467 | 40 | |||
1468 | 41 | def set_trace(addr=DEFAULT_ADDR, port=DEFAULT_PORT): | ||
1469 | 42 | """ | ||
1470 | 43 | Set a trace point using the remote debugger | ||
1471 | 44 | """ | ||
1472 | 45 | atexit.register(close_port, port) | ||
1473 | 46 | try: | ||
1474 | 47 | log("Starting a remote python debugger session on %s:%s" % (addr, | ||
1475 | 48 | port)) | ||
1476 | 49 | open_port(port) | ||
1477 | 50 | debugger = Rpdb(addr=addr, port=port) | ||
1478 | 51 | debugger.set_trace(sys._getframe().f_back) | ||
1479 | 52 | except Exception: | ||
1480 | 53 | _error("Cannot start a remote debug session on %s:%s" % (addr, | ||
1481 | 54 | port)) | ||
1482 | diff --git a/hooks/charmhelpers/fetch/python/packages.py b/hooks/charmhelpers/fetch/python/packages.py | |||
1483 | 0 | new file mode 100644 | 55 | new file mode 100644 |
1484 | index 0000000..6e95028 | |||
1485 | --- /dev/null | |||
1486 | +++ b/hooks/charmhelpers/fetch/python/packages.py | |||
1487 | @@ -0,0 +1,154 @@ | |||
1488 | 1 | #!/usr/bin/env python | ||
1489 | 2 | # coding: utf-8 | ||
1490 | 3 | |||
1491 | 4 | # Copyright 2014-2015 Canonical Limited. | ||
1492 | 5 | # | ||
1493 | 6 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
1494 | 7 | # you may not use this file except in compliance with the License. | ||
1495 | 8 | # You may obtain a copy of the License at | ||
1496 | 9 | # | ||
1497 | 10 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
1498 | 11 | # | ||
1499 | 12 | # Unless required by applicable law or agreed to in writing, software | ||
1500 | 13 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
1501 | 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
1502 | 15 | # See the License for the specific language governing permissions and | ||
1503 | 16 | # limitations under the License. | ||
1504 | 17 | |||
1505 | 18 | import os | ||
1506 | 19 | import six | ||
1507 | 20 | import subprocess | ||
1508 | 21 | import sys | ||
1509 | 22 | |||
1510 | 23 | from charmhelpers.fetch import apt_install, apt_update | ||
1511 | 24 | from charmhelpers.core.hookenv import charm_dir, log | ||
1512 | 25 | |||
1513 | 26 | __author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" | ||
1514 | 27 | |||
1515 | 28 | |||
1516 | 29 | def pip_execute(*args, **kwargs): | ||
1517 | 30 | """Overriden pip_execute() to stop sys.path being changed. | ||
1518 | 31 | |||
1519 | 32 | The act of importing main from the pip module seems to cause add wheels | ||
1520 | 33 | from the /usr/share/python-wheels which are installed by various tools. | ||
1521 | 34 | This function ensures that sys.path remains the same after the call is | ||
1522 | 35 | executed. | ||
1523 | 36 | """ | ||
1524 | 37 | try: | ||
1525 | 38 | _path = sys.path | ||
1526 | 39 | try: | ||
1527 | 40 | from pip import main as _pip_execute | ||
1528 | 41 | except ImportError: | ||
1529 | 42 | apt_update() | ||
1530 | 43 | if six.PY2: | ||
1531 | 44 | apt_install('python-pip') | ||
1532 | 45 | else: | ||
1533 | 46 | apt_install('python3-pip') | ||
1534 | 47 | from pip import main as _pip_execute | ||
1535 | 48 | _pip_execute(*args, **kwargs) | ||
1536 | 49 | finally: | ||
1537 | 50 | sys.path = _path | ||
1538 | 51 | |||
1539 | 52 | |||
1540 | 53 | def parse_options(given, available): | ||
1541 | 54 | """Given a set of options, check if available""" | ||
1542 | 55 | for key, value in sorted(given.items()): | ||
1543 | 56 | if not value: | ||
1544 | 57 | continue | ||
1545 | 58 | if key in available: | ||
1546 | 59 | yield "--{0}={1}".format(key, value) | ||
1547 | 60 | |||
1548 | 61 | |||
1549 | 62 | def pip_install_requirements(requirements, constraints=None, **options): | ||
1550 | 63 | """Install a requirements file. | ||
1551 | 64 | |||
1552 | 65 | :param constraints: Path to pip constraints file. | ||
1553 | 66 | http://pip.readthedocs.org/en/stable/user_guide/#constraints-files | ||
1554 | 67 | """ | ||
1555 | 68 | command = ["install"] | ||
1556 | 69 | |||
1557 | 70 | available_options = ('proxy', 'src', 'log', ) | ||
1558 | 71 | for option in parse_options(options, available_options): | ||
1559 | 72 | command.append(option) | ||
1560 | 73 | |||
1561 | 74 | command.append("-r {0}".format(requirements)) | ||
1562 | 75 | if constraints: | ||
1563 | 76 | command.append("-c {0}".format(constraints)) | ||
1564 | 77 | log("Installing from file: {} with constraints {} " | ||
1565 | 78 | "and options: {}".format(requirements, constraints, command)) | ||
1566 | 79 | else: | ||
1567 | 80 | log("Installing from file: {} with options: {}".format(requirements, | ||
1568 | 81 | command)) | ||
1569 | 82 | pip_execute(command) | ||
1570 | 83 | |||
1571 | 84 | |||
1572 | 85 | def pip_install(package, fatal=False, upgrade=False, venv=None, | ||
1573 | 86 | constraints=None, **options): | ||
1574 | 87 | """Install a python package""" | ||
1575 | 88 | if venv: | ||
1576 | 89 | venv_python = os.path.join(venv, 'bin/pip') | ||
1577 | 90 | command = [venv_python, "install"] | ||
1578 | 91 | else: | ||
1579 | 92 | command = ["install"] | ||
1580 | 93 | |||
1581 | 94 | available_options = ('proxy', 'src', 'log', 'index-url', ) | ||
1582 | 95 | for option in parse_options(options, available_options): | ||
1583 | 96 | command.append(option) | ||
1584 | 97 | |||
1585 | 98 | if upgrade: | ||
1586 | 99 | command.append('--upgrade') | ||
1587 | 100 | |||
1588 | 101 | if constraints: | ||
1589 | 102 | command.extend(['-c', constraints]) | ||
1590 | 103 | |||
1591 | 104 | if isinstance(package, list): | ||
1592 | 105 | command.extend(package) | ||
1593 | 106 | else: | ||
1594 | 107 | command.append(package) | ||
1595 | 108 | |||
1596 | 109 | log("Installing {} package with options: {}".format(package, | ||
1597 | 110 | command)) | ||
1598 | 111 | if venv: | ||
1599 | 112 | subprocess.check_call(command) | ||
1600 | 113 | else: | ||
1601 | 114 | pip_execute(command) | ||
1602 | 115 | |||
1603 | 116 | |||
1604 | 117 | def pip_uninstall(package, **options): | ||
1605 | 118 | """Uninstall a python package""" | ||
1606 | 119 | command = ["uninstall", "-q", "-y"] | ||
1607 | 120 | |||
1608 | 121 | available_options = ('proxy', 'log', ) | ||
1609 | 122 | for option in parse_options(options, available_options): | ||
1610 | 123 | command.append(option) | ||
1611 | 124 | |||
1612 | 125 | if isinstance(package, list): | ||
1613 | 126 | command.extend(package) | ||
1614 | 127 | else: | ||
1615 | 128 | command.append(package) | ||
1616 | 129 | |||
1617 | 130 | log("Uninstalling {} package with options: {}".format(package, | ||
1618 | 131 | command)) | ||
1619 | 132 | pip_execute(command) | ||
1620 | 133 | |||
1621 | 134 | |||
1622 | 135 | def pip_list(): | ||
1623 | 136 | """Returns the list of current python installed packages | ||
1624 | 137 | """ | ||
1625 | 138 | return pip_execute(["list"]) | ||
1626 | 139 | |||
1627 | 140 | |||
1628 | 141 | def pip_create_virtualenv(path=None): | ||
1629 | 142 | """Create an isolated Python environment.""" | ||
1630 | 143 | if six.PY2: | ||
1631 | 144 | apt_install('python-virtualenv') | ||
1632 | 145 | else: | ||
1633 | 146 | apt_install('python3-virtualenv') | ||
1634 | 147 | |||
1635 | 148 | if path: | ||
1636 | 149 | venv_path = path | ||
1637 | 150 | else: | ||
1638 | 151 | venv_path = os.path.join(charm_dir(), 'venv') | ||
1639 | 152 | |||
1640 | 153 | if not os.path.exists(venv_path): | ||
1641 | 154 | subprocess.check_call(['virtualenv', venv_path]) | ||
1642 | diff --git a/hooks/charmhelpers/fetch/python/rpdb.py b/hooks/charmhelpers/fetch/python/rpdb.py | |||
1643 | 0 | new file mode 100644 | 155 | new file mode 100644 |
1644 | index 0000000..9b31610 | |||
1645 | --- /dev/null | |||
1646 | +++ b/hooks/charmhelpers/fetch/python/rpdb.py | |||
1647 | @@ -0,0 +1,56 @@ | |||
1648 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
1649 | 2 | # | ||
1650 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
1651 | 4 | # you may not use this file except in compliance with the License. | ||
1652 | 5 | # You may obtain a copy of the License at | ||
1653 | 6 | # | ||
1654 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
1655 | 8 | # | ||
1656 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
1657 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
1658 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
1659 | 12 | # See the License for the specific language governing permissions and | ||
1660 | 13 | # limitations under the License. | ||
1661 | 14 | |||
1662 | 15 | """Remote Python Debugger (pdb wrapper).""" | ||
1663 | 16 | |||
1664 | 17 | import pdb | ||
1665 | 18 | import socket | ||
1666 | 19 | import sys | ||
1667 | 20 | |||
1668 | 21 | __author__ = "Bertrand Janin <b@janin.com>" | ||
1669 | 22 | __version__ = "0.1.3" | ||
1670 | 23 | |||
1671 | 24 | |||
1672 | 25 | class Rpdb(pdb.Pdb): | ||
1673 | 26 | |||
1674 | 27 | def __init__(self, addr="127.0.0.1", port=4444): | ||
1675 | 28 | """Initialize the socket and initialize pdb.""" | ||
1676 | 29 | |||
1677 | 30 | # Backup stdin and stdout before replacing them by the socket handle | ||
1678 | 31 | self.old_stdout = sys.stdout | ||
1679 | 32 | self.old_stdin = sys.stdin | ||
1680 | 33 | |||
1681 | 34 | # Open a 'reusable' socket to let the webapp reload on the same port | ||
1682 | 35 | self.skt = socket.socket(socket.AF_INET, socket.SOCK_STREAM) | ||
1683 | 36 | self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True) | ||
1684 | 37 | self.skt.bind((addr, port)) | ||
1685 | 38 | self.skt.listen(1) | ||
1686 | 39 | (clientsocket, address) = self.skt.accept() | ||
1687 | 40 | handle = clientsocket.makefile('rw') | ||
1688 | 41 | pdb.Pdb.__init__(self, completekey='tab', stdin=handle, stdout=handle) | ||
1689 | 42 | sys.stdout = sys.stdin = handle | ||
1690 | 43 | |||
1691 | 44 | def shutdown(self): | ||
1692 | 45 | """Revert stdin and stdout, close the socket.""" | ||
1693 | 46 | sys.stdout = self.old_stdout | ||
1694 | 47 | sys.stdin = self.old_stdin | ||
1695 | 48 | self.skt.close() | ||
1696 | 49 | self.set_continue() | ||
1697 | 50 | |||
1698 | 51 | def do_continue(self, arg): | ||
1699 | 52 | """Stop all operation on ``continue``.""" | ||
1700 | 53 | self.shutdown() | ||
1701 | 54 | return 1 | ||
1702 | 55 | |||
1703 | 56 | do_EOF = do_quit = do_exit = do_c = do_cont = do_continue | ||
1704 | diff --git a/hooks/charmhelpers/fetch/python/version.py b/hooks/charmhelpers/fetch/python/version.py | |||
1705 | 0 | new file mode 100644 | 57 | new file mode 100644 |
1706 | index 0000000..3eb4210 | |||
1707 | --- /dev/null | |||
1708 | +++ b/hooks/charmhelpers/fetch/python/version.py | |||
1709 | @@ -0,0 +1,32 @@ | |||
1710 | 1 | #!/usr/bin/env python | ||
1711 | 2 | # coding: utf-8 | ||
1712 | 3 | |||
1713 | 4 | # Copyright 2014-2015 Canonical Limited. | ||
1714 | 5 | # | ||
1715 | 6 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
1716 | 7 | # you may not use this file except in compliance with the License. | ||
1717 | 8 | # You may obtain a copy of the License at | ||
1718 | 9 | # | ||
1719 | 10 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
1720 | 11 | # | ||
1721 | 12 | # Unless required by applicable law or agreed to in writing, software | ||
1722 | 13 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
1723 | 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
1724 | 15 | # See the License for the specific language governing permissions and | ||
1725 | 16 | # limitations under the License. | ||
1726 | 17 | |||
1727 | 18 | import sys | ||
1728 | 19 | |||
1729 | 20 | __author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" | ||
1730 | 21 | |||
1731 | 22 | |||
1732 | 23 | def current_version(): | ||
1733 | 24 | """Current system python version""" | ||
1734 | 25 | return sys.version_info | ||
1735 | 26 | |||
1736 | 27 | |||
1737 | 28 | def current_version_string(): | ||
1738 | 29 | """Current system python version as string major.minor.micro""" | ||
1739 | 30 | return "{0}.{1}.{2}".format(sys.version_info.major, | ||
1740 | 31 | sys.version_info.minor, | ||
1741 | 32 | sys.version_info.micro) | ||
1742 | diff --git a/hooks/charmhelpers/fetch/snap.py b/hooks/charmhelpers/fetch/snap.py | |||
1743 | index 23c707b..395836c 100644 | |||
1744 | --- a/hooks/charmhelpers/fetch/snap.py | |||
1745 | +++ b/hooks/charmhelpers/fetch/snap.py | |||
1746 | @@ -18,21 +18,33 @@ If writing reactive charms, use the snap layer: | |||
1747 | 18 | https://lists.ubuntu.com/archives/snapcraft/2016-September/001114.html | 18 | https://lists.ubuntu.com/archives/snapcraft/2016-September/001114.html |
1748 | 19 | """ | 19 | """ |
1749 | 20 | import subprocess | 20 | import subprocess |
1751 | 21 | from os import environ | 21 | import os |
1752 | 22 | from time import sleep | 22 | from time import sleep |
1753 | 23 | from charmhelpers.core.hookenv import log | 23 | from charmhelpers.core.hookenv import log |
1754 | 24 | 24 | ||
1755 | 25 | __author__ = 'Joseph Borg <joseph.borg@canonical.com>' | 25 | __author__ = 'Joseph Borg <joseph.borg@canonical.com>' |
1756 | 26 | 26 | ||
1758 | 27 | SNAP_NO_LOCK = 1 # The return code for "couldn't acquire lock" in Snap (hopefully this will be improved). | 27 | # The return code for "couldn't acquire lock" in Snap |
1759 | 28 | # (hopefully this will be improved). | ||
1760 | 29 | SNAP_NO_LOCK = 1 | ||
1761 | 28 | SNAP_NO_LOCK_RETRY_DELAY = 10 # Wait X seconds between Snap lock checks. | 30 | SNAP_NO_LOCK_RETRY_DELAY = 10 # Wait X seconds between Snap lock checks. |
1762 | 29 | SNAP_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times. | 31 | SNAP_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times. |
1763 | 32 | SNAP_CHANNELS = [ | ||
1764 | 33 | 'edge', | ||
1765 | 34 | 'beta', | ||
1766 | 35 | 'candidate', | ||
1767 | 36 | 'stable', | ||
1768 | 37 | ] | ||
1769 | 30 | 38 | ||
1770 | 31 | 39 | ||
1771 | 32 | class CouldNotAcquireLockException(Exception): | 40 | class CouldNotAcquireLockException(Exception): |
1772 | 33 | pass | 41 | pass |
1773 | 34 | 42 | ||
1774 | 35 | 43 | ||
1775 | 44 | class InvalidSnapChannel(Exception): | ||
1776 | 45 | pass | ||
1777 | 46 | |||
1778 | 47 | |||
1779 | 36 | def _snap_exec(commands): | 48 | def _snap_exec(commands): |
1780 | 37 | """ | 49 | """ |
1781 | 38 | Execute snap commands. | 50 | Execute snap commands. |
1782 | @@ -47,13 +59,17 @@ def _snap_exec(commands): | |||
1783 | 47 | 59 | ||
1784 | 48 | while return_code is None or return_code == SNAP_NO_LOCK: | 60 | while return_code is None or return_code == SNAP_NO_LOCK: |
1785 | 49 | try: | 61 | try: |
1787 | 50 | return_code = subprocess.check_call(['snap'] + commands, env=environ) | 62 | return_code = subprocess.check_call(['snap'] + commands, |
1788 | 63 | env=os.environ) | ||
1789 | 51 | except subprocess.CalledProcessError as e: | 64 | except subprocess.CalledProcessError as e: |
1790 | 52 | retry_count += + 1 | 65 | retry_count += + 1 |
1791 | 53 | if retry_count > SNAP_NO_LOCK_RETRY_COUNT: | 66 | if retry_count > SNAP_NO_LOCK_RETRY_COUNT: |
1793 | 54 | raise CouldNotAcquireLockException('Could not aquire lock after %s attempts' % SNAP_NO_LOCK_RETRY_COUNT) | 67 | raise CouldNotAcquireLockException( |
1794 | 68 | 'Could not aquire lock after {} attempts' | ||
1795 | 69 | .format(SNAP_NO_LOCK_RETRY_COUNT)) | ||
1796 | 55 | return_code = e.returncode | 70 | return_code = e.returncode |
1798 | 56 | log('Snap failed to acquire lock, trying again in %s seconds.' % SNAP_NO_LOCK_RETRY_DELAY, level='WARN') | 71 | log('Snap failed to acquire lock, trying again in {} seconds.' |
1799 | 72 | .format(SNAP_NO_LOCK_RETRY_DELAY, level='WARN')) | ||
1800 | 57 | sleep(SNAP_NO_LOCK_RETRY_DELAY) | 73 | sleep(SNAP_NO_LOCK_RETRY_DELAY) |
1801 | 58 | 74 | ||
1802 | 59 | return return_code | 75 | return return_code |
1803 | @@ -120,3 +136,15 @@ def snap_refresh(packages, *flags): | |||
1804 | 120 | 136 | ||
1805 | 121 | log(message, level='INFO') | 137 | log(message, level='INFO') |
1806 | 122 | return _snap_exec(['refresh'] + flags + packages) | 138 | return _snap_exec(['refresh'] + flags + packages) |
1807 | 139 | |||
1808 | 140 | |||
1809 | 141 | def valid_snap_channel(channel): | ||
1810 | 142 | """ Validate snap channel exists | ||
1811 | 143 | |||
1812 | 144 | :raises InvalidSnapChannel: When channel does not exist | ||
1813 | 145 | :return: Boolean | ||
1814 | 146 | """ | ||
1815 | 147 | if channel.lower() in SNAP_CHANNELS: | ||
1816 | 148 | return True | ||
1817 | 149 | else: | ||
1818 | 150 | raise InvalidSnapChannel("Invalid Snap Channel: {}".format(channel)) | ||
1819 | diff --git a/hooks/charmhelpers/fetch/ubuntu.py b/hooks/charmhelpers/fetch/ubuntu.py | |||
1820 | index 57b5fb6..24c76e3 100644 | |||
1821 | --- a/hooks/charmhelpers/fetch/ubuntu.py | |||
1822 | +++ b/hooks/charmhelpers/fetch/ubuntu.py | |||
1823 | @@ -19,14 +19,14 @@ import re | |||
1824 | 19 | import six | 19 | import six |
1825 | 20 | import time | 20 | import time |
1826 | 21 | import subprocess | 21 | import subprocess |
1827 | 22 | from tempfile import NamedTemporaryFile | ||
1828 | 23 | 22 | ||
1832 | 24 | from charmhelpers.core.host import ( | 23 | from charmhelpers.core.host import get_distrib_codename |
1833 | 25 | lsb_release | 24 | |
1831 | 26 | ) | ||
1834 | 27 | from charmhelpers.core.hookenv import ( | 25 | from charmhelpers.core.hookenv import ( |
1835 | 28 | log, | 26 | log, |
1836 | 29 | DEBUG, | 27 | DEBUG, |
1837 | 28 | WARNING, | ||
1838 | 29 | env_proxy_settings, | ||
1839 | 30 | ) | 30 | ) |
1840 | 31 | from charmhelpers.fetch import SourceConfigError, GPGKeyError | 31 | from charmhelpers.fetch import SourceConfigError, GPGKeyError |
1841 | 32 | 32 | ||
1842 | @@ -43,6 +43,7 @@ ARCH_TO_PROPOSED_POCKET = { | |||
1843 | 43 | 'x86_64': PROPOSED_POCKET, | 43 | 'x86_64': PROPOSED_POCKET, |
1844 | 44 | 'ppc64le': PROPOSED_PORTS_POCKET, | 44 | 'ppc64le': PROPOSED_PORTS_POCKET, |
1845 | 45 | 'aarch64': PROPOSED_PORTS_POCKET, | 45 | 'aarch64': PROPOSED_PORTS_POCKET, |
1846 | 46 | 's390x': PROPOSED_PORTS_POCKET, | ||
1847 | 46 | } | 47 | } |
1848 | 47 | CLOUD_ARCHIVE_URL = "http://ubuntu-cloud.archive.canonical.com/ubuntu" | 48 | CLOUD_ARCHIVE_URL = "http://ubuntu-cloud.archive.canonical.com/ubuntu" |
1849 | 48 | CLOUD_ARCHIVE_KEY_ID = '5EDB1B62EC4926EA' | 49 | CLOUD_ARCHIVE_KEY_ID = '5EDB1B62EC4926EA' |
1850 | @@ -139,7 +140,7 @@ CLOUD_ARCHIVE_POCKETS = { | |||
1851 | 139 | 'xenial-updates/ocata': 'xenial-updates/ocata', | 140 | 'xenial-updates/ocata': 'xenial-updates/ocata', |
1852 | 140 | 'ocata/proposed': 'xenial-proposed/ocata', | 141 | 'ocata/proposed': 'xenial-proposed/ocata', |
1853 | 141 | 'xenial-ocata/proposed': 'xenial-proposed/ocata', | 142 | 'xenial-ocata/proposed': 'xenial-proposed/ocata', |
1855 | 142 | 'xenial-ocata/newton': 'xenial-proposed/ocata', | 143 | 'xenial-proposed/ocata': 'xenial-proposed/ocata', |
1856 | 143 | # Pike | 144 | # Pike |
1857 | 144 | 'pike': 'xenial-updates/pike', | 145 | 'pike': 'xenial-updates/pike', |
1858 | 145 | 'xenial-pike': 'xenial-updates/pike', | 146 | 'xenial-pike': 'xenial-updates/pike', |
1859 | @@ -147,7 +148,7 @@ CLOUD_ARCHIVE_POCKETS = { | |||
1860 | 147 | 'xenial-updates/pike': 'xenial-updates/pike', | 148 | 'xenial-updates/pike': 'xenial-updates/pike', |
1861 | 148 | 'pike/proposed': 'xenial-proposed/pike', | 149 | 'pike/proposed': 'xenial-proposed/pike', |
1862 | 149 | 'xenial-pike/proposed': 'xenial-proposed/pike', | 150 | 'xenial-pike/proposed': 'xenial-proposed/pike', |
1864 | 150 | 'xenial-pike/newton': 'xenial-proposed/pike', | 151 | 'xenial-proposed/pike': 'xenial-proposed/pike', |
1865 | 151 | # Queens | 152 | # Queens |
1866 | 152 | 'queens': 'xenial-updates/queens', | 153 | 'queens': 'xenial-updates/queens', |
1867 | 153 | 'xenial-queens': 'xenial-updates/queens', | 154 | 'xenial-queens': 'xenial-updates/queens', |
1868 | @@ -155,13 +156,37 @@ CLOUD_ARCHIVE_POCKETS = { | |||
1869 | 155 | 'xenial-updates/queens': 'xenial-updates/queens', | 156 | 'xenial-updates/queens': 'xenial-updates/queens', |
1870 | 156 | 'queens/proposed': 'xenial-proposed/queens', | 157 | 'queens/proposed': 'xenial-proposed/queens', |
1871 | 157 | 'xenial-queens/proposed': 'xenial-proposed/queens', | 158 | 'xenial-queens/proposed': 'xenial-proposed/queens', |
1873 | 158 | 'xenial-queens/newton': 'xenial-proposed/queens', | 159 | 'xenial-proposed/queens': 'xenial-proposed/queens', |
1874 | 160 | # Rocky | ||
1875 | 161 | 'rocky': 'bionic-updates/rocky', | ||
1876 | 162 | 'bionic-rocky': 'bionic-updates/rocky', | ||
1877 | 163 | 'bionic-rocky/updates': 'bionic-updates/rocky', | ||
1878 | 164 | 'bionic-updates/rocky': 'bionic-updates/rocky', | ||
1879 | 165 | 'rocky/proposed': 'bionic-proposed/rocky', | ||
1880 | 166 | 'bionic-rocky/proposed': 'bionic-proposed/rocky', | ||
1881 | 167 | 'bionic-proposed/rocky': 'bionic-proposed/rocky', | ||
1882 | 168 | # Stein | ||
1883 | 169 | 'stein': 'bionic-updates/stein', | ||
1884 | 170 | 'bionic-stein': 'bionic-updates/stein', | ||
1885 | 171 | 'bionic-stein/updates': 'bionic-updates/stein', | ||
1886 | 172 | 'bionic-updates/stein': 'bionic-updates/stein', | ||
1887 | 173 | 'stein/proposed': 'bionic-proposed/stein', | ||
1888 | 174 | 'bionic-stein/proposed': 'bionic-proposed/stein', | ||
1889 | 175 | 'bionic-proposed/stein': 'bionic-proposed/stein', | ||
1890 | 176 | # Train | ||
1891 | 177 | 'train': 'bionic-updates/train', | ||
1892 | 178 | 'bionic-train': 'bionic-updates/train', | ||
1893 | 179 | 'bionic-train/updates': 'bionic-updates/train', | ||
1894 | 180 | 'bionic-updates/train': 'bionic-updates/train', | ||
1895 | 181 | 'train/proposed': 'bionic-proposed/train', | ||
1896 | 182 | 'bionic-train/proposed': 'bionic-proposed/train', | ||
1897 | 183 | 'bionic-proposed/train': 'bionic-proposed/train', | ||
1898 | 159 | } | 184 | } |
1899 | 160 | 185 | ||
1900 | 161 | 186 | ||
1901 | 162 | APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT. | 187 | APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT. |
1902 | 163 | CMD_RETRY_DELAY = 10 # Wait 10 seconds between command retries. | 188 | CMD_RETRY_DELAY = 10 # Wait 10 seconds between command retries. |
1904 | 164 | CMD_RETRY_COUNT = 30 # Retry a failing fatal command X times. | 189 | CMD_RETRY_COUNT = 3 # Retry a failing fatal command X times. |
1905 | 165 | 190 | ||
1906 | 166 | 191 | ||
1907 | 167 | def filter_installed_packages(packages): | 192 | def filter_installed_packages(packages): |
1908 | @@ -179,6 +204,18 @@ def filter_installed_packages(packages): | |||
1909 | 179 | return _pkgs | 204 | return _pkgs |
1910 | 180 | 205 | ||
1911 | 181 | 206 | ||
1912 | 207 | def filter_missing_packages(packages): | ||
1913 | 208 | """Return a list of packages that are installed. | ||
1914 | 209 | |||
1915 | 210 | :param packages: list of packages to evaluate. | ||
1916 | 211 | :returns list: Packages that are installed. | ||
1917 | 212 | """ | ||
1918 | 213 | return list( | ||
1919 | 214 | set(packages) - | ||
1920 | 215 | set(filter_installed_packages(packages)) | ||
1921 | 216 | ) | ||
1922 | 217 | |||
1923 | 218 | |||
1924 | 182 | def apt_cache(in_memory=True, progress=None): | 219 | def apt_cache(in_memory=True, progress=None): |
1925 | 183 | """Build and return an apt cache.""" | 220 | """Build and return an apt cache.""" |
1926 | 184 | from apt import apt_pkg | 221 | from apt import apt_pkg |
1927 | @@ -238,6 +275,14 @@ def apt_purge(packages, fatal=False): | |||
1928 | 238 | _run_apt_command(cmd, fatal) | 275 | _run_apt_command(cmd, fatal) |
1929 | 239 | 276 | ||
1930 | 240 | 277 | ||
1931 | 278 | def apt_autoremove(purge=True, fatal=False): | ||
1932 | 279 | """Purge one or more packages.""" | ||
1933 | 280 | cmd = ['apt-get', '--assume-yes', 'autoremove'] | ||
1934 | 281 | if purge: | ||
1935 | 282 | cmd.append('--purge') | ||
1936 | 283 | _run_apt_command(cmd, fatal) | ||
1937 | 284 | |||
1938 | 285 | |||
1939 | 241 | def apt_mark(packages, mark, fatal=False): | 286 | def apt_mark(packages, mark, fatal=False): |
1940 | 242 | """Flag one or more packages using apt-mark.""" | 287 | """Flag one or more packages using apt-mark.""" |
1941 | 243 | log("Marking {} as {}".format(packages, mark)) | 288 | log("Marking {} as {}".format(packages, mark)) |
1942 | @@ -261,42 +306,156 @@ def apt_unhold(packages, fatal=False): | |||
1943 | 261 | return apt_mark(packages, 'unhold', fatal=fatal) | 306 | return apt_mark(packages, 'unhold', fatal=fatal) |
1944 | 262 | 307 | ||
1945 | 263 | 308 | ||
1948 | 264 | def import_key(keyid): | 309 | def import_key(key): |
1949 | 265 | """Import a key in either ASCII Armor or Radix64 format. | 310 | """Import an ASCII Armor key. |
1950 | 266 | 311 | ||
1953 | 267 | `keyid` is either the keyid to fetch from a PGP server, or | 312 | A Radix64 format keyid is also supported for backwards |
1954 | 268 | the key in ASCII armor foramt. | 313 | compatibility. In this case Ubuntu keyserver will be |
1955 | 314 | queried for a key via HTTPS by its keyid. This method | ||
1956 | 315 | is less preferrable because https proxy servers may | ||
1957 | 316 | require traffic decryption which is equivalent to a | ||
1958 | 317 | man-in-the-middle attack (a proxy server impersonates | ||
1959 | 318 | keyserver TLS certificates and has to be explicitly | ||
1960 | 319 | trusted by the system). | ||
1961 | 269 | 320 | ||
1963 | 270 | :param keyid: String of key (or key id). | 321 | :param key: A GPG key in ASCII armor format, |
1964 | 322 | including BEGIN and END markers or a keyid. | ||
1965 | 323 | :type key: (bytes, str) | ||
1966 | 271 | :raises: GPGKeyError if the key could not be imported | 324 | :raises: GPGKeyError if the key could not be imported |
1967 | 272 | """ | 325 | """ |
1971 | 273 | key = keyid.strip() | 326 | key = key.strip() |
1972 | 274 | if (key.startswith('-----BEGIN PGP PUBLIC KEY BLOCK-----') and | 327 | if '-' in key or '\n' in key: |
1973 | 275 | key.endswith('-----END PGP PUBLIC KEY BLOCK-----')): | 328 | # Send everything not obviously a keyid to GPG to import, as |
1974 | 329 | # we trust its validation better than our own. eg. handling | ||
1975 | 330 | # comments before the key. | ||
1976 | 276 | log("PGP key found (looks like ASCII Armor format)", level=DEBUG) | 331 | log("PGP key found (looks like ASCII Armor format)", level=DEBUG) |
1989 | 277 | log("Importing ASCII Armor PGP key", level=DEBUG) | 332 | if ('-----BEGIN PGP PUBLIC KEY BLOCK-----' in key and |
1990 | 278 | with NamedTemporaryFile() as keyfile: | 333 | '-----END PGP PUBLIC KEY BLOCK-----' in key): |
1991 | 279 | with open(keyfile.name, 'w') as fd: | 334 | log("Writing provided PGP key in the binary format", level=DEBUG) |
1992 | 280 | fd.write(key) | 335 | if six.PY3: |
1993 | 281 | fd.write("\n") | 336 | key_bytes = key.encode('utf-8') |
1994 | 282 | cmd = ['apt-key', 'add', keyfile.name] | 337 | else: |
1995 | 283 | try: | 338 | key_bytes = key |
1996 | 284 | subprocess.check_call(cmd) | 339 | key_name = _get_keyid_by_gpg_key(key_bytes) |
1997 | 285 | except subprocess.CalledProcessError: | 340 | key_gpg = _dearmor_gpg_key(key_bytes) |
1998 | 286 | error = "Error importing PGP key '{}'".format(key) | 341 | _write_apt_gpg_keyfile(key_name=key_name, key_material=key_gpg) |
1999 | 287 | log(error) | 342 | else: |
2000 | 288 | raise GPGKeyError(error) | 343 | raise GPGKeyError("ASCII armor markers missing from GPG key") |
2001 | 289 | else: | 344 | else: |
2012 | 290 | log("PGP key found (looks like Radix64 format)", level=DEBUG) | 345 | log("PGP key found (looks like Radix64 format)", level=WARNING) |
2013 | 291 | log("Importing PGP key from keyserver", level=DEBUG) | 346 | log("SECURELY importing PGP key from keyserver; " |
2014 | 292 | cmd = ['apt-key', 'adv', '--keyserver', | 347 | "full key not provided.", level=WARNING) |
2015 | 293 | 'hkp://keyserver.ubuntu.com:80', '--recv-keys', key] | 348 | # as of bionic add-apt-repository uses curl with an HTTPS keyserver URL |
2016 | 294 | try: | 349 | # to retrieve GPG keys. `apt-key adv` command is deprecated as is |
2017 | 295 | subprocess.check_call(cmd) | 350 | # apt-key in general as noted in its manpage. See lp:1433761 for more |
2018 | 296 | except subprocess.CalledProcessError: | 351 | # history. Instead, /etc/apt/trusted.gpg.d is used directly to drop |
2019 | 297 | error = "Error importing PGP key '{}'".format(key) | 352 | # gpg |
2020 | 298 | log(error) | 353 | key_asc = _get_key_by_keyid(key) |
2021 | 299 | raise GPGKeyError(error) | 354 | # write the key in GPG format so that apt-key list shows it |
2022 | 355 | key_gpg = _dearmor_gpg_key(key_asc) | ||
2023 | 356 | _write_apt_gpg_keyfile(key_name=key, key_material=key_gpg) | ||
2024 | 357 | |||
2025 | 358 | |||
2026 | 359 | def _get_keyid_by_gpg_key(key_material): | ||
2027 | 360 | """Get a GPG key fingerprint by GPG key material. | ||
2028 | 361 | Gets a GPG key fingerprint (40-digit, 160-bit) by the ASCII armor-encoded | ||
2029 | 362 | or binary GPG key material. Can be used, for example, to generate file | ||
2030 | 363 | names for keys passed via charm options. | ||
2031 | 364 | |||
2032 | 365 | :param key_material: ASCII armor-encoded or binary GPG key material | ||
2033 | 366 | :type key_material: bytes | ||
2034 | 367 | :raises: GPGKeyError if invalid key material has been provided | ||
2035 | 368 | :returns: A GPG key fingerprint | ||
2036 | 369 | :rtype: str | ||
2037 | 370 | """ | ||
2038 | 371 | # Use the same gpg command for both Xenial and Bionic | ||
2039 | 372 | cmd = 'gpg --with-colons --with-fingerprint' | ||
2040 | 373 | ps = subprocess.Popen(cmd.split(), | ||
2041 | 374 | stdout=subprocess.PIPE, | ||
2042 | 375 | stderr=subprocess.PIPE, | ||
2043 | 376 | stdin=subprocess.PIPE) | ||
2044 | 377 | out, err = ps.communicate(input=key_material) | ||
2045 | 378 | if six.PY3: | ||
2046 | 379 | out = out.decode('utf-8') | ||
2047 | 380 | err = err.decode('utf-8') | ||
2048 | 381 | if 'gpg: no valid OpenPGP data found.' in err: | ||
2049 | 382 | raise GPGKeyError('Invalid GPG key material provided') | ||
2050 | 383 | # from gnupg2 docs: fpr :: Fingerprint (fingerprint is in field 10) | ||
2051 | 384 | return re.search(r"^fpr:{9}([0-9A-F]{40}):$", out, re.MULTILINE).group(1) | ||
2052 | 385 | |||
2053 | 386 | |||
2054 | 387 | def _get_key_by_keyid(keyid): | ||
2055 | 388 | """Get a key via HTTPS from the Ubuntu keyserver. | ||
2056 | 389 | Different key ID formats are supported by SKS keyservers (the longer ones | ||
2057 | 390 | are more secure, see "dead beef attack" and https://evil32.com/). Since | ||
2058 | 391 | HTTPS is used, if SSLBump-like HTTPS proxies are in place, they will | ||
2059 | 392 | impersonate keyserver.ubuntu.com and generate a certificate with | ||
2060 | 393 | keyserver.ubuntu.com in the CN field or in SubjAltName fields of a | ||
2061 | 394 | certificate. If such proxy behavior is expected it is necessary to add the | ||
2062 | 395 | CA certificate chain containing the intermediate CA of the SSLBump proxy to | ||
2063 | 396 | every machine that this code runs on via ca-certs cloud-init directive (via | ||
2064 | 397 | cloudinit-userdata model-config) or via other means (such as through a | ||
2065 | 398 | custom charm option). Also note that DNS resolution for the hostname in a | ||
2066 | 399 | URL is done at a proxy server - not at the client side. | ||
2067 | 400 | |||
2068 | 401 | 8-digit (32 bit) key ID | ||
2069 | 402 | https://keyserver.ubuntu.com/pks/lookup?search=0x4652B4E6 | ||
2070 | 403 | 16-digit (64 bit) key ID | ||
2071 | 404 | https://keyserver.ubuntu.com/pks/lookup?search=0x6E85A86E4652B4E6 | ||
2072 | 405 | 40-digit key ID: | ||
2073 | 406 | https://keyserver.ubuntu.com/pks/lookup?search=0x35F77D63B5CEC106C577ED856E85A86E4652B4E6 | ||
2074 | 407 | |||
2075 | 408 | :param keyid: An 8, 16 or 40 hex digit keyid to find a key for | ||
2076 | 409 | :type keyid: (bytes, str) | ||
2077 | 410 | :returns: A key material for the specified GPG key id | ||
2078 | 411 | :rtype: (str, bytes) | ||
2079 | 412 | :raises: subprocess.CalledProcessError | ||
2080 | 413 | """ | ||
2081 | 414 | # options=mr - machine-readable output (disables html wrappers) | ||
2082 | 415 | keyserver_url = ('https://keyserver.ubuntu.com' | ||
2083 | 416 | '/pks/lookup?op=get&options=mr&exact=on&search=0x{}') | ||
2084 | 417 | curl_cmd = ['curl', keyserver_url.format(keyid)] | ||
2085 | 418 | # use proxy server settings in order to retrieve the key | ||
2086 | 419 | return subprocess.check_output(curl_cmd, | ||
2087 | 420 | env=env_proxy_settings(['https'])) | ||
2088 | 421 | |||
2089 | 422 | |||
2090 | 423 | def _dearmor_gpg_key(key_asc): | ||
2091 | 424 | """Converts a GPG key in the ASCII armor format to the binary format. | ||
2092 | 425 | |||
2093 | 426 | :param key_asc: A GPG key in ASCII armor format. | ||
2094 | 427 | :type key_asc: (str, bytes) | ||
2095 | 428 | :returns: A GPG key in binary format | ||
2096 | 429 | :rtype: (str, bytes) | ||
2097 | 430 | :raises: GPGKeyError | ||
2098 | 431 | """ | ||
2099 | 432 | ps = subprocess.Popen(['gpg', '--dearmor'], | ||
2100 | 433 | stdout=subprocess.PIPE, | ||
2101 | 434 | stderr=subprocess.PIPE, | ||
2102 | 435 | stdin=subprocess.PIPE) | ||
2103 | 436 | out, err = ps.communicate(input=key_asc) | ||
2104 | 437 | # no need to decode output as it is binary (invalid utf-8), only error | ||
2105 | 438 | if six.PY3: | ||
2106 | 439 | err = err.decode('utf-8') | ||
2107 | 440 | if 'gpg: no valid OpenPGP data found.' in err: | ||
2108 | 441 | raise GPGKeyError('Invalid GPG key material. Check your network setup' | ||
2109 | 442 | ' (MTU, routing, DNS) and/or proxy server settings' | ||
2110 | 443 | ' as well as destination keyserver status.') | ||
2111 | 444 | else: | ||
2112 | 445 | return out | ||
2113 | 446 | |||
2114 | 447 | |||
2115 | 448 | def _write_apt_gpg_keyfile(key_name, key_material): | ||
2116 | 449 | """Writes GPG key material into a file at a provided path. | ||
2117 | 450 | |||
2118 | 451 | :param key_name: A key name to use for a key file (could be a fingerprint) | ||
2119 | 452 | :type key_name: str | ||
2120 | 453 | :param key_material: A GPG key material (binary) | ||
2121 | 454 | :type key_material: (str, bytes) | ||
2122 | 455 | """ | ||
2123 | 456 | with open('/etc/apt/trusted.gpg.d/{}.gpg'.format(key_name), | ||
2124 | 457 | 'wb') as keyf: | ||
2125 | 458 | keyf.write(key_material) | ||
2126 | 300 | 459 | ||
2127 | 301 | 460 | ||
2128 | 302 | def add_source(source, key=None, fail_invalid=False): | 461 | def add_source(source, key=None, fail_invalid=False): |
2129 | @@ -364,20 +523,23 @@ def add_source(source, key=None, fail_invalid=False): | |||
2130 | 364 | (r"^cloud:(.*)-(.*)\/staging$", _add_cloud_staging), | 523 | (r"^cloud:(.*)-(.*)\/staging$", _add_cloud_staging), |
2131 | 365 | (r"^cloud:(.*)-(.*)$", _add_cloud_distro_check), | 524 | (r"^cloud:(.*)-(.*)$", _add_cloud_distro_check), |
2132 | 366 | (r"^cloud:(.*)$", _add_cloud_pocket), | 525 | (r"^cloud:(.*)$", _add_cloud_pocket), |
2133 | 526 | (r"^snap:.*-(.*)-(.*)$", _add_cloud_distro_check), | ||
2134 | 367 | ]) | 527 | ]) |
2135 | 368 | if source is None: | 528 | if source is None: |
2136 | 369 | source = '' | 529 | source = '' |
2137 | 370 | for r, fn in six.iteritems(_mapping): | 530 | for r, fn in six.iteritems(_mapping): |
2138 | 371 | m = re.match(r, source) | 531 | m = re.match(r, source) |
2139 | 372 | if m: | 532 | if m: |
2140 | 373 | # call the assoicated function with the captured groups | ||
2141 | 374 | # raises SourceConfigError on error. | ||
2142 | 375 | fn(*m.groups()) | ||
2143 | 376 | if key: | 533 | if key: |
2144 | 534 | # Import key before adding the source which depends on it, | ||
2145 | 535 | # as refreshing packages could fail otherwise. | ||
2146 | 377 | try: | 536 | try: |
2147 | 378 | import_key(key) | 537 | import_key(key) |
2148 | 379 | except GPGKeyError as e: | 538 | except GPGKeyError as e: |
2149 | 380 | raise SourceConfigError(str(e)) | 539 | raise SourceConfigError(str(e)) |
2150 | 540 | # call the associated function with the captured groups | ||
2151 | 541 | # raises SourceConfigError on error. | ||
2152 | 542 | fn(*m.groups()) | ||
2153 | 381 | break | 543 | break |
2154 | 382 | else: | 544 | else: |
2155 | 383 | # nothing matched. log an error and maybe sys.exit | 545 | # nothing matched. log an error and maybe sys.exit |
2156 | @@ -390,13 +552,13 @@ def add_source(source, key=None, fail_invalid=False): | |||
2157 | 390 | def _add_proposed(): | 552 | def _add_proposed(): |
2158 | 391 | """Add the PROPOSED_POCKET as /etc/apt/source.list.d/proposed.list | 553 | """Add the PROPOSED_POCKET as /etc/apt/source.list.d/proposed.list |
2159 | 392 | 554 | ||
2161 | 393 | Uses lsb_release()['DISTRIB_CODENAME'] to determine the correct staza for | 555 | Uses get_distrib_codename to determine the correct stanza for |
2162 | 394 | the deb line. | 556 | the deb line. |
2163 | 395 | 557 | ||
2164 | 396 | For intel architecutres PROPOSED_POCKET is used for the release, but for | 558 | For intel architecutres PROPOSED_POCKET is used for the release, but for |
2165 | 397 | other architectures PROPOSED_PORTS_POCKET is used for the release. | 559 | other architectures PROPOSED_PORTS_POCKET is used for the release. |
2166 | 398 | """ | 560 | """ |
2168 | 399 | release = lsb_release()['DISTRIB_CODENAME'] | 561 | release = get_distrib_codename() |
2169 | 400 | arch = platform.machine() | 562 | arch = platform.machine() |
2170 | 401 | if arch not in six.iterkeys(ARCH_TO_PROPOSED_POCKET): | 563 | if arch not in six.iterkeys(ARCH_TO_PROPOSED_POCKET): |
2171 | 402 | raise SourceConfigError("Arch {} not supported for (distro-)proposed" | 564 | raise SourceConfigError("Arch {} not supported for (distro-)proposed" |
2172 | @@ -409,8 +571,16 @@ def _add_apt_repository(spec): | |||
2173 | 409 | """Add the spec using add_apt_repository | 571 | """Add the spec using add_apt_repository |
2174 | 410 | 572 | ||
2175 | 411 | :param spec: the parameter to pass to add_apt_repository | 573 | :param spec: the parameter to pass to add_apt_repository |
2176 | 574 | :type spec: str | ||
2177 | 412 | """ | 575 | """ |
2179 | 413 | _run_with_retries(['add-apt-repository', '--yes', spec]) | 576 | if '{series}' in spec: |
2180 | 577 | series = get_distrib_codename() | ||
2181 | 578 | spec = spec.replace('{series}', series) | ||
2182 | 579 | # software-properties package for bionic properly reacts to proxy settings | ||
2183 | 580 | # passed as environment variables (See lp:1433761). This is not the case | ||
2184 | 581 | # LTS and non-LTS releases below bionic. | ||
2185 | 582 | _run_with_retries(['add-apt-repository', '--yes', spec], | ||
2186 | 583 | cmd_env=env_proxy_settings(['https'])) | ||
2187 | 414 | 584 | ||
2188 | 415 | 585 | ||
2189 | 416 | def _add_cloud_pocket(pocket): | 586 | def _add_cloud_pocket(pocket): |
2190 | @@ -479,7 +649,7 @@ def _verify_is_ubuntu_rel(release, os_release): | |||
2191 | 479 | :raises: SourceConfigError if the release is not the same as the ubuntu | 649 | :raises: SourceConfigError if the release is not the same as the ubuntu |
2192 | 480 | release. | 650 | release. |
2193 | 481 | """ | 651 | """ |
2195 | 482 | ubuntu_rel = lsb_release()['DISTRIB_CODENAME'] | 652 | ubuntu_rel = get_distrib_codename() |
2196 | 483 | if release != ubuntu_rel: | 653 | if release != ubuntu_rel: |
2197 | 484 | raise SourceConfigError( | 654 | raise SourceConfigError( |
2198 | 485 | 'Invalid Cloud Archive release specified: {}-{} on this Ubuntu' | 655 | 'Invalid Cloud Archive release specified: {}-{} on this Ubuntu' |
2199 | @@ -557,7 +727,7 @@ def get_upstream_version(package): | |||
2200 | 557 | cache = apt_cache() | 727 | cache = apt_cache() |
2201 | 558 | try: | 728 | try: |
2202 | 559 | pkg = cache[package] | 729 | pkg = cache[package] |
2204 | 560 | except: | 730 | except Exception: |
2205 | 561 | # the package is unknown to the current apt cache. | 731 | # the package is unknown to the current apt cache. |
2206 | 562 | return None | 732 | return None |
2207 | 563 | 733 |
This merge proposal is being monitored by mergebot. Change the status to Approved to merge.