Merge ~xavpaice/charm-nagios:fix-lp1677580 into ~nagios-charmers/charm-nagios:master
- Git
- lp:~xavpaice/charm-nagios
- fix-lp1677580
- Merge into master
Proposed by
Xav Paice
Status: | Merged | ||||
---|---|---|---|---|---|
Approved by: | James Hebden | ||||
Approved revision: | dbc7c76615b10b763c5f5ca355a83ac8afee8b8b | ||||
Merged at revision: | 38f049516d4865a1e3c1fec5289f6f189fff0631 | ||||
Proposed branch: | ~xavpaice/charm-nagios:fix-lp1677580 | ||||
Merge into: | ~nagios-charmers/charm-nagios:master | ||||
Diff against target: |
6573 lines (+5335/-405) 39 files modified
Makefile (+14/-0) bin/charm_helpers_sync.py (+252/-0) charm-helpers.yaml (+2/-1) config.yaml (+21/-0) hooks/charmhelpers/__init__.py (+97/-0) hooks/charmhelpers/contrib/__init__.py (+13/-0) hooks/charmhelpers/contrib/ssl/__init__.py (+15/-1) hooks/charmhelpers/contrib/ssl/service.py (+28/-18) hooks/charmhelpers/core/__init__.py (+13/-0) hooks/charmhelpers/core/decorators.py (+55/-0) hooks/charmhelpers/core/files.py (+43/-0) hooks/charmhelpers/core/fstab.py (+132/-0) hooks/charmhelpers/core/hookenv.py (+742/-35) hooks/charmhelpers/core/host.py (+755/-104) hooks/charmhelpers/core/host_factory/__init__.py (+0/-0) hooks/charmhelpers/core/host_factory/centos.py (+72/-0) hooks/charmhelpers/core/host_factory/ubuntu.py (+89/-0) hooks/charmhelpers/core/hugepage.py (+69/-0) hooks/charmhelpers/core/kernel.py (+72/-0) hooks/charmhelpers/core/kernel_factory/__init__.py (+0/-0) hooks/charmhelpers/core/kernel_factory/centos.py (+17/-0) hooks/charmhelpers/core/kernel_factory/ubuntu.py (+13/-0) hooks/charmhelpers/core/services/__init__.py (+16/-0) hooks/charmhelpers/core/services/base.py (+351/-0) hooks/charmhelpers/core/services/helpers.py (+290/-0) hooks/charmhelpers/core/strutils.py (+123/-0) hooks/charmhelpers/core/sysctl.py (+54/-0) hooks/charmhelpers/core/templating.py (+84/-0) hooks/charmhelpers/core/unitdata.py (+518/-0) hooks/charmhelpers/fetch/__init__.py (+135/-211) hooks/charmhelpers/fetch/archiveurl.py (+126/-9) hooks/charmhelpers/fetch/bzrurl.py (+52/-25) hooks/charmhelpers/fetch/centos.py (+171/-0) hooks/charmhelpers/fetch/giturl.py (+69/-0) hooks/charmhelpers/fetch/snap.py (+134/-0) hooks/charmhelpers/fetch/ubuntu.py (+583/-0) hooks/charmhelpers/osplatform.py (+25/-0) hooks/templates/localhost_nagios2.cfg.tmpl (+70/-0) hooks/upgrade-charm (+20/-1) |
||||
Related bugs: |
|
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
James Hebden (community) | Approve | ||
Review via email: mp+329236@code.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Revision history for this message
James Hebden (ec0) wrote : | # |
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | diff --git a/Makefile b/Makefile | |||
2 | index c75b2e9..9d48829 100644 | |||
3 | --- a/Makefile | |||
4 | +++ b/Makefile | |||
5 | @@ -1,3 +1,7 @@ | |||
6 | 1 | #!/usr/bin/make | ||
7 | 2 | PYTHON := /usr/bin/python3 | ||
8 | 3 | export PYTHONPATH := hooks | ||
9 | 4 | |||
10 | 1 | default: | 5 | default: |
11 | 2 | echo Nothing to do | 6 | echo Nothing to do |
12 | 3 | 7 | ||
13 | @@ -12,3 +16,13 @@ test: | |||
14 | 12 | tests/22-extraconfig-test | 16 | tests/22-extraconfig-test |
15 | 13 | tests/23-livestatus-test | 17 | tests/23-livestatus-test |
16 | 14 | tests/24-pagerduty-test | 18 | tests/24-pagerduty-test |
17 | 19 | |||
18 | 20 | bin/charm_helpers_sync.py: | ||
19 | 21 | @mkdir -p bin | ||
20 | 22 | @bzr cat lp:charm-helpers/tools/charm_helpers_sync/charm_helpers_sync.py \ | ||
21 | 23 | > bin/charm_helpers_sync.py | ||
22 | 24 | |||
23 | 25 | sync: bin/charm_helpers_sync.py | ||
24 | 26 | @$(PYTHON) bin/charm_helpers_sync.py -c charm-helpers.yaml | ||
25 | 27 | |||
26 | 28 | |||
27 | diff --git a/bin/charm_helpers_sync.py b/bin/charm_helpers_sync.py | |||
28 | 15 | new file mode 100644 | 29 | new file mode 100644 |
29 | index 0000000..bd79460 | |||
30 | --- /dev/null | |||
31 | +++ b/bin/charm_helpers_sync.py | |||
32 | @@ -0,0 +1,252 @@ | |||
33 | 1 | #!/usr/bin/python | ||
34 | 2 | |||
35 | 3 | # Copyright 2014-2015 Canonical Limited. | ||
36 | 4 | # | ||
37 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
38 | 6 | # you may not use this file except in compliance with the License. | ||
39 | 7 | # You may obtain a copy of the License at | ||
40 | 8 | # | ||
41 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
42 | 10 | # | ||
43 | 11 | # Unless required by applicable law or agreed to in writing, software | ||
44 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
45 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
46 | 14 | # See the License for the specific language governing permissions and | ||
47 | 15 | # limitations under the License. | ||
48 | 16 | |||
49 | 17 | # Authors: | ||
50 | 18 | # Adam Gandelman <adamg@ubuntu.com> | ||
51 | 19 | |||
52 | 20 | import logging | ||
53 | 21 | import optparse | ||
54 | 22 | import os | ||
55 | 23 | import subprocess | ||
56 | 24 | import shutil | ||
57 | 25 | import sys | ||
58 | 26 | import tempfile | ||
59 | 27 | import yaml | ||
60 | 28 | from fnmatch import fnmatch | ||
61 | 29 | |||
62 | 30 | import six | ||
63 | 31 | |||
64 | 32 | CHARM_HELPERS_BRANCH = 'lp:charm-helpers' | ||
65 | 33 | |||
66 | 34 | |||
67 | 35 | def parse_config(conf_file): | ||
68 | 36 | if not os.path.isfile(conf_file): | ||
69 | 37 | logging.error('Invalid config file: %s.' % conf_file) | ||
70 | 38 | return False | ||
71 | 39 | return yaml.load(open(conf_file).read()) | ||
72 | 40 | |||
73 | 41 | |||
74 | 42 | def clone_helpers(work_dir, branch): | ||
75 | 43 | dest = os.path.join(work_dir, 'charm-helpers') | ||
76 | 44 | logging.info('Checking out %s to %s.' % (branch, dest)) | ||
77 | 45 | cmd = ['bzr', 'checkout', '--lightweight', branch, dest] | ||
78 | 46 | subprocess.check_call(cmd) | ||
79 | 47 | return dest | ||
80 | 48 | |||
81 | 49 | |||
82 | 50 | def _module_path(module): | ||
83 | 51 | return os.path.join(*module.split('.')) | ||
84 | 52 | |||
85 | 53 | |||
86 | 54 | def _src_path(src, module): | ||
87 | 55 | return os.path.join(src, 'charmhelpers', _module_path(module)) | ||
88 | 56 | |||
89 | 57 | |||
90 | 58 | def _dest_path(dest, module): | ||
91 | 59 | return os.path.join(dest, _module_path(module)) | ||
92 | 60 | |||
93 | 61 | |||
94 | 62 | def _is_pyfile(path): | ||
95 | 63 | return os.path.isfile(path + '.py') | ||
96 | 64 | |||
97 | 65 | |||
98 | 66 | def ensure_init(path): | ||
99 | 67 | ''' | ||
100 | 68 | ensure directories leading up to path are importable, omitting | ||
101 | 69 | parent directory, eg path='/hooks/helpers/foo'/: | ||
102 | 70 | hooks/ | ||
103 | 71 | hooks/helpers/__init__.py | ||
104 | 72 | hooks/helpers/foo/__init__.py | ||
105 | 73 | ''' | ||
106 | 74 | for d, dirs, files in os.walk(os.path.join(*path.split('/')[:2])): | ||
107 | 75 | _i = os.path.join(d, '__init__.py') | ||
108 | 76 | if not os.path.exists(_i): | ||
109 | 77 | logging.info('Adding missing __init__.py: %s' % _i) | ||
110 | 78 | open(_i, 'wb').close() | ||
111 | 79 | |||
112 | 80 | |||
113 | 81 | def sync_pyfile(src, dest): | ||
114 | 82 | src = src + '.py' | ||
115 | 83 | src_dir = os.path.dirname(src) | ||
116 | 84 | logging.info('Syncing pyfile: %s -> %s.' % (src, dest)) | ||
117 | 85 | if not os.path.exists(dest): | ||
118 | 86 | os.makedirs(dest) | ||
119 | 87 | shutil.copy(src, dest) | ||
120 | 88 | if os.path.isfile(os.path.join(src_dir, '__init__.py')): | ||
121 | 89 | shutil.copy(os.path.join(src_dir, '__init__.py'), | ||
122 | 90 | dest) | ||
123 | 91 | ensure_init(dest) | ||
124 | 92 | |||
125 | 93 | |||
126 | 94 | def get_filter(opts=None): | ||
127 | 95 | opts = opts or [] | ||
128 | 96 | if 'inc=*' in opts: | ||
129 | 97 | # do not filter any files, include everything | ||
130 | 98 | return None | ||
131 | 99 | |||
132 | 100 | def _filter(dir, ls): | ||
133 | 101 | incs = [opt.split('=').pop() for opt in opts if 'inc=' in opt] | ||
134 | 102 | _filter = [] | ||
135 | 103 | for f in ls: | ||
136 | 104 | _f = os.path.join(dir, f) | ||
137 | 105 | |||
138 | 106 | if not os.path.isdir(_f) and not _f.endswith('.py') and incs: | ||
139 | 107 | if True not in [fnmatch(_f, inc) for inc in incs]: | ||
140 | 108 | logging.debug('Not syncing %s, does not match include ' | ||
141 | 109 | 'filters (%s)' % (_f, incs)) | ||
142 | 110 | _filter.append(f) | ||
143 | 111 | else: | ||
144 | 112 | logging.debug('Including file, which matches include ' | ||
145 | 113 | 'filters (%s): %s' % (incs, _f)) | ||
146 | 114 | elif (os.path.isfile(_f) and not _f.endswith('.py')): | ||
147 | 115 | logging.debug('Not syncing file: %s' % f) | ||
148 | 116 | _filter.append(f) | ||
149 | 117 | elif (os.path.isdir(_f) and not | ||
150 | 118 | os.path.isfile(os.path.join(_f, '__init__.py'))): | ||
151 | 119 | logging.debug('Not syncing directory: %s' % f) | ||
152 | 120 | _filter.append(f) | ||
153 | 121 | return _filter | ||
154 | 122 | return _filter | ||
155 | 123 | |||
156 | 124 | |||
157 | 125 | def sync_directory(src, dest, opts=None): | ||
158 | 126 | if os.path.exists(dest): | ||
159 | 127 | logging.debug('Removing existing directory: %s' % dest) | ||
160 | 128 | shutil.rmtree(dest) | ||
161 | 129 | logging.info('Syncing directory: %s -> %s.' % (src, dest)) | ||
162 | 130 | |||
163 | 131 | shutil.copytree(src, dest, ignore=get_filter(opts)) | ||
164 | 132 | ensure_init(dest) | ||
165 | 133 | |||
166 | 134 | |||
167 | 135 | def sync(src, dest, module, opts=None): | ||
168 | 136 | |||
169 | 137 | # Sync charmhelpers/__init__.py for bootstrap code. | ||
170 | 138 | sync_pyfile(_src_path(src, '__init__'), dest) | ||
171 | 139 | |||
172 | 140 | # Sync other __init__.py files in the path leading to module. | ||
173 | 141 | m = [] | ||
174 | 142 | steps = module.split('.')[:-1] | ||
175 | 143 | while steps: | ||
176 | 144 | m.append(steps.pop(0)) | ||
177 | 145 | init = '.'.join(m + ['__init__']) | ||
178 | 146 | sync_pyfile(_src_path(src, init), | ||
179 | 147 | os.path.dirname(_dest_path(dest, init))) | ||
180 | 148 | |||
181 | 149 | # Sync the module, or maybe a .py file. | ||
182 | 150 | if os.path.isdir(_src_path(src, module)): | ||
183 | 151 | sync_directory(_src_path(src, module), _dest_path(dest, module), opts) | ||
184 | 152 | elif _is_pyfile(_src_path(src, module)): | ||
185 | 153 | sync_pyfile(_src_path(src, module), | ||
186 | 154 | os.path.dirname(_dest_path(dest, module))) | ||
187 | 155 | else: | ||
188 | 156 | logging.warn('Could not sync: %s. Neither a pyfile or directory, ' | ||
189 | 157 | 'does it even exist?' % module) | ||
190 | 158 | |||
191 | 159 | |||
192 | 160 | def parse_sync_options(options): | ||
193 | 161 | if not options: | ||
194 | 162 | return [] | ||
195 | 163 | return options.split(',') | ||
196 | 164 | |||
197 | 165 | |||
198 | 166 | def extract_options(inc, global_options=None): | ||
199 | 167 | global_options = global_options or [] | ||
200 | 168 | if global_options and isinstance(global_options, six.string_types): | ||
201 | 169 | global_options = [global_options] | ||
202 | 170 | if '|' not in inc: | ||
203 | 171 | return (inc, global_options) | ||
204 | 172 | inc, opts = inc.split('|') | ||
205 | 173 | return (inc, parse_sync_options(opts) + global_options) | ||
206 | 174 | |||
207 | 175 | |||
208 | 176 | def sync_helpers(include, src, dest, options=None): | ||
209 | 177 | if not os.path.isdir(dest): | ||
210 | 178 | os.makedirs(dest) | ||
211 | 179 | |||
212 | 180 | global_options = parse_sync_options(options) | ||
213 | 181 | |||
214 | 182 | for inc in include: | ||
215 | 183 | if isinstance(inc, str): | ||
216 | 184 | inc, opts = extract_options(inc, global_options) | ||
217 | 185 | sync(src, dest, inc, opts) | ||
218 | 186 | elif isinstance(inc, dict): | ||
219 | 187 | # could also do nested dicts here. | ||
220 | 188 | for k, v in six.iteritems(inc): | ||
221 | 189 | if isinstance(v, list): | ||
222 | 190 | for m in v: | ||
223 | 191 | inc, opts = extract_options(m, global_options) | ||
224 | 192 | sync(src, dest, '%s.%s' % (k, inc), opts) | ||
225 | 193 | |||
226 | 194 | |||
227 | 195 | if __name__ == '__main__': | ||
228 | 196 | parser = optparse.OptionParser() | ||
229 | 197 | parser.add_option('-c', '--config', action='store', dest='config', | ||
230 | 198 | default=None, help='helper config file') | ||
231 | 199 | parser.add_option('-D', '--debug', action='store_true', dest='debug', | ||
232 | 200 | default=False, help='debug') | ||
233 | 201 | parser.add_option('-b', '--branch', action='store', dest='branch', | ||
234 | 202 | help='charm-helpers bzr branch (overrides config)') | ||
235 | 203 | parser.add_option('-d', '--destination', action='store', dest='dest_dir', | ||
236 | 204 | help='sync destination dir (overrides config)') | ||
237 | 205 | (opts, args) = parser.parse_args() | ||
238 | 206 | |||
239 | 207 | if opts.debug: | ||
240 | 208 | logging.basicConfig(level=logging.DEBUG) | ||
241 | 209 | else: | ||
242 | 210 | logging.basicConfig(level=logging.INFO) | ||
243 | 211 | |||
244 | 212 | if opts.config: | ||
245 | 213 | logging.info('Loading charm helper config from %s.' % opts.config) | ||
246 | 214 | config = parse_config(opts.config) | ||
247 | 215 | if not config: | ||
248 | 216 | logging.error('Could not parse config from %s.' % opts.config) | ||
249 | 217 | sys.exit(1) | ||
250 | 218 | else: | ||
251 | 219 | config = {} | ||
252 | 220 | |||
253 | 221 | if 'branch' not in config: | ||
254 | 222 | config['branch'] = CHARM_HELPERS_BRANCH | ||
255 | 223 | if opts.branch: | ||
256 | 224 | config['branch'] = opts.branch | ||
257 | 225 | if opts.dest_dir: | ||
258 | 226 | config['destination'] = opts.dest_dir | ||
259 | 227 | |||
260 | 228 | if 'destination' not in config: | ||
261 | 229 | logging.error('No destination dir. specified as option or config.') | ||
262 | 230 | sys.exit(1) | ||
263 | 231 | |||
264 | 232 | if 'include' not in config: | ||
265 | 233 | if not args: | ||
266 | 234 | logging.error('No modules to sync specified as option or config.') | ||
267 | 235 | sys.exit(1) | ||
268 | 236 | config['include'] = [] | ||
269 | 237 | [config['include'].append(a) for a in args] | ||
270 | 238 | |||
271 | 239 | sync_options = None | ||
272 | 240 | if 'options' in config: | ||
273 | 241 | sync_options = config['options'] | ||
274 | 242 | tmpd = tempfile.mkdtemp() | ||
275 | 243 | try: | ||
276 | 244 | checkout = clone_helpers(tmpd, config['branch']) | ||
277 | 245 | sync_helpers(config['include'], checkout, config['destination'], | ||
278 | 246 | options=sync_options) | ||
279 | 247 | except Exception as e: | ||
280 | 248 | logging.error("Could not sync: %s" % e) | ||
281 | 249 | raise e | ||
282 | 250 | finally: | ||
283 | 251 | logging.debug('Cleaning up %s' % tmpd) | ||
284 | 252 | shutil.rmtree(tmpd) | ||
285 | diff --git a/charm-helpers.yaml b/charm-helpers.yaml | |||
286 | index 4c97181..e5f7760 100644 | |||
287 | --- a/charm-helpers.yaml | |||
288 | +++ b/charm-helpers.yaml | |||
289 | @@ -1,6 +1,7 @@ | |||
290 | 1 | destination: hooks/charmhelpers | 1 | destination: hooks/charmhelpers |
292 | 2 | branch: lp:~openstack-charmers/charm-helpers/ssl-everywhere | 2 | branch: lp:charm-helpers |
293 | 3 | include: | 3 | include: |
294 | 4 | - core | 4 | - core |
295 | 5 | - fetch | 5 | - fetch |
296 | 6 | - osplatform | ||
297 | 6 | - contrib.ssl | 7 | - contrib.ssl |
298 | diff --git a/config.yaml b/config.yaml | |||
299 | index 3eb834a..77dc2a1 100644 | |||
300 | --- a/config.yaml | |||
301 | +++ b/config.yaml | |||
302 | @@ -144,3 +144,24 @@ options: | |||
303 | 144 | Password to use for Nagios administrative access. If not | 144 | Password to use for Nagios administrative access. If not |
304 | 145 | provided, a password will be generated (see documentation for | 145 | provided, a password will be generated (see documentation for |
305 | 146 | instructions on retrieving the generated password.) | 146 | instructions on retrieving the generated password.) |
306 | 147 | monitor_self: | ||
307 | 148 | type: boolean | ||
308 | 149 | default: true | ||
309 | 150 | description: | | ||
310 | 151 | If true, enable monitoring of the nagios unit itself. | ||
311 | 152 | nagios_host_context: | ||
312 | 153 | default: "juju" | ||
313 | 154 | type: string | ||
314 | 155 | description: | | ||
315 | 156 | a string that will be prepended to instance name to set the host name | ||
316 | 157 | in nagios. So for instance the hostname would be something like: | ||
317 | 158 | juju-postgresql-0 | ||
318 | 159 | If you're running multiple environments with the same services in them | ||
319 | 160 | this allows you to differentiate between them. | ||
320 | 161 | load_monitor: | ||
321 | 162 | default: '5.0!4.0!3.0!10.0!6.0!4.0' | ||
322 | 163 | type: string | ||
323 | 164 | description: | | ||
324 | 165 | A string to pass to the Nagios load monitoring command. Default is | ||
325 | 166 | to report warning at 5.0, 4.0 and 3.0 averages, critical at 10.0, | ||
326 | 167 | 6.0 and 4.0. | ||
327 | diff --git a/hooks/charmhelpers/__init__.py b/hooks/charmhelpers/__init__.py | |||
328 | index e69de29..e7aa471 100644 | |||
329 | --- a/hooks/charmhelpers/__init__.py | |||
330 | +++ b/hooks/charmhelpers/__init__.py | |||
331 | @@ -0,0 +1,97 @@ | |||
332 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
333 | 2 | # | ||
334 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
335 | 4 | # you may not use this file except in compliance with the License. | ||
336 | 5 | # You may obtain a copy of the License at | ||
337 | 6 | # | ||
338 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
339 | 8 | # | ||
340 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
341 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
342 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
343 | 12 | # See the License for the specific language governing permissions and | ||
344 | 13 | # limitations under the License. | ||
345 | 14 | |||
346 | 15 | # Bootstrap charm-helpers, installing its dependencies if necessary using | ||
347 | 16 | # only standard libraries. | ||
348 | 17 | from __future__ import print_function | ||
349 | 18 | from __future__ import absolute_import | ||
350 | 19 | |||
351 | 20 | import functools | ||
352 | 21 | import inspect | ||
353 | 22 | import subprocess | ||
354 | 23 | import sys | ||
355 | 24 | |||
356 | 25 | try: | ||
357 | 26 | import six # flake8: noqa | ||
358 | 27 | except ImportError: | ||
359 | 28 | if sys.version_info.major == 2: | ||
360 | 29 | subprocess.check_call(['apt-get', 'install', '-y', 'python-six']) | ||
361 | 30 | else: | ||
362 | 31 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-six']) | ||
363 | 32 | import six # flake8: noqa | ||
364 | 33 | |||
365 | 34 | try: | ||
366 | 35 | import yaml # flake8: noqa | ||
367 | 36 | except ImportError: | ||
368 | 37 | if sys.version_info.major == 2: | ||
369 | 38 | subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml']) | ||
370 | 39 | else: | ||
371 | 40 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml']) | ||
372 | 41 | import yaml # flake8: noqa | ||
373 | 42 | |||
374 | 43 | |||
375 | 44 | # Holds a list of mapping of mangled function names that have been deprecated | ||
376 | 45 | # using the @deprecate decorator below. This is so that the warning is only | ||
377 | 46 | # printed once for each usage of the function. | ||
378 | 47 | __deprecated_functions = {} | ||
379 | 48 | |||
380 | 49 | |||
381 | 50 | def deprecate(warning, date=None, log=None): | ||
382 | 51 | """Add a deprecation warning the first time the function is used. | ||
383 | 52 | The date, which is a string in semi-ISO8660 format indicate the year-month | ||
384 | 53 | that the function is officially going to be removed. | ||
385 | 54 | |||
386 | 55 | usage: | ||
387 | 56 | |||
388 | 57 | @deprecate('use core/fetch/add_source() instead', '2017-04') | ||
389 | 58 | def contributed_add_source_thing(...): | ||
390 | 59 | ... | ||
391 | 60 | |||
392 | 61 | And it then prints to the log ONCE that the function is deprecated. | ||
393 | 62 | The reason for passing the logging function (log) is so that hookenv.log | ||
394 | 63 | can be used for a charm if needed. | ||
395 | 64 | |||
396 | 65 | :param warning: String to indicat where it has moved ot. | ||
397 | 66 | :param date: optional sting, in YYYY-MM format to indicate when the | ||
398 | 67 | function will definitely (probably) be removed. | ||
399 | 68 | :param log: The log function to call to log. If not, logs to stdout | ||
400 | 69 | """ | ||
401 | 70 | def wrap(f): | ||
402 | 71 | |||
403 | 72 | @functools.wraps(f) | ||
404 | 73 | def wrapped_f(*args, **kwargs): | ||
405 | 74 | try: | ||
406 | 75 | module = inspect.getmodule(f) | ||
407 | 76 | file = inspect.getsourcefile(f) | ||
408 | 77 | lines = inspect.getsourcelines(f) | ||
409 | 78 | f_name = "{}-{}-{}..{}-{}".format( | ||
410 | 79 | module.__name__, file, lines[0], lines[-1], f.__name__) | ||
411 | 80 | except (IOError, TypeError): | ||
412 | 81 | # assume it was local, so just use the name of the function | ||
413 | 82 | f_name = f.__name__ | ||
414 | 83 | if f_name not in __deprecated_functions: | ||
415 | 84 | __deprecated_functions[f_name] = True | ||
416 | 85 | s = "DEPRECATION WARNING: Function {} is being removed".format( | ||
417 | 86 | f.__name__) | ||
418 | 87 | if date: | ||
419 | 88 | s = "{} on/around {}".format(s, date) | ||
420 | 89 | if warning: | ||
421 | 90 | s = "{} : {}".format(s, warning) | ||
422 | 91 | if log: | ||
423 | 92 | log(s) | ||
424 | 93 | else: | ||
425 | 94 | print(s) | ||
426 | 95 | return f(*args, **kwargs) | ||
427 | 96 | return wrapped_f | ||
428 | 97 | return wrap | ||
429 | diff --git a/hooks/charmhelpers/contrib/__init__.py b/hooks/charmhelpers/contrib/__init__.py | |||
430 | index e69de29..d7567b8 100644 | |||
431 | --- a/hooks/charmhelpers/contrib/__init__.py | |||
432 | +++ b/hooks/charmhelpers/contrib/__init__.py | |||
433 | @@ -0,0 +1,13 @@ | |||
434 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
435 | 2 | # | ||
436 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
437 | 4 | # you may not use this file except in compliance with the License. | ||
438 | 5 | # You may obtain a copy of the License at | ||
439 | 6 | # | ||
440 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
441 | 8 | # | ||
442 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
443 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
444 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
445 | 12 | # See the License for the specific language governing permissions and | ||
446 | 13 | # limitations under the License. | ||
447 | diff --git a/hooks/charmhelpers/contrib/ssl/__init__.py b/hooks/charmhelpers/contrib/ssl/__init__.py | |||
448 | index 2999c0a..1d238b5 100644 | |||
449 | --- a/hooks/charmhelpers/contrib/ssl/__init__.py | |||
450 | +++ b/hooks/charmhelpers/contrib/ssl/__init__.py | |||
451 | @@ -1,3 +1,17 @@ | |||
452 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
453 | 2 | # | ||
454 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
455 | 4 | # you may not use this file except in compliance with the License. | ||
456 | 5 | # You may obtain a copy of the License at | ||
457 | 6 | # | ||
458 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
459 | 8 | # | ||
460 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
461 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
462 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
463 | 12 | # See the License for the specific language governing permissions and | ||
464 | 13 | # limitations under the License. | ||
465 | 14 | |||
466 | 1 | import subprocess | 15 | import subprocess |
467 | 2 | from charmhelpers.core import hookenv | 16 | from charmhelpers.core import hookenv |
468 | 3 | 17 | ||
469 | @@ -74,5 +88,5 @@ def generate_selfsigned(keyfile, certfile, keysize="1024", config=None, subject= | |||
470 | 74 | subprocess.check_call(cmd) | 88 | subprocess.check_call(cmd) |
471 | 75 | return True | 89 | return True |
472 | 76 | except Exception as e: | 90 | except Exception as e: |
474 | 77 | print "Execution of openssl command failed:\n{}".format(e) | 91 | print("Execution of openssl command failed:\n{}".format(e)) |
475 | 78 | return False | 92 | return False |
476 | diff --git a/hooks/charmhelpers/contrib/ssl/service.py b/hooks/charmhelpers/contrib/ssl/service.py | |||
477 | index 295f721..06b534f 100644 | |||
478 | --- a/hooks/charmhelpers/contrib/ssl/service.py | |||
479 | +++ b/hooks/charmhelpers/contrib/ssl/service.py | |||
480 | @@ -1,13 +1,23 @@ | |||
482 | 1 | import logging | 1 | # Copyright 2014-2015 Canonical Limited. |
483 | 2 | # | ||
484 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
485 | 4 | # you may not use this file except in compliance with the License. | ||
486 | 5 | # You may obtain a copy of the License at | ||
487 | 6 | # | ||
488 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
489 | 8 | # | ||
490 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
491 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
492 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
493 | 12 | # See the License for the specific language governing permissions and | ||
494 | 13 | # limitations under the License. | ||
495 | 14 | |||
496 | 2 | import os | 15 | import os |
497 | 3 | from os.path import join as path_join | 16 | from os.path import join as path_join |
498 | 4 | from os.path import exists | 17 | from os.path import exists |
499 | 5 | import subprocess | 18 | import subprocess |
500 | 6 | 19 | ||
505 | 7 | 20 | from charmhelpers.core.hookenv import log, DEBUG | |
502 | 8 | log = logging.getLogger("service_ca") | ||
503 | 9 | |||
504 | 10 | logging.basicConfig(level=logging.DEBUG) | ||
506 | 11 | 21 | ||
507 | 12 | STD_CERT = "standard" | 22 | STD_CERT = "standard" |
508 | 13 | 23 | ||
509 | @@ -46,7 +56,7 @@ class ServiceCA(object): | |||
510 | 46 | ############### | 56 | ############### |
511 | 47 | 57 | ||
512 | 48 | def init(self): | 58 | def init(self): |
514 | 49 | log.debug("initializing service ca") | 59 | log("initializing service ca", level=DEBUG) |
515 | 50 | if not exists(self.ca_dir): | 60 | if not exists(self.ca_dir): |
516 | 51 | self._init_ca_dir(self.ca_dir) | 61 | self._init_ca_dir(self.ca_dir) |
517 | 52 | self._init_ca() | 62 | self._init_ca() |
518 | @@ -75,23 +85,23 @@ class ServiceCA(object): | |||
519 | 75 | os.mkdir(sd) | 85 | os.mkdir(sd) |
520 | 76 | 86 | ||
521 | 77 | if not exists(path_join(ca_dir, 'serial')): | 87 | if not exists(path_join(ca_dir, 'serial')): |
523 | 78 | with open(path_join(ca_dir, 'serial'), 'wb') as fh: | 88 | with open(path_join(ca_dir, 'serial'), 'w') as fh: |
524 | 79 | fh.write('02\n') | 89 | fh.write('02\n') |
525 | 80 | 90 | ||
526 | 81 | if not exists(path_join(ca_dir, 'index.txt')): | 91 | if not exists(path_join(ca_dir, 'index.txt')): |
528 | 82 | with open(path_join(ca_dir, 'index.txt'), 'wb') as fh: | 92 | with open(path_join(ca_dir, 'index.txt'), 'w') as fh: |
529 | 83 | fh.write('') | 93 | fh.write('') |
530 | 84 | 94 | ||
531 | 85 | def _init_ca(self): | 95 | def _init_ca(self): |
532 | 86 | """Generate the root ca's cert and key. | 96 | """Generate the root ca's cert and key. |
533 | 87 | """ | 97 | """ |
534 | 88 | if not exists(path_join(self.ca_dir, 'ca.cnf')): | 98 | if not exists(path_join(self.ca_dir, 'ca.cnf')): |
536 | 89 | with open(path_join(self.ca_dir, 'ca.cnf'), 'wb') as fh: | 99 | with open(path_join(self.ca_dir, 'ca.cnf'), 'w') as fh: |
537 | 90 | fh.write( | 100 | fh.write( |
538 | 91 | CA_CONF_TEMPLATE % (self.get_conf_variables())) | 101 | CA_CONF_TEMPLATE % (self.get_conf_variables())) |
539 | 92 | 102 | ||
540 | 93 | if not exists(path_join(self.ca_dir, 'signing.cnf')): | 103 | if not exists(path_join(self.ca_dir, 'signing.cnf')): |
542 | 94 | with open(path_join(self.ca_dir, 'signing.cnf'), 'wb') as fh: | 104 | with open(path_join(self.ca_dir, 'signing.cnf'), 'w') as fh: |
543 | 95 | fh.write( | 105 | fh.write( |
544 | 96 | SIGNING_CONF_TEMPLATE % (self.get_conf_variables())) | 106 | SIGNING_CONF_TEMPLATE % (self.get_conf_variables())) |
545 | 97 | 107 | ||
546 | @@ -103,7 +113,7 @@ class ServiceCA(object): | |||
547 | 103 | '-keyout', self.ca_key, '-out', self.ca_cert, | 113 | '-keyout', self.ca_key, '-out', self.ca_cert, |
548 | 104 | '-outform', 'PEM'] | 114 | '-outform', 'PEM'] |
549 | 105 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | 115 | output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) |
551 | 106 | log.debug("CA Init:\n %s", output) | 116 | log("CA Init:\n %s" % output, level=DEBUG) |
552 | 107 | 117 | ||
553 | 108 | def get_conf_variables(self): | 118 | def get_conf_variables(self): |
554 | 109 | return dict( | 119 | return dict( |
555 | @@ -127,7 +137,7 @@ class ServiceCA(object): | |||
556 | 127 | return self.get_certificate(common_name) | 137 | return self.get_certificate(common_name) |
557 | 128 | 138 | ||
558 | 129 | def get_certificate(self, common_name): | 139 | def get_certificate(self, common_name): |
560 | 130 | if not common_name in self: | 140 | if common_name not in self: |
561 | 131 | raise ValueError("No certificate for %s" % common_name) | 141 | raise ValueError("No certificate for %s" % common_name) |
562 | 132 | key_p = path_join(self.ca_dir, "certs", "%s.key" % common_name) | 142 | key_p = path_join(self.ca_dir, "certs", "%s.key" % common_name) |
563 | 133 | crt_p = path_join(self.ca_dir, "certs", "%s.crt" % common_name) | 143 | crt_p = path_join(self.ca_dir, "certs", "%s.crt" % common_name) |
564 | @@ -147,15 +157,15 @@ class ServiceCA(object): | |||
565 | 147 | subj = '/O=%(org_name)s/OU=%(org_unit_name)s/CN=%(common_name)s' % ( | 157 | subj = '/O=%(org_name)s/OU=%(org_unit_name)s/CN=%(common_name)s' % ( |
566 | 148 | template_vars) | 158 | template_vars) |
567 | 149 | 159 | ||
569 | 150 | log.debug("CA Create Cert %s", common_name) | 160 | log("CA Create Cert %s" % common_name, level=DEBUG) |
570 | 151 | cmd = ['openssl', 'req', '-sha1', '-newkey', 'rsa:2048', | 161 | cmd = ['openssl', 'req', '-sha1', '-newkey', 'rsa:2048', |
571 | 152 | '-nodes', '-days', self.default_expiry, | 162 | '-nodes', '-days', self.default_expiry, |
572 | 153 | '-keyout', key_p, '-out', csr_p, '-subj', subj] | 163 | '-keyout', key_p, '-out', csr_p, '-subj', subj] |
574 | 154 | subprocess.check_call(cmd) | 164 | subprocess.check_call(cmd, stderr=subprocess.PIPE) |
575 | 155 | cmd = ['openssl', 'rsa', '-in', key_p, '-out', key_p] | 165 | cmd = ['openssl', 'rsa', '-in', key_p, '-out', key_p] |
577 | 156 | subprocess.check_call(cmd) | 166 | subprocess.check_call(cmd, stderr=subprocess.PIPE) |
578 | 157 | 167 | ||
580 | 158 | log.debug("CA Sign Cert %s", common_name) | 168 | log("CA Sign Cert %s" % common_name, level=DEBUG) |
581 | 159 | if self.cert_type == MYSQL_CERT: | 169 | if self.cert_type == MYSQL_CERT: |
582 | 160 | cmd = ['openssl', 'x509', '-req', | 170 | cmd = ['openssl', 'x509', '-req', |
583 | 161 | '-in', csr_p, '-days', self.default_expiry, | 171 | '-in', csr_p, '-days', self.default_expiry, |
584 | @@ -166,8 +176,8 @@ class ServiceCA(object): | |||
585 | 166 | '-extensions', 'req_extensions', | 176 | '-extensions', 'req_extensions', |
586 | 167 | '-days', self.default_expiry, '-notext', | 177 | '-days', self.default_expiry, '-notext', |
587 | 168 | '-in', csr_p, '-out', crt_p, '-subj', subj, '-batch'] | 178 | '-in', csr_p, '-out', crt_p, '-subj', subj, '-batch'] |
590 | 169 | log.debug("running %s", " ".join(cmd)) | 179 | log("running %s" % " ".join(cmd), level=DEBUG) |
591 | 170 | subprocess.check_call(cmd) | 180 | subprocess.check_call(cmd, stderr=subprocess.PIPE) |
592 | 171 | 181 | ||
593 | 172 | def get_ca_bundle(self): | 182 | def get_ca_bundle(self): |
594 | 173 | with open(self.ca_cert) as fh: | 183 | with open(self.ca_cert) as fh: |
595 | diff --git a/hooks/charmhelpers/core/__init__.py b/hooks/charmhelpers/core/__init__.py | |||
596 | index e69de29..d7567b8 100644 | |||
597 | --- a/hooks/charmhelpers/core/__init__.py | |||
598 | +++ b/hooks/charmhelpers/core/__init__.py | |||
599 | @@ -0,0 +1,13 @@ | |||
600 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
601 | 2 | # | ||
602 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
603 | 4 | # you may not use this file except in compliance with the License. | ||
604 | 5 | # You may obtain a copy of the License at | ||
605 | 6 | # | ||
606 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
607 | 8 | # | ||
608 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
609 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
610 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
611 | 12 | # See the License for the specific language governing permissions and | ||
612 | 13 | # limitations under the License. | ||
613 | diff --git a/hooks/charmhelpers/core/decorators.py b/hooks/charmhelpers/core/decorators.py | |||
614 | 0 | new file mode 100644 | 14 | new file mode 100644 |
615 | index 0000000..6ad41ee | |||
616 | --- /dev/null | |||
617 | +++ b/hooks/charmhelpers/core/decorators.py | |||
618 | @@ -0,0 +1,55 @@ | |||
619 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
620 | 2 | # | ||
621 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
622 | 4 | # you may not use this file except in compliance with the License. | ||
623 | 5 | # You may obtain a copy of the License at | ||
624 | 6 | # | ||
625 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
626 | 8 | # | ||
627 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
628 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
629 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
630 | 12 | # See the License for the specific language governing permissions and | ||
631 | 13 | # limitations under the License. | ||
632 | 14 | |||
633 | 15 | # | ||
634 | 16 | # Copyright 2014 Canonical Ltd. | ||
635 | 17 | # | ||
636 | 18 | # Authors: | ||
637 | 19 | # Edward Hope-Morley <opentastic@gmail.com> | ||
638 | 20 | # | ||
639 | 21 | |||
640 | 22 | import time | ||
641 | 23 | |||
642 | 24 | from charmhelpers.core.hookenv import ( | ||
643 | 25 | log, | ||
644 | 26 | INFO, | ||
645 | 27 | ) | ||
646 | 28 | |||
647 | 29 | |||
648 | 30 | def retry_on_exception(num_retries, base_delay=0, exc_type=Exception): | ||
649 | 31 | """If the decorated function raises exception exc_type, allow num_retries | ||
650 | 32 | retry attempts before raise the exception. | ||
651 | 33 | """ | ||
652 | 34 | def _retry_on_exception_inner_1(f): | ||
653 | 35 | def _retry_on_exception_inner_2(*args, **kwargs): | ||
654 | 36 | retries = num_retries | ||
655 | 37 | multiplier = 1 | ||
656 | 38 | while True: | ||
657 | 39 | try: | ||
658 | 40 | return f(*args, **kwargs) | ||
659 | 41 | except exc_type: | ||
660 | 42 | if not retries: | ||
661 | 43 | raise | ||
662 | 44 | |||
663 | 45 | delay = base_delay * multiplier | ||
664 | 46 | multiplier += 1 | ||
665 | 47 | log("Retrying '%s' %d more times (delay=%s)" % | ||
666 | 48 | (f.__name__, retries, delay), level=INFO) | ||
667 | 49 | retries -= 1 | ||
668 | 50 | if delay: | ||
669 | 51 | time.sleep(delay) | ||
670 | 52 | |||
671 | 53 | return _retry_on_exception_inner_2 | ||
672 | 54 | |||
673 | 55 | return _retry_on_exception_inner_1 | ||
674 | diff --git a/hooks/charmhelpers/core/files.py b/hooks/charmhelpers/core/files.py | |||
675 | 0 | new file mode 100644 | 56 | new file mode 100644 |
676 | index 0000000..fdd82b7 | |||
677 | --- /dev/null | |||
678 | +++ b/hooks/charmhelpers/core/files.py | |||
679 | @@ -0,0 +1,43 @@ | |||
680 | 1 | #!/usr/bin/env python | ||
681 | 2 | # -*- coding: utf-8 -*- | ||
682 | 3 | |||
683 | 4 | # Copyright 2014-2015 Canonical Limited. | ||
684 | 5 | # | ||
685 | 6 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
686 | 7 | # you may not use this file except in compliance with the License. | ||
687 | 8 | # You may obtain a copy of the License at | ||
688 | 9 | # | ||
689 | 10 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
690 | 11 | # | ||
691 | 12 | # Unless required by applicable law or agreed to in writing, software | ||
692 | 13 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
693 | 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
694 | 15 | # See the License for the specific language governing permissions and | ||
695 | 16 | # limitations under the License. | ||
696 | 17 | |||
697 | 18 | __author__ = 'Jorge Niedbalski <niedbalski@ubuntu.com>' | ||
698 | 19 | |||
699 | 20 | import os | ||
700 | 21 | import subprocess | ||
701 | 22 | |||
702 | 23 | |||
703 | 24 | def sed(filename, before, after, flags='g'): | ||
704 | 25 | """ | ||
705 | 26 | Search and replaces the given pattern on filename. | ||
706 | 27 | |||
707 | 28 | :param filename: relative or absolute file path. | ||
708 | 29 | :param before: expression to be replaced (see 'man sed') | ||
709 | 30 | :param after: expression to replace with (see 'man sed') | ||
710 | 31 | :param flags: sed-compatible regex flags in example, to make | ||
711 | 32 | the search and replace case insensitive, specify ``flags="i"``. | ||
712 | 33 | The ``g`` flag is always specified regardless, so you do not | ||
713 | 34 | need to remember to include it when overriding this parameter. | ||
714 | 35 | :returns: If the sed command exit code was zero then return, | ||
715 | 36 | otherwise raise CalledProcessError. | ||
716 | 37 | """ | ||
717 | 38 | expression = r's/{0}/{1}/{2}'.format(before, | ||
718 | 39 | after, flags) | ||
719 | 40 | |||
720 | 41 | return subprocess.check_call(["sed", "-i", "-r", "-e", | ||
721 | 42 | expression, | ||
722 | 43 | os.path.expanduser(filename)]) | ||
723 | diff --git a/hooks/charmhelpers/core/fstab.py b/hooks/charmhelpers/core/fstab.py | |||
724 | 0 | new file mode 100644 | 44 | new file mode 100644 |
725 | index 0000000..d9fa915 | |||
726 | --- /dev/null | |||
727 | +++ b/hooks/charmhelpers/core/fstab.py | |||
728 | @@ -0,0 +1,132 @@ | |||
729 | 1 | #!/usr/bin/env python | ||
730 | 2 | # -*- coding: utf-8 -*- | ||
731 | 3 | |||
732 | 4 | # Copyright 2014-2015 Canonical Limited. | ||
733 | 5 | # | ||
734 | 6 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
735 | 7 | # you may not use this file except in compliance with the License. | ||
736 | 8 | # You may obtain a copy of the License at | ||
737 | 9 | # | ||
738 | 10 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
739 | 11 | # | ||
740 | 12 | # Unless required by applicable law or agreed to in writing, software | ||
741 | 13 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
742 | 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
743 | 15 | # See the License for the specific language governing permissions and | ||
744 | 16 | # limitations under the License. | ||
745 | 17 | |||
746 | 18 | import io | ||
747 | 19 | import os | ||
748 | 20 | |||
749 | 21 | __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' | ||
750 | 22 | |||
751 | 23 | |||
752 | 24 | class Fstab(io.FileIO): | ||
753 | 25 | """This class extends file in order to implement a file reader/writer | ||
754 | 26 | for file `/etc/fstab` | ||
755 | 27 | """ | ||
756 | 28 | |||
757 | 29 | class Entry(object): | ||
758 | 30 | """Entry class represents a non-comment line on the `/etc/fstab` file | ||
759 | 31 | """ | ||
760 | 32 | def __init__(self, device, mountpoint, filesystem, | ||
761 | 33 | options, d=0, p=0): | ||
762 | 34 | self.device = device | ||
763 | 35 | self.mountpoint = mountpoint | ||
764 | 36 | self.filesystem = filesystem | ||
765 | 37 | |||
766 | 38 | if not options: | ||
767 | 39 | options = "defaults" | ||
768 | 40 | |||
769 | 41 | self.options = options | ||
770 | 42 | self.d = int(d) | ||
771 | 43 | self.p = int(p) | ||
772 | 44 | |||
773 | 45 | def __eq__(self, o): | ||
774 | 46 | return str(self) == str(o) | ||
775 | 47 | |||
776 | 48 | def __str__(self): | ||
777 | 49 | return "{} {} {} {} {} {}".format(self.device, | ||
778 | 50 | self.mountpoint, | ||
779 | 51 | self.filesystem, | ||
780 | 52 | self.options, | ||
781 | 53 | self.d, | ||
782 | 54 | self.p) | ||
783 | 55 | |||
784 | 56 | DEFAULT_PATH = os.path.join(os.path.sep, 'etc', 'fstab') | ||
785 | 57 | |||
786 | 58 | def __init__(self, path=None): | ||
787 | 59 | if path: | ||
788 | 60 | self._path = path | ||
789 | 61 | else: | ||
790 | 62 | self._path = self.DEFAULT_PATH | ||
791 | 63 | super(Fstab, self).__init__(self._path, 'rb+') | ||
792 | 64 | |||
793 | 65 | def _hydrate_entry(self, line): | ||
794 | 66 | # NOTE: use split with no arguments to split on any | ||
795 | 67 | # whitespace including tabs | ||
796 | 68 | return Fstab.Entry(*filter( | ||
797 | 69 | lambda x: x not in ('', None), | ||
798 | 70 | line.strip("\n").split())) | ||
799 | 71 | |||
800 | 72 | @property | ||
801 | 73 | def entries(self): | ||
802 | 74 | self.seek(0) | ||
803 | 75 | for line in self.readlines(): | ||
804 | 76 | line = line.decode('us-ascii') | ||
805 | 77 | try: | ||
806 | 78 | if line.strip() and not line.strip().startswith("#"): | ||
807 | 79 | yield self._hydrate_entry(line) | ||
808 | 80 | except ValueError: | ||
809 | 81 | pass | ||
810 | 82 | |||
811 | 83 | def get_entry_by_attr(self, attr, value): | ||
812 | 84 | for entry in self.entries: | ||
813 | 85 | e_attr = getattr(entry, attr) | ||
814 | 86 | if e_attr == value: | ||
815 | 87 | return entry | ||
816 | 88 | return None | ||
817 | 89 | |||
818 | 90 | def add_entry(self, entry): | ||
819 | 91 | if self.get_entry_by_attr('device', entry.device): | ||
820 | 92 | return False | ||
821 | 93 | |||
822 | 94 | self.write((str(entry) + '\n').encode('us-ascii')) | ||
823 | 95 | self.truncate() | ||
824 | 96 | return entry | ||
825 | 97 | |||
826 | 98 | def remove_entry(self, entry): | ||
827 | 99 | self.seek(0) | ||
828 | 100 | |||
829 | 101 | lines = [l.decode('us-ascii') for l in self.readlines()] | ||
830 | 102 | |||
831 | 103 | found = False | ||
832 | 104 | for index, line in enumerate(lines): | ||
833 | 105 | if line.strip() and not line.strip().startswith("#"): | ||
834 | 106 | if self._hydrate_entry(line) == entry: | ||
835 | 107 | found = True | ||
836 | 108 | break | ||
837 | 109 | |||
838 | 110 | if not found: | ||
839 | 111 | return False | ||
840 | 112 | |||
841 | 113 | lines.remove(line) | ||
842 | 114 | |||
843 | 115 | self.seek(0) | ||
844 | 116 | self.write(''.join(lines).encode('us-ascii')) | ||
845 | 117 | self.truncate() | ||
846 | 118 | return True | ||
847 | 119 | |||
848 | 120 | @classmethod | ||
849 | 121 | def remove_by_mountpoint(cls, mountpoint, path=None): | ||
850 | 122 | fstab = cls(path=path) | ||
851 | 123 | entry = fstab.get_entry_by_attr('mountpoint', mountpoint) | ||
852 | 124 | if entry: | ||
853 | 125 | return fstab.remove_entry(entry) | ||
854 | 126 | return False | ||
855 | 127 | |||
856 | 128 | @classmethod | ||
857 | 129 | def add(cls, device, mountpoint, filesystem, options=None, path=None): | ||
858 | 130 | return cls(path=path).add_entry(Fstab.Entry(device, | ||
859 | 131 | mountpoint, filesystem, | ||
860 | 132 | options=options)) | ||
861 | diff --git a/hooks/charmhelpers/core/hookenv.py b/hooks/charmhelpers/core/hookenv.py | |||
862 | index 505c202..12f37b2 100644 | |||
863 | --- a/hooks/charmhelpers/core/hookenv.py | |||
864 | +++ b/hooks/charmhelpers/core/hookenv.py | |||
865 | @@ -1,22 +1,49 @@ | |||
866 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
867 | 2 | # | ||
868 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
869 | 4 | # you may not use this file except in compliance with the License. | ||
870 | 5 | # You may obtain a copy of the License at | ||
871 | 6 | # | ||
872 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
873 | 8 | # | ||
874 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
875 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
876 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
877 | 12 | # See the License for the specific language governing permissions and | ||
878 | 13 | # limitations under the License. | ||
879 | 14 | |||
880 | 1 | "Interactions with the Juju environment" | 15 | "Interactions with the Juju environment" |
881 | 2 | # Copyright 2013 Canonical Ltd. | 16 | # Copyright 2013 Canonical Ltd. |
882 | 3 | # | 17 | # |
883 | 4 | # Authors: | 18 | # Authors: |
884 | 5 | # Charm Helpers Developers <juju@lists.ubuntu.com> | 19 | # Charm Helpers Developers <juju@lists.ubuntu.com> |
885 | 6 | 20 | ||
886 | 21 | from __future__ import print_function | ||
887 | 22 | import copy | ||
888 | 23 | from distutils.version import LooseVersion | ||
889 | 24 | from functools import wraps | ||
890 | 25 | import glob | ||
891 | 7 | import os | 26 | import os |
892 | 8 | import json | 27 | import json |
893 | 9 | import yaml | 28 | import yaml |
894 | 10 | import subprocess | 29 | import subprocess |
895 | 11 | import sys | 30 | import sys |
897 | 12 | import UserDict | 31 | import errno |
898 | 32 | import tempfile | ||
899 | 13 | from subprocess import CalledProcessError | 33 | from subprocess import CalledProcessError |
900 | 14 | 34 | ||
901 | 35 | import six | ||
902 | 36 | if not six.PY3: | ||
903 | 37 | from UserDict import UserDict | ||
904 | 38 | else: | ||
905 | 39 | from collections import UserDict | ||
906 | 40 | |||
907 | 15 | CRITICAL = "CRITICAL" | 41 | CRITICAL = "CRITICAL" |
908 | 16 | ERROR = "ERROR" | 42 | ERROR = "ERROR" |
909 | 17 | WARNING = "WARNING" | 43 | WARNING = "WARNING" |
910 | 18 | INFO = "INFO" | 44 | INFO = "INFO" |
911 | 19 | DEBUG = "DEBUG" | 45 | DEBUG = "DEBUG" |
912 | 46 | TRACE = "TRACE" | ||
913 | 20 | MARKER = object() | 47 | MARKER = object() |
914 | 21 | 48 | ||
915 | 22 | cache = {} | 49 | cache = {} |
916 | @@ -25,7 +52,7 @@ cache = {} | |||
917 | 25 | def cached(func): | 52 | def cached(func): |
918 | 26 | """Cache return values for multiple executions of func + args | 53 | """Cache return values for multiple executions of func + args |
919 | 27 | 54 | ||
921 | 28 | For example: | 55 | For example:: |
922 | 29 | 56 | ||
923 | 30 | @cached | 57 | @cached |
924 | 31 | def unit_get(attribute): | 58 | def unit_get(attribute): |
925 | @@ -35,15 +62,18 @@ def cached(func): | |||
926 | 35 | 62 | ||
927 | 36 | will cache the result of unit_get + 'test' for future calls. | 63 | will cache the result of unit_get + 'test' for future calls. |
928 | 37 | """ | 64 | """ |
929 | 65 | @wraps(func) | ||
930 | 38 | def wrapper(*args, **kwargs): | 66 | def wrapper(*args, **kwargs): |
931 | 39 | global cache | 67 | global cache |
932 | 40 | key = str((func, args, kwargs)) | 68 | key = str((func, args, kwargs)) |
933 | 41 | try: | 69 | try: |
934 | 42 | return cache[key] | 70 | return cache[key] |
935 | 43 | except KeyError: | 71 | except KeyError: |
939 | 44 | res = func(*args, **kwargs) | 72 | pass # Drop out of the exception handler scope. |
940 | 45 | cache[key] = res | 73 | res = func(*args, **kwargs) |
941 | 46 | return res | 74 | cache[key] = res |
942 | 75 | return res | ||
943 | 76 | wrapper._wrapped = func | ||
944 | 47 | return wrapper | 77 | return wrapper |
945 | 48 | 78 | ||
946 | 49 | 79 | ||
947 | @@ -63,16 +93,29 @@ def log(message, level=None): | |||
948 | 63 | command = ['juju-log'] | 93 | command = ['juju-log'] |
949 | 64 | if level: | 94 | if level: |
950 | 65 | command += ['-l', level] | 95 | command += ['-l', level] |
951 | 96 | if not isinstance(message, six.string_types): | ||
952 | 97 | message = repr(message) | ||
953 | 66 | command += [message] | 98 | command += [message] |
955 | 67 | subprocess.call(command) | 99 | # Missing juju-log should not cause failures in unit tests |
956 | 100 | # Send log output to stderr | ||
957 | 101 | try: | ||
958 | 102 | subprocess.call(command) | ||
959 | 103 | except OSError as e: | ||
960 | 104 | if e.errno == errno.ENOENT: | ||
961 | 105 | if level: | ||
962 | 106 | message = "{}: {}".format(level, message) | ||
963 | 107 | message = "juju-log: {}".format(message) | ||
964 | 108 | print(message, file=sys.stderr) | ||
965 | 109 | else: | ||
966 | 110 | raise | ||
967 | 68 | 111 | ||
968 | 69 | 112 | ||
970 | 70 | class Serializable(UserDict.IterableUserDict): | 113 | class Serializable(UserDict): |
971 | 71 | """Wrapper, an object that can be serialized to yaml or json""" | 114 | """Wrapper, an object that can be serialized to yaml or json""" |
972 | 72 | 115 | ||
973 | 73 | def __init__(self, obj): | 116 | def __init__(self, obj): |
974 | 74 | # wrap the object | 117 | # wrap the object |
976 | 75 | UserDict.IterableUserDict.__init__(self) | 118 | UserDict.__init__(self) |
977 | 76 | self.data = obj | 119 | self.data = obj |
978 | 77 | 120 | ||
979 | 78 | def __getattr__(self, attr): | 121 | def __getattr__(self, attr): |
980 | @@ -130,9 +173,19 @@ def relation_type(): | |||
981 | 130 | return os.environ.get('JUJU_RELATION', None) | 173 | return os.environ.get('JUJU_RELATION', None) |
982 | 131 | 174 | ||
983 | 132 | 175 | ||
987 | 133 | def relation_id(): | 176 | @cached |
988 | 134 | """The relation ID for the current relation hook""" | 177 | def relation_id(relation_name=None, service_or_unit=None): |
989 | 135 | return os.environ.get('JUJU_RELATION_ID', None) | 178 | """The relation ID for the current or a specified relation""" |
990 | 179 | if not relation_name and not service_or_unit: | ||
991 | 180 | return os.environ.get('JUJU_RELATION_ID', None) | ||
992 | 181 | elif relation_name and service_or_unit: | ||
993 | 182 | service_name = service_or_unit.split('/')[0] | ||
994 | 183 | for relid in relation_ids(relation_name): | ||
995 | 184 | remote_service = remote_service_name(relid) | ||
996 | 185 | if remote_service == service_name: | ||
997 | 186 | return relid | ||
998 | 187 | else: | ||
999 | 188 | raise ValueError('Must specify neither or both of relation_name and service_or_unit') | ||
1000 | 136 | 189 | ||
1001 | 137 | 190 | ||
1002 | 138 | def local_unit(): | 191 | def local_unit(): |
1003 | @@ -142,7 +195,7 @@ def local_unit(): | |||
1004 | 142 | 195 | ||
1005 | 143 | def remote_unit(): | 196 | def remote_unit(): |
1006 | 144 | """The remote unit for the current relation hook""" | 197 | """The remote unit for the current relation hook""" |
1008 | 145 | return os.environ['JUJU_REMOTE_UNIT'] | 198 | return os.environ.get('JUJU_REMOTE_UNIT', None) |
1009 | 146 | 199 | ||
1010 | 147 | 200 | ||
1011 | 148 | def service_name(): | 201 | def service_name(): |
1012 | @@ -150,9 +203,149 @@ def service_name(): | |||
1013 | 150 | return local_unit().split('/')[0] | 203 | return local_unit().split('/')[0] |
1014 | 151 | 204 | ||
1015 | 152 | 205 | ||
1016 | 206 | def principal_unit(): | ||
1017 | 207 | """Returns the principal unit of this unit, otherwise None""" | ||
1018 | 208 | # Juju 2.2 and above provides JUJU_PRINCIPAL_UNIT | ||
1019 | 209 | principal_unit = os.environ.get('JUJU_PRINCIPAL_UNIT', None) | ||
1020 | 210 | # If it's empty, then this unit is the principal | ||
1021 | 211 | if principal_unit == '': | ||
1022 | 212 | return os.environ['JUJU_UNIT_NAME'] | ||
1023 | 213 | elif principal_unit is not None: | ||
1024 | 214 | return principal_unit | ||
1025 | 215 | # For Juju 2.1 and below, let's try work out the principle unit by | ||
1026 | 216 | # the various charms' metadata.yaml. | ||
1027 | 217 | for reltype in relation_types(): | ||
1028 | 218 | for rid in relation_ids(reltype): | ||
1029 | 219 | for unit in related_units(rid): | ||
1030 | 220 | md = _metadata_unit(unit) | ||
1031 | 221 | subordinate = md.pop('subordinate', None) | ||
1032 | 222 | if not subordinate: | ||
1033 | 223 | return unit | ||
1034 | 224 | return None | ||
1035 | 225 | |||
1036 | 226 | |||
1037 | 227 | @cached | ||
1038 | 228 | def remote_service_name(relid=None): | ||
1039 | 229 | """The remote service name for a given relation-id (or the current relation)""" | ||
1040 | 230 | if relid is None: | ||
1041 | 231 | unit = remote_unit() | ||
1042 | 232 | else: | ||
1043 | 233 | units = related_units(relid) | ||
1044 | 234 | unit = units[0] if units else None | ||
1045 | 235 | return unit.split('/')[0] if unit else None | ||
1046 | 236 | |||
1047 | 237 | |||
1048 | 153 | def hook_name(): | 238 | def hook_name(): |
1049 | 154 | """The name of the currently executing hook""" | 239 | """The name of the currently executing hook""" |
1051 | 155 | return os.path.basename(sys.argv[0]) | 240 | return os.environ.get('JUJU_HOOK_NAME', os.path.basename(sys.argv[0])) |
1052 | 241 | |||
1053 | 242 | |||
1054 | 243 | class Config(dict): | ||
1055 | 244 | """A dictionary representation of the charm's config.yaml, with some | ||
1056 | 245 | extra features: | ||
1057 | 246 | |||
1058 | 247 | - See which values in the dictionary have changed since the previous hook. | ||
1059 | 248 | - For values that have changed, see what the previous value was. | ||
1060 | 249 | - Store arbitrary data for use in a later hook. | ||
1061 | 250 | |||
1062 | 251 | NOTE: Do not instantiate this object directly - instead call | ||
1063 | 252 | ``hookenv.config()``, which will return an instance of :class:`Config`. | ||
1064 | 253 | |||
1065 | 254 | Example usage:: | ||
1066 | 255 | |||
1067 | 256 | >>> # inside a hook | ||
1068 | 257 | >>> from charmhelpers.core import hookenv | ||
1069 | 258 | >>> config = hookenv.config() | ||
1070 | 259 | >>> config['foo'] | ||
1071 | 260 | 'bar' | ||
1072 | 261 | >>> # store a new key/value for later use | ||
1073 | 262 | >>> config['mykey'] = 'myval' | ||
1074 | 263 | |||
1075 | 264 | |||
1076 | 265 | >>> # user runs `juju set mycharm foo=baz` | ||
1077 | 266 | >>> # now we're inside subsequent config-changed hook | ||
1078 | 267 | >>> config = hookenv.config() | ||
1079 | 268 | >>> config['foo'] | ||
1080 | 269 | 'baz' | ||
1081 | 270 | >>> # test to see if this val has changed since last hook | ||
1082 | 271 | >>> config.changed('foo') | ||
1083 | 272 | True | ||
1084 | 273 | >>> # what was the previous value? | ||
1085 | 274 | >>> config.previous('foo') | ||
1086 | 275 | 'bar' | ||
1087 | 276 | >>> # keys/values that we add are preserved across hooks | ||
1088 | 277 | >>> config['mykey'] | ||
1089 | 278 | 'myval' | ||
1090 | 279 | |||
1091 | 280 | """ | ||
1092 | 281 | CONFIG_FILE_NAME = '.juju-persistent-config' | ||
1093 | 282 | |||
1094 | 283 | def __init__(self, *args, **kw): | ||
1095 | 284 | super(Config, self).__init__(*args, **kw) | ||
1096 | 285 | self.implicit_save = True | ||
1097 | 286 | self._prev_dict = None | ||
1098 | 287 | self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME) | ||
1099 | 288 | if os.path.exists(self.path): | ||
1100 | 289 | self.load_previous() | ||
1101 | 290 | atexit(self._implicit_save) | ||
1102 | 291 | |||
1103 | 292 | def load_previous(self, path=None): | ||
1104 | 293 | """Load previous copy of config from disk. | ||
1105 | 294 | |||
1106 | 295 | In normal usage you don't need to call this method directly - it | ||
1107 | 296 | is called automatically at object initialization. | ||
1108 | 297 | |||
1109 | 298 | :param path: | ||
1110 | 299 | |||
1111 | 300 | File path from which to load the previous config. If `None`, | ||
1112 | 301 | config is loaded from the default location. If `path` is | ||
1113 | 302 | specified, subsequent `save()` calls will write to the same | ||
1114 | 303 | path. | ||
1115 | 304 | |||
1116 | 305 | """ | ||
1117 | 306 | self.path = path or self.path | ||
1118 | 307 | with open(self.path) as f: | ||
1119 | 308 | self._prev_dict = json.load(f) | ||
1120 | 309 | for k, v in copy.deepcopy(self._prev_dict).items(): | ||
1121 | 310 | if k not in self: | ||
1122 | 311 | self[k] = v | ||
1123 | 312 | |||
1124 | 313 | def changed(self, key): | ||
1125 | 314 | """Return True if the current value for this key is different from | ||
1126 | 315 | the previous value. | ||
1127 | 316 | |||
1128 | 317 | """ | ||
1129 | 318 | if self._prev_dict is None: | ||
1130 | 319 | return True | ||
1131 | 320 | return self.previous(key) != self.get(key) | ||
1132 | 321 | |||
1133 | 322 | def previous(self, key): | ||
1134 | 323 | """Return previous value for this key, or None if there | ||
1135 | 324 | is no previous value. | ||
1136 | 325 | |||
1137 | 326 | """ | ||
1138 | 327 | if self._prev_dict: | ||
1139 | 328 | return self._prev_dict.get(key) | ||
1140 | 329 | return None | ||
1141 | 330 | |||
1142 | 331 | def save(self): | ||
1143 | 332 | """Save this config to disk. | ||
1144 | 333 | |||
1145 | 334 | If the charm is using the :mod:`Services Framework <services.base>` | ||
1146 | 335 | or :meth:'@hook <Hooks.hook>' decorator, this | ||
1147 | 336 | is called automatically at the end of successful hook execution. | ||
1148 | 337 | Otherwise, it should be called directly by user code. | ||
1149 | 338 | |||
1150 | 339 | To disable automatic saves, set ``implicit_save=False`` on this | ||
1151 | 340 | instance. | ||
1152 | 341 | |||
1153 | 342 | """ | ||
1154 | 343 | with open(self.path, 'w') as f: | ||
1155 | 344 | json.dump(self, f) | ||
1156 | 345 | |||
1157 | 346 | def _implicit_save(self): | ||
1158 | 347 | if self.implicit_save: | ||
1159 | 348 | self.save() | ||
1160 | 156 | 349 | ||
1161 | 157 | 350 | ||
1162 | 158 | @cached | 351 | @cached |
1163 | @@ -161,9 +354,15 @@ def config(scope=None): | |||
1164 | 161 | config_cmd_line = ['config-get'] | 354 | config_cmd_line = ['config-get'] |
1165 | 162 | if scope is not None: | 355 | if scope is not None: |
1166 | 163 | config_cmd_line.append(scope) | 356 | config_cmd_line.append(scope) |
1167 | 357 | else: | ||
1168 | 358 | config_cmd_line.append('--all') | ||
1169 | 164 | config_cmd_line.append('--format=json') | 359 | config_cmd_line.append('--format=json') |
1170 | 165 | try: | 360 | try: |
1172 | 166 | return json.loads(subprocess.check_output(config_cmd_line)) | 361 | config_data = json.loads( |
1173 | 362 | subprocess.check_output(config_cmd_line).decode('UTF-8')) | ||
1174 | 363 | if scope is not None: | ||
1175 | 364 | return config_data | ||
1176 | 365 | return Config(config_data) | ||
1177 | 167 | except ValueError: | 366 | except ValueError: |
1178 | 168 | return None | 367 | return None |
1179 | 169 | 368 | ||
1180 | @@ -179,30 +378,62 @@ def relation_get(attribute=None, unit=None, rid=None): | |||
1181 | 179 | if unit: | 378 | if unit: |
1182 | 180 | _args.append(unit) | 379 | _args.append(unit) |
1183 | 181 | try: | 380 | try: |
1185 | 182 | return json.loads(subprocess.check_output(_args)) | 381 | return json.loads(subprocess.check_output(_args).decode('UTF-8')) |
1186 | 183 | except ValueError: | 382 | except ValueError: |
1187 | 184 | return None | 383 | return None |
1189 | 185 | except CalledProcessError, e: | 384 | except CalledProcessError as e: |
1190 | 186 | if e.returncode == 2: | 385 | if e.returncode == 2: |
1191 | 187 | return None | 386 | return None |
1192 | 188 | raise | 387 | raise |
1193 | 189 | 388 | ||
1194 | 190 | 389 | ||
1196 | 191 | def relation_set(relation_id=None, relation_settings={}, **kwargs): | 390 | def relation_set(relation_id=None, relation_settings=None, **kwargs): |
1197 | 192 | """Set relation information for the current unit""" | 391 | """Set relation information for the current unit""" |
1198 | 392 | relation_settings = relation_settings if relation_settings else {} | ||
1199 | 193 | relation_cmd_line = ['relation-set'] | 393 | relation_cmd_line = ['relation-set'] |
1200 | 394 | accepts_file = "--file" in subprocess.check_output( | ||
1201 | 395 | relation_cmd_line + ["--help"], universal_newlines=True) | ||
1202 | 194 | if relation_id is not None: | 396 | if relation_id is not None: |
1203 | 195 | relation_cmd_line.extend(('-r', relation_id)) | 397 | relation_cmd_line.extend(('-r', relation_id)) |
1210 | 196 | for k, v in (relation_settings.items() + kwargs.items()): | 398 | settings = relation_settings.copy() |
1211 | 197 | if v is None: | 399 | settings.update(kwargs) |
1212 | 198 | relation_cmd_line.append('{}='.format(k)) | 400 | for key, value in settings.items(): |
1213 | 199 | else: | 401 | # Force value to be a string: it always should, but some call |
1214 | 200 | relation_cmd_line.append('{}={}'.format(k, v)) | 402 | # sites pass in things like dicts or numbers. |
1215 | 201 | subprocess.check_call(relation_cmd_line) | 403 | if value is not None: |
1216 | 404 | settings[key] = "{}".format(value) | ||
1217 | 405 | if accepts_file: | ||
1218 | 406 | # --file was introduced in Juju 1.23.2. Use it by default if | ||
1219 | 407 | # available, since otherwise we'll break if the relation data is | ||
1220 | 408 | # too big. Ideally we should tell relation-set to read the data from | ||
1221 | 409 | # stdin, but that feature is broken in 1.23.2: Bug #1454678. | ||
1222 | 410 | with tempfile.NamedTemporaryFile(delete=False) as settings_file: | ||
1223 | 411 | settings_file.write(yaml.safe_dump(settings).encode("utf-8")) | ||
1224 | 412 | subprocess.check_call( | ||
1225 | 413 | relation_cmd_line + ["--file", settings_file.name]) | ||
1226 | 414 | os.remove(settings_file.name) | ||
1227 | 415 | else: | ||
1228 | 416 | for key, value in settings.items(): | ||
1229 | 417 | if value is None: | ||
1230 | 418 | relation_cmd_line.append('{}='.format(key)) | ||
1231 | 419 | else: | ||
1232 | 420 | relation_cmd_line.append('{}={}'.format(key, value)) | ||
1233 | 421 | subprocess.check_call(relation_cmd_line) | ||
1234 | 202 | # Flush cache of any relation-gets for local unit | 422 | # Flush cache of any relation-gets for local unit |
1235 | 203 | flush(local_unit()) | 423 | flush(local_unit()) |
1236 | 204 | 424 | ||
1237 | 205 | 425 | ||
1238 | 426 | def relation_clear(r_id=None): | ||
1239 | 427 | ''' Clears any relation data already set on relation r_id ''' | ||
1240 | 428 | settings = relation_get(rid=r_id, | ||
1241 | 429 | unit=local_unit()) | ||
1242 | 430 | for setting in settings: | ||
1243 | 431 | if setting not in ['public-address', 'private-address']: | ||
1244 | 432 | settings[setting] = None | ||
1245 | 433 | relation_set(relation_id=r_id, | ||
1246 | 434 | **settings) | ||
1247 | 435 | |||
1248 | 436 | |||
1249 | 206 | @cached | 437 | @cached |
1250 | 207 | def relation_ids(reltype=None): | 438 | def relation_ids(reltype=None): |
1251 | 208 | """A list of relation_ids""" | 439 | """A list of relation_ids""" |
1252 | @@ -210,7 +441,8 @@ def relation_ids(reltype=None): | |||
1253 | 210 | relid_cmd_line = ['relation-ids', '--format=json'] | 441 | relid_cmd_line = ['relation-ids', '--format=json'] |
1254 | 211 | if reltype is not None: | 442 | if reltype is not None: |
1255 | 212 | relid_cmd_line.append(reltype) | 443 | relid_cmd_line.append(reltype) |
1257 | 213 | return json.loads(subprocess.check_output(relid_cmd_line)) or [] | 444 | return json.loads( |
1258 | 445 | subprocess.check_output(relid_cmd_line).decode('UTF-8')) or [] | ||
1259 | 214 | return [] | 446 | return [] |
1260 | 215 | 447 | ||
1261 | 216 | 448 | ||
1262 | @@ -221,7 +453,8 @@ def related_units(relid=None): | |||
1263 | 221 | units_cmd_line = ['relation-list', '--format=json'] | 453 | units_cmd_line = ['relation-list', '--format=json'] |
1264 | 222 | if relid is not None: | 454 | if relid is not None: |
1265 | 223 | units_cmd_line.extend(('-r', relid)) | 455 | units_cmd_line.extend(('-r', relid)) |
1267 | 224 | return json.loads(subprocess.check_output(units_cmd_line)) or [] | 456 | return json.loads( |
1268 | 457 | subprocess.check_output(units_cmd_line).decode('UTF-8')) or [] | ||
1269 | 225 | 458 | ||
1270 | 226 | 459 | ||
1271 | 227 | @cached | 460 | @cached |
1272 | @@ -261,21 +494,116 @@ def relations_of_type(reltype=None): | |||
1273 | 261 | 494 | ||
1274 | 262 | 495 | ||
1275 | 263 | @cached | 496 | @cached |
1276 | 497 | def metadata(): | ||
1277 | 498 | """Get the current charm metadata.yaml contents as a python object""" | ||
1278 | 499 | with open(os.path.join(charm_dir(), 'metadata.yaml')) as md: | ||
1279 | 500 | return yaml.safe_load(md) | ||
1280 | 501 | |||
1281 | 502 | |||
1282 | 503 | def _metadata_unit(unit): | ||
1283 | 504 | """Given the name of a unit (e.g. apache2/0), get the unit charm's | ||
1284 | 505 | metadata.yaml. Very similar to metadata() but allows us to inspect | ||
1285 | 506 | other units. Unit needs to be co-located, such as a subordinate or | ||
1286 | 507 | principal/primary. | ||
1287 | 508 | |||
1288 | 509 | :returns: metadata.yaml as a python object. | ||
1289 | 510 | |||
1290 | 511 | """ | ||
1291 | 512 | basedir = os.sep.join(charm_dir().split(os.sep)[:-2]) | ||
1292 | 513 | unitdir = 'unit-{}'.format(unit.replace(os.sep, '-')) | ||
1293 | 514 | with open(os.path.join(basedir, unitdir, 'charm', 'metadata.yaml')) as md: | ||
1294 | 515 | return yaml.safe_load(md) | ||
1295 | 516 | |||
1296 | 517 | |||
1297 | 518 | @cached | ||
1298 | 264 | def relation_types(): | 519 | def relation_types(): |
1299 | 265 | """Get a list of relation types supported by this charm""" | 520 | """Get a list of relation types supported by this charm""" |
1300 | 266 | charmdir = os.environ.get('CHARM_DIR', '') | ||
1301 | 267 | mdf = open(os.path.join(charmdir, 'metadata.yaml')) | ||
1302 | 268 | md = yaml.safe_load(mdf) | ||
1303 | 269 | rel_types = [] | 521 | rel_types = [] |
1304 | 522 | md = metadata() | ||
1305 | 270 | for key in ('provides', 'requires', 'peers'): | 523 | for key in ('provides', 'requires', 'peers'): |
1306 | 271 | section = md.get(key) | 524 | section = md.get(key) |
1307 | 272 | if section: | 525 | if section: |
1308 | 273 | rel_types.extend(section.keys()) | 526 | rel_types.extend(section.keys()) |
1309 | 274 | mdf.close() | ||
1310 | 275 | return rel_types | 527 | return rel_types |
1311 | 276 | 528 | ||
1312 | 277 | 529 | ||
1313 | 278 | @cached | 530 | @cached |
1314 | 531 | def peer_relation_id(): | ||
1315 | 532 | '''Get the peers relation id if a peers relation has been joined, else None.''' | ||
1316 | 533 | md = metadata() | ||
1317 | 534 | section = md.get('peers') | ||
1318 | 535 | if section: | ||
1319 | 536 | for key in section: | ||
1320 | 537 | relids = relation_ids(key) | ||
1321 | 538 | if relids: | ||
1322 | 539 | return relids[0] | ||
1323 | 540 | return None | ||
1324 | 541 | |||
1325 | 542 | |||
1326 | 543 | @cached | ||
1327 | 544 | def relation_to_interface(relation_name): | ||
1328 | 545 | """ | ||
1329 | 546 | Given the name of a relation, return the interface that relation uses. | ||
1330 | 547 | |||
1331 | 548 | :returns: The interface name, or ``None``. | ||
1332 | 549 | """ | ||
1333 | 550 | return relation_to_role_and_interface(relation_name)[1] | ||
1334 | 551 | |||
1335 | 552 | |||
1336 | 553 | @cached | ||
1337 | 554 | def relation_to_role_and_interface(relation_name): | ||
1338 | 555 | """ | ||
1339 | 556 | Given the name of a relation, return the role and the name of the interface | ||
1340 | 557 | that relation uses (where role is one of ``provides``, ``requires``, or ``peers``). | ||
1341 | 558 | |||
1342 | 559 | :returns: A tuple containing ``(role, interface)``, or ``(None, None)``. | ||
1343 | 560 | """ | ||
1344 | 561 | _metadata = metadata() | ||
1345 | 562 | for role in ('provides', 'requires', 'peers'): | ||
1346 | 563 | interface = _metadata.get(role, {}).get(relation_name, {}).get('interface') | ||
1347 | 564 | if interface: | ||
1348 | 565 | return role, interface | ||
1349 | 566 | return None, None | ||
1350 | 567 | |||
1351 | 568 | |||
1352 | 569 | @cached | ||
1353 | 570 | def role_and_interface_to_relations(role, interface_name): | ||
1354 | 571 | """ | ||
1355 | 572 | Given a role and interface name, return a list of relation names for the | ||
1356 | 573 | current charm that use that interface under that role (where role is one | ||
1357 | 574 | of ``provides``, ``requires``, or ``peers``). | ||
1358 | 575 | |||
1359 | 576 | :returns: A list of relation names. | ||
1360 | 577 | """ | ||
1361 | 578 | _metadata = metadata() | ||
1362 | 579 | results = [] | ||
1363 | 580 | for relation_name, relation in _metadata.get(role, {}).items(): | ||
1364 | 581 | if relation['interface'] == interface_name: | ||
1365 | 582 | results.append(relation_name) | ||
1366 | 583 | return results | ||
1367 | 584 | |||
1368 | 585 | |||
1369 | 586 | @cached | ||
1370 | 587 | def interface_to_relations(interface_name): | ||
1371 | 588 | """ | ||
1372 | 589 | Given an interface, return a list of relation names for the current | ||
1373 | 590 | charm that use that interface. | ||
1374 | 591 | |||
1375 | 592 | :returns: A list of relation names. | ||
1376 | 593 | """ | ||
1377 | 594 | results = [] | ||
1378 | 595 | for role in ('provides', 'requires', 'peers'): | ||
1379 | 596 | results.extend(role_and_interface_to_relations(role, interface_name)) | ||
1380 | 597 | return results | ||
1381 | 598 | |||
1382 | 599 | |||
1383 | 600 | @cached | ||
1384 | 601 | def charm_name(): | ||
1385 | 602 | """Get the name of the current charm as is specified on metadata.yaml""" | ||
1386 | 603 | return metadata().get('name') | ||
1387 | 604 | |||
1388 | 605 | |||
1389 | 606 | @cached | ||
1390 | 279 | def relations(): | 607 | def relations(): |
1391 | 280 | """Get a nested dictionary of relation data for all related units""" | 608 | """Get a nested dictionary of relation data for all related units""" |
1392 | 281 | rels = {} | 609 | rels = {} |
1393 | @@ -325,21 +653,72 @@ def close_port(port, protocol="TCP"): | |||
1394 | 325 | subprocess.check_call(_args) | 653 | subprocess.check_call(_args) |
1395 | 326 | 654 | ||
1396 | 327 | 655 | ||
1397 | 656 | def open_ports(start, end, protocol="TCP"): | ||
1398 | 657 | """Opens a range of service network ports""" | ||
1399 | 658 | _args = ['open-port'] | ||
1400 | 659 | _args.append('{}-{}/{}'.format(start, end, protocol)) | ||
1401 | 660 | subprocess.check_call(_args) | ||
1402 | 661 | |||
1403 | 662 | |||
1404 | 663 | def close_ports(start, end, protocol="TCP"): | ||
1405 | 664 | """Close a range of service network ports""" | ||
1406 | 665 | _args = ['close-port'] | ||
1407 | 666 | _args.append('{}-{}/{}'.format(start, end, protocol)) | ||
1408 | 667 | subprocess.check_call(_args) | ||
1409 | 668 | |||
1410 | 669 | |||
1411 | 328 | @cached | 670 | @cached |
1412 | 329 | def unit_get(attribute): | 671 | def unit_get(attribute): |
1413 | 330 | """Get the unit ID for the remote unit""" | 672 | """Get the unit ID for the remote unit""" |
1414 | 331 | _args = ['unit-get', '--format=json', attribute] | 673 | _args = ['unit-get', '--format=json', attribute] |
1415 | 332 | try: | 674 | try: |
1417 | 333 | return json.loads(subprocess.check_output(_args)) | 675 | return json.loads(subprocess.check_output(_args).decode('UTF-8')) |
1418 | 334 | except ValueError: | 676 | except ValueError: |
1419 | 335 | return None | 677 | return None |
1420 | 336 | 678 | ||
1421 | 337 | 679 | ||
1422 | 680 | def unit_public_ip(): | ||
1423 | 681 | """Get this unit's public IP address""" | ||
1424 | 682 | return unit_get('public-address') | ||
1425 | 683 | |||
1426 | 684 | |||
1427 | 338 | def unit_private_ip(): | 685 | def unit_private_ip(): |
1428 | 339 | """Get this unit's private IP address""" | 686 | """Get this unit's private IP address""" |
1429 | 340 | return unit_get('private-address') | 687 | return unit_get('private-address') |
1430 | 341 | 688 | ||
1431 | 342 | 689 | ||
1432 | 690 | @cached | ||
1433 | 691 | def storage_get(attribute=None, storage_id=None): | ||
1434 | 692 | """Get storage attributes""" | ||
1435 | 693 | _args = ['storage-get', '--format=json'] | ||
1436 | 694 | if storage_id: | ||
1437 | 695 | _args.extend(('-s', storage_id)) | ||
1438 | 696 | if attribute: | ||
1439 | 697 | _args.append(attribute) | ||
1440 | 698 | try: | ||
1441 | 699 | return json.loads(subprocess.check_output(_args).decode('UTF-8')) | ||
1442 | 700 | except ValueError: | ||
1443 | 701 | return None | ||
1444 | 702 | |||
1445 | 703 | |||
1446 | 704 | @cached | ||
1447 | 705 | def storage_list(storage_name=None): | ||
1448 | 706 | """List the storage IDs for the unit""" | ||
1449 | 707 | _args = ['storage-list', '--format=json'] | ||
1450 | 708 | if storage_name: | ||
1451 | 709 | _args.append(storage_name) | ||
1452 | 710 | try: | ||
1453 | 711 | return json.loads(subprocess.check_output(_args).decode('UTF-8')) | ||
1454 | 712 | except ValueError: | ||
1455 | 713 | return None | ||
1456 | 714 | except OSError as e: | ||
1457 | 715 | import errno | ||
1458 | 716 | if e.errno == errno.ENOENT: | ||
1459 | 717 | # storage-list does not exist | ||
1460 | 718 | return [] | ||
1461 | 719 | raise | ||
1462 | 720 | |||
1463 | 721 | |||
1464 | 343 | class UnregisteredHookError(Exception): | 722 | class UnregisteredHookError(Exception): |
1465 | 344 | """Raised when an undefined hook is called""" | 723 | """Raised when an undefined hook is called""" |
1466 | 345 | pass | 724 | pass |
1467 | @@ -348,37 +727,50 @@ class UnregisteredHookError(Exception): | |||
1468 | 348 | class Hooks(object): | 727 | class Hooks(object): |
1469 | 349 | """A convenient handler for hook functions. | 728 | """A convenient handler for hook functions. |
1470 | 350 | 729 | ||
1472 | 351 | Example: | 730 | Example:: |
1473 | 731 | |||
1474 | 352 | hooks = Hooks() | 732 | hooks = Hooks() |
1475 | 353 | 733 | ||
1476 | 354 | # register a hook, taking its name from the function name | 734 | # register a hook, taking its name from the function name |
1477 | 355 | @hooks.hook() | 735 | @hooks.hook() |
1478 | 356 | def install(): | 736 | def install(): |
1480 | 357 | ... | 737 | pass # your code here |
1481 | 358 | 738 | ||
1482 | 359 | # register a hook, providing a custom hook name | 739 | # register a hook, providing a custom hook name |
1483 | 360 | @hooks.hook("config-changed") | 740 | @hooks.hook("config-changed") |
1484 | 361 | def config_changed(): | 741 | def config_changed(): |
1486 | 362 | ... | 742 | pass # your code here |
1487 | 363 | 743 | ||
1488 | 364 | if __name__ == "__main__": | 744 | if __name__ == "__main__": |
1489 | 365 | # execute a hook based on the name the program is called by | 745 | # execute a hook based on the name the program is called by |
1490 | 366 | hooks.execute(sys.argv) | 746 | hooks.execute(sys.argv) |
1491 | 367 | """ | 747 | """ |
1492 | 368 | 748 | ||
1494 | 369 | def __init__(self): | 749 | def __init__(self, config_save=None): |
1495 | 370 | super(Hooks, self).__init__() | 750 | super(Hooks, self).__init__() |
1496 | 371 | self._hooks = {} | 751 | self._hooks = {} |
1497 | 372 | 752 | ||
1498 | 753 | # For unknown reasons, we allow the Hooks constructor to override | ||
1499 | 754 | # config().implicit_save. | ||
1500 | 755 | if config_save is not None: | ||
1501 | 756 | config().implicit_save = config_save | ||
1502 | 757 | |||
1503 | 373 | def register(self, name, function): | 758 | def register(self, name, function): |
1504 | 374 | """Register a hook""" | 759 | """Register a hook""" |
1505 | 375 | self._hooks[name] = function | 760 | self._hooks[name] = function |
1506 | 376 | 761 | ||
1507 | 377 | def execute(self, args): | 762 | def execute(self, args): |
1508 | 378 | """Execute a registered hook based on args[0]""" | 763 | """Execute a registered hook based on args[0]""" |
1509 | 764 | _run_atstart() | ||
1510 | 379 | hook_name = os.path.basename(args[0]) | 765 | hook_name = os.path.basename(args[0]) |
1511 | 380 | if hook_name in self._hooks: | 766 | if hook_name in self._hooks: |
1513 | 381 | self._hooks[hook_name]() | 767 | try: |
1514 | 768 | self._hooks[hook_name]() | ||
1515 | 769 | except SystemExit as x: | ||
1516 | 770 | if x.code is None or x.code == 0: | ||
1517 | 771 | _run_atexit() | ||
1518 | 772 | raise | ||
1519 | 773 | _run_atexit() | ||
1520 | 382 | else: | 774 | else: |
1521 | 383 | raise UnregisteredHookError(hook_name) | 775 | raise UnregisteredHookError(hook_name) |
1522 | 384 | 776 | ||
1523 | @@ -398,4 +790,319 @@ class Hooks(object): | |||
1524 | 398 | 790 | ||
1525 | 399 | def charm_dir(): | 791 | def charm_dir(): |
1526 | 400 | """Return the root directory of the current charm""" | 792 | """Return the root directory of the current charm""" |
1527 | 793 | d = os.environ.get('JUJU_CHARM_DIR') | ||
1528 | 794 | if d is not None: | ||
1529 | 795 | return d | ||
1530 | 401 | return os.environ.get('CHARM_DIR') | 796 | return os.environ.get('CHARM_DIR') |
1531 | 797 | |||
1532 | 798 | |||
1533 | 799 | @cached | ||
1534 | 800 | def action_get(key=None): | ||
1535 | 801 | """Gets the value of an action parameter, or all key/value param pairs""" | ||
1536 | 802 | cmd = ['action-get'] | ||
1537 | 803 | if key is not None: | ||
1538 | 804 | cmd.append(key) | ||
1539 | 805 | cmd.append('--format=json') | ||
1540 | 806 | action_data = json.loads(subprocess.check_output(cmd).decode('UTF-8')) | ||
1541 | 807 | return action_data | ||
1542 | 808 | |||
1543 | 809 | |||
1544 | 810 | def action_set(values): | ||
1545 | 811 | """Sets the values to be returned after the action finishes""" | ||
1546 | 812 | cmd = ['action-set'] | ||
1547 | 813 | for k, v in list(values.items()): | ||
1548 | 814 | cmd.append('{}={}'.format(k, v)) | ||
1549 | 815 | subprocess.check_call(cmd) | ||
1550 | 816 | |||
1551 | 817 | |||
1552 | 818 | def action_fail(message): | ||
1553 | 819 | """Sets the action status to failed and sets the error message. | ||
1554 | 820 | |||
1555 | 821 | The results set by action_set are preserved.""" | ||
1556 | 822 | subprocess.check_call(['action-fail', message]) | ||
1557 | 823 | |||
1558 | 824 | |||
1559 | 825 | def action_name(): | ||
1560 | 826 | """Get the name of the currently executing action.""" | ||
1561 | 827 | return os.environ.get('JUJU_ACTION_NAME') | ||
1562 | 828 | |||
1563 | 829 | |||
1564 | 830 | def action_uuid(): | ||
1565 | 831 | """Get the UUID of the currently executing action.""" | ||
1566 | 832 | return os.environ.get('JUJU_ACTION_UUID') | ||
1567 | 833 | |||
1568 | 834 | |||
1569 | 835 | def action_tag(): | ||
1570 | 836 | """Get the tag for the currently executing action.""" | ||
1571 | 837 | return os.environ.get('JUJU_ACTION_TAG') | ||
1572 | 838 | |||
1573 | 839 | |||
1574 | 840 | def status_set(workload_state, message): | ||
1575 | 841 | """Set the workload state with a message | ||
1576 | 842 | |||
1577 | 843 | Use status-set to set the workload state with a message which is visible | ||
1578 | 844 | to the user via juju status. If the status-set command is not found then | ||
1579 | 845 | assume this is juju < 1.23 and juju-log the message unstead. | ||
1580 | 846 | |||
1581 | 847 | workload_state -- valid juju workload state. | ||
1582 | 848 | message -- status update message | ||
1583 | 849 | """ | ||
1584 | 850 | valid_states = ['maintenance', 'blocked', 'waiting', 'active'] | ||
1585 | 851 | if workload_state not in valid_states: | ||
1586 | 852 | raise ValueError( | ||
1587 | 853 | '{!r} is not a valid workload state'.format(workload_state) | ||
1588 | 854 | ) | ||
1589 | 855 | cmd = ['status-set', workload_state, message] | ||
1590 | 856 | try: | ||
1591 | 857 | ret = subprocess.call(cmd) | ||
1592 | 858 | if ret == 0: | ||
1593 | 859 | return | ||
1594 | 860 | except OSError as e: | ||
1595 | 861 | if e.errno != errno.ENOENT: | ||
1596 | 862 | raise | ||
1597 | 863 | log_message = 'status-set failed: {} {}'.format(workload_state, | ||
1598 | 864 | message) | ||
1599 | 865 | log(log_message, level='INFO') | ||
1600 | 866 | |||
1601 | 867 | |||
1602 | 868 | def status_get(): | ||
1603 | 869 | """Retrieve the previously set juju workload state and message | ||
1604 | 870 | |||
1605 | 871 | If the status-get command is not found then assume this is juju < 1.23 and | ||
1606 | 872 | return 'unknown', "" | ||
1607 | 873 | |||
1608 | 874 | """ | ||
1609 | 875 | cmd = ['status-get', "--format=json", "--include-data"] | ||
1610 | 876 | try: | ||
1611 | 877 | raw_status = subprocess.check_output(cmd) | ||
1612 | 878 | except OSError as e: | ||
1613 | 879 | if e.errno == errno.ENOENT: | ||
1614 | 880 | return ('unknown', "") | ||
1615 | 881 | else: | ||
1616 | 882 | raise | ||
1617 | 883 | else: | ||
1618 | 884 | status = json.loads(raw_status.decode("UTF-8")) | ||
1619 | 885 | return (status["status"], status["message"]) | ||
1620 | 886 | |||
1621 | 887 | |||
1622 | 888 | def translate_exc(from_exc, to_exc): | ||
1623 | 889 | def inner_translate_exc1(f): | ||
1624 | 890 | @wraps(f) | ||
1625 | 891 | def inner_translate_exc2(*args, **kwargs): | ||
1626 | 892 | try: | ||
1627 | 893 | return f(*args, **kwargs) | ||
1628 | 894 | except from_exc: | ||
1629 | 895 | raise to_exc | ||
1630 | 896 | |||
1631 | 897 | return inner_translate_exc2 | ||
1632 | 898 | |||
1633 | 899 | return inner_translate_exc1 | ||
1634 | 900 | |||
1635 | 901 | |||
1636 | 902 | def application_version_set(version): | ||
1637 | 903 | """Charm authors may trigger this command from any hook to output what | ||
1638 | 904 | version of the application is running. This could be a package version, | ||
1639 | 905 | for instance postgres version 9.5. It could also be a build number or | ||
1640 | 906 | version control revision identifier, for instance git sha 6fb7ba68. """ | ||
1641 | 907 | |||
1642 | 908 | cmd = ['application-version-set'] | ||
1643 | 909 | cmd.append(version) | ||
1644 | 910 | try: | ||
1645 | 911 | subprocess.check_call(cmd) | ||
1646 | 912 | except OSError: | ||
1647 | 913 | log("Application Version: {}".format(version)) | ||
1648 | 914 | |||
1649 | 915 | |||
1650 | 916 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | ||
1651 | 917 | def is_leader(): | ||
1652 | 918 | """Does the current unit hold the juju leadership | ||
1653 | 919 | |||
1654 | 920 | Uses juju to determine whether the current unit is the leader of its peers | ||
1655 | 921 | """ | ||
1656 | 922 | cmd = ['is-leader', '--format=json'] | ||
1657 | 923 | return json.loads(subprocess.check_output(cmd).decode('UTF-8')) | ||
1658 | 924 | |||
1659 | 925 | |||
1660 | 926 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | ||
1661 | 927 | def leader_get(attribute=None): | ||
1662 | 928 | """Juju leader get value(s)""" | ||
1663 | 929 | cmd = ['leader-get', '--format=json'] + [attribute or '-'] | ||
1664 | 930 | return json.loads(subprocess.check_output(cmd).decode('UTF-8')) | ||
1665 | 931 | |||
1666 | 932 | |||
1667 | 933 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | ||
1668 | 934 | def leader_set(settings=None, **kwargs): | ||
1669 | 935 | """Juju leader set value(s)""" | ||
1670 | 936 | # Don't log secrets. | ||
1671 | 937 | # log("Juju leader-set '%s'" % (settings), level=DEBUG) | ||
1672 | 938 | cmd = ['leader-set'] | ||
1673 | 939 | settings = settings or {} | ||
1674 | 940 | settings.update(kwargs) | ||
1675 | 941 | for k, v in settings.items(): | ||
1676 | 942 | if v is None: | ||
1677 | 943 | cmd.append('{}='.format(k)) | ||
1678 | 944 | else: | ||
1679 | 945 | cmd.append('{}={}'.format(k, v)) | ||
1680 | 946 | subprocess.check_call(cmd) | ||
1681 | 947 | |||
1682 | 948 | |||
1683 | 949 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | ||
1684 | 950 | def payload_register(ptype, klass, pid): | ||
1685 | 951 | """ is used while a hook is running to let Juju know that a | ||
1686 | 952 | payload has been started.""" | ||
1687 | 953 | cmd = ['payload-register'] | ||
1688 | 954 | for x in [ptype, klass, pid]: | ||
1689 | 955 | cmd.append(x) | ||
1690 | 956 | subprocess.check_call(cmd) | ||
1691 | 957 | |||
1692 | 958 | |||
1693 | 959 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | ||
1694 | 960 | def payload_unregister(klass, pid): | ||
1695 | 961 | """ is used while a hook is running to let Juju know | ||
1696 | 962 | that a payload has been manually stopped. The <class> and <id> provided | ||
1697 | 963 | must match a payload that has been previously registered with juju using | ||
1698 | 964 | payload-register.""" | ||
1699 | 965 | cmd = ['payload-unregister'] | ||
1700 | 966 | for x in [klass, pid]: | ||
1701 | 967 | cmd.append(x) | ||
1702 | 968 | subprocess.check_call(cmd) | ||
1703 | 969 | |||
1704 | 970 | |||
1705 | 971 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | ||
1706 | 972 | def payload_status_set(klass, pid, status): | ||
1707 | 973 | """is used to update the current status of a registered payload. | ||
1708 | 974 | The <class> and <id> provided must match a payload that has been previously | ||
1709 | 975 | registered with juju using payload-register. The <status> must be one of the | ||
1710 | 976 | follow: starting, started, stopping, stopped""" | ||
1711 | 977 | cmd = ['payload-status-set'] | ||
1712 | 978 | for x in [klass, pid, status]: | ||
1713 | 979 | cmd.append(x) | ||
1714 | 980 | subprocess.check_call(cmd) | ||
1715 | 981 | |||
1716 | 982 | |||
1717 | 983 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | ||
1718 | 984 | def resource_get(name): | ||
1719 | 985 | """used to fetch the resource path of the given name. | ||
1720 | 986 | |||
1721 | 987 | <name> must match a name of defined resource in metadata.yaml | ||
1722 | 988 | |||
1723 | 989 | returns either a path or False if resource not available | ||
1724 | 990 | """ | ||
1725 | 991 | if not name: | ||
1726 | 992 | return False | ||
1727 | 993 | |||
1728 | 994 | cmd = ['resource-get', name] | ||
1729 | 995 | try: | ||
1730 | 996 | return subprocess.check_output(cmd).decode('UTF-8') | ||
1731 | 997 | except subprocess.CalledProcessError: | ||
1732 | 998 | return False | ||
1733 | 999 | |||
1734 | 1000 | |||
1735 | 1001 | @cached | ||
1736 | 1002 | def juju_version(): | ||
1737 | 1003 | """Full version string (eg. '1.23.3.1-trusty-amd64')""" | ||
1738 | 1004 | # Per https://bugs.launchpad.net/juju-core/+bug/1455368/comments/1 | ||
1739 | 1005 | jujud = glob.glob('/var/lib/juju/tools/machine-*/jujud')[0] | ||
1740 | 1006 | return subprocess.check_output([jujud, 'version'], | ||
1741 | 1007 | universal_newlines=True).strip() | ||
1742 | 1008 | |||
1743 | 1009 | |||
1744 | 1010 | @cached | ||
1745 | 1011 | def has_juju_version(minimum_version): | ||
1746 | 1012 | """Return True if the Juju version is at least the provided version""" | ||
1747 | 1013 | return LooseVersion(juju_version()) >= LooseVersion(minimum_version) | ||
1748 | 1014 | |||
1749 | 1015 | |||
1750 | 1016 | _atexit = [] | ||
1751 | 1017 | _atstart = [] | ||
1752 | 1018 | |||
1753 | 1019 | |||
1754 | 1020 | def atstart(callback, *args, **kwargs): | ||
1755 | 1021 | '''Schedule a callback to run before the main hook. | ||
1756 | 1022 | |||
1757 | 1023 | Callbacks are run in the order they were added. | ||
1758 | 1024 | |||
1759 | 1025 | This is useful for modules and classes to perform initialization | ||
1760 | 1026 | and inject behavior. In particular: | ||
1761 | 1027 | |||
1762 | 1028 | - Run common code before all of your hooks, such as logging | ||
1763 | 1029 | the hook name or interesting relation data. | ||
1764 | 1030 | - Defer object or module initialization that requires a hook | ||
1765 | 1031 | context until we know there actually is a hook context, | ||
1766 | 1032 | making testing easier. | ||
1767 | 1033 | - Rather than requiring charm authors to include boilerplate to | ||
1768 | 1034 | invoke your helper's behavior, have it run automatically if | ||
1769 | 1035 | your object is instantiated or module imported. | ||
1770 | 1036 | |||
1771 | 1037 | This is not at all useful after your hook framework as been launched. | ||
1772 | 1038 | ''' | ||
1773 | 1039 | global _atstart | ||
1774 | 1040 | _atstart.append((callback, args, kwargs)) | ||
1775 | 1041 | |||
1776 | 1042 | |||
1777 | 1043 | def atexit(callback, *args, **kwargs): | ||
1778 | 1044 | '''Schedule a callback to run on successful hook completion. | ||
1779 | 1045 | |||
1780 | 1046 | Callbacks are run in the reverse order that they were added.''' | ||
1781 | 1047 | _atexit.append((callback, args, kwargs)) | ||
1782 | 1048 | |||
1783 | 1049 | |||
1784 | 1050 | def _run_atstart(): | ||
1785 | 1051 | '''Hook frameworks must invoke this before running the main hook body.''' | ||
1786 | 1052 | global _atstart | ||
1787 | 1053 | for callback, args, kwargs in _atstart: | ||
1788 | 1054 | callback(*args, **kwargs) | ||
1789 | 1055 | del _atstart[:] | ||
1790 | 1056 | |||
1791 | 1057 | |||
1792 | 1058 | def _run_atexit(): | ||
1793 | 1059 | '''Hook frameworks must invoke this after the main hook body has | ||
1794 | 1060 | successfully completed. Do not invoke it if the hook fails.''' | ||
1795 | 1061 | global _atexit | ||
1796 | 1062 | for callback, args, kwargs in reversed(_atexit): | ||
1797 | 1063 | callback(*args, **kwargs) | ||
1798 | 1064 | del _atexit[:] | ||
1799 | 1065 | |||
1800 | 1066 | |||
1801 | 1067 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | ||
1802 | 1068 | def network_get_primary_address(binding): | ||
1803 | 1069 | ''' | ||
1804 | 1070 | Retrieve the primary network address for a named binding | ||
1805 | 1071 | |||
1806 | 1072 | :param binding: string. The name of a relation of extra-binding | ||
1807 | 1073 | :return: string. The primary IP address for the named binding | ||
1808 | 1074 | :raise: NotImplementedError if run on Juju < 2.0 | ||
1809 | 1075 | ''' | ||
1810 | 1076 | cmd = ['network-get', '--primary-address', binding] | ||
1811 | 1077 | return subprocess.check_output(cmd).decode('UTF-8').strip() | ||
1812 | 1078 | |||
1813 | 1079 | |||
1814 | 1080 | def add_metric(*args, **kwargs): | ||
1815 | 1081 | """Add metric values. Values may be expressed with keyword arguments. For | ||
1816 | 1082 | metric names containing dashes, these may be expressed as one or more | ||
1817 | 1083 | 'key=value' positional arguments. May only be called from the collect-metrics | ||
1818 | 1084 | hook.""" | ||
1819 | 1085 | _args = ['add-metric'] | ||
1820 | 1086 | _kvpairs = [] | ||
1821 | 1087 | _kvpairs.extend(args) | ||
1822 | 1088 | _kvpairs.extend(['{}={}'.format(k, v) for k, v in kwargs.items()]) | ||
1823 | 1089 | _args.extend(sorted(_kvpairs)) | ||
1824 | 1090 | try: | ||
1825 | 1091 | subprocess.check_call(_args) | ||
1826 | 1092 | return | ||
1827 | 1093 | except EnvironmentError as e: | ||
1828 | 1094 | if e.errno != errno.ENOENT: | ||
1829 | 1095 | raise | ||
1830 | 1096 | log_message = 'add-metric failed: {}'.format(' '.join(_kvpairs)) | ||
1831 | 1097 | log(log_message, level='INFO') | ||
1832 | 1098 | |||
1833 | 1099 | |||
1834 | 1100 | def meter_status(): | ||
1835 | 1101 | """Get the meter status, if running in the meter-status-changed hook.""" | ||
1836 | 1102 | return os.environ.get('JUJU_METER_STATUS') | ||
1837 | 1103 | |||
1838 | 1104 | |||
1839 | 1105 | def meter_info(): | ||
1840 | 1106 | """Get the meter status information, if running in the meter-status-changed | ||
1841 | 1107 | hook.""" | ||
1842 | 1108 | return os.environ.get('JUJU_METER_INFO') | ||
1843 | diff --git a/hooks/charmhelpers/core/host.py b/hooks/charmhelpers/core/host.py | |||
1844 | index cfd2684..5656e2f 100644 | |||
1845 | --- a/hooks/charmhelpers/core/host.py | |||
1846 | +++ b/hooks/charmhelpers/core/host.py | |||
1847 | @@ -1,3 +1,17 @@ | |||
1848 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
1849 | 2 | # | ||
1850 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
1851 | 4 | # you may not use this file except in compliance with the License. | ||
1852 | 5 | # You may obtain a copy of the License at | ||
1853 | 6 | # | ||
1854 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
1855 | 8 | # | ||
1856 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
1857 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
1858 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
1859 | 12 | # See the License for the specific language governing permissions and | ||
1860 | 13 | # limitations under the License. | ||
1861 | 14 | |||
1862 | 1 | """Tools for working with the host system""" | 15 | """Tools for working with the host system""" |
1863 | 2 | # Copyright 2012 Canonical Ltd. | 16 | # Copyright 2012 Canonical Ltd. |
1864 | 3 | # | 17 | # |
1865 | @@ -6,68 +20,332 @@ | |||
1866 | 6 | # Matthew Wedgwood <matthew.wedgwood@canonical.com> | 20 | # Matthew Wedgwood <matthew.wedgwood@canonical.com> |
1867 | 7 | 21 | ||
1868 | 8 | import os | 22 | import os |
1869 | 23 | import re | ||
1870 | 9 | import pwd | 24 | import pwd |
1871 | 25 | import glob | ||
1872 | 10 | import grp | 26 | import grp |
1873 | 11 | import random | 27 | import random |
1874 | 12 | import string | 28 | import string |
1875 | 13 | import subprocess | 29 | import subprocess |
1876 | 14 | import hashlib | 30 | import hashlib |
1877 | 31 | import functools | ||
1878 | 32 | import itertools | ||
1879 | 33 | import six | ||
1880 | 15 | 34 | ||
1881 | 35 | from contextlib import contextmanager | ||
1882 | 16 | from collections import OrderedDict | 36 | from collections import OrderedDict |
1883 | 37 | from .hookenv import log, DEBUG | ||
1884 | 38 | from .fstab import Fstab | ||
1885 | 39 | from charmhelpers.osplatform import get_platform | ||
1886 | 40 | |||
1887 | 41 | __platform__ = get_platform() | ||
1888 | 42 | if __platform__ == "ubuntu": | ||
1889 | 43 | from charmhelpers.core.host_factory.ubuntu import ( | ||
1890 | 44 | service_available, | ||
1891 | 45 | add_new_group, | ||
1892 | 46 | lsb_release, | ||
1893 | 47 | cmp_pkgrevno, | ||
1894 | 48 | CompareHostReleases, | ||
1895 | 49 | ) # flake8: noqa -- ignore F401 for this import | ||
1896 | 50 | elif __platform__ == "centos": | ||
1897 | 51 | from charmhelpers.core.host_factory.centos import ( | ||
1898 | 52 | service_available, | ||
1899 | 53 | add_new_group, | ||
1900 | 54 | lsb_release, | ||
1901 | 55 | cmp_pkgrevno, | ||
1902 | 56 | CompareHostReleases, | ||
1903 | 57 | ) # flake8: noqa -- ignore F401 for this import | ||
1904 | 58 | |||
1905 | 59 | UPDATEDB_PATH = '/etc/updatedb.conf' | ||
1906 | 60 | |||
1907 | 61 | def service_start(service_name, **kwargs): | ||
1908 | 62 | """Start a system service. | ||
1909 | 63 | |||
1910 | 64 | The specified service name is managed via the system level init system. | ||
1911 | 65 | Some init systems (e.g. upstart) require that additional arguments be | ||
1912 | 66 | provided in order to directly control service instances whereas other init | ||
1913 | 67 | systems allow for addressing instances of a service directly by name (e.g. | ||
1914 | 68 | systemd). | ||
1915 | 69 | |||
1916 | 70 | The kwargs allow for the additional parameters to be passed to underlying | ||
1917 | 71 | init systems for those systems which require/allow for them. For example, | ||
1918 | 72 | the ceph-osd upstart script requires the id parameter to be passed along | ||
1919 | 73 | in order to identify which running daemon should be reloaded. The follow- | ||
1920 | 74 | ing example stops the ceph-osd service for instance id=4: | ||
1921 | 75 | |||
1922 | 76 | service_stop('ceph-osd', id=4) | ||
1923 | 77 | |||
1924 | 78 | :param service_name: the name of the service to stop | ||
1925 | 79 | :param **kwargs: additional parameters to pass to the init system when | ||
1926 | 80 | managing services. These will be passed as key=value | ||
1927 | 81 | parameters to the init system's commandline. kwargs | ||
1928 | 82 | are ignored for systemd enabled systems. | ||
1929 | 83 | """ | ||
1930 | 84 | return service('start', service_name, **kwargs) | ||
1931 | 17 | 85 | ||
1932 | 18 | from hookenv import log | ||
1933 | 19 | 86 | ||
1934 | 87 | def service_stop(service_name, **kwargs): | ||
1935 | 88 | """Stop a system service. | ||
1936 | 89 | |||
1937 | 90 | The specified service name is managed via the system level init system. | ||
1938 | 91 | Some init systems (e.g. upstart) require that additional arguments be | ||
1939 | 92 | provided in order to directly control service instances whereas other init | ||
1940 | 93 | systems allow for addressing instances of a service directly by name (e.g. | ||
1941 | 94 | systemd). | ||
1942 | 95 | |||
1943 | 96 | The kwargs allow for the additional parameters to be passed to underlying | ||
1944 | 97 | init systems for those systems which require/allow for them. For example, | ||
1945 | 98 | the ceph-osd upstart script requires the id parameter to be passed along | ||
1946 | 99 | in order to identify which running daemon should be reloaded. The follow- | ||
1947 | 100 | ing example stops the ceph-osd service for instance id=4: | ||
1948 | 101 | |||
1949 | 102 | service_stop('ceph-osd', id=4) | ||
1950 | 103 | |||
1951 | 104 | :param service_name: the name of the service to stop | ||
1952 | 105 | :param **kwargs: additional parameters to pass to the init system when | ||
1953 | 106 | managing services. These will be passed as key=value | ||
1954 | 107 | parameters to the init system's commandline. kwargs | ||
1955 | 108 | are ignored for systemd enabled systems. | ||
1956 | 109 | """ | ||
1957 | 110 | return service('stop', service_name, **kwargs) | ||
1958 | 20 | 111 | ||
1959 | 21 | def service_start(service_name): | ||
1960 | 22 | """Start a system service""" | ||
1961 | 23 | return service('start', service_name) | ||
1962 | 24 | 112 | ||
1963 | 113 | def service_restart(service_name, **kwargs): | ||
1964 | 114 | """Restart a system service. | ||
1965 | 25 | 115 | ||
1969 | 26 | def service_stop(service_name): | 116 | The specified service name is managed via the system level init system. |
1970 | 27 | """Stop a system service""" | 117 | Some init systems (e.g. upstart) require that additional arguments be |
1971 | 28 | return service('stop', service_name) | 118 | provided in order to directly control service instances whereas other init |
1972 | 119 | systems allow for addressing instances of a service directly by name (e.g. | ||
1973 | 120 | systemd). | ||
1974 | 29 | 121 | ||
1975 | 122 | The kwargs allow for the additional parameters to be passed to underlying | ||
1976 | 123 | init systems for those systems which require/allow for them. For example, | ||
1977 | 124 | the ceph-osd upstart script requires the id parameter to be passed along | ||
1978 | 125 | in order to identify which running daemon should be restarted. The follow- | ||
1979 | 126 | ing example restarts the ceph-osd service for instance id=4: | ||
1980 | 30 | 127 | ||
1983 | 31 | def service_restart(service_name): | 128 | service_restart('ceph-osd', id=4) |
1984 | 32 | """Restart a system service""" | 129 | |
1985 | 130 | :param service_name: the name of the service to restart | ||
1986 | 131 | :param **kwargs: additional parameters to pass to the init system when | ||
1987 | 132 | managing services. These will be passed as key=value | ||
1988 | 133 | parameters to the init system's commandline. kwargs | ||
1989 | 134 | are ignored for init systems not allowing additional | ||
1990 | 135 | parameters via the commandline (systemd). | ||
1991 | 136 | """ | ||
1992 | 33 | return service('restart', service_name) | 137 | return service('restart', service_name) |
1993 | 34 | 138 | ||
1994 | 35 | 139 | ||
1998 | 36 | def service_reload(service_name, restart_on_failure=False): | 140 | def service_reload(service_name, restart_on_failure=False, **kwargs): |
1999 | 37 | """Reload a system service, optionally falling back to restart if reload fails""" | 141 | """Reload a system service, optionally falling back to restart if |
2000 | 38 | service_result = service('reload', service_name) | 142 | reload fails. |
2001 | 143 | |||
2002 | 144 | The specified service name is managed via the system level init system. | ||
2003 | 145 | Some init systems (e.g. upstart) require that additional arguments be | ||
2004 | 146 | provided in order to directly control service instances whereas other init | ||
2005 | 147 | systems allow for addressing instances of a service directly by name (e.g. | ||
2006 | 148 | systemd). | ||
2007 | 149 | |||
2008 | 150 | The kwargs allow for the additional parameters to be passed to underlying | ||
2009 | 151 | init systems for those systems which require/allow for them. For example, | ||
2010 | 152 | the ceph-osd upstart script requires the id parameter to be passed along | ||
2011 | 153 | in order to identify which running daemon should be reloaded. The follow- | ||
2012 | 154 | ing example restarts the ceph-osd service for instance id=4: | ||
2013 | 155 | |||
2014 | 156 | service_reload('ceph-osd', id=4) | ||
2015 | 157 | |||
2016 | 158 | :param service_name: the name of the service to reload | ||
2017 | 159 | :param restart_on_failure: boolean indicating whether to fallback to a | ||
2018 | 160 | restart if the reload fails. | ||
2019 | 161 | :param **kwargs: additional parameters to pass to the init system when | ||
2020 | 162 | managing services. These will be passed as key=value | ||
2021 | 163 | parameters to the init system's commandline. kwargs | ||
2022 | 164 | are ignored for init systems not allowing additional | ||
2023 | 165 | parameters via the commandline (systemd). | ||
2024 | 166 | """ | ||
2025 | 167 | service_result = service('reload', service_name, **kwargs) | ||
2026 | 39 | if not service_result and restart_on_failure: | 168 | if not service_result and restart_on_failure: |
2028 | 40 | service_result = service('restart', service_name) | 169 | service_result = service('restart', service_name, **kwargs) |
2029 | 41 | return service_result | 170 | return service_result |
2030 | 42 | 171 | ||
2031 | 43 | 172 | ||
2035 | 44 | def service(action, service_name): | 173 | def service_pause(service_name, init_dir="/etc/init", initd_dir="/etc/init.d", |
2036 | 45 | """Control a system service""" | 174 | **kwargs): |
2037 | 46 | cmd = ['service', service_name, action] | 175 | """Pause a system service. |
2038 | 176 | |||
2039 | 177 | Stop it, and prevent it from starting again at boot. | ||
2040 | 178 | |||
2041 | 179 | :param service_name: the name of the service to pause | ||
2042 | 180 | :param init_dir: path to the upstart init directory | ||
2043 | 181 | :param initd_dir: path to the sysv init directory | ||
2044 | 182 | :param **kwargs: additional parameters to pass to the init system when | ||
2045 | 183 | managing services. These will be passed as key=value | ||
2046 | 184 | parameters to the init system's commandline. kwargs | ||
2047 | 185 | are ignored for init systems which do not support | ||
2048 | 186 | key=value arguments via the commandline. | ||
2049 | 187 | """ | ||
2050 | 188 | stopped = True | ||
2051 | 189 | if service_running(service_name, **kwargs): | ||
2052 | 190 | stopped = service_stop(service_name, **kwargs) | ||
2053 | 191 | upstart_file = os.path.join(init_dir, "{}.conf".format(service_name)) | ||
2054 | 192 | sysv_file = os.path.join(initd_dir, service_name) | ||
2055 | 193 | if init_is_systemd(): | ||
2056 | 194 | service('disable', service_name) | ||
2057 | 195 | service('mask', service_name) | ||
2058 | 196 | elif os.path.exists(upstart_file): | ||
2059 | 197 | override_path = os.path.join( | ||
2060 | 198 | init_dir, '{}.override'.format(service_name)) | ||
2061 | 199 | with open(override_path, 'w') as fh: | ||
2062 | 200 | fh.write("manual\n") | ||
2063 | 201 | elif os.path.exists(sysv_file): | ||
2064 | 202 | subprocess.check_call(["update-rc.d", service_name, "disable"]) | ||
2065 | 203 | else: | ||
2066 | 204 | raise ValueError( | ||
2067 | 205 | "Unable to detect {0} as SystemD, Upstart {1} or" | ||
2068 | 206 | " SysV {2}".format( | ||
2069 | 207 | service_name, upstart_file, sysv_file)) | ||
2070 | 208 | return stopped | ||
2071 | 209 | |||
2072 | 210 | |||
2073 | 211 | def service_resume(service_name, init_dir="/etc/init", | ||
2074 | 212 | initd_dir="/etc/init.d", **kwargs): | ||
2075 | 213 | """Resume a system service. | ||
2076 | 214 | |||
2077 | 215 | Reenable starting again at boot. Start the service. | ||
2078 | 216 | |||
2079 | 217 | :param service_name: the name of the service to resume | ||
2080 | 218 | :param init_dir: the path to the init dir | ||
2081 | 219 | :param initd dir: the path to the initd dir | ||
2082 | 220 | :param **kwargs: additional parameters to pass to the init system when | ||
2083 | 221 | managing services. These will be passed as key=value | ||
2084 | 222 | parameters to the init system's commandline. kwargs | ||
2085 | 223 | are ignored for systemd enabled systems. | ||
2086 | 224 | """ | ||
2087 | 225 | upstart_file = os.path.join(init_dir, "{}.conf".format(service_name)) | ||
2088 | 226 | sysv_file = os.path.join(initd_dir, service_name) | ||
2089 | 227 | if init_is_systemd(): | ||
2090 | 228 | service('unmask', service_name) | ||
2091 | 229 | service('enable', service_name) | ||
2092 | 230 | elif os.path.exists(upstart_file): | ||
2093 | 231 | override_path = os.path.join( | ||
2094 | 232 | init_dir, '{}.override'.format(service_name)) | ||
2095 | 233 | if os.path.exists(override_path): | ||
2096 | 234 | os.unlink(override_path) | ||
2097 | 235 | elif os.path.exists(sysv_file): | ||
2098 | 236 | subprocess.check_call(["update-rc.d", service_name, "enable"]) | ||
2099 | 237 | else: | ||
2100 | 238 | raise ValueError( | ||
2101 | 239 | "Unable to detect {0} as SystemD, Upstart {1} or" | ||
2102 | 240 | " SysV {2}".format( | ||
2103 | 241 | service_name, upstart_file, sysv_file)) | ||
2104 | 242 | started = service_running(service_name, **kwargs) | ||
2105 | 243 | |||
2106 | 244 | if not started: | ||
2107 | 245 | started = service_start(service_name, **kwargs) | ||
2108 | 246 | return started | ||
2109 | 247 | |||
2110 | 248 | |||
2111 | 249 | def service(action, service_name, **kwargs): | ||
2112 | 250 | """Control a system service. | ||
2113 | 251 | |||
2114 | 252 | :param action: the action to take on the service | ||
2115 | 253 | :param service_name: the name of the service to perform th action on | ||
2116 | 254 | :param **kwargs: additional params to be passed to the service command in | ||
2117 | 255 | the form of key=value. | ||
2118 | 256 | """ | ||
2119 | 257 | if init_is_systemd(): | ||
2120 | 258 | cmd = ['systemctl', action, service_name] | ||
2121 | 259 | else: | ||
2122 | 260 | cmd = ['service', service_name, action] | ||
2123 | 261 | for key, value in six.iteritems(kwargs): | ||
2124 | 262 | parameter = '%s=%s' % (key, value) | ||
2125 | 263 | cmd.append(parameter) | ||
2126 | 47 | return subprocess.call(cmd) == 0 | 264 | return subprocess.call(cmd) == 0 |
2127 | 48 | 265 | ||
2128 | 49 | 266 | ||
2135 | 50 | def service_running(service): | 267 | _UPSTART_CONF = "/etc/init/{}.conf" |
2136 | 51 | """Determine whether a system service is running""" | 268 | _INIT_D_CONF = "/etc/init.d/{}" |
2137 | 52 | try: | 269 | |
2138 | 53 | output = subprocess.check_output(['service', service, 'status']) | 270 | |
2139 | 54 | except subprocess.CalledProcessError: | 271 | def service_running(service_name, **kwargs): |
2140 | 55 | return False | 272 | """Determine whether a system service is running. |
2141 | 273 | |||
2142 | 274 | :param service_name: the name of the service | ||
2143 | 275 | :param **kwargs: additional args to pass to the service command. This is | ||
2144 | 276 | used to pass additional key=value arguments to the | ||
2145 | 277 | service command line for managing specific instance | ||
2146 | 278 | units (e.g. service ceph-osd status id=2). The kwargs | ||
2147 | 279 | are ignored in systemd services. | ||
2148 | 280 | """ | ||
2149 | 281 | if init_is_systemd(): | ||
2150 | 282 | return service('is-active', service_name) | ||
2151 | 56 | else: | 283 | else: |
2156 | 57 | if ("start/running" in output or "is running" in output): | 284 | if os.path.exists(_UPSTART_CONF.format(service_name)): |
2157 | 58 | return True | 285 | try: |
2158 | 59 | else: | 286 | cmd = ['status', service_name] |
2159 | 60 | return False | 287 | for key, value in six.iteritems(kwargs): |
2160 | 288 | parameter = '%s=%s' % (key, value) | ||
2161 | 289 | cmd.append(parameter) | ||
2162 | 290 | output = subprocess.check_output(cmd, | ||
2163 | 291 | stderr=subprocess.STDOUT).decode('UTF-8') | ||
2164 | 292 | except subprocess.CalledProcessError: | ||
2165 | 293 | return False | ||
2166 | 294 | else: | ||
2167 | 295 | # This works for upstart scripts where the 'service' command | ||
2168 | 296 | # returns a consistent string to represent running | ||
2169 | 297 | # 'start/running' | ||
2170 | 298 | if ("start/running" in output or | ||
2171 | 299 | "is running" in output or | ||
2172 | 300 | "up and running" in output): | ||
2173 | 301 | return True | ||
2174 | 302 | elif os.path.exists(_INIT_D_CONF.format(service_name)): | ||
2175 | 303 | # Check System V scripts init script return codes | ||
2176 | 304 | return service('status', service_name) | ||
2177 | 305 | return False | ||
2178 | 306 | |||
2179 | 61 | 307 | ||
2180 | 308 | SYSTEMD_SYSTEM = '/run/systemd/system' | ||
2181 | 62 | 309 | ||
2184 | 63 | def adduser(username, password=None, shell='/bin/bash', system_user=False): | 310 | |
2185 | 64 | """Add a user to the system""" | 311 | def init_is_systemd(): |
2186 | 312 | """Return True if the host system uses systemd, False otherwise.""" | ||
2187 | 313 | if lsb_release()['DISTRIB_CODENAME'] == 'trusty': | ||
2188 | 314 | return False | ||
2189 | 315 | return os.path.isdir(SYSTEMD_SYSTEM) | ||
2190 | 316 | |||
2191 | 317 | |||
2192 | 318 | def adduser(username, password=None, shell='/bin/bash', | ||
2193 | 319 | system_user=False, primary_group=None, | ||
2194 | 320 | secondary_groups=None, uid=None, home_dir=None): | ||
2195 | 321 | """Add a user to the system. | ||
2196 | 322 | |||
2197 | 323 | Will log but otherwise succeed if the user already exists. | ||
2198 | 324 | |||
2199 | 325 | :param str username: Username to create | ||
2200 | 326 | :param str password: Password for user; if ``None``, create a system user | ||
2201 | 327 | :param str shell: The default shell for the user | ||
2202 | 328 | :param bool system_user: Whether to create a login or system user | ||
2203 | 329 | :param str primary_group: Primary group for user; defaults to username | ||
2204 | 330 | :param list secondary_groups: Optional list of additional groups | ||
2205 | 331 | :param int uid: UID for user being created | ||
2206 | 332 | :param str home_dir: Home directory for user | ||
2207 | 333 | |||
2208 | 334 | :returns: The password database entry struct, as returned by `pwd.getpwnam` | ||
2209 | 335 | """ | ||
2210 | 65 | try: | 336 | try: |
2211 | 66 | user_info = pwd.getpwnam(username) | 337 | user_info = pwd.getpwnam(username) |
2212 | 67 | log('user {0} already exists!'.format(username)) | 338 | log('user {0} already exists!'.format(username)) |
2213 | 339 | if uid: | ||
2214 | 340 | user_info = pwd.getpwuid(int(uid)) | ||
2215 | 341 | log('user with uid {0} already exists!'.format(uid)) | ||
2216 | 68 | except KeyError: | 342 | except KeyError: |
2217 | 69 | log('creating user {0}'.format(username)) | 343 | log('creating user {0}'.format(username)) |
2218 | 70 | cmd = ['useradd'] | 344 | cmd = ['useradd'] |
2219 | 345 | if uid: | ||
2220 | 346 | cmd.extend(['--uid', str(uid)]) | ||
2221 | 347 | if home_dir: | ||
2222 | 348 | cmd.extend(['--home', str(home_dir)]) | ||
2223 | 71 | if system_user or password is None: | 349 | if system_user or password is None: |
2224 | 72 | cmd.append('--system') | 350 | cmd.append('--system') |
2225 | 73 | else: | 351 | else: |
2226 | @@ -76,32 +354,104 @@ def adduser(username, password=None, shell='/bin/bash', system_user=False): | |||
2227 | 76 | '--shell', shell, | 354 | '--shell', shell, |
2228 | 77 | '--password', password, | 355 | '--password', password, |
2229 | 78 | ]) | 356 | ]) |
2230 | 357 | if not primary_group: | ||
2231 | 358 | try: | ||
2232 | 359 | grp.getgrnam(username) | ||
2233 | 360 | primary_group = username # avoid "group exists" error | ||
2234 | 361 | except KeyError: | ||
2235 | 362 | pass | ||
2236 | 363 | if primary_group: | ||
2237 | 364 | cmd.extend(['-g', primary_group]) | ||
2238 | 365 | if secondary_groups: | ||
2239 | 366 | cmd.extend(['-G', ','.join(secondary_groups)]) | ||
2240 | 79 | cmd.append(username) | 367 | cmd.append(username) |
2241 | 80 | subprocess.check_call(cmd) | 368 | subprocess.check_call(cmd) |
2242 | 81 | user_info = pwd.getpwnam(username) | 369 | user_info = pwd.getpwnam(username) |
2243 | 82 | return user_info | 370 | return user_info |
2244 | 83 | 371 | ||
2245 | 84 | 372 | ||
2246 | 373 | def user_exists(username): | ||
2247 | 374 | """Check if a user exists""" | ||
2248 | 375 | try: | ||
2249 | 376 | pwd.getpwnam(username) | ||
2250 | 377 | user_exists = True | ||
2251 | 378 | except KeyError: | ||
2252 | 379 | user_exists = False | ||
2253 | 380 | return user_exists | ||
2254 | 381 | |||
2255 | 382 | |||
2256 | 383 | def uid_exists(uid): | ||
2257 | 384 | """Check if a uid exists""" | ||
2258 | 385 | try: | ||
2259 | 386 | pwd.getpwuid(uid) | ||
2260 | 387 | uid_exists = True | ||
2261 | 388 | except KeyError: | ||
2262 | 389 | uid_exists = False | ||
2263 | 390 | return uid_exists | ||
2264 | 391 | |||
2265 | 392 | |||
2266 | 393 | def group_exists(groupname): | ||
2267 | 394 | """Check if a group exists""" | ||
2268 | 395 | try: | ||
2269 | 396 | grp.getgrnam(groupname) | ||
2270 | 397 | group_exists = True | ||
2271 | 398 | except KeyError: | ||
2272 | 399 | group_exists = False | ||
2273 | 400 | return group_exists | ||
2274 | 401 | |||
2275 | 402 | |||
2276 | 403 | def gid_exists(gid): | ||
2277 | 404 | """Check if a gid exists""" | ||
2278 | 405 | try: | ||
2279 | 406 | grp.getgrgid(gid) | ||
2280 | 407 | gid_exists = True | ||
2281 | 408 | except KeyError: | ||
2282 | 409 | gid_exists = False | ||
2283 | 410 | return gid_exists | ||
2284 | 411 | |||
2285 | 412 | |||
2286 | 413 | def add_group(group_name, system_group=False, gid=None): | ||
2287 | 414 | """Add a group to the system | ||
2288 | 415 | |||
2289 | 416 | Will log but otherwise succeed if the group already exists. | ||
2290 | 417 | |||
2291 | 418 | :param str group_name: group to create | ||
2292 | 419 | :param bool system_group: Create system group | ||
2293 | 420 | :param int gid: GID for user being created | ||
2294 | 421 | |||
2295 | 422 | :returns: The password database entry struct, as returned by `grp.getgrnam` | ||
2296 | 423 | """ | ||
2297 | 424 | try: | ||
2298 | 425 | group_info = grp.getgrnam(group_name) | ||
2299 | 426 | log('group {0} already exists!'.format(group_name)) | ||
2300 | 427 | if gid: | ||
2301 | 428 | group_info = grp.getgrgid(gid) | ||
2302 | 429 | log('group with gid {0} already exists!'.format(gid)) | ||
2303 | 430 | except KeyError: | ||
2304 | 431 | log('creating group {0}'.format(group_name)) | ||
2305 | 432 | add_new_group(group_name, system_group, gid) | ||
2306 | 433 | group_info = grp.getgrnam(group_name) | ||
2307 | 434 | return group_info | ||
2308 | 435 | |||
2309 | 436 | |||
2310 | 85 | def add_user_to_group(username, group): | 437 | def add_user_to_group(username, group): |
2311 | 86 | """Add a user to a group""" | 438 | """Add a user to a group""" |
2317 | 87 | cmd = [ | 439 | cmd = ['gpasswd', '-a', username, group] |
2313 | 88 | 'gpasswd', '-a', | ||
2314 | 89 | username, | ||
2315 | 90 | group | ||
2316 | 91 | ] | ||
2318 | 92 | log("Adding user {} to group {}".format(username, group)) | 440 | log("Adding user {} to group {}".format(username, group)) |
2319 | 93 | subprocess.check_call(cmd) | 441 | subprocess.check_call(cmd) |
2320 | 94 | 442 | ||
2321 | 95 | 443 | ||
2323 | 96 | def rsync(from_path, to_path, flags='-r', options=None): | 444 | def rsync(from_path, to_path, flags='-r', options=None, timeout=None): |
2324 | 97 | """Replicate the contents of a path""" | 445 | """Replicate the contents of a path""" |
2325 | 98 | options = options or ['--delete', '--executability'] | 446 | options = options or ['--delete', '--executability'] |
2326 | 99 | cmd = ['/usr/bin/rsync', flags] | 447 | cmd = ['/usr/bin/rsync', flags] |
2327 | 448 | if timeout: | ||
2328 | 449 | cmd = ['timeout', str(timeout)] + cmd | ||
2329 | 100 | cmd.extend(options) | 450 | cmd.extend(options) |
2330 | 101 | cmd.append(from_path) | 451 | cmd.append(from_path) |
2331 | 102 | cmd.append(to_path) | 452 | cmd.append(to_path) |
2332 | 103 | log(" ".join(cmd)) | 453 | log(" ".join(cmd)) |
2334 | 104 | return subprocess.check_output(cmd).strip() | 454 | return subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode('UTF-8').strip() |
2335 | 105 | 455 | ||
2336 | 106 | 456 | ||
2337 | 107 | def symlink(source, destination): | 457 | def symlink(source, destination): |
2338 | @@ -116,34 +466,71 @@ def symlink(source, destination): | |||
2339 | 116 | subprocess.check_call(cmd) | 466 | subprocess.check_call(cmd) |
2340 | 117 | 467 | ||
2341 | 118 | 468 | ||
2343 | 119 | def mkdir(path, owner='root', group='root', perms=0555, force=False): | 469 | def mkdir(path, owner='root', group='root', perms=0o555, force=False): |
2344 | 120 | """Create a directory""" | 470 | """Create a directory""" |
2345 | 121 | log("Making dir {} {}:{} {:o}".format(path, owner, group, | 471 | log("Making dir {} {}:{} {:o}".format(path, owner, group, |
2346 | 122 | perms)) | 472 | perms)) |
2347 | 123 | uid = pwd.getpwnam(owner).pw_uid | 473 | uid = pwd.getpwnam(owner).pw_uid |
2348 | 124 | gid = grp.getgrnam(group).gr_gid | 474 | gid = grp.getgrnam(group).gr_gid |
2349 | 125 | realpath = os.path.abspath(path) | 475 | realpath = os.path.abspath(path) |
2352 | 126 | if os.path.exists(realpath): | 476 | path_exists = os.path.exists(realpath) |
2353 | 127 | if force and not os.path.isdir(realpath): | 477 | if path_exists and force: |
2354 | 478 | if not os.path.isdir(realpath): | ||
2355 | 128 | log("Removing non-directory file {} prior to mkdir()".format(path)) | 479 | log("Removing non-directory file {} prior to mkdir()".format(path)) |
2356 | 129 | os.unlink(realpath) | 480 | os.unlink(realpath) |
2358 | 130 | else: | 481 | os.makedirs(realpath, perms) |
2359 | 482 | elif not path_exists: | ||
2360 | 131 | os.makedirs(realpath, perms) | 483 | os.makedirs(realpath, perms) |
2361 | 132 | os.chown(realpath, uid, gid) | 484 | os.chown(realpath, uid, gid) |
2362 | 485 | os.chmod(realpath, perms) | ||
2363 | 133 | 486 | ||
2364 | 134 | 487 | ||
2368 | 135 | def write_file(path, content, owner='root', group='root', perms=0444): | 488 | def write_file(path, content, owner='root', group='root', perms=0o444): |
2369 | 136 | """Create or overwrite a file with the contents of a string""" | 489 | """Create or overwrite a file with the contents of a byte string.""" |
2367 | 137 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms)) | ||
2370 | 138 | uid = pwd.getpwnam(owner).pw_uid | 490 | uid = pwd.getpwnam(owner).pw_uid |
2371 | 139 | gid = grp.getgrnam(group).gr_gid | 491 | gid = grp.getgrnam(group).gr_gid |
2379 | 140 | with open(path, 'w') as target: | 492 | # lets see if we can grab the file and compare the context, to avoid doing |
2380 | 141 | os.fchown(target.fileno(), uid, gid) | 493 | # a write. |
2381 | 142 | os.fchmod(target.fileno(), perms) | 494 | existing_content = None |
2382 | 143 | target.write(content) | 495 | existing_uid, existing_gid = None, None |
2383 | 144 | 496 | try: | |
2384 | 145 | 497 | with open(path, 'rb') as target: | |
2385 | 146 | def mount(device, mountpoint, options=None, persist=False): | 498 | existing_content = target.read() |
2386 | 499 | stat = os.stat(path) | ||
2387 | 500 | existing_uid, existing_gid = stat.st_uid, stat.st_gid | ||
2388 | 501 | except: | ||
2389 | 502 | pass | ||
2390 | 503 | if content != existing_content: | ||
2391 | 504 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms), | ||
2392 | 505 | level=DEBUG) | ||
2393 | 506 | with open(path, 'wb') as target: | ||
2394 | 507 | os.fchown(target.fileno(), uid, gid) | ||
2395 | 508 | os.fchmod(target.fileno(), perms) | ||
2396 | 509 | target.write(content) | ||
2397 | 510 | return | ||
2398 | 511 | # the contents were the same, but we might still need to change the | ||
2399 | 512 | # ownership. | ||
2400 | 513 | if existing_uid != uid: | ||
2401 | 514 | log("Changing uid on already existing content: {} -> {}" | ||
2402 | 515 | .format(existing_uid, uid), level=DEBUG) | ||
2403 | 516 | os.chown(path, uid, -1) | ||
2404 | 517 | if existing_gid != gid: | ||
2405 | 518 | log("Changing gid on already existing content: {} -> {}" | ||
2406 | 519 | .format(existing_gid, gid), level=DEBUG) | ||
2407 | 520 | os.chown(path, -1, gid) | ||
2408 | 521 | |||
2409 | 522 | |||
2410 | 523 | def fstab_remove(mp): | ||
2411 | 524 | """Remove the given mountpoint entry from /etc/fstab""" | ||
2412 | 525 | return Fstab.remove_by_mountpoint(mp) | ||
2413 | 526 | |||
2414 | 527 | |||
2415 | 528 | def fstab_add(dev, mp, fs, options=None): | ||
2416 | 529 | """Adds the given device entry to the /etc/fstab file""" | ||
2417 | 530 | return Fstab.add(dev, mp, fs, options=options) | ||
2418 | 531 | |||
2419 | 532 | |||
2420 | 533 | def mount(device, mountpoint, options=None, persist=False, filesystem="ext3"): | ||
2421 | 147 | """Mount a filesystem at a particular mountpoint""" | 534 | """Mount a filesystem at a particular mountpoint""" |
2422 | 148 | cmd_args = ['mount'] | 535 | cmd_args = ['mount'] |
2423 | 149 | if options is not None: | 536 | if options is not None: |
2424 | @@ -151,12 +538,12 @@ def mount(device, mountpoint, options=None, persist=False): | |||
2425 | 151 | cmd_args.extend([device, mountpoint]) | 538 | cmd_args.extend([device, mountpoint]) |
2426 | 152 | try: | 539 | try: |
2427 | 153 | subprocess.check_output(cmd_args) | 540 | subprocess.check_output(cmd_args) |
2429 | 154 | except subprocess.CalledProcessError, e: | 541 | except subprocess.CalledProcessError as e: |
2430 | 155 | log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output)) | 542 | log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output)) |
2431 | 156 | return False | 543 | return False |
2432 | 544 | |||
2433 | 157 | if persist: | 545 | if persist: |
2436 | 158 | # TODO: update fstab | 546 | return fstab_add(device, mountpoint, filesystem, options=options) |
2435 | 159 | pass | ||
2437 | 160 | return True | 547 | return True |
2438 | 161 | 548 | ||
2439 | 162 | 549 | ||
2440 | @@ -165,12 +552,12 @@ def umount(mountpoint, persist=False): | |||
2441 | 165 | cmd_args = ['umount', mountpoint] | 552 | cmd_args = ['umount', mountpoint] |
2442 | 166 | try: | 553 | try: |
2443 | 167 | subprocess.check_output(cmd_args) | 554 | subprocess.check_output(cmd_args) |
2445 | 168 | except subprocess.CalledProcessError, e: | 555 | except subprocess.CalledProcessError as e: |
2446 | 169 | log('Error unmounting {}\n{}'.format(mountpoint, e.output)) | 556 | log('Error unmounting {}\n{}'.format(mountpoint, e.output)) |
2447 | 170 | return False | 557 | return False |
2448 | 558 | |||
2449 | 171 | if persist: | 559 | if persist: |
2452 | 172 | # TODO: update fstab | 560 | return fstab_remove(mountpoint) |
2451 | 173 | pass | ||
2453 | 174 | return True | 561 | return True |
2454 | 175 | 562 | ||
2455 | 176 | 563 | ||
2456 | @@ -183,102 +570,240 @@ def mounts(): | |||
2457 | 183 | return system_mounts | 570 | return system_mounts |
2458 | 184 | 571 | ||
2459 | 185 | 572 | ||
2462 | 186 | def file_hash(path): | 573 | def fstab_mount(mountpoint): |
2463 | 187 | """Generate a md5 hash of the contents of 'path' or None if not found """ | 574 | """Mount filesystem using fstab""" |
2464 | 575 | cmd_args = ['mount', mountpoint] | ||
2465 | 576 | try: | ||
2466 | 577 | subprocess.check_output(cmd_args) | ||
2467 | 578 | except subprocess.CalledProcessError as e: | ||
2468 | 579 | log('Error unmounting {}\n{}'.format(mountpoint, e.output)) | ||
2469 | 580 | return False | ||
2470 | 581 | return True | ||
2471 | 582 | |||
2472 | 583 | |||
2473 | 584 | def file_hash(path, hash_type='md5'): | ||
2474 | 585 | """Generate a hash checksum of the contents of 'path' or None if not found. | ||
2475 | 586 | |||
2476 | 587 | :param str hash_type: Any hash alrgorithm supported by :mod:`hashlib`, | ||
2477 | 588 | such as md5, sha1, sha256, sha512, etc. | ||
2478 | 589 | """ | ||
2479 | 188 | if os.path.exists(path): | 590 | if os.path.exists(path): |
2483 | 189 | h = hashlib.md5() | 591 | h = getattr(hashlib, hash_type)() |
2484 | 190 | with open(path, 'r') as source: | 592 | with open(path, 'rb') as source: |
2485 | 191 | h.update(source.read()) # IGNORE:E1101 - it does have update | 593 | h.update(source.read()) |
2486 | 192 | return h.hexdigest() | 594 | return h.hexdigest() |
2487 | 193 | else: | 595 | else: |
2488 | 194 | return None | 596 | return None |
2489 | 195 | 597 | ||
2490 | 196 | 598 | ||
2492 | 197 | def restart_on_change(restart_map, stopstart=False): | 599 | def path_hash(path): |
2493 | 600 | """Generate a hash checksum of all files matching 'path'. Standard | ||
2494 | 601 | wildcards like '*' and '?' are supported, see documentation for the 'glob' | ||
2495 | 602 | module for more information. | ||
2496 | 603 | |||
2497 | 604 | :return: dict: A { filename: hash } dictionary for all matched files. | ||
2498 | 605 | Empty if none found. | ||
2499 | 606 | """ | ||
2500 | 607 | return { | ||
2501 | 608 | filename: file_hash(filename) | ||
2502 | 609 | for filename in glob.iglob(path) | ||
2503 | 610 | } | ||
2504 | 611 | |||
2505 | 612 | |||
2506 | 613 | def check_hash(path, checksum, hash_type='md5'): | ||
2507 | 614 | """Validate a file using a cryptographic checksum. | ||
2508 | 615 | |||
2509 | 616 | :param str checksum: Value of the checksum used to validate the file. | ||
2510 | 617 | :param str hash_type: Hash algorithm used to generate `checksum`. | ||
2511 | 618 | Can be any hash alrgorithm supported by :mod:`hashlib`, | ||
2512 | 619 | such as md5, sha1, sha256, sha512, etc. | ||
2513 | 620 | :raises ChecksumError: If the file fails the checksum | ||
2514 | 621 | |||
2515 | 622 | """ | ||
2516 | 623 | actual_checksum = file_hash(path, hash_type) | ||
2517 | 624 | if checksum != actual_checksum: | ||
2518 | 625 | raise ChecksumError("'%s' != '%s'" % (checksum, actual_checksum)) | ||
2519 | 626 | |||
2520 | 627 | |||
2521 | 628 | class ChecksumError(ValueError): | ||
2522 | 629 | """A class derived from Value error to indicate the checksum failed.""" | ||
2523 | 630 | pass | ||
2524 | 631 | |||
2525 | 632 | |||
2526 | 633 | def restart_on_change(restart_map, stopstart=False, restart_functions=None): | ||
2527 | 198 | """Restart services based on configuration files changing | 634 | """Restart services based on configuration files changing |
2528 | 199 | 635 | ||
2530 | 200 | This function is used a decorator, for example | 636 | This function is used a decorator, for example:: |
2531 | 201 | 637 | ||
2532 | 202 | @restart_on_change({ | 638 | @restart_on_change({ |
2533 | 203 | '/etc/ceph/ceph.conf': [ 'cinder-api', 'cinder-volume' ] | 639 | '/etc/ceph/ceph.conf': [ 'cinder-api', 'cinder-volume' ] |
2534 | 640 | '/etc/apache/sites-enabled/*': [ 'apache2' ] | ||
2535 | 204 | }) | 641 | }) |
2538 | 205 | def ceph_client_changed(): | 642 | def config_changed(): |
2539 | 206 | ... | 643 | pass # your code here |
2540 | 207 | 644 | ||
2541 | 208 | In this example, the cinder-api and cinder-volume services | 645 | In this example, the cinder-api and cinder-volume services |
2542 | 209 | would be restarted if /etc/ceph/ceph.conf is changed by the | 646 | would be restarted if /etc/ceph/ceph.conf is changed by the |
2544 | 210 | ceph_client_changed function. | 647 | ceph_client_changed function. The apache2 service would be |
2545 | 648 | restarted if any file matching the pattern got changed, created | ||
2546 | 649 | or removed. Standard wildcards are supported, see documentation | ||
2547 | 650 | for the 'glob' module for more information. | ||
2548 | 651 | |||
2549 | 652 | @param restart_map: {path_file_name: [service_name, ...] | ||
2550 | 653 | @param stopstart: DEFAULT false; whether to stop, start OR restart | ||
2551 | 654 | @param restart_functions: nonstandard functions to use to restart services | ||
2552 | 655 | {svc: func, ...} | ||
2553 | 656 | @returns result from decorated function | ||
2554 | 211 | """ | 657 | """ |
2555 | 212 | def wrap(f): | 658 | def wrap(f): |
2573 | 213 | def wrapped_f(*args): | 659 | @functools.wraps(f) |
2574 | 214 | checksums = {} | 660 | def wrapped_f(*args, **kwargs): |
2575 | 215 | for path in restart_map: | 661 | return restart_on_change_helper( |
2576 | 216 | checksums[path] = file_hash(path) | 662 | (lambda: f(*args, **kwargs)), restart_map, stopstart, |
2577 | 217 | f(*args) | 663 | restart_functions) |
2561 | 218 | restarts = [] | ||
2562 | 219 | for path in restart_map: | ||
2563 | 220 | if checksums[path] != file_hash(path): | ||
2564 | 221 | restarts += restart_map[path] | ||
2565 | 222 | services_list = list(OrderedDict.fromkeys(restarts)) | ||
2566 | 223 | if not stopstart: | ||
2567 | 224 | for service_name in services_list: | ||
2568 | 225 | service('restart', service_name) | ||
2569 | 226 | else: | ||
2570 | 227 | for action in ['stop', 'start']: | ||
2571 | 228 | for service_name in services_list: | ||
2572 | 229 | service(action, service_name) | ||
2578 | 230 | return wrapped_f | 664 | return wrapped_f |
2579 | 231 | return wrap | 665 | return wrap |
2580 | 232 | 666 | ||
2581 | 233 | 667 | ||
2590 | 234 | def lsb_release(): | 668 | def restart_on_change_helper(lambda_f, restart_map, stopstart=False, |
2591 | 235 | """Return /etc/lsb-release in a dict""" | 669 | restart_functions=None): |
2592 | 236 | d = {} | 670 | """Helper function to perform the restart_on_change function. |
2593 | 237 | with open('/etc/lsb-release', 'r') as lsb: | 671 | |
2594 | 238 | for l in lsb: | 672 | This is provided for decorators to restart services if files described |
2595 | 239 | k, v = l.split('=') | 673 | in the restart_map have changed after an invocation of lambda_f(). |
2596 | 240 | d[k.strip()] = v.strip() | 674 | |
2597 | 241 | return d | 675 | @param lambda_f: function to call. |
2598 | 676 | @param restart_map: {file: [service, ...]} | ||
2599 | 677 | @param stopstart: whether to stop, start or restart a service | ||
2600 | 678 | @param restart_functions: nonstandard functions to use to restart services | ||
2601 | 679 | {svc: func, ...} | ||
2602 | 680 | @returns result of lambda_f() | ||
2603 | 681 | """ | ||
2604 | 682 | if restart_functions is None: | ||
2605 | 683 | restart_functions = {} | ||
2606 | 684 | checksums = {path: path_hash(path) for path in restart_map} | ||
2607 | 685 | r = lambda_f() | ||
2608 | 686 | # create a list of lists of the services to restart | ||
2609 | 687 | restarts = [restart_map[path] | ||
2610 | 688 | for path in restart_map | ||
2611 | 689 | if path_hash(path) != checksums[path]] | ||
2612 | 690 | # create a flat list of ordered services without duplicates from lists | ||
2613 | 691 | services_list = list(OrderedDict.fromkeys(itertools.chain(*restarts))) | ||
2614 | 692 | if services_list: | ||
2615 | 693 | actions = ('stop', 'start') if stopstart else ('restart',) | ||
2616 | 694 | for service_name in services_list: | ||
2617 | 695 | if service_name in restart_functions: | ||
2618 | 696 | restart_functions[service_name](service_name) | ||
2619 | 697 | else: | ||
2620 | 698 | for action in actions: | ||
2621 | 699 | service(action, service_name) | ||
2622 | 700 | return r | ||
2623 | 242 | 701 | ||
2624 | 243 | 702 | ||
2625 | 244 | def pwgen(length=None): | 703 | def pwgen(length=None): |
2626 | 245 | """Generate a random pasword.""" | 704 | """Generate a random pasword.""" |
2627 | 246 | if length is None: | 705 | if length is None: |
2628 | 706 | # A random length is ok to use a weak PRNG | ||
2629 | 247 | length = random.choice(range(35, 45)) | 707 | length = random.choice(range(35, 45)) |
2630 | 248 | alphanumeric_chars = [ | 708 | alphanumeric_chars = [ |
2632 | 249 | l for l in (string.letters + string.digits) | 709 | l for l in (string.ascii_letters + string.digits) |
2633 | 250 | if l not in 'l0QD1vAEIOUaeiou'] | 710 | if l not in 'l0QD1vAEIOUaeiou'] |
2634 | 711 | # Use a crypto-friendly PRNG (e.g. /dev/urandom) for making the | ||
2635 | 712 | # actual password | ||
2636 | 713 | random_generator = random.SystemRandom() | ||
2637 | 251 | random_chars = [ | 714 | random_chars = [ |
2639 | 252 | random.choice(alphanumeric_chars) for _ in range(length)] | 715 | random_generator.choice(alphanumeric_chars) for _ in range(length)] |
2640 | 253 | return(''.join(random_chars)) | 716 | return(''.join(random_chars)) |
2641 | 254 | 717 | ||
2642 | 255 | 718 | ||
2646 | 256 | def list_nics(nic_type): | 719 | def is_phy_iface(interface): |
2647 | 257 | '''Return a list of nics of given type(s)''' | 720 | """Returns True if interface is not virtual, otherwise False.""" |
2648 | 258 | if isinstance(nic_type, basestring): | 721 | if interface: |
2649 | 722 | sys_net = '/sys/class/net' | ||
2650 | 723 | if os.path.isdir(sys_net): | ||
2651 | 724 | for iface in glob.glob(os.path.join(sys_net, '*')): | ||
2652 | 725 | if '/virtual/' in os.path.realpath(iface): | ||
2653 | 726 | continue | ||
2654 | 727 | |||
2655 | 728 | if interface == os.path.basename(iface): | ||
2656 | 729 | return True | ||
2657 | 730 | |||
2658 | 731 | return False | ||
2659 | 732 | |||
2660 | 733 | |||
2661 | 734 | def get_bond_master(interface): | ||
2662 | 735 | """Returns bond master if interface is bond slave otherwise None. | ||
2663 | 736 | |||
2664 | 737 | NOTE: the provided interface is expected to be physical | ||
2665 | 738 | """ | ||
2666 | 739 | if interface: | ||
2667 | 740 | iface_path = '/sys/class/net/%s' % (interface) | ||
2668 | 741 | if os.path.exists(iface_path): | ||
2669 | 742 | if '/virtual/' in os.path.realpath(iface_path): | ||
2670 | 743 | return None | ||
2671 | 744 | |||
2672 | 745 | master = os.path.join(iface_path, 'master') | ||
2673 | 746 | if os.path.exists(master): | ||
2674 | 747 | master = os.path.realpath(master) | ||
2675 | 748 | # make sure it is a bond master | ||
2676 | 749 | if os.path.exists(os.path.join(master, 'bonding')): | ||
2677 | 750 | return os.path.basename(master) | ||
2678 | 751 | |||
2679 | 752 | return None | ||
2680 | 753 | |||
2681 | 754 | |||
2682 | 755 | def list_nics(nic_type=None): | ||
2683 | 756 | """Return a list of nics of given type(s)""" | ||
2684 | 757 | if isinstance(nic_type, six.string_types): | ||
2685 | 259 | int_types = [nic_type] | 758 | int_types = [nic_type] |
2686 | 260 | else: | 759 | else: |
2687 | 261 | int_types = nic_type | 760 | int_types = nic_type |
2688 | 761 | |||
2689 | 262 | interfaces = [] | 762 | interfaces = [] |
2694 | 263 | for int_type in int_types: | 763 | if nic_type: |
2695 | 264 | cmd = ['ip', 'addr', 'show', 'label', int_type + '*'] | 764 | for int_type in int_types: |
2696 | 265 | ip_output = subprocess.check_output(cmd).split('\n') | 765 | cmd = ['ip', 'addr', 'show', 'label', int_type + '*'] |
2697 | 266 | ip_output = (line for line in ip_output if line) | 766 | ip_output = subprocess.check_output(cmd).decode('UTF-8') |
2698 | 767 | ip_output = ip_output.split('\n') | ||
2699 | 768 | ip_output = (line for line in ip_output if line) | ||
2700 | 769 | for line in ip_output: | ||
2701 | 770 | if line.split()[1].startswith(int_type): | ||
2702 | 771 | matched = re.search('.*: (' + int_type + | ||
2703 | 772 | r'[0-9]+\.[0-9]+)@.*', line) | ||
2704 | 773 | if matched: | ||
2705 | 774 | iface = matched.groups()[0] | ||
2706 | 775 | else: | ||
2707 | 776 | iface = line.split()[1].replace(":", "") | ||
2708 | 777 | |||
2709 | 778 | if iface not in interfaces: | ||
2710 | 779 | interfaces.append(iface) | ||
2711 | 780 | else: | ||
2712 | 781 | cmd = ['ip', 'a'] | ||
2713 | 782 | ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') | ||
2714 | 783 | ip_output = (line.strip() for line in ip_output if line) | ||
2715 | 784 | |||
2716 | 785 | key = re.compile('^[0-9]+:\s+(.+):') | ||
2717 | 267 | for line in ip_output: | 786 | for line in ip_output: |
2720 | 268 | if line.split()[1].startswith(int_type): | 787 | matched = re.search(key, line) |
2721 | 269 | interfaces.append(line.split()[1].replace(":", "")) | 788 | if matched: |
2722 | 789 | iface = matched.group(1) | ||
2723 | 790 | iface = iface.partition("@")[0] | ||
2724 | 791 | if iface not in interfaces: | ||
2725 | 792 | interfaces.append(iface) | ||
2726 | 793 | |||
2727 | 270 | return interfaces | 794 | return interfaces |
2728 | 271 | 795 | ||
2729 | 272 | 796 | ||
2730 | 273 | def set_nic_mtu(nic, mtu): | 797 | def set_nic_mtu(nic, mtu): |
2732 | 274 | '''Set MTU on a network interface''' | 798 | """Set the Maximum Transmission Unit (MTU) on a network interface.""" |
2733 | 275 | cmd = ['ip', 'link', 'set', nic, 'mtu', mtu] | 799 | cmd = ['ip', 'link', 'set', nic, 'mtu', mtu] |
2734 | 276 | subprocess.check_call(cmd) | 800 | subprocess.check_call(cmd) |
2735 | 277 | 801 | ||
2736 | 278 | 802 | ||
2737 | 279 | def get_nic_mtu(nic): | 803 | def get_nic_mtu(nic): |
2738 | 804 | """Return the Maximum Transmission Unit (MTU) for a network interface.""" | ||
2739 | 280 | cmd = ['ip', 'addr', 'show', nic] | 805 | cmd = ['ip', 'addr', 'show', nic] |
2741 | 281 | ip_output = subprocess.check_output(cmd).split('\n') | 806 | ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') |
2742 | 282 | mtu = "" | 807 | mtu = "" |
2743 | 283 | for line in ip_output: | 808 | for line in ip_output: |
2744 | 284 | words = line.split() | 809 | words = line.split() |
2745 | @@ -288,10 +813,136 @@ def get_nic_mtu(nic): | |||
2746 | 288 | 813 | ||
2747 | 289 | 814 | ||
2748 | 290 | def get_nic_hwaddr(nic): | 815 | def get_nic_hwaddr(nic): |
2749 | 816 | """Return the Media Access Control (MAC) for a network interface.""" | ||
2750 | 291 | cmd = ['ip', '-o', '-0', 'addr', 'show', nic] | 817 | cmd = ['ip', '-o', '-0', 'addr', 'show', nic] |
2752 | 292 | ip_output = subprocess.check_output(cmd) | 818 | ip_output = subprocess.check_output(cmd).decode('UTF-8') |
2753 | 293 | hwaddr = "" | 819 | hwaddr = "" |
2754 | 294 | words = ip_output.split() | 820 | words = ip_output.split() |
2755 | 295 | if 'link/ether' in words: | 821 | if 'link/ether' in words: |
2756 | 296 | hwaddr = words[words.index('link/ether') + 1] | 822 | hwaddr = words[words.index('link/ether') + 1] |
2757 | 297 | return hwaddr | 823 | return hwaddr |
2758 | 824 | |||
2759 | 825 | |||
2760 | 826 | @contextmanager | ||
2761 | 827 | def chdir(directory): | ||
2762 | 828 | """Change the current working directory to a different directory for a code | ||
2763 | 829 | block and return the previous directory after the block exits. Useful to | ||
2764 | 830 | run commands from a specificed directory. | ||
2765 | 831 | |||
2766 | 832 | :param str directory: The directory path to change to for this context. | ||
2767 | 833 | """ | ||
2768 | 834 | cur = os.getcwd() | ||
2769 | 835 | try: | ||
2770 | 836 | yield os.chdir(directory) | ||
2771 | 837 | finally: | ||
2772 | 838 | os.chdir(cur) | ||
2773 | 839 | |||
2774 | 840 | |||
2775 | 841 | def chownr(path, owner, group, follow_links=True, chowntopdir=False): | ||
2776 | 842 | """Recursively change user and group ownership of files and directories | ||
2777 | 843 | in given path. Doesn't chown path itself by default, only its children. | ||
2778 | 844 | |||
2779 | 845 | :param str path: The string path to start changing ownership. | ||
2780 | 846 | :param str owner: The owner string to use when looking up the uid. | ||
2781 | 847 | :param str group: The group string to use when looking up the gid. | ||
2782 | 848 | :param bool follow_links: Also follow and chown links if True | ||
2783 | 849 | :param bool chowntopdir: Also chown path itself if True | ||
2784 | 850 | """ | ||
2785 | 851 | uid = pwd.getpwnam(owner).pw_uid | ||
2786 | 852 | gid = grp.getgrnam(group).gr_gid | ||
2787 | 853 | if follow_links: | ||
2788 | 854 | chown = os.chown | ||
2789 | 855 | else: | ||
2790 | 856 | chown = os.lchown | ||
2791 | 857 | |||
2792 | 858 | if chowntopdir: | ||
2793 | 859 | broken_symlink = os.path.lexists(path) and not os.path.exists(path) | ||
2794 | 860 | if not broken_symlink: | ||
2795 | 861 | chown(path, uid, gid) | ||
2796 | 862 | for root, dirs, files in os.walk(path, followlinks=follow_links): | ||
2797 | 863 | for name in dirs + files: | ||
2798 | 864 | full = os.path.join(root, name) | ||
2799 | 865 | broken_symlink = os.path.lexists(full) and not os.path.exists(full) | ||
2800 | 866 | if not broken_symlink: | ||
2801 | 867 | chown(full, uid, gid) | ||
2802 | 868 | |||
2803 | 869 | |||
2804 | 870 | def lchownr(path, owner, group): | ||
2805 | 871 | """Recursively change user and group ownership of files and directories | ||
2806 | 872 | in a given path, not following symbolic links. See the documentation for | ||
2807 | 873 | 'os.lchown' for more information. | ||
2808 | 874 | |||
2809 | 875 | :param str path: The string path to start changing ownership. | ||
2810 | 876 | :param str owner: The owner string to use when looking up the uid. | ||
2811 | 877 | :param str group: The group string to use when looking up the gid. | ||
2812 | 878 | """ | ||
2813 | 879 | chownr(path, owner, group, follow_links=False) | ||
2814 | 880 | |||
2815 | 881 | |||
2816 | 882 | def owner(path): | ||
2817 | 883 | """Returns a tuple containing the username & groupname owning the path. | ||
2818 | 884 | |||
2819 | 885 | :param str path: the string path to retrieve the ownership | ||
2820 | 886 | :return tuple(str, str): A (username, groupname) tuple containing the | ||
2821 | 887 | name of the user and group owning the path. | ||
2822 | 888 | :raises OSError: if the specified path does not exist | ||
2823 | 889 | """ | ||
2824 | 890 | stat = os.stat(path) | ||
2825 | 891 | username = pwd.getpwuid(stat.st_uid)[0] | ||
2826 | 892 | groupname = grp.getgrgid(stat.st_gid)[0] | ||
2827 | 893 | return username, groupname | ||
2828 | 894 | |||
2829 | 895 | |||
2830 | 896 | def get_total_ram(): | ||
2831 | 897 | """The total amount of system RAM in bytes. | ||
2832 | 898 | |||
2833 | 899 | This is what is reported by the OS, and may be overcommitted when | ||
2834 | 900 | there are multiple containers hosted on the same machine. | ||
2835 | 901 | """ | ||
2836 | 902 | with open('/proc/meminfo', 'r') as f: | ||
2837 | 903 | for line in f.readlines(): | ||
2838 | 904 | if line: | ||
2839 | 905 | key, value, unit = line.split() | ||
2840 | 906 | if key == 'MemTotal:': | ||
2841 | 907 | assert unit == 'kB', 'Unknown unit' | ||
2842 | 908 | return int(value) * 1024 # Classic, not KiB. | ||
2843 | 909 | raise NotImplementedError() | ||
2844 | 910 | |||
2845 | 911 | |||
2846 | 912 | UPSTART_CONTAINER_TYPE = '/run/container_type' | ||
2847 | 913 | |||
2848 | 914 | |||
2849 | 915 | def is_container(): | ||
2850 | 916 | """Determine whether unit is running in a container | ||
2851 | 917 | |||
2852 | 918 | @return: boolean indicating if unit is in a container | ||
2853 | 919 | """ | ||
2854 | 920 | if init_is_systemd(): | ||
2855 | 921 | # Detect using systemd-detect-virt | ||
2856 | 922 | return subprocess.call(['systemd-detect-virt', | ||
2857 | 923 | '--container']) == 0 | ||
2858 | 924 | else: | ||
2859 | 925 | # Detect using upstart container file marker | ||
2860 | 926 | return os.path.exists(UPSTART_CONTAINER_TYPE) | ||
2861 | 927 | |||
2862 | 928 | |||
2863 | 929 | def add_to_updatedb_prunepath(path, updatedb_path=UPDATEDB_PATH): | ||
2864 | 930 | with open(updatedb_path, 'r+') as f_id: | ||
2865 | 931 | updatedb_text = f_id.read() | ||
2866 | 932 | output = updatedb(updatedb_text, path) | ||
2867 | 933 | f_id.seek(0) | ||
2868 | 934 | f_id.write(output) | ||
2869 | 935 | f_id.truncate() | ||
2870 | 936 | |||
2871 | 937 | |||
2872 | 938 | def updatedb(updatedb_text, new_path): | ||
2873 | 939 | lines = [line for line in updatedb_text.split("\n")] | ||
2874 | 940 | for i, line in enumerate(lines): | ||
2875 | 941 | if line.startswith("PRUNEPATHS="): | ||
2876 | 942 | paths_line = line.split("=")[1].replace('"', '') | ||
2877 | 943 | paths = paths_line.split(" ") | ||
2878 | 944 | if new_path not in paths: | ||
2879 | 945 | paths.append(new_path) | ||
2880 | 946 | lines[i] = 'PRUNEPATHS="{}"'.format(' '.join(paths)) | ||
2881 | 947 | output = "\n".join(lines) | ||
2882 | 948 | return output | ||
2883 | diff --git a/hooks/charmhelpers/core/host_factory/__init__.py b/hooks/charmhelpers/core/host_factory/__init__.py | |||
2884 | 298 | new file mode 100644 | 949 | new file mode 100644 |
2885 | index 0000000..e69de29 | |||
2886 | --- /dev/null | |||
2887 | +++ b/hooks/charmhelpers/core/host_factory/__init__.py | |||
2888 | diff --git a/hooks/charmhelpers/core/host_factory/centos.py b/hooks/charmhelpers/core/host_factory/centos.py | |||
2889 | 299 | new file mode 100644 | 950 | new file mode 100644 |
2890 | index 0000000..7781a39 | |||
2891 | --- /dev/null | |||
2892 | +++ b/hooks/charmhelpers/core/host_factory/centos.py | |||
2893 | @@ -0,0 +1,72 @@ | |||
2894 | 1 | import subprocess | ||
2895 | 2 | import yum | ||
2896 | 3 | import os | ||
2897 | 4 | |||
2898 | 5 | from charmhelpers.core.strutils import BasicStringComparator | ||
2899 | 6 | |||
2900 | 7 | |||
2901 | 8 | class CompareHostReleases(BasicStringComparator): | ||
2902 | 9 | """Provide comparisons of Host releases. | ||
2903 | 10 | |||
2904 | 11 | Use in the form of | ||
2905 | 12 | |||
2906 | 13 | if CompareHostReleases(release) > 'trusty': | ||
2907 | 14 | # do something with mitaka | ||
2908 | 15 | """ | ||
2909 | 16 | |||
2910 | 17 | def __init__(self, item): | ||
2911 | 18 | raise NotImplementedError( | ||
2912 | 19 | "CompareHostReleases() is not implemented for CentOS") | ||
2913 | 20 | |||
2914 | 21 | |||
2915 | 22 | def service_available(service_name): | ||
2916 | 23 | # """Determine whether a system service is available.""" | ||
2917 | 24 | if os.path.isdir('/run/systemd/system'): | ||
2918 | 25 | cmd = ['systemctl', 'is-enabled', service_name] | ||
2919 | 26 | else: | ||
2920 | 27 | cmd = ['service', service_name, 'is-enabled'] | ||
2921 | 28 | return subprocess.call(cmd) == 0 | ||
2922 | 29 | |||
2923 | 30 | |||
2924 | 31 | def add_new_group(group_name, system_group=False, gid=None): | ||
2925 | 32 | cmd = ['groupadd'] | ||
2926 | 33 | if gid: | ||
2927 | 34 | cmd.extend(['--gid', str(gid)]) | ||
2928 | 35 | if system_group: | ||
2929 | 36 | cmd.append('-r') | ||
2930 | 37 | cmd.append(group_name) | ||
2931 | 38 | subprocess.check_call(cmd) | ||
2932 | 39 | |||
2933 | 40 | |||
2934 | 41 | def lsb_release(): | ||
2935 | 42 | """Return /etc/os-release in a dict.""" | ||
2936 | 43 | d = {} | ||
2937 | 44 | with open('/etc/os-release', 'r') as lsb: | ||
2938 | 45 | for l in lsb: | ||
2939 | 46 | s = l.split('=') | ||
2940 | 47 | if len(s) != 2: | ||
2941 | 48 | continue | ||
2942 | 49 | d[s[0].strip()] = s[1].strip() | ||
2943 | 50 | return d | ||
2944 | 51 | |||
2945 | 52 | |||
2946 | 53 | def cmp_pkgrevno(package, revno, pkgcache=None): | ||
2947 | 54 | """Compare supplied revno with the revno of the installed package. | ||
2948 | 55 | |||
2949 | 56 | * 1 => Installed revno is greater than supplied arg | ||
2950 | 57 | * 0 => Installed revno is the same as supplied arg | ||
2951 | 58 | * -1 => Installed revno is less than supplied arg | ||
2952 | 59 | |||
2953 | 60 | This function imports YumBase function if the pkgcache argument | ||
2954 | 61 | is None. | ||
2955 | 62 | """ | ||
2956 | 63 | if not pkgcache: | ||
2957 | 64 | y = yum.YumBase() | ||
2958 | 65 | packages = y.doPackageLists() | ||
2959 | 66 | pkgcache = {i.Name: i.version for i in packages['installed']} | ||
2960 | 67 | pkg = pkgcache[package] | ||
2961 | 68 | if pkg > revno: | ||
2962 | 69 | return 1 | ||
2963 | 70 | if pkg < revno: | ||
2964 | 71 | return -1 | ||
2965 | 72 | return 0 | ||
2966 | diff --git a/hooks/charmhelpers/core/host_factory/ubuntu.py b/hooks/charmhelpers/core/host_factory/ubuntu.py | |||
2967 | 0 | new file mode 100644 | 73 | new file mode 100644 |
2968 | index 0000000..d8dc378 | |||
2969 | --- /dev/null | |||
2970 | +++ b/hooks/charmhelpers/core/host_factory/ubuntu.py | |||
2971 | @@ -0,0 +1,89 @@ | |||
2972 | 1 | import subprocess | ||
2973 | 2 | |||
2974 | 3 | from charmhelpers.core.strutils import BasicStringComparator | ||
2975 | 4 | |||
2976 | 5 | |||
2977 | 6 | UBUNTU_RELEASES = ( | ||
2978 | 7 | 'lucid', | ||
2979 | 8 | 'maverick', | ||
2980 | 9 | 'natty', | ||
2981 | 10 | 'oneiric', | ||
2982 | 11 | 'precise', | ||
2983 | 12 | 'quantal', | ||
2984 | 13 | 'raring', | ||
2985 | 14 | 'saucy', | ||
2986 | 15 | 'trusty', | ||
2987 | 16 | 'utopic', | ||
2988 | 17 | 'vivid', | ||
2989 | 18 | 'wily', | ||
2990 | 19 | 'xenial', | ||
2991 | 20 | 'yakkety', | ||
2992 | 21 | 'zesty', | ||
2993 | 22 | 'artful', | ||
2994 | 23 | ) | ||
2995 | 24 | |||
2996 | 25 | |||
2997 | 26 | class CompareHostReleases(BasicStringComparator): | ||
2998 | 27 | """Provide comparisons of Ubuntu releases. | ||
2999 | 28 | |||
3000 | 29 | Use in the form of | ||
3001 | 30 | |||
3002 | 31 | if CompareHostReleases(release) > 'trusty': | ||
3003 | 32 | # do something with mitaka | ||
3004 | 33 | """ | ||
3005 | 34 | _list = UBUNTU_RELEASES | ||
3006 | 35 | |||
3007 | 36 | |||
3008 | 37 | def service_available(service_name): | ||
3009 | 38 | """Determine whether a system service is available""" | ||
3010 | 39 | try: | ||
3011 | 40 | subprocess.check_output( | ||
3012 | 41 | ['service', service_name, 'status'], | ||
3013 | 42 | stderr=subprocess.STDOUT).decode('UTF-8') | ||
3014 | 43 | except subprocess.CalledProcessError as e: | ||
3015 | 44 | return b'unrecognized service' not in e.output | ||
3016 | 45 | else: | ||
3017 | 46 | return True | ||
3018 | 47 | |||
3019 | 48 | |||
3020 | 49 | def add_new_group(group_name, system_group=False, gid=None): | ||
3021 | 50 | cmd = ['addgroup'] | ||
3022 | 51 | if gid: | ||
3023 | 52 | cmd.extend(['--gid', str(gid)]) | ||
3024 | 53 | if system_group: | ||
3025 | 54 | cmd.append('--system') | ||
3026 | 55 | else: | ||
3027 | 56 | cmd.extend([ | ||
3028 | 57 | '--group', | ||
3029 | 58 | ]) | ||
3030 | 59 | cmd.append(group_name) | ||
3031 | 60 | subprocess.check_call(cmd) | ||
3032 | 61 | |||
3033 | 62 | |||
3034 | 63 | def lsb_release(): | ||
3035 | 64 | """Return /etc/lsb-release in a dict""" | ||
3036 | 65 | d = {} | ||
3037 | 66 | with open('/etc/lsb-release', 'r') as lsb: | ||
3038 | 67 | for l in lsb: | ||
3039 | 68 | k, v = l.split('=') | ||
3040 | 69 | d[k.strip()] = v.strip() | ||
3041 | 70 | return d | ||
3042 | 71 | |||
3043 | 72 | |||
3044 | 73 | def cmp_pkgrevno(package, revno, pkgcache=None): | ||
3045 | 74 | """Compare supplied revno with the revno of the installed package. | ||
3046 | 75 | |||
3047 | 76 | * 1 => Installed revno is greater than supplied arg | ||
3048 | 77 | * 0 => Installed revno is the same as supplied arg | ||
3049 | 78 | * -1 => Installed revno is less than supplied arg | ||
3050 | 79 | |||
3051 | 80 | This function imports apt_cache function from charmhelpers.fetch if | ||
3052 | 81 | the pkgcache argument is None. Be sure to add charmhelpers.fetch if | ||
3053 | 82 | you call this function, or pass an apt_pkg.Cache() instance. | ||
3054 | 83 | """ | ||
3055 | 84 | import apt_pkg | ||
3056 | 85 | if not pkgcache: | ||
3057 | 86 | from charmhelpers.fetch import apt_cache | ||
3058 | 87 | pkgcache = apt_cache() | ||
3059 | 88 | pkg = pkgcache[package] | ||
3060 | 89 | return apt_pkg.version_compare(pkg.current_ver.ver_str, revno) | ||
3061 | diff --git a/hooks/charmhelpers/core/hugepage.py b/hooks/charmhelpers/core/hugepage.py | |||
3062 | 0 | new file mode 100644 | 90 | new file mode 100644 |
3063 | index 0000000..54b5b5e | |||
3064 | --- /dev/null | |||
3065 | +++ b/hooks/charmhelpers/core/hugepage.py | |||
3066 | @@ -0,0 +1,69 @@ | |||
3067 | 1 | # -*- coding: utf-8 -*- | ||
3068 | 2 | |||
3069 | 3 | # Copyright 2014-2015 Canonical Limited. | ||
3070 | 4 | # | ||
3071 | 5 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
3072 | 6 | # you may not use this file except in compliance with the License. | ||
3073 | 7 | # You may obtain a copy of the License at | ||
3074 | 8 | # | ||
3075 | 9 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
3076 | 10 | # | ||
3077 | 11 | # Unless required by applicable law or agreed to in writing, software | ||
3078 | 12 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
3079 | 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
3080 | 14 | # See the License for the specific language governing permissions and | ||
3081 | 15 | # limitations under the License. | ||
3082 | 16 | |||
3083 | 17 | import yaml | ||
3084 | 18 | from charmhelpers.core import fstab | ||
3085 | 19 | from charmhelpers.core import sysctl | ||
3086 | 20 | from charmhelpers.core.host import ( | ||
3087 | 21 | add_group, | ||
3088 | 22 | add_user_to_group, | ||
3089 | 23 | fstab_mount, | ||
3090 | 24 | mkdir, | ||
3091 | 25 | ) | ||
3092 | 26 | from charmhelpers.core.strutils import bytes_from_string | ||
3093 | 27 | from subprocess import check_output | ||
3094 | 28 | |||
3095 | 29 | |||
3096 | 30 | def hugepage_support(user, group='hugetlb', nr_hugepages=256, | ||
3097 | 31 | max_map_count=65536, mnt_point='/run/hugepages/kvm', | ||
3098 | 32 | pagesize='2MB', mount=True, set_shmmax=False): | ||
3099 | 33 | """Enable hugepages on system. | ||
3100 | 34 | |||
3101 | 35 | Args: | ||
3102 | 36 | user (str) -- Username to allow access to hugepages to | ||
3103 | 37 | group (str) -- Group name to own hugepages | ||
3104 | 38 | nr_hugepages (int) -- Number of pages to reserve | ||
3105 | 39 | max_map_count (int) -- Number of Virtual Memory Areas a process can own | ||
3106 | 40 | mnt_point (str) -- Directory to mount hugepages on | ||
3107 | 41 | pagesize (str) -- Size of hugepages | ||
3108 | 42 | mount (bool) -- Whether to Mount hugepages | ||
3109 | 43 | """ | ||
3110 | 44 | group_info = add_group(group) | ||
3111 | 45 | gid = group_info.gr_gid | ||
3112 | 46 | add_user_to_group(user, group) | ||
3113 | 47 | if max_map_count < 2 * nr_hugepages: | ||
3114 | 48 | max_map_count = 2 * nr_hugepages | ||
3115 | 49 | sysctl_settings = { | ||
3116 | 50 | 'vm.nr_hugepages': nr_hugepages, | ||
3117 | 51 | 'vm.max_map_count': max_map_count, | ||
3118 | 52 | 'vm.hugetlb_shm_group': gid, | ||
3119 | 53 | } | ||
3120 | 54 | if set_shmmax: | ||
3121 | 55 | shmmax_current = int(check_output(['sysctl', '-n', 'kernel.shmmax'])) | ||
3122 | 56 | shmmax_minsize = bytes_from_string(pagesize) * nr_hugepages | ||
3123 | 57 | if shmmax_minsize > shmmax_current: | ||
3124 | 58 | sysctl_settings['kernel.shmmax'] = shmmax_minsize | ||
3125 | 59 | sysctl.create(yaml.dump(sysctl_settings), '/etc/sysctl.d/10-hugepage.conf') | ||
3126 | 60 | mkdir(mnt_point, owner='root', group='root', perms=0o755, force=False) | ||
3127 | 61 | lfstab = fstab.Fstab() | ||
3128 | 62 | fstab_entry = lfstab.get_entry_by_attr('mountpoint', mnt_point) | ||
3129 | 63 | if fstab_entry: | ||
3130 | 64 | lfstab.remove_entry(fstab_entry) | ||
3131 | 65 | entry = lfstab.Entry('nodev', mnt_point, 'hugetlbfs', | ||
3132 | 66 | 'mode=1770,gid={},pagesize={}'.format(gid, pagesize), 0, 0) | ||
3133 | 67 | lfstab.add_entry(entry) | ||
3134 | 68 | if mount: | ||
3135 | 69 | fstab_mount(mnt_point) | ||
3136 | diff --git a/hooks/charmhelpers/core/kernel.py b/hooks/charmhelpers/core/kernel.py | |||
3137 | 0 | new file mode 100644 | 70 | new file mode 100644 |
3138 | index 0000000..2d40452 | |||
3139 | --- /dev/null | |||
3140 | +++ b/hooks/charmhelpers/core/kernel.py | |||
3141 | @@ -0,0 +1,72 @@ | |||
3142 | 1 | #!/usr/bin/env python | ||
3143 | 2 | # -*- coding: utf-8 -*- | ||
3144 | 3 | |||
3145 | 4 | # Copyright 2014-2015 Canonical Limited. | ||
3146 | 5 | # | ||
3147 | 6 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
3148 | 7 | # you may not use this file except in compliance with the License. | ||
3149 | 8 | # You may obtain a copy of the License at | ||
3150 | 9 | # | ||
3151 | 10 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
3152 | 11 | # | ||
3153 | 12 | # Unless required by applicable law or agreed to in writing, software | ||
3154 | 13 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
3155 | 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
3156 | 15 | # See the License for the specific language governing permissions and | ||
3157 | 16 | # limitations under the License. | ||
3158 | 17 | |||
3159 | 18 | import re | ||
3160 | 19 | import subprocess | ||
3161 | 20 | |||
3162 | 21 | from charmhelpers.osplatform import get_platform | ||
3163 | 22 | from charmhelpers.core.hookenv import ( | ||
3164 | 23 | log, | ||
3165 | 24 | INFO | ||
3166 | 25 | ) | ||
3167 | 26 | |||
3168 | 27 | __platform__ = get_platform() | ||
3169 | 28 | if __platform__ == "ubuntu": | ||
3170 | 29 | from charmhelpers.core.kernel_factory.ubuntu import ( | ||
3171 | 30 | persistent_modprobe, | ||
3172 | 31 | update_initramfs, | ||
3173 | 32 | ) # flake8: noqa -- ignore F401 for this import | ||
3174 | 33 | elif __platform__ == "centos": | ||
3175 | 34 | from charmhelpers.core.kernel_factory.centos import ( | ||
3176 | 35 | persistent_modprobe, | ||
3177 | 36 | update_initramfs, | ||
3178 | 37 | ) # flake8: noqa -- ignore F401 for this import | ||
3179 | 38 | |||
3180 | 39 | __author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" | ||
3181 | 40 | |||
3182 | 41 | |||
3183 | 42 | def modprobe(module, persist=True): | ||
3184 | 43 | """Load a kernel module and configure for auto-load on reboot.""" | ||
3185 | 44 | cmd = ['modprobe', module] | ||
3186 | 45 | |||
3187 | 46 | log('Loading kernel module %s' % module, level=INFO) | ||
3188 | 47 | |||
3189 | 48 | subprocess.check_call(cmd) | ||
3190 | 49 | if persist: | ||
3191 | 50 | persistent_modprobe(module) | ||
3192 | 51 | |||
3193 | 52 | |||
3194 | 53 | def rmmod(module, force=False): | ||
3195 | 54 | """Remove a module from the linux kernel""" | ||
3196 | 55 | cmd = ['rmmod'] | ||
3197 | 56 | if force: | ||
3198 | 57 | cmd.append('-f') | ||
3199 | 58 | cmd.append(module) | ||
3200 | 59 | log('Removing kernel module %s' % module, level=INFO) | ||
3201 | 60 | return subprocess.check_call(cmd) | ||
3202 | 61 | |||
3203 | 62 | |||
3204 | 63 | def lsmod(): | ||
3205 | 64 | """Shows what kernel modules are currently loaded""" | ||
3206 | 65 | return subprocess.check_output(['lsmod'], | ||
3207 | 66 | universal_newlines=True) | ||
3208 | 67 | |||
3209 | 68 | |||
3210 | 69 | def is_module_loaded(module): | ||
3211 | 70 | """Checks if a kernel module is already loaded""" | ||
3212 | 71 | matches = re.findall('^%s[ ]+' % module, lsmod(), re.M) | ||
3213 | 72 | return len(matches) > 0 | ||
3214 | diff --git a/hooks/charmhelpers/core/kernel_factory/__init__.py b/hooks/charmhelpers/core/kernel_factory/__init__.py | |||
3215 | 0 | new file mode 100644 | 73 | new file mode 100644 |
3216 | index 0000000..e69de29 | |||
3217 | --- /dev/null | |||
3218 | +++ b/hooks/charmhelpers/core/kernel_factory/__init__.py | |||
3219 | diff --git a/hooks/charmhelpers/core/kernel_factory/centos.py b/hooks/charmhelpers/core/kernel_factory/centos.py | |||
3220 | 1 | new file mode 100644 | 74 | new file mode 100644 |
3221 | index 0000000..1c402c1 | |||
3222 | --- /dev/null | |||
3223 | +++ b/hooks/charmhelpers/core/kernel_factory/centos.py | |||
3224 | @@ -0,0 +1,17 @@ | |||
3225 | 1 | import subprocess | ||
3226 | 2 | import os | ||
3227 | 3 | |||
3228 | 4 | |||
3229 | 5 | def persistent_modprobe(module): | ||
3230 | 6 | """Load a kernel module and configure for auto-load on reboot.""" | ||
3231 | 7 | if not os.path.exists('/etc/rc.modules'): | ||
3232 | 8 | open('/etc/rc.modules', 'a') | ||
3233 | 9 | os.chmod('/etc/rc.modules', 111) | ||
3234 | 10 | with open('/etc/rc.modules', 'r+') as modules: | ||
3235 | 11 | if module not in modules.read(): | ||
3236 | 12 | modules.write('modprobe %s\n' % module) | ||
3237 | 13 | |||
3238 | 14 | |||
3239 | 15 | def update_initramfs(version='all'): | ||
3240 | 16 | """Updates an initramfs image.""" | ||
3241 | 17 | return subprocess.check_call(["dracut", "-f", version]) | ||
3242 | diff --git a/hooks/charmhelpers/core/kernel_factory/ubuntu.py b/hooks/charmhelpers/core/kernel_factory/ubuntu.py | |||
3243 | 0 | new file mode 100644 | 18 | new file mode 100644 |
3244 | index 0000000..3de372f | |||
3245 | --- /dev/null | |||
3246 | +++ b/hooks/charmhelpers/core/kernel_factory/ubuntu.py | |||
3247 | @@ -0,0 +1,13 @@ | |||
3248 | 1 | import subprocess | ||
3249 | 2 | |||
3250 | 3 | |||
3251 | 4 | def persistent_modprobe(module): | ||
3252 | 5 | """Load a kernel module and configure for auto-load on reboot.""" | ||
3253 | 6 | with open('/etc/modules', 'r+') as modules: | ||
3254 | 7 | if module not in modules.read(): | ||
3255 | 8 | modules.write(module + "\n") | ||
3256 | 9 | |||
3257 | 10 | |||
3258 | 11 | def update_initramfs(version='all'): | ||
3259 | 12 | """Updates an initramfs image.""" | ||
3260 | 13 | return subprocess.check_call(["update-initramfs", "-k", version, "-u"]) | ||
3261 | diff --git a/hooks/charmhelpers/core/services/__init__.py b/hooks/charmhelpers/core/services/__init__.py | |||
3262 | 0 | new file mode 100644 | 14 | new file mode 100644 |
3263 | index 0000000..61fd074 | |||
3264 | --- /dev/null | |||
3265 | +++ b/hooks/charmhelpers/core/services/__init__.py | |||
3266 | @@ -0,0 +1,16 @@ | |||
3267 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
3268 | 2 | # | ||
3269 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
3270 | 4 | # you may not use this file except in compliance with the License. | ||
3271 | 5 | # You may obtain a copy of the License at | ||
3272 | 6 | # | ||
3273 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
3274 | 8 | # | ||
3275 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
3276 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
3277 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
3278 | 12 | # See the License for the specific language governing permissions and | ||
3279 | 13 | # limitations under the License. | ||
3280 | 14 | |||
3281 | 15 | from .base import * # NOQA | ||
3282 | 16 | from .helpers import * # NOQA | ||
3283 | diff --git a/hooks/charmhelpers/core/services/base.py b/hooks/charmhelpers/core/services/base.py | |||
3284 | 0 | new file mode 100644 | 17 | new file mode 100644 |
3285 | index 0000000..ca9dc99 | |||
3286 | --- /dev/null | |||
3287 | +++ b/hooks/charmhelpers/core/services/base.py | |||
3288 | @@ -0,0 +1,351 @@ | |||
3289 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
3290 | 2 | # | ||
3291 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
3292 | 4 | # you may not use this file except in compliance with the License. | ||
3293 | 5 | # You may obtain a copy of the License at | ||
3294 | 6 | # | ||
3295 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
3296 | 8 | # | ||
3297 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
3298 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
3299 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
3300 | 12 | # See the License for the specific language governing permissions and | ||
3301 | 13 | # limitations under the License. | ||
3302 | 14 | |||
3303 | 15 | import os | ||
3304 | 16 | import json | ||
3305 | 17 | from inspect import getargspec | ||
3306 | 18 | from collections import Iterable, OrderedDict | ||
3307 | 19 | |||
3308 | 20 | from charmhelpers.core import host | ||
3309 | 21 | from charmhelpers.core import hookenv | ||
3310 | 22 | |||
3311 | 23 | |||
3312 | 24 | __all__ = ['ServiceManager', 'ManagerCallback', | ||
3313 | 25 | 'PortManagerCallback', 'open_ports', 'close_ports', 'manage_ports', | ||
3314 | 26 | 'service_restart', 'service_stop'] | ||
3315 | 27 | |||
3316 | 28 | |||
3317 | 29 | class ServiceManager(object): | ||
3318 | 30 | def __init__(self, services=None): | ||
3319 | 31 | """ | ||
3320 | 32 | Register a list of services, given their definitions. | ||
3321 | 33 | |||
3322 | 34 | Service definitions are dicts in the following formats (all keys except | ||
3323 | 35 | 'service' are optional):: | ||
3324 | 36 | |||
3325 | 37 | { | ||
3326 | 38 | "service": <service name>, | ||
3327 | 39 | "required_data": <list of required data contexts>, | ||
3328 | 40 | "provided_data": <list of provided data contexts>, | ||
3329 | 41 | "data_ready": <one or more callbacks>, | ||
3330 | 42 | "data_lost": <one or more callbacks>, | ||
3331 | 43 | "start": <one or more callbacks>, | ||
3332 | 44 | "stop": <one or more callbacks>, | ||
3333 | 45 | "ports": <list of ports to manage>, | ||
3334 | 46 | } | ||
3335 | 47 | |||
3336 | 48 | The 'required_data' list should contain dicts of required data (or | ||
3337 | 49 | dependency managers that act like dicts and know how to collect the data). | ||
3338 | 50 | Only when all items in the 'required_data' list are populated are the list | ||
3339 | 51 | of 'data_ready' and 'start' callbacks executed. See `is_ready()` for more | ||
3340 | 52 | information. | ||
3341 | 53 | |||
3342 | 54 | The 'provided_data' list should contain relation data providers, most likely | ||
3343 | 55 | a subclass of :class:`charmhelpers.core.services.helpers.RelationContext`, | ||
3344 | 56 | that will indicate a set of data to set on a given relation. | ||
3345 | 57 | |||
3346 | 58 | The 'data_ready' value should be either a single callback, or a list of | ||
3347 | 59 | callbacks, to be called when all items in 'required_data' pass `is_ready()`. | ||
3348 | 60 | Each callback will be called with the service name as the only parameter. | ||
3349 | 61 | After all of the 'data_ready' callbacks are called, the 'start' callbacks | ||
3350 | 62 | are fired. | ||
3351 | 63 | |||
3352 | 64 | The 'data_lost' value should be either a single callback, or a list of | ||
3353 | 65 | callbacks, to be called when a 'required_data' item no longer passes | ||
3354 | 66 | `is_ready()`. Each callback will be called with the service name as the | ||
3355 | 67 | only parameter. After all of the 'data_lost' callbacks are called, | ||
3356 | 68 | the 'stop' callbacks are fired. | ||
3357 | 69 | |||
3358 | 70 | The 'start' value should be either a single callback, or a list of | ||
3359 | 71 | callbacks, to be called when starting the service, after the 'data_ready' | ||
3360 | 72 | callbacks are complete. Each callback will be called with the service | ||
3361 | 73 | name as the only parameter. This defaults to | ||
3362 | 74 | `[host.service_start, services.open_ports]`. | ||
3363 | 75 | |||
3364 | 76 | The 'stop' value should be either a single callback, or a list of | ||
3365 | 77 | callbacks, to be called when stopping the service. If the service is | ||
3366 | 78 | being stopped because it no longer has all of its 'required_data', this | ||
3367 | 79 | will be called after all of the 'data_lost' callbacks are complete. | ||
3368 | 80 | Each callback will be called with the service name as the only parameter. | ||
3369 | 81 | This defaults to `[services.close_ports, host.service_stop]`. | ||
3370 | 82 | |||
3371 | 83 | The 'ports' value should be a list of ports to manage. The default | ||
3372 | 84 | 'start' handler will open the ports after the service is started, | ||
3373 | 85 | and the default 'stop' handler will close the ports prior to stopping | ||
3374 | 86 | the service. | ||
3375 | 87 | |||
3376 | 88 | |||
3377 | 89 | Examples: | ||
3378 | 90 | |||
3379 | 91 | The following registers an Upstart service called bingod that depends on | ||
3380 | 92 | a mongodb relation and which runs a custom `db_migrate` function prior to | ||
3381 | 93 | restarting the service, and a Runit service called spadesd:: | ||
3382 | 94 | |||
3383 | 95 | manager = services.ServiceManager([ | ||
3384 | 96 | { | ||
3385 | 97 | 'service': 'bingod', | ||
3386 | 98 | 'ports': [80, 443], | ||
3387 | 99 | 'required_data': [MongoRelation(), config(), {'my': 'data'}], | ||
3388 | 100 | 'data_ready': [ | ||
3389 | 101 | services.template(source='bingod.conf'), | ||
3390 | 102 | services.template(source='bingod.ini', | ||
3391 | 103 | target='/etc/bingod.ini', | ||
3392 | 104 | owner='bingo', perms=0400), | ||
3393 | 105 | ], | ||
3394 | 106 | }, | ||
3395 | 107 | { | ||
3396 | 108 | 'service': 'spadesd', | ||
3397 | 109 | 'data_ready': services.template(source='spadesd_run.j2', | ||
3398 | 110 | target='/etc/sv/spadesd/run', | ||
3399 | 111 | perms=0555), | ||
3400 | 112 | 'start': runit_start, | ||
3401 | 113 | 'stop': runit_stop, | ||
3402 | 114 | }, | ||
3403 | 115 | ]) | ||
3404 | 116 | manager.manage() | ||
3405 | 117 | """ | ||
3406 | 118 | self._ready_file = os.path.join(hookenv.charm_dir(), 'READY-SERVICES.json') | ||
3407 | 119 | self._ready = None | ||
3408 | 120 | self.services = OrderedDict() | ||
3409 | 121 | for service in services or []: | ||
3410 | 122 | service_name = service['service'] | ||
3411 | 123 | self.services[service_name] = service | ||
3412 | 124 | |||
3413 | 125 | def manage(self): | ||
3414 | 126 | """ | ||
3415 | 127 | Handle the current hook by doing The Right Thing with the registered services. | ||
3416 | 128 | """ | ||
3417 | 129 | hookenv._run_atstart() | ||
3418 | 130 | try: | ||
3419 | 131 | hook_name = hookenv.hook_name() | ||
3420 | 132 | if hook_name == 'stop': | ||
3421 | 133 | self.stop_services() | ||
3422 | 134 | else: | ||
3423 | 135 | self.reconfigure_services() | ||
3424 | 136 | self.provide_data() | ||
3425 | 137 | except SystemExit as x: | ||
3426 | 138 | if x.code is None or x.code == 0: | ||
3427 | 139 | hookenv._run_atexit() | ||
3428 | 140 | hookenv._run_atexit() | ||
3429 | 141 | |||
3430 | 142 | def provide_data(self): | ||
3431 | 143 | """ | ||
3432 | 144 | Set the relation data for each provider in the ``provided_data`` list. | ||
3433 | 145 | |||
3434 | 146 | A provider must have a `name` attribute, which indicates which relation | ||
3435 | 147 | to set data on, and a `provide_data()` method, which returns a dict of | ||
3436 | 148 | data to set. | ||
3437 | 149 | |||
3438 | 150 | The `provide_data()` method can optionally accept two parameters: | ||
3439 | 151 | |||
3440 | 152 | * ``remote_service`` The name of the remote service that the data will | ||
3441 | 153 | be provided to. The `provide_data()` method will be called once | ||
3442 | 154 | for each connected service (not unit). This allows the method to | ||
3443 | 155 | tailor its data to the given service. | ||
3444 | 156 | * ``service_ready`` Whether or not the service definition had all of | ||
3445 | 157 | its requirements met, and thus the ``data_ready`` callbacks run. | ||
3446 | 158 | |||
3447 | 159 | Note that the ``provided_data`` methods are now called **after** the | ||
3448 | 160 | ``data_ready`` callbacks are run. This gives the ``data_ready`` callbacks | ||
3449 | 161 | a chance to generate any data necessary for the providing to the remote | ||
3450 | 162 | services. | ||
3451 | 163 | """ | ||
3452 | 164 | for service_name, service in self.services.items(): | ||
3453 | 165 | service_ready = self.is_ready(service_name) | ||
3454 | 166 | for provider in service.get('provided_data', []): | ||
3455 | 167 | for relid in hookenv.relation_ids(provider.name): | ||
3456 | 168 | units = hookenv.related_units(relid) | ||
3457 | 169 | if not units: | ||
3458 | 170 | continue | ||
3459 | 171 | remote_service = units[0].split('/')[0] | ||
3460 | 172 | argspec = getargspec(provider.provide_data) | ||
3461 | 173 | if len(argspec.args) > 1: | ||
3462 | 174 | data = provider.provide_data(remote_service, service_ready) | ||
3463 | 175 | else: | ||
3464 | 176 | data = provider.provide_data() | ||
3465 | 177 | if data: | ||
3466 | 178 | hookenv.relation_set(relid, data) | ||
3467 | 179 | |||
3468 | 180 | def reconfigure_services(self, *service_names): | ||
3469 | 181 | """ | ||
3470 | 182 | Update all files for one or more registered services, and, | ||
3471 | 183 | if ready, optionally restart them. | ||
3472 | 184 | |||
3473 | 185 | If no service names are given, reconfigures all registered services. | ||
3474 | 186 | """ | ||
3475 | 187 | for service_name in service_names or self.services.keys(): | ||
3476 | 188 | if self.is_ready(service_name): | ||
3477 | 189 | self.fire_event('data_ready', service_name) | ||
3478 | 190 | self.fire_event('start', service_name, default=[ | ||
3479 | 191 | service_restart, | ||
3480 | 192 | manage_ports]) | ||
3481 | 193 | self.save_ready(service_name) | ||
3482 | 194 | else: | ||
3483 | 195 | if self.was_ready(service_name): | ||
3484 | 196 | self.fire_event('data_lost', service_name) | ||
3485 | 197 | self.fire_event('stop', service_name, default=[ | ||
3486 | 198 | manage_ports, | ||
3487 | 199 | service_stop]) | ||
3488 | 200 | self.save_lost(service_name) | ||
3489 | 201 | |||
3490 | 202 | def stop_services(self, *service_names): | ||
3491 | 203 | """ | ||
3492 | 204 | Stop one or more registered services, by name. | ||
3493 | 205 | |||
3494 | 206 | If no service names are given, stops all registered services. | ||
3495 | 207 | """ | ||
3496 | 208 | for service_name in service_names or self.services.keys(): | ||
3497 | 209 | self.fire_event('stop', service_name, default=[ | ||
3498 | 210 | manage_ports, | ||
3499 | 211 | service_stop]) | ||
3500 | 212 | |||
3501 | 213 | def get_service(self, service_name): | ||
3502 | 214 | """ | ||
3503 | 215 | Given the name of a registered service, return its service definition. | ||
3504 | 216 | """ | ||
3505 | 217 | service = self.services.get(service_name) | ||
3506 | 218 | if not service: | ||
3507 | 219 | raise KeyError('Service not registered: %s' % service_name) | ||
3508 | 220 | return service | ||
3509 | 221 | |||
3510 | 222 | def fire_event(self, event_name, service_name, default=None): | ||
3511 | 223 | """ | ||
3512 | 224 | Fire a data_ready, data_lost, start, or stop event on a given service. | ||
3513 | 225 | """ | ||
3514 | 226 | service = self.get_service(service_name) | ||
3515 | 227 | callbacks = service.get(event_name, default) | ||
3516 | 228 | if not callbacks: | ||
3517 | 229 | return | ||
3518 | 230 | if not isinstance(callbacks, Iterable): | ||
3519 | 231 | callbacks = [callbacks] | ||
3520 | 232 | for callback in callbacks: | ||
3521 | 233 | if isinstance(callback, ManagerCallback): | ||
3522 | 234 | callback(self, service_name, event_name) | ||
3523 | 235 | else: | ||
3524 | 236 | callback(service_name) | ||
3525 | 237 | |||
3526 | 238 | def is_ready(self, service_name): | ||
3527 | 239 | """ | ||
3528 | 240 | Determine if a registered service is ready, by checking its 'required_data'. | ||
3529 | 241 | |||
3530 | 242 | A 'required_data' item can be any mapping type, and is considered ready | ||
3531 | 243 | if `bool(item)` evaluates as True. | ||
3532 | 244 | """ | ||
3533 | 245 | service = self.get_service(service_name) | ||
3534 | 246 | reqs = service.get('required_data', []) | ||
3535 | 247 | return all(bool(req) for req in reqs) | ||
3536 | 248 | |||
3537 | 249 | def _load_ready_file(self): | ||
3538 | 250 | if self._ready is not None: | ||
3539 | 251 | return | ||
3540 | 252 | if os.path.exists(self._ready_file): | ||
3541 | 253 | with open(self._ready_file) as fp: | ||
3542 | 254 | self._ready = set(json.load(fp)) | ||
3543 | 255 | else: | ||
3544 | 256 | self._ready = set() | ||
3545 | 257 | |||
3546 | 258 | def _save_ready_file(self): | ||
3547 | 259 | if self._ready is None: | ||
3548 | 260 | return | ||
3549 | 261 | with open(self._ready_file, 'w') as fp: | ||
3550 | 262 | json.dump(list(self._ready), fp) | ||
3551 | 263 | |||
3552 | 264 | def save_ready(self, service_name): | ||
3553 | 265 | """ | ||
3554 | 266 | Save an indicator that the given service is now data_ready. | ||
3555 | 267 | """ | ||
3556 | 268 | self._load_ready_file() | ||
3557 | 269 | self._ready.add(service_name) | ||
3558 | 270 | self._save_ready_file() | ||
3559 | 271 | |||
3560 | 272 | def save_lost(self, service_name): | ||
3561 | 273 | """ | ||
3562 | 274 | Save an indicator that the given service is no longer data_ready. | ||
3563 | 275 | """ | ||
3564 | 276 | self._load_ready_file() | ||
3565 | 277 | self._ready.discard(service_name) | ||
3566 | 278 | self._save_ready_file() | ||
3567 | 279 | |||
3568 | 280 | def was_ready(self, service_name): | ||
3569 | 281 | """ | ||
3570 | 282 | Determine if the given service was previously data_ready. | ||
3571 | 283 | """ | ||
3572 | 284 | self._load_ready_file() | ||
3573 | 285 | return service_name in self._ready | ||
3574 | 286 | |||
3575 | 287 | |||
3576 | 288 | class ManagerCallback(object): | ||
3577 | 289 | """ | ||
3578 | 290 | Special case of a callback that takes the `ServiceManager` instance | ||
3579 | 291 | in addition to the service name. | ||
3580 | 292 | |||
3581 | 293 | Subclasses should implement `__call__` which should accept three parameters: | ||
3582 | 294 | |||
3583 | 295 | * `manager` The `ServiceManager` instance | ||
3584 | 296 | * `service_name` The name of the service it's being triggered for | ||
3585 | 297 | * `event_name` The name of the event that this callback is handling | ||
3586 | 298 | """ | ||
3587 | 299 | def __call__(self, manager, service_name, event_name): | ||
3588 | 300 | raise NotImplementedError() | ||
3589 | 301 | |||
3590 | 302 | |||
3591 | 303 | class PortManagerCallback(ManagerCallback): | ||
3592 | 304 | """ | ||
3593 | 305 | Callback class that will open or close ports, for use as either | ||
3594 | 306 | a start or stop action. | ||
3595 | 307 | """ | ||
3596 | 308 | def __call__(self, manager, service_name, event_name): | ||
3597 | 309 | service = manager.get_service(service_name) | ||
3598 | 310 | new_ports = service.get('ports', []) | ||
3599 | 311 | port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name)) | ||
3600 | 312 | if os.path.exists(port_file): | ||
3601 | 313 | with open(port_file) as fp: | ||
3602 | 314 | old_ports = fp.read().split(',') | ||
3603 | 315 | for old_port in old_ports: | ||
3604 | 316 | if bool(old_port): | ||
3605 | 317 | old_port = int(old_port) | ||
3606 | 318 | if old_port not in new_ports: | ||
3607 | 319 | hookenv.close_port(old_port) | ||
3608 | 320 | with open(port_file, 'w') as fp: | ||
3609 | 321 | fp.write(','.join(str(port) for port in new_ports)) | ||
3610 | 322 | for port in new_ports: | ||
3611 | 323 | if event_name == 'start': | ||
3612 | 324 | hookenv.open_port(port) | ||
3613 | 325 | elif event_name == 'stop': | ||
3614 | 326 | hookenv.close_port(port) | ||
3615 | 327 | |||
3616 | 328 | |||
3617 | 329 | def service_stop(service_name): | ||
3618 | 330 | """ | ||
3619 | 331 | Wrapper around host.service_stop to prevent spurious "unknown service" | ||
3620 | 332 | messages in the logs. | ||
3621 | 333 | """ | ||
3622 | 334 | if host.service_running(service_name): | ||
3623 | 335 | host.service_stop(service_name) | ||
3624 | 336 | |||
3625 | 337 | |||
3626 | 338 | def service_restart(service_name): | ||
3627 | 339 | """ | ||
3628 | 340 | Wrapper around host.service_restart to prevent spurious "unknown service" | ||
3629 | 341 | messages in the logs. | ||
3630 | 342 | """ | ||
3631 | 343 | if host.service_available(service_name): | ||
3632 | 344 | if host.service_running(service_name): | ||
3633 | 345 | host.service_restart(service_name) | ||
3634 | 346 | else: | ||
3635 | 347 | host.service_start(service_name) | ||
3636 | 348 | |||
3637 | 349 | |||
3638 | 350 | # Convenience aliases | ||
3639 | 351 | open_ports = close_ports = manage_ports = PortManagerCallback() | ||
3640 | diff --git a/hooks/charmhelpers/core/services/helpers.py b/hooks/charmhelpers/core/services/helpers.py | |||
3641 | 0 | new file mode 100644 | 352 | new file mode 100644 |
3642 | index 0000000..3e6e30d | |||
3643 | --- /dev/null | |||
3644 | +++ b/hooks/charmhelpers/core/services/helpers.py | |||
3645 | @@ -0,0 +1,290 @@ | |||
3646 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
3647 | 2 | # | ||
3648 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
3649 | 4 | # you may not use this file except in compliance with the License. | ||
3650 | 5 | # You may obtain a copy of the License at | ||
3651 | 6 | # | ||
3652 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
3653 | 8 | # | ||
3654 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
3655 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
3656 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
3657 | 12 | # See the License for the specific language governing permissions and | ||
3658 | 13 | # limitations under the License. | ||
3659 | 14 | |||
3660 | 15 | import os | ||
3661 | 16 | import yaml | ||
3662 | 17 | |||
3663 | 18 | from charmhelpers.core import hookenv | ||
3664 | 19 | from charmhelpers.core import host | ||
3665 | 20 | from charmhelpers.core import templating | ||
3666 | 21 | |||
3667 | 22 | from charmhelpers.core.services.base import ManagerCallback | ||
3668 | 23 | |||
3669 | 24 | |||
3670 | 25 | __all__ = ['RelationContext', 'TemplateCallback', | ||
3671 | 26 | 'render_template', 'template'] | ||
3672 | 27 | |||
3673 | 28 | |||
3674 | 29 | class RelationContext(dict): | ||
3675 | 30 | """ | ||
3676 | 31 | Base class for a context generator that gets relation data from juju. | ||
3677 | 32 | |||
3678 | 33 | Subclasses must provide the attributes `name`, which is the name of the | ||
3679 | 34 | interface of interest, `interface`, which is the type of the interface of | ||
3680 | 35 | interest, and `required_keys`, which is the set of keys required for the | ||
3681 | 36 | relation to be considered complete. The data for all interfaces matching | ||
3682 | 37 | the `name` attribute that are complete will used to populate the dictionary | ||
3683 | 38 | values (see `get_data`, below). | ||
3684 | 39 | |||
3685 | 40 | The generated context will be namespaced under the relation :attr:`name`, | ||
3686 | 41 | to prevent potential naming conflicts. | ||
3687 | 42 | |||
3688 | 43 | :param str name: Override the relation :attr:`name`, since it can vary from charm to charm | ||
3689 | 44 | :param list additional_required_keys: Extend the list of :attr:`required_keys` | ||
3690 | 45 | """ | ||
3691 | 46 | name = None | ||
3692 | 47 | interface = None | ||
3693 | 48 | |||
3694 | 49 | def __init__(self, name=None, additional_required_keys=None): | ||
3695 | 50 | if not hasattr(self, 'required_keys'): | ||
3696 | 51 | self.required_keys = [] | ||
3697 | 52 | |||
3698 | 53 | if name is not None: | ||
3699 | 54 | self.name = name | ||
3700 | 55 | if additional_required_keys: | ||
3701 | 56 | self.required_keys.extend(additional_required_keys) | ||
3702 | 57 | self.get_data() | ||
3703 | 58 | |||
3704 | 59 | def __bool__(self): | ||
3705 | 60 | """ | ||
3706 | 61 | Returns True if all of the required_keys are available. | ||
3707 | 62 | """ | ||
3708 | 63 | return self.is_ready() | ||
3709 | 64 | |||
3710 | 65 | __nonzero__ = __bool__ | ||
3711 | 66 | |||
3712 | 67 | def __repr__(self): | ||
3713 | 68 | return super(RelationContext, self).__repr__() | ||
3714 | 69 | |||
3715 | 70 | def is_ready(self): | ||
3716 | 71 | """ | ||
3717 | 72 | Returns True if all of the `required_keys` are available from any units. | ||
3718 | 73 | """ | ||
3719 | 74 | ready = len(self.get(self.name, [])) > 0 | ||
3720 | 75 | if not ready: | ||
3721 | 76 | hookenv.log('Incomplete relation: {}'.format(self.__class__.__name__), hookenv.DEBUG) | ||
3722 | 77 | return ready | ||
3723 | 78 | |||
3724 | 79 | def _is_ready(self, unit_data): | ||
3725 | 80 | """ | ||
3726 | 81 | Helper method that tests a set of relation data and returns True if | ||
3727 | 82 | all of the `required_keys` are present. | ||
3728 | 83 | """ | ||
3729 | 84 | return set(unit_data.keys()).issuperset(set(self.required_keys)) | ||
3730 | 85 | |||
3731 | 86 | def get_data(self): | ||
3732 | 87 | """ | ||
3733 | 88 | Retrieve the relation data for each unit involved in a relation and, | ||
3734 | 89 | if complete, store it in a list under `self[self.name]`. This | ||
3735 | 90 | is automatically called when the RelationContext is instantiated. | ||
3736 | 91 | |||
3737 | 92 | The units are sorted lexographically first by the service ID, then by | ||
3738 | 93 | the unit ID. Thus, if an interface has two other services, 'db:1' | ||
3739 | 94 | and 'db:2', with 'db:1' having two units, 'wordpress/0' and 'wordpress/1', | ||
3740 | 95 | and 'db:2' having one unit, 'mediawiki/0', all of which have a complete | ||
3741 | 96 | set of data, the relation data for the units will be stored in the | ||
3742 | 97 | order: 'wordpress/0', 'wordpress/1', 'mediawiki/0'. | ||
3743 | 98 | |||
3744 | 99 | If you only care about a single unit on the relation, you can just | ||
3745 | 100 | access it as `{{ interface[0]['key'] }}`. However, if you can at all | ||
3746 | 101 | support multiple units on a relation, you should iterate over the list, | ||
3747 | 102 | like:: | ||
3748 | 103 | |||
3749 | 104 | {% for unit in interface -%} | ||
3750 | 105 | {{ unit['key'] }}{% if not loop.last %},{% endif %} | ||
3751 | 106 | {%- endfor %} | ||
3752 | 107 | |||
3753 | 108 | Note that since all sets of relation data from all related services and | ||
3754 | 109 | units are in a single list, if you need to know which service or unit a | ||
3755 | 110 | set of data came from, you'll need to extend this class to preserve | ||
3756 | 111 | that information. | ||
3757 | 112 | """ | ||
3758 | 113 | if not hookenv.relation_ids(self.name): | ||
3759 | 114 | return | ||
3760 | 115 | |||
3761 | 116 | ns = self.setdefault(self.name, []) | ||
3762 | 117 | for rid in sorted(hookenv.relation_ids(self.name)): | ||
3763 | 118 | for unit in sorted(hookenv.related_units(rid)): | ||
3764 | 119 | reldata = hookenv.relation_get(rid=rid, unit=unit) | ||
3765 | 120 | if self._is_ready(reldata): | ||
3766 | 121 | ns.append(reldata) | ||
3767 | 122 | |||
3768 | 123 | def provide_data(self): | ||
3769 | 124 | """ | ||
3770 | 125 | Return data to be relation_set for this interface. | ||
3771 | 126 | """ | ||
3772 | 127 | return {} | ||
3773 | 128 | |||
3774 | 129 | |||
3775 | 130 | class MysqlRelation(RelationContext): | ||
3776 | 131 | """ | ||
3777 | 132 | Relation context for the `mysql` interface. | ||
3778 | 133 | |||
3779 | 134 | :param str name: Override the relation :attr:`name`, since it can vary from charm to charm | ||
3780 | 135 | :param list additional_required_keys: Extend the list of :attr:`required_keys` | ||
3781 | 136 | """ | ||
3782 | 137 | name = 'db' | ||
3783 | 138 | interface = 'mysql' | ||
3784 | 139 | |||
3785 | 140 | def __init__(self, *args, **kwargs): | ||
3786 | 141 | self.required_keys = ['host', 'user', 'password', 'database'] | ||
3787 | 142 | RelationContext.__init__(self, *args, **kwargs) | ||
3788 | 143 | |||
3789 | 144 | |||
3790 | 145 | class HttpRelation(RelationContext): | ||
3791 | 146 | """ | ||
3792 | 147 | Relation context for the `http` interface. | ||
3793 | 148 | |||
3794 | 149 | :param str name: Override the relation :attr:`name`, since it can vary from charm to charm | ||
3795 | 150 | :param list additional_required_keys: Extend the list of :attr:`required_keys` | ||
3796 | 151 | """ | ||
3797 | 152 | name = 'website' | ||
3798 | 153 | interface = 'http' | ||
3799 | 154 | |||
3800 | 155 | def __init__(self, *args, **kwargs): | ||
3801 | 156 | self.required_keys = ['host', 'port'] | ||
3802 | 157 | RelationContext.__init__(self, *args, **kwargs) | ||
3803 | 158 | |||
3804 | 159 | def provide_data(self): | ||
3805 | 160 | return { | ||
3806 | 161 | 'host': hookenv.unit_get('private-address'), | ||
3807 | 162 | 'port': 80, | ||
3808 | 163 | } | ||
3809 | 164 | |||
3810 | 165 | |||
3811 | 166 | class RequiredConfig(dict): | ||
3812 | 167 | """ | ||
3813 | 168 | Data context that loads config options with one or more mandatory options. | ||
3814 | 169 | |||
3815 | 170 | Once the required options have been changed from their default values, all | ||
3816 | 171 | config options will be available, namespaced under `config` to prevent | ||
3817 | 172 | potential naming conflicts (for example, between a config option and a | ||
3818 | 173 | relation property). | ||
3819 | 174 | |||
3820 | 175 | :param list *args: List of options that must be changed from their default values. | ||
3821 | 176 | """ | ||
3822 | 177 | |||
3823 | 178 | def __init__(self, *args): | ||
3824 | 179 | self.required_options = args | ||
3825 | 180 | self['config'] = hookenv.config() | ||
3826 | 181 | with open(os.path.join(hookenv.charm_dir(), 'config.yaml')) as fp: | ||
3827 | 182 | self.config = yaml.load(fp).get('options', {}) | ||
3828 | 183 | |||
3829 | 184 | def __bool__(self): | ||
3830 | 185 | for option in self.required_options: | ||
3831 | 186 | if option not in self['config']: | ||
3832 | 187 | return False | ||
3833 | 188 | current_value = self['config'][option] | ||
3834 | 189 | default_value = self.config[option].get('default') | ||
3835 | 190 | if current_value == default_value: | ||
3836 | 191 | return False | ||
3837 | 192 | if current_value in (None, '') and default_value in (None, ''): | ||
3838 | 193 | return False | ||
3839 | 194 | return True | ||
3840 | 195 | |||
3841 | 196 | def __nonzero__(self): | ||
3842 | 197 | return self.__bool__() | ||
3843 | 198 | |||
3844 | 199 | |||
3845 | 200 | class StoredContext(dict): | ||
3846 | 201 | """ | ||
3847 | 202 | A data context that always returns the data that it was first created with. | ||
3848 | 203 | |||
3849 | 204 | This is useful to do a one-time generation of things like passwords, that | ||
3850 | 205 | will thereafter use the same value that was originally generated, instead | ||
3851 | 206 | of generating a new value each time it is run. | ||
3852 | 207 | """ | ||
3853 | 208 | def __init__(self, file_name, config_data): | ||
3854 | 209 | """ | ||
3855 | 210 | If the file exists, populate `self` with the data from the file. | ||
3856 | 211 | Otherwise, populate with the given data and persist it to the file. | ||
3857 | 212 | """ | ||
3858 | 213 | if os.path.exists(file_name): | ||
3859 | 214 | self.update(self.read_context(file_name)) | ||
3860 | 215 | else: | ||
3861 | 216 | self.store_context(file_name, config_data) | ||
3862 | 217 | self.update(config_data) | ||
3863 | 218 | |||
3864 | 219 | def store_context(self, file_name, config_data): | ||
3865 | 220 | if not os.path.isabs(file_name): | ||
3866 | 221 | file_name = os.path.join(hookenv.charm_dir(), file_name) | ||
3867 | 222 | with open(file_name, 'w') as file_stream: | ||
3868 | 223 | os.fchmod(file_stream.fileno(), 0o600) | ||
3869 | 224 | yaml.dump(config_data, file_stream) | ||
3870 | 225 | |||
3871 | 226 | def read_context(self, file_name): | ||
3872 | 227 | if not os.path.isabs(file_name): | ||
3873 | 228 | file_name = os.path.join(hookenv.charm_dir(), file_name) | ||
3874 | 229 | with open(file_name, 'r') as file_stream: | ||
3875 | 230 | data = yaml.load(file_stream) | ||
3876 | 231 | if not data: | ||
3877 | 232 | raise OSError("%s is empty" % file_name) | ||
3878 | 233 | return data | ||
3879 | 234 | |||
3880 | 235 | |||
3881 | 236 | class TemplateCallback(ManagerCallback): | ||
3882 | 237 | """ | ||
3883 | 238 | Callback class that will render a Jinja2 template, for use as a ready | ||
3884 | 239 | action. | ||
3885 | 240 | |||
3886 | 241 | :param str source: The template source file, relative to | ||
3887 | 242 | `$CHARM_DIR/templates` | ||
3888 | 243 | |||
3889 | 244 | :param str target: The target to write the rendered template to (or None) | ||
3890 | 245 | :param str owner: The owner of the rendered file | ||
3891 | 246 | :param str group: The group of the rendered file | ||
3892 | 247 | :param int perms: The permissions of the rendered file | ||
3893 | 248 | :param partial on_change_action: functools partial to be executed when | ||
3894 | 249 | rendered file changes | ||
3895 | 250 | :param jinja2 loader template_loader: A jinja2 template loader | ||
3896 | 251 | |||
3897 | 252 | :return str: The rendered template | ||
3898 | 253 | """ | ||
3899 | 254 | def __init__(self, source, target, | ||
3900 | 255 | owner='root', group='root', perms=0o444, | ||
3901 | 256 | on_change_action=None, template_loader=None): | ||
3902 | 257 | self.source = source | ||
3903 | 258 | self.target = target | ||
3904 | 259 | self.owner = owner | ||
3905 | 260 | self.group = group | ||
3906 | 261 | self.perms = perms | ||
3907 | 262 | self.on_change_action = on_change_action | ||
3908 | 263 | self.template_loader = template_loader | ||
3909 | 264 | |||
3910 | 265 | def __call__(self, manager, service_name, event_name): | ||
3911 | 266 | pre_checksum = '' | ||
3912 | 267 | if self.on_change_action and os.path.isfile(self.target): | ||
3913 | 268 | pre_checksum = host.file_hash(self.target) | ||
3914 | 269 | service = manager.get_service(service_name) | ||
3915 | 270 | context = {'ctx': {}} | ||
3916 | 271 | for ctx in service.get('required_data', []): | ||
3917 | 272 | context.update(ctx) | ||
3918 | 273 | context['ctx'].update(ctx) | ||
3919 | 274 | |||
3920 | 275 | result = templating.render(self.source, self.target, context, | ||
3921 | 276 | self.owner, self.group, self.perms, | ||
3922 | 277 | template_loader=self.template_loader) | ||
3923 | 278 | if self.on_change_action: | ||
3924 | 279 | if pre_checksum == host.file_hash(self.target): | ||
3925 | 280 | hookenv.log( | ||
3926 | 281 | 'No change detected: {}'.format(self.target), | ||
3927 | 282 | hookenv.DEBUG) | ||
3928 | 283 | else: | ||
3929 | 284 | self.on_change_action() | ||
3930 | 285 | |||
3931 | 286 | return result | ||
3932 | 287 | |||
3933 | 288 | |||
3934 | 289 | # Convenience aliases for templates | ||
3935 | 290 | render_template = template = TemplateCallback | ||
3936 | diff --git a/hooks/charmhelpers/core/strutils.py b/hooks/charmhelpers/core/strutils.py | |||
3937 | 0 | new file mode 100644 | 291 | new file mode 100644 |
3938 | index 0000000..685dabd | |||
3939 | --- /dev/null | |||
3940 | +++ b/hooks/charmhelpers/core/strutils.py | |||
3941 | @@ -0,0 +1,123 @@ | |||
3942 | 1 | #!/usr/bin/env python | ||
3943 | 2 | # -*- coding: utf-8 -*- | ||
3944 | 3 | |||
3945 | 4 | # Copyright 2014-2015 Canonical Limited. | ||
3946 | 5 | # | ||
3947 | 6 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
3948 | 7 | # you may not use this file except in compliance with the License. | ||
3949 | 8 | # You may obtain a copy of the License at | ||
3950 | 9 | # | ||
3951 | 10 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
3952 | 11 | # | ||
3953 | 12 | # Unless required by applicable law or agreed to in writing, software | ||
3954 | 13 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
3955 | 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
3956 | 15 | # See the License for the specific language governing permissions and | ||
3957 | 16 | # limitations under the License. | ||
3958 | 17 | |||
3959 | 18 | import six | ||
3960 | 19 | import re | ||
3961 | 20 | |||
3962 | 21 | |||
3963 | 22 | def bool_from_string(value): | ||
3964 | 23 | """Interpret string value as boolean. | ||
3965 | 24 | |||
3966 | 25 | Returns True if value translates to True otherwise False. | ||
3967 | 26 | """ | ||
3968 | 27 | if isinstance(value, six.string_types): | ||
3969 | 28 | value = six.text_type(value) | ||
3970 | 29 | else: | ||
3971 | 30 | msg = "Unable to interpret non-string value '%s' as boolean" % (value) | ||
3972 | 31 | raise ValueError(msg) | ||
3973 | 32 | |||
3974 | 33 | value = value.strip().lower() | ||
3975 | 34 | |||
3976 | 35 | if value in ['y', 'yes', 'true', 't', 'on']: | ||
3977 | 36 | return True | ||
3978 | 37 | elif value in ['n', 'no', 'false', 'f', 'off']: | ||
3979 | 38 | return False | ||
3980 | 39 | |||
3981 | 40 | msg = "Unable to interpret string value '%s' as boolean" % (value) | ||
3982 | 41 | raise ValueError(msg) | ||
3983 | 42 | |||
3984 | 43 | |||
3985 | 44 | def bytes_from_string(value): | ||
3986 | 45 | """Interpret human readable string value as bytes. | ||
3987 | 46 | |||
3988 | 47 | Returns int | ||
3989 | 48 | """ | ||
3990 | 49 | BYTE_POWER = { | ||
3991 | 50 | 'K': 1, | ||
3992 | 51 | 'KB': 1, | ||
3993 | 52 | 'M': 2, | ||
3994 | 53 | 'MB': 2, | ||
3995 | 54 | 'G': 3, | ||
3996 | 55 | 'GB': 3, | ||
3997 | 56 | 'T': 4, | ||
3998 | 57 | 'TB': 4, | ||
3999 | 58 | 'P': 5, | ||
4000 | 59 | 'PB': 5, | ||
4001 | 60 | } | ||
4002 | 61 | if isinstance(value, six.string_types): | ||
4003 | 62 | value = six.text_type(value) | ||
4004 | 63 | else: | ||
4005 | 64 | msg = "Unable to interpret non-string value '%s' as boolean" % (value) | ||
4006 | 65 | raise ValueError(msg) | ||
4007 | 66 | matches = re.match("([0-9]+)([a-zA-Z]+)", value) | ||
4008 | 67 | if not matches: | ||
4009 | 68 | msg = "Unable to interpret string value '%s' as bytes" % (value) | ||
4010 | 69 | raise ValueError(msg) | ||
4011 | 70 | return int(matches.group(1)) * (1024 ** BYTE_POWER[matches.group(2)]) | ||
4012 | 71 | |||
4013 | 72 | |||
4014 | 73 | class BasicStringComparator(object): | ||
4015 | 74 | """Provides a class that will compare strings from an iterator type object. | ||
4016 | 75 | Used to provide > and < comparisons on strings that may not necessarily be | ||
4017 | 76 | alphanumerically ordered. e.g. OpenStack or Ubuntu releases AFTER the | ||
4018 | 77 | z-wrap. | ||
4019 | 78 | """ | ||
4020 | 79 | |||
4021 | 80 | _list = None | ||
4022 | 81 | |||
4023 | 82 | def __init__(self, item): | ||
4024 | 83 | if self._list is None: | ||
4025 | 84 | raise Exception("Must define the _list in the class definition!") | ||
4026 | 85 | try: | ||
4027 | 86 | self.index = self._list.index(item) | ||
4028 | 87 | except Exception: | ||
4029 | 88 | raise KeyError("Item '{}' is not in list '{}'" | ||
4030 | 89 | .format(item, self._list)) | ||
4031 | 90 | |||
4032 | 91 | def __eq__(self, other): | ||
4033 | 92 | assert isinstance(other, str) or isinstance(other, self.__class__) | ||
4034 | 93 | return self.index == self._list.index(other) | ||
4035 | 94 | |||
4036 | 95 | def __ne__(self, other): | ||
4037 | 96 | return not self.__eq__(other) | ||
4038 | 97 | |||
4039 | 98 | def __lt__(self, other): | ||
4040 | 99 | assert isinstance(other, str) or isinstance(other, self.__class__) | ||
4041 | 100 | return self.index < self._list.index(other) | ||
4042 | 101 | |||
4043 | 102 | def __ge__(self, other): | ||
4044 | 103 | return not self.__lt__(other) | ||
4045 | 104 | |||
4046 | 105 | def __gt__(self, other): | ||
4047 | 106 | assert isinstance(other, str) or isinstance(other, self.__class__) | ||
4048 | 107 | return self.index > self._list.index(other) | ||
4049 | 108 | |||
4050 | 109 | def __le__(self, other): | ||
4051 | 110 | return not self.__gt__(other) | ||
4052 | 111 | |||
4053 | 112 | def __str__(self): | ||
4054 | 113 | """Always give back the item at the index so it can be used in | ||
4055 | 114 | comparisons like: | ||
4056 | 115 | |||
4057 | 116 | s_mitaka = CompareOpenStack('mitaka') | ||
4058 | 117 | s_newton = CompareOpenstack('newton') | ||
4059 | 118 | |||
4060 | 119 | assert s_newton > s_mitaka | ||
4061 | 120 | |||
4062 | 121 | @returns: <string> | ||
4063 | 122 | """ | ||
4064 | 123 | return self._list[self.index] | ||
4065 | diff --git a/hooks/charmhelpers/core/sysctl.py b/hooks/charmhelpers/core/sysctl.py | |||
4066 | 0 | new file mode 100644 | 124 | new file mode 100644 |
4067 | index 0000000..6e413e3 | |||
4068 | --- /dev/null | |||
4069 | +++ b/hooks/charmhelpers/core/sysctl.py | |||
4070 | @@ -0,0 +1,54 @@ | |||
4071 | 1 | #!/usr/bin/env python | ||
4072 | 2 | # -*- coding: utf-8 -*- | ||
4073 | 3 | |||
4074 | 4 | # Copyright 2014-2015 Canonical Limited. | ||
4075 | 5 | # | ||
4076 | 6 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
4077 | 7 | # you may not use this file except in compliance with the License. | ||
4078 | 8 | # You may obtain a copy of the License at | ||
4079 | 9 | # | ||
4080 | 10 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
4081 | 11 | # | ||
4082 | 12 | # Unless required by applicable law or agreed to in writing, software | ||
4083 | 13 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
4084 | 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
4085 | 15 | # See the License for the specific language governing permissions and | ||
4086 | 16 | # limitations under the License. | ||
4087 | 17 | |||
4088 | 18 | import yaml | ||
4089 | 19 | |||
4090 | 20 | from subprocess import check_call | ||
4091 | 21 | |||
4092 | 22 | from charmhelpers.core.hookenv import ( | ||
4093 | 23 | log, | ||
4094 | 24 | DEBUG, | ||
4095 | 25 | ERROR, | ||
4096 | 26 | ) | ||
4097 | 27 | |||
4098 | 28 | __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' | ||
4099 | 29 | |||
4100 | 30 | |||
4101 | 31 | def create(sysctl_dict, sysctl_file): | ||
4102 | 32 | """Creates a sysctl.conf file from a YAML associative array | ||
4103 | 33 | |||
4104 | 34 | :param sysctl_dict: a YAML-formatted string of sysctl options eg "{ 'kernel.max_pid': 1337 }" | ||
4105 | 35 | :type sysctl_dict: str | ||
4106 | 36 | :param sysctl_file: path to the sysctl file to be saved | ||
4107 | 37 | :type sysctl_file: str or unicode | ||
4108 | 38 | :returns: None | ||
4109 | 39 | """ | ||
4110 | 40 | try: | ||
4111 | 41 | sysctl_dict_parsed = yaml.safe_load(sysctl_dict) | ||
4112 | 42 | except yaml.YAMLError: | ||
4113 | 43 | log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict), | ||
4114 | 44 | level=ERROR) | ||
4115 | 45 | return | ||
4116 | 46 | |||
4117 | 47 | with open(sysctl_file, "w") as fd: | ||
4118 | 48 | for key, value in sysctl_dict_parsed.items(): | ||
4119 | 49 | fd.write("{}={}\n".format(key, value)) | ||
4120 | 50 | |||
4121 | 51 | log("Updating sysctl_file: %s values: %s" % (sysctl_file, sysctl_dict_parsed), | ||
4122 | 52 | level=DEBUG) | ||
4123 | 53 | |||
4124 | 54 | check_call(["sysctl", "-p", sysctl_file]) | ||
4125 | diff --git a/hooks/charmhelpers/core/templating.py b/hooks/charmhelpers/core/templating.py | |||
4126 | 0 | new file mode 100644 | 55 | new file mode 100644 |
4127 | index 0000000..7b801a3 | |||
4128 | --- /dev/null | |||
4129 | +++ b/hooks/charmhelpers/core/templating.py | |||
4130 | @@ -0,0 +1,84 @@ | |||
4131 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
4132 | 2 | # | ||
4133 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
4134 | 4 | # you may not use this file except in compliance with the License. | ||
4135 | 5 | # You may obtain a copy of the License at | ||
4136 | 6 | # | ||
4137 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
4138 | 8 | # | ||
4139 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
4140 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
4141 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
4142 | 12 | # See the License for the specific language governing permissions and | ||
4143 | 13 | # limitations under the License. | ||
4144 | 14 | |||
4145 | 15 | import os | ||
4146 | 16 | import sys | ||
4147 | 17 | |||
4148 | 18 | from charmhelpers.core import host | ||
4149 | 19 | from charmhelpers.core import hookenv | ||
4150 | 20 | |||
4151 | 21 | |||
4152 | 22 | def render(source, target, context, owner='root', group='root', | ||
4153 | 23 | perms=0o444, templates_dir=None, encoding='UTF-8', template_loader=None): | ||
4154 | 24 | """ | ||
4155 | 25 | Render a template. | ||
4156 | 26 | |||
4157 | 27 | The `source` path, if not absolute, is relative to the `templates_dir`. | ||
4158 | 28 | |||
4159 | 29 | The `target` path should be absolute. It can also be `None`, in which | ||
4160 | 30 | case no file will be written. | ||
4161 | 31 | |||
4162 | 32 | The context should be a dict containing the values to be replaced in the | ||
4163 | 33 | template. | ||
4164 | 34 | |||
4165 | 35 | The `owner`, `group`, and `perms` options will be passed to `write_file`. | ||
4166 | 36 | |||
4167 | 37 | If omitted, `templates_dir` defaults to the `templates` folder in the charm. | ||
4168 | 38 | |||
4169 | 39 | The rendered template will be written to the file as well as being returned | ||
4170 | 40 | as a string. | ||
4171 | 41 | |||
4172 | 42 | Note: Using this requires python-jinja2 or python3-jinja2; if it is not | ||
4173 | 43 | installed, calling this will attempt to use charmhelpers.fetch.apt_install | ||
4174 | 44 | to install it. | ||
4175 | 45 | """ | ||
4176 | 46 | try: | ||
4177 | 47 | from jinja2 import FileSystemLoader, Environment, exceptions | ||
4178 | 48 | except ImportError: | ||
4179 | 49 | try: | ||
4180 | 50 | from charmhelpers.fetch import apt_install | ||
4181 | 51 | except ImportError: | ||
4182 | 52 | hookenv.log('Could not import jinja2, and could not import ' | ||
4183 | 53 | 'charmhelpers.fetch to install it', | ||
4184 | 54 | level=hookenv.ERROR) | ||
4185 | 55 | raise | ||
4186 | 56 | if sys.version_info.major == 2: | ||
4187 | 57 | apt_install('python-jinja2', fatal=True) | ||
4188 | 58 | else: | ||
4189 | 59 | apt_install('python3-jinja2', fatal=True) | ||
4190 | 60 | from jinja2 import FileSystemLoader, Environment, exceptions | ||
4191 | 61 | |||
4192 | 62 | if template_loader: | ||
4193 | 63 | template_env = Environment(loader=template_loader) | ||
4194 | 64 | else: | ||
4195 | 65 | if templates_dir is None: | ||
4196 | 66 | templates_dir = os.path.join(hookenv.charm_dir(), 'templates') | ||
4197 | 67 | template_env = Environment(loader=FileSystemLoader(templates_dir)) | ||
4198 | 68 | try: | ||
4199 | 69 | source = source | ||
4200 | 70 | template = template_env.get_template(source) | ||
4201 | 71 | except exceptions.TemplateNotFound as e: | ||
4202 | 72 | hookenv.log('Could not load template %s from %s.' % | ||
4203 | 73 | (source, templates_dir), | ||
4204 | 74 | level=hookenv.ERROR) | ||
4205 | 75 | raise e | ||
4206 | 76 | content = template.render(context) | ||
4207 | 77 | if target is not None: | ||
4208 | 78 | target_dir = os.path.dirname(target) | ||
4209 | 79 | if not os.path.exists(target_dir): | ||
4210 | 80 | # This is a terrible default directory permission, as the file | ||
4211 | 81 | # or its siblings will often contain secrets. | ||
4212 | 82 | host.mkdir(os.path.dirname(target), owner, group, perms=0o755) | ||
4213 | 83 | host.write_file(target, content.encode(encoding), owner, group, perms) | ||
4214 | 84 | return content | ||
4215 | diff --git a/hooks/charmhelpers/core/unitdata.py b/hooks/charmhelpers/core/unitdata.py | |||
4216 | 0 | new file mode 100644 | 85 | new file mode 100644 |
4217 | index 0000000..54ec969 | |||
4218 | --- /dev/null | |||
4219 | +++ b/hooks/charmhelpers/core/unitdata.py | |||
4220 | @@ -0,0 +1,518 @@ | |||
4221 | 1 | #!/usr/bin/env python | ||
4222 | 2 | # -*- coding: utf-8 -*- | ||
4223 | 3 | # | ||
4224 | 4 | # Copyright 2014-2015 Canonical Limited. | ||
4225 | 5 | # | ||
4226 | 6 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
4227 | 7 | # you may not use this file except in compliance with the License. | ||
4228 | 8 | # You may obtain a copy of the License at | ||
4229 | 9 | # | ||
4230 | 10 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
4231 | 11 | # | ||
4232 | 12 | # Unless required by applicable law or agreed to in writing, software | ||
4233 | 13 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
4234 | 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
4235 | 15 | # See the License for the specific language governing permissions and | ||
4236 | 16 | # limitations under the License. | ||
4237 | 17 | # | ||
4238 | 18 | # Authors: | ||
4239 | 19 | # Kapil Thangavelu <kapil.foss@gmail.com> | ||
4240 | 20 | # | ||
4241 | 21 | """ | ||
4242 | 22 | Intro | ||
4243 | 23 | ----- | ||
4244 | 24 | |||
4245 | 25 | A simple way to store state in units. This provides a key value | ||
4246 | 26 | storage with support for versioned, transactional operation, | ||
4247 | 27 | and can calculate deltas from previous values to simplify unit logic | ||
4248 | 28 | when processing changes. | ||
4249 | 29 | |||
4250 | 30 | |||
4251 | 31 | Hook Integration | ||
4252 | 32 | ---------------- | ||
4253 | 33 | |||
4254 | 34 | There are several extant frameworks for hook execution, including | ||
4255 | 35 | |||
4256 | 36 | - charmhelpers.core.hookenv.Hooks | ||
4257 | 37 | - charmhelpers.core.services.ServiceManager | ||
4258 | 38 | |||
4259 | 39 | The storage classes are framework agnostic, one simple integration is | ||
4260 | 40 | via the HookData contextmanager. It will record the current hook | ||
4261 | 41 | execution environment (including relation data, config data, etc.), | ||
4262 | 42 | setup a transaction and allow easy access to the changes from | ||
4263 | 43 | previously seen values. One consequence of the integration is the | ||
4264 | 44 | reservation of particular keys ('rels', 'unit', 'env', 'config', | ||
4265 | 45 | 'charm_revisions') for their respective values. | ||
4266 | 46 | |||
4267 | 47 | Here's a fully worked integration example using hookenv.Hooks:: | ||
4268 | 48 | |||
4269 | 49 | from charmhelper.core import hookenv, unitdata | ||
4270 | 50 | |||
4271 | 51 | hook_data = unitdata.HookData() | ||
4272 | 52 | db = unitdata.kv() | ||
4273 | 53 | hooks = hookenv.Hooks() | ||
4274 | 54 | |||
4275 | 55 | @hooks.hook | ||
4276 | 56 | def config_changed(): | ||
4277 | 57 | # Print all changes to configuration from previously seen | ||
4278 | 58 | # values. | ||
4279 | 59 | for changed, (prev, cur) in hook_data.conf.items(): | ||
4280 | 60 | print('config changed', changed, | ||
4281 | 61 | 'previous value', prev, | ||
4282 | 62 | 'current value', cur) | ||
4283 | 63 | |||
4284 | 64 | # Get some unit specific bookeeping | ||
4285 | 65 | if not db.get('pkg_key'): | ||
4286 | 66 | key = urllib.urlopen('https://example.com/pkg_key').read() | ||
4287 | 67 | db.set('pkg_key', key) | ||
4288 | 68 | |||
4289 | 69 | # Directly access all charm config as a mapping. | ||
4290 | 70 | conf = db.getrange('config', True) | ||
4291 | 71 | |||
4292 | 72 | # Directly access all relation data as a mapping | ||
4293 | 73 | rels = db.getrange('rels', True) | ||
4294 | 74 | |||
4295 | 75 | if __name__ == '__main__': | ||
4296 | 76 | with hook_data(): | ||
4297 | 77 | hook.execute() | ||
4298 | 78 | |||
4299 | 79 | |||
4300 | 80 | A more basic integration is via the hook_scope context manager which simply | ||
4301 | 81 | manages transaction scope (and records hook name, and timestamp):: | ||
4302 | 82 | |||
4303 | 83 | >>> from unitdata import kv | ||
4304 | 84 | >>> db = kv() | ||
4305 | 85 | >>> with db.hook_scope('install'): | ||
4306 | 86 | ... # do work, in transactional scope. | ||
4307 | 87 | ... db.set('x', 1) | ||
4308 | 88 | >>> db.get('x') | ||
4309 | 89 | 1 | ||
4310 | 90 | |||
4311 | 91 | |||
4312 | 92 | Usage | ||
4313 | 93 | ----- | ||
4314 | 94 | |||
4315 | 95 | Values are automatically json de/serialized to preserve basic typing | ||
4316 | 96 | and complex data struct capabilities (dicts, lists, ints, booleans, etc). | ||
4317 | 97 | |||
4318 | 98 | Individual values can be manipulated via get/set:: | ||
4319 | 99 | |||
4320 | 100 | >>> kv.set('y', True) | ||
4321 | 101 | >>> kv.get('y') | ||
4322 | 102 | True | ||
4323 | 103 | |||
4324 | 104 | # We can set complex values (dicts, lists) as a single key. | ||
4325 | 105 | >>> kv.set('config', {'a': 1, 'b': True'}) | ||
4326 | 106 | |||
4327 | 107 | # Also supports returning dictionaries as a record which | ||
4328 | 108 | # provides attribute access. | ||
4329 | 109 | >>> config = kv.get('config', record=True) | ||
4330 | 110 | >>> config.b | ||
4331 | 111 | True | ||
4332 | 112 | |||
4333 | 113 | |||
4334 | 114 | Groups of keys can be manipulated with update/getrange:: | ||
4335 | 115 | |||
4336 | 116 | >>> kv.update({'z': 1, 'y': 2}, prefix="gui.") | ||
4337 | 117 | >>> kv.getrange('gui.', strip=True) | ||
4338 | 118 | {'z': 1, 'y': 2} | ||
4339 | 119 | |||
4340 | 120 | When updating values, its very helpful to understand which values | ||
4341 | 121 | have actually changed and how have they changed. The storage | ||
4342 | 122 | provides a delta method to provide for this:: | ||
4343 | 123 | |||
4344 | 124 | >>> data = {'debug': True, 'option': 2} | ||
4345 | 125 | >>> delta = kv.delta(data, 'config.') | ||
4346 | 126 | >>> delta.debug.previous | ||
4347 | 127 | None | ||
4348 | 128 | >>> delta.debug.current | ||
4349 | 129 | True | ||
4350 | 130 | >>> delta | ||
4351 | 131 | {'debug': (None, True), 'option': (None, 2)} | ||
4352 | 132 | |||
4353 | 133 | Note the delta method does not persist the actual change, it needs to | ||
4354 | 134 | be explicitly saved via 'update' method:: | ||
4355 | 135 | |||
4356 | 136 | >>> kv.update(data, 'config.') | ||
4357 | 137 | |||
4358 | 138 | Values modified in the context of a hook scope retain historical values | ||
4359 | 139 | associated to the hookname. | ||
4360 | 140 | |||
4361 | 141 | >>> with db.hook_scope('config-changed'): | ||
4362 | 142 | ... db.set('x', 42) | ||
4363 | 143 | >>> db.gethistory('x') | ||
4364 | 144 | [(1, u'x', 1, u'install', u'2015-01-21T16:49:30.038372'), | ||
4365 | 145 | (2, u'x', 42, u'config-changed', u'2015-01-21T16:49:30.038786')] | ||
4366 | 146 | |||
4367 | 147 | """ | ||
4368 | 148 | |||
4369 | 149 | import collections | ||
4370 | 150 | import contextlib | ||
4371 | 151 | import datetime | ||
4372 | 152 | import itertools | ||
4373 | 153 | import json | ||
4374 | 154 | import os | ||
4375 | 155 | import pprint | ||
4376 | 156 | import sqlite3 | ||
4377 | 157 | import sys | ||
4378 | 158 | |||
4379 | 159 | __author__ = 'Kapil Thangavelu <kapil.foss@gmail.com>' | ||
4380 | 160 | |||
4381 | 161 | |||
4382 | 162 | class Storage(object): | ||
4383 | 163 | """Simple key value database for local unit state within charms. | ||
4384 | 164 | |||
4385 | 165 | Modifications are not persisted unless :meth:`flush` is called. | ||
4386 | 166 | |||
4387 | 167 | To support dicts, lists, integer, floats, and booleans values | ||
4388 | 168 | are automatically json encoded/decoded. | ||
4389 | 169 | """ | ||
4390 | 170 | def __init__(self, path=None): | ||
4391 | 171 | self.db_path = path | ||
4392 | 172 | if path is None: | ||
4393 | 173 | if 'UNIT_STATE_DB' in os.environ: | ||
4394 | 174 | self.db_path = os.environ['UNIT_STATE_DB'] | ||
4395 | 175 | else: | ||
4396 | 176 | self.db_path = os.path.join( | ||
4397 | 177 | os.environ.get('CHARM_DIR', ''), '.unit-state.db') | ||
4398 | 178 | self.conn = sqlite3.connect('%s' % self.db_path) | ||
4399 | 179 | self.cursor = self.conn.cursor() | ||
4400 | 180 | self.revision = None | ||
4401 | 181 | self._closed = False | ||
4402 | 182 | self._init() | ||
4403 | 183 | |||
4404 | 184 | def close(self): | ||
4405 | 185 | if self._closed: | ||
4406 | 186 | return | ||
4407 | 187 | self.flush(False) | ||
4408 | 188 | self.cursor.close() | ||
4409 | 189 | self.conn.close() | ||
4410 | 190 | self._closed = True | ||
4411 | 191 | |||
4412 | 192 | def get(self, key, default=None, record=False): | ||
4413 | 193 | self.cursor.execute('select data from kv where key=?', [key]) | ||
4414 | 194 | result = self.cursor.fetchone() | ||
4415 | 195 | if not result: | ||
4416 | 196 | return default | ||
4417 | 197 | if record: | ||
4418 | 198 | return Record(json.loads(result[0])) | ||
4419 | 199 | return json.loads(result[0]) | ||
4420 | 200 | |||
4421 | 201 | def getrange(self, key_prefix, strip=False): | ||
4422 | 202 | """ | ||
4423 | 203 | Get a range of keys starting with a common prefix as a mapping of | ||
4424 | 204 | keys to values. | ||
4425 | 205 | |||
4426 | 206 | :param str key_prefix: Common prefix among all keys | ||
4427 | 207 | :param bool strip: Optionally strip the common prefix from the key | ||
4428 | 208 | names in the returned dict | ||
4429 | 209 | :return dict: A (possibly empty) dict of key-value mappings | ||
4430 | 210 | """ | ||
4431 | 211 | self.cursor.execute("select key, data from kv where key like ?", | ||
4432 | 212 | ['%s%%' % key_prefix]) | ||
4433 | 213 | result = self.cursor.fetchall() | ||
4434 | 214 | |||
4435 | 215 | if not result: | ||
4436 | 216 | return {} | ||
4437 | 217 | if not strip: | ||
4438 | 218 | key_prefix = '' | ||
4439 | 219 | return dict([ | ||
4440 | 220 | (k[len(key_prefix):], json.loads(v)) for k, v in result]) | ||
4441 | 221 | |||
4442 | 222 | def update(self, mapping, prefix=""): | ||
4443 | 223 | """ | ||
4444 | 224 | Set the values of multiple keys at once. | ||
4445 | 225 | |||
4446 | 226 | :param dict mapping: Mapping of keys to values | ||
4447 | 227 | :param str prefix: Optional prefix to apply to all keys in `mapping` | ||
4448 | 228 | before setting | ||
4449 | 229 | """ | ||
4450 | 230 | for k, v in mapping.items(): | ||
4451 | 231 | self.set("%s%s" % (prefix, k), v) | ||
4452 | 232 | |||
4453 | 233 | def unset(self, key): | ||
4454 | 234 | """ | ||
4455 | 235 | Remove a key from the database entirely. | ||
4456 | 236 | """ | ||
4457 | 237 | self.cursor.execute('delete from kv where key=?', [key]) | ||
4458 | 238 | if self.revision and self.cursor.rowcount: | ||
4459 | 239 | self.cursor.execute( | ||
4460 | 240 | 'insert into kv_revisions values (?, ?, ?)', | ||
4461 | 241 | [key, self.revision, json.dumps('DELETED')]) | ||
4462 | 242 | |||
4463 | 243 | def unsetrange(self, keys=None, prefix=""): | ||
4464 | 244 | """ | ||
4465 | 245 | Remove a range of keys starting with a common prefix, from the database | ||
4466 | 246 | entirely. | ||
4467 | 247 | |||
4468 | 248 | :param list keys: List of keys to remove. | ||
4469 | 249 | :param str prefix: Optional prefix to apply to all keys in ``keys`` | ||
4470 | 250 | before removing. | ||
4471 | 251 | """ | ||
4472 | 252 | if keys is not None: | ||
4473 | 253 | keys = ['%s%s' % (prefix, key) for key in keys] | ||
4474 | 254 | self.cursor.execute('delete from kv where key in (%s)' % ','.join(['?'] * len(keys)), keys) | ||
4475 | 255 | if self.revision and self.cursor.rowcount: | ||
4476 | 256 | self.cursor.execute( | ||
4477 | 257 | 'insert into kv_revisions values %s' % ','.join(['(?, ?, ?)'] * len(keys)), | ||
4478 | 258 | list(itertools.chain.from_iterable((key, self.revision, json.dumps('DELETED')) for key in keys))) | ||
4479 | 259 | else: | ||
4480 | 260 | self.cursor.execute('delete from kv where key like ?', | ||
4481 | 261 | ['%s%%' % prefix]) | ||
4482 | 262 | if self.revision and self.cursor.rowcount: | ||
4483 | 263 | self.cursor.execute( | ||
4484 | 264 | 'insert into kv_revisions values (?, ?, ?)', | ||
4485 | 265 | ['%s%%' % prefix, self.revision, json.dumps('DELETED')]) | ||
4486 | 266 | |||
4487 | 267 | def set(self, key, value): | ||
4488 | 268 | """ | ||
4489 | 269 | Set a value in the database. | ||
4490 | 270 | |||
4491 | 271 | :param str key: Key to set the value for | ||
4492 | 272 | :param value: Any JSON-serializable value to be set | ||
4493 | 273 | """ | ||
4494 | 274 | serialized = json.dumps(value) | ||
4495 | 275 | |||
4496 | 276 | self.cursor.execute('select data from kv where key=?', [key]) | ||
4497 | 277 | exists = self.cursor.fetchone() | ||
4498 | 278 | |||
4499 | 279 | # Skip mutations to the same value | ||
4500 | 280 | if exists: | ||
4501 | 281 | if exists[0] == serialized: | ||
4502 | 282 | return value | ||
4503 | 283 | |||
4504 | 284 | if not exists: | ||
4505 | 285 | self.cursor.execute( | ||
4506 | 286 | 'insert into kv (key, data) values (?, ?)', | ||
4507 | 287 | (key, serialized)) | ||
4508 | 288 | else: | ||
4509 | 289 | self.cursor.execute(''' | ||
4510 | 290 | update kv | ||
4511 | 291 | set data = ? | ||
4512 | 292 | where key = ?''', [serialized, key]) | ||
4513 | 293 | |||
4514 | 294 | # Save | ||
4515 | 295 | if not self.revision: | ||
4516 | 296 | return value | ||
4517 | 297 | |||
4518 | 298 | self.cursor.execute( | ||
4519 | 299 | 'select 1 from kv_revisions where key=? and revision=?', | ||
4520 | 300 | [key, self.revision]) | ||
4521 | 301 | exists = self.cursor.fetchone() | ||
4522 | 302 | |||
4523 | 303 | if not exists: | ||
4524 | 304 | self.cursor.execute( | ||
4525 | 305 | '''insert into kv_revisions ( | ||
4526 | 306 | revision, key, data) values (?, ?, ?)''', | ||
4527 | 307 | (self.revision, key, serialized)) | ||
4528 | 308 | else: | ||
4529 | 309 | self.cursor.execute( | ||
4530 | 310 | ''' | ||
4531 | 311 | update kv_revisions | ||
4532 | 312 | set data = ? | ||
4533 | 313 | where key = ? | ||
4534 | 314 | and revision = ?''', | ||
4535 | 315 | [serialized, key, self.revision]) | ||
4536 | 316 | |||
4537 | 317 | return value | ||
4538 | 318 | |||
4539 | 319 | def delta(self, mapping, prefix): | ||
4540 | 320 | """ | ||
4541 | 321 | return a delta containing values that have changed. | ||
4542 | 322 | """ | ||
4543 | 323 | previous = self.getrange(prefix, strip=True) | ||
4544 | 324 | if not previous: | ||
4545 | 325 | pk = set() | ||
4546 | 326 | else: | ||
4547 | 327 | pk = set(previous.keys()) | ||
4548 | 328 | ck = set(mapping.keys()) | ||
4549 | 329 | delta = DeltaSet() | ||
4550 | 330 | |||
4551 | 331 | # added | ||
4552 | 332 | for k in ck.difference(pk): | ||
4553 | 333 | delta[k] = Delta(None, mapping[k]) | ||
4554 | 334 | |||
4555 | 335 | # removed | ||
4556 | 336 | for k in pk.difference(ck): | ||
4557 | 337 | delta[k] = Delta(previous[k], None) | ||
4558 | 338 | |||
4559 | 339 | # changed | ||
4560 | 340 | for k in pk.intersection(ck): | ||
4561 | 341 | c = mapping[k] | ||
4562 | 342 | p = previous[k] | ||
4563 | 343 | if c != p: | ||
4564 | 344 | delta[k] = Delta(p, c) | ||
4565 | 345 | |||
4566 | 346 | return delta | ||
4567 | 347 | |||
4568 | 348 | @contextlib.contextmanager | ||
4569 | 349 | def hook_scope(self, name=""): | ||
4570 | 350 | """Scope all future interactions to the current hook execution | ||
4571 | 351 | revision.""" | ||
4572 | 352 | assert not self.revision | ||
4573 | 353 | self.cursor.execute( | ||
4574 | 354 | 'insert into hooks (hook, date) values (?, ?)', | ||
4575 | 355 | (name or sys.argv[0], | ||
4576 | 356 | datetime.datetime.utcnow().isoformat())) | ||
4577 | 357 | self.revision = self.cursor.lastrowid | ||
4578 | 358 | try: | ||
4579 | 359 | yield self.revision | ||
4580 | 360 | self.revision = None | ||
4581 | 361 | except: | ||
4582 | 362 | self.flush(False) | ||
4583 | 363 | self.revision = None | ||
4584 | 364 | raise | ||
4585 | 365 | else: | ||
4586 | 366 | self.flush() | ||
4587 | 367 | |||
4588 | 368 | def flush(self, save=True): | ||
4589 | 369 | if save: | ||
4590 | 370 | self.conn.commit() | ||
4591 | 371 | elif self._closed: | ||
4592 | 372 | return | ||
4593 | 373 | else: | ||
4594 | 374 | self.conn.rollback() | ||
4595 | 375 | |||
4596 | 376 | def _init(self): | ||
4597 | 377 | self.cursor.execute(''' | ||
4598 | 378 | create table if not exists kv ( | ||
4599 | 379 | key text, | ||
4600 | 380 | data text, | ||
4601 | 381 | primary key (key) | ||
4602 | 382 | )''') | ||
4603 | 383 | self.cursor.execute(''' | ||
4604 | 384 | create table if not exists kv_revisions ( | ||
4605 | 385 | key text, | ||
4606 | 386 | revision integer, | ||
4607 | 387 | data text, | ||
4608 | 388 | primary key (key, revision) | ||
4609 | 389 | )''') | ||
4610 | 390 | self.cursor.execute(''' | ||
4611 | 391 | create table if not exists hooks ( | ||
4612 | 392 | version integer primary key autoincrement, | ||
4613 | 393 | hook text, | ||
4614 | 394 | date text | ||
4615 | 395 | )''') | ||
4616 | 396 | self.conn.commit() | ||
4617 | 397 | |||
4618 | 398 | def gethistory(self, key, deserialize=False): | ||
4619 | 399 | self.cursor.execute( | ||
4620 | 400 | ''' | ||
4621 | 401 | select kv.revision, kv.key, kv.data, h.hook, h.date | ||
4622 | 402 | from kv_revisions kv, | ||
4623 | 403 | hooks h | ||
4624 | 404 | where kv.key=? | ||
4625 | 405 | and kv.revision = h.version | ||
4626 | 406 | ''', [key]) | ||
4627 | 407 | if deserialize is False: | ||
4628 | 408 | return self.cursor.fetchall() | ||
4629 | 409 | return map(_parse_history, self.cursor.fetchall()) | ||
4630 | 410 | |||
4631 | 411 | def debug(self, fh=sys.stderr): | ||
4632 | 412 | self.cursor.execute('select * from kv') | ||
4633 | 413 | pprint.pprint(self.cursor.fetchall(), stream=fh) | ||
4634 | 414 | self.cursor.execute('select * from kv_revisions') | ||
4635 | 415 | pprint.pprint(self.cursor.fetchall(), stream=fh) | ||
4636 | 416 | |||
4637 | 417 | |||
4638 | 418 | def _parse_history(d): | ||
4639 | 419 | return (d[0], d[1], json.loads(d[2]), d[3], | ||
4640 | 420 | datetime.datetime.strptime(d[-1], "%Y-%m-%dT%H:%M:%S.%f")) | ||
4641 | 421 | |||
4642 | 422 | |||
4643 | 423 | class HookData(object): | ||
4644 | 424 | """Simple integration for existing hook exec frameworks. | ||
4645 | 425 | |||
4646 | 426 | Records all unit information, and stores deltas for processing | ||
4647 | 427 | by the hook. | ||
4648 | 428 | |||
4649 | 429 | Sample:: | ||
4650 | 430 | |||
4651 | 431 | from charmhelper.core import hookenv, unitdata | ||
4652 | 432 | |||
4653 | 433 | changes = unitdata.HookData() | ||
4654 | 434 | db = unitdata.kv() | ||
4655 | 435 | hooks = hookenv.Hooks() | ||
4656 | 436 | |||
4657 | 437 | @hooks.hook | ||
4658 | 438 | def config_changed(): | ||
4659 | 439 | # View all changes to configuration | ||
4660 | 440 | for changed, (prev, cur) in changes.conf.items(): | ||
4661 | 441 | print('config changed', changed, | ||
4662 | 442 | 'previous value', prev, | ||
4663 | 443 | 'current value', cur) | ||
4664 | 444 | |||
4665 | 445 | # Get some unit specific bookeeping | ||
4666 | 446 | if not db.get('pkg_key'): | ||
4667 | 447 | key = urllib.urlopen('https://example.com/pkg_key').read() | ||
4668 | 448 | db.set('pkg_key', key) | ||
4669 | 449 | |||
4670 | 450 | if __name__ == '__main__': | ||
4671 | 451 | with changes(): | ||
4672 | 452 | hook.execute() | ||
4673 | 453 | |||
4674 | 454 | """ | ||
4675 | 455 | def __init__(self): | ||
4676 | 456 | self.kv = kv() | ||
4677 | 457 | self.conf = None | ||
4678 | 458 | self.rels = None | ||
4679 | 459 | |||
4680 | 460 | @contextlib.contextmanager | ||
4681 | 461 | def __call__(self): | ||
4682 | 462 | from charmhelpers.core import hookenv | ||
4683 | 463 | hook_name = hookenv.hook_name() | ||
4684 | 464 | |||
4685 | 465 | with self.kv.hook_scope(hook_name): | ||
4686 | 466 | self._record_charm_version(hookenv.charm_dir()) | ||
4687 | 467 | delta_config, delta_relation = self._record_hook(hookenv) | ||
4688 | 468 | yield self.kv, delta_config, delta_relation | ||
4689 | 469 | |||
4690 | 470 | def _record_charm_version(self, charm_dir): | ||
4691 | 471 | # Record revisions.. charm revisions are meaningless | ||
4692 | 472 | # to charm authors as they don't control the revision. | ||
4693 | 473 | # so logic dependnent on revision is not particularly | ||
4694 | 474 | # useful, however it is useful for debugging analysis. | ||
4695 | 475 | charm_rev = open( | ||
4696 | 476 | os.path.join(charm_dir, 'revision')).read().strip() | ||
4697 | 477 | charm_rev = charm_rev or '0' | ||
4698 | 478 | revs = self.kv.get('charm_revisions', []) | ||
4699 | 479 | if charm_rev not in revs: | ||
4700 | 480 | revs.append(charm_rev.strip() or '0') | ||
4701 | 481 | self.kv.set('charm_revisions', revs) | ||
4702 | 482 | |||
4703 | 483 | def _record_hook(self, hookenv): | ||
4704 | 484 | data = hookenv.execution_environment() | ||
4705 | 485 | self.conf = conf_delta = self.kv.delta(data['conf'], 'config') | ||
4706 | 486 | self.rels = rels_delta = self.kv.delta(data['rels'], 'rels') | ||
4707 | 487 | self.kv.set('env', dict(data['env'])) | ||
4708 | 488 | self.kv.set('unit', data['unit']) | ||
4709 | 489 | self.kv.set('relid', data.get('relid')) | ||
4710 | 490 | return conf_delta, rels_delta | ||
4711 | 491 | |||
4712 | 492 | |||
4713 | 493 | class Record(dict): | ||
4714 | 494 | |||
4715 | 495 | __slots__ = () | ||
4716 | 496 | |||
4717 | 497 | def __getattr__(self, k): | ||
4718 | 498 | if k in self: | ||
4719 | 499 | return self[k] | ||
4720 | 500 | raise AttributeError(k) | ||
4721 | 501 | |||
4722 | 502 | |||
4723 | 503 | class DeltaSet(Record): | ||
4724 | 504 | |||
4725 | 505 | __slots__ = () | ||
4726 | 506 | |||
4727 | 507 | |||
4728 | 508 | Delta = collections.namedtuple('Delta', ['previous', 'current']) | ||
4729 | 509 | |||
4730 | 510 | |||
4731 | 511 | _KV = None | ||
4732 | 512 | |||
4733 | 513 | |||
4734 | 514 | def kv(): | ||
4735 | 515 | global _KV | ||
4736 | 516 | if _KV is None: | ||
4737 | 517 | _KV = Storage() | ||
4738 | 518 | return _KV | ||
4739 | diff --git a/hooks/charmhelpers/fetch/__init__.py b/hooks/charmhelpers/fetch/__init__.py | |||
4740 | index 07bb707..480a627 100644 | |||
4741 | --- a/hooks/charmhelpers/fetch/__init__.py | |||
4742 | +++ b/hooks/charmhelpers/fetch/__init__.py | |||
4743 | @@ -1,267 +1,191 @@ | |||
4744 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
4745 | 2 | # | ||
4746 | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
4747 | 4 | # you may not use this file except in compliance with the License. | ||
4748 | 5 | # You may obtain a copy of the License at | ||
4749 | 6 | # | ||
4750 | 7 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
4751 | 8 | # | ||
4752 | 9 | # Unless required by applicable law or agreed to in writing, software | ||
4753 | 10 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
4754 | 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
4755 | 12 | # See the License for the specific language governing permissions and | ||
4756 | 13 | # limitations under the License. | ||
4757 | 14 | |||
4758 | 1 | import importlib | 15 | import importlib |
4759 | 16 | from charmhelpers.osplatform import get_platform | ||
4760 | 2 | from yaml import safe_load | 17 | from yaml import safe_load |
4761 | 3 | from charmhelpers.core.host import ( | ||
4762 | 4 | lsb_release | ||
4763 | 5 | ) | ||
4764 | 6 | from urlparse import ( | ||
4765 | 7 | urlparse, | ||
4766 | 8 | urlunparse, | ||
4767 | 9 | ) | ||
4768 | 10 | import subprocess | ||
4769 | 11 | from charmhelpers.core.hookenv import ( | 18 | from charmhelpers.core.hookenv import ( |
4770 | 12 | config, | 19 | config, |
4771 | 13 | log, | 20 | log, |
4772 | 14 | ) | 21 | ) |
4773 | 15 | import apt_pkg | ||
4774 | 16 | import os | ||
4775 | 17 | |||
4776 | 18 | CLOUD_ARCHIVE = """# Ubuntu Cloud Archive | ||
4777 | 19 | deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main | ||
4778 | 20 | """ | ||
4779 | 21 | PROPOSED_POCKET = """# Proposed | ||
4780 | 22 | deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted | ||
4781 | 23 | """ | ||
4782 | 24 | CLOUD_ARCHIVE_POCKETS = { | ||
4783 | 25 | # Folsom | ||
4784 | 26 | 'folsom': 'precise-updates/folsom', | ||
4785 | 27 | 'precise-folsom': 'precise-updates/folsom', | ||
4786 | 28 | 'precise-folsom/updates': 'precise-updates/folsom', | ||
4787 | 29 | 'precise-updates/folsom': 'precise-updates/folsom', | ||
4788 | 30 | 'folsom/proposed': 'precise-proposed/folsom', | ||
4789 | 31 | 'precise-folsom/proposed': 'precise-proposed/folsom', | ||
4790 | 32 | 'precise-proposed/folsom': 'precise-proposed/folsom', | ||
4791 | 33 | # Grizzly | ||
4792 | 34 | 'grizzly': 'precise-updates/grizzly', | ||
4793 | 35 | 'precise-grizzly': 'precise-updates/grizzly', | ||
4794 | 36 | 'precise-grizzly/updates': 'precise-updates/grizzly', | ||
4795 | 37 | 'precise-updates/grizzly': 'precise-updates/grizzly', | ||
4796 | 38 | 'grizzly/proposed': 'precise-proposed/grizzly', | ||
4797 | 39 | 'precise-grizzly/proposed': 'precise-proposed/grizzly', | ||
4798 | 40 | 'precise-proposed/grizzly': 'precise-proposed/grizzly', | ||
4799 | 41 | # Havana | ||
4800 | 42 | 'havana': 'precise-updates/havana', | ||
4801 | 43 | 'precise-havana': 'precise-updates/havana', | ||
4802 | 44 | 'precise-havana/updates': 'precise-updates/havana', | ||
4803 | 45 | 'precise-updates/havana': 'precise-updates/havana', | ||
4804 | 46 | 'havana/proposed': 'precise-proposed/havana', | ||
4805 | 47 | 'precise-havana/proposed': 'precise-proposed/havana', | ||
4806 | 48 | 'precise-proposed/havana': 'precise-proposed/havana', | ||
4807 | 49 | # Icehouse | ||
4808 | 50 | 'icehouse': 'precise-updates/icehouse', | ||
4809 | 51 | 'precise-icehouse': 'precise-updates/icehouse', | ||
4810 | 52 | 'precise-icehouse/updates': 'precise-updates/icehouse', | ||
4811 | 53 | 'precise-updates/icehouse': 'precise-updates/icehouse', | ||
4812 | 54 | 'icehouse/proposed': 'precise-proposed/icehouse', | ||
4813 | 55 | 'precise-icehouse/proposed': 'precise-proposed/icehouse', | ||
4814 | 56 | 'precise-proposed/icehouse': 'precise-proposed/icehouse', | ||
4815 | 57 | } | ||
4816 | 58 | |||
4817 | 59 | |||
4818 | 60 | def filter_installed_packages(packages): | ||
4819 | 61 | """Returns a list of packages that require installation""" | ||
4820 | 62 | apt_pkg.init() | ||
4821 | 63 | cache = apt_pkg.Cache() | ||
4822 | 64 | _pkgs = [] | ||
4823 | 65 | for package in packages: | ||
4824 | 66 | try: | ||
4825 | 67 | p = cache[package] | ||
4826 | 68 | p.current_ver or _pkgs.append(package) | ||
4827 | 69 | except KeyError: | ||
4828 | 70 | log('Package {} has no installation candidate.'.format(package), | ||
4829 | 71 | level='WARNING') | ||
4830 | 72 | _pkgs.append(package) | ||
4831 | 73 | return _pkgs | ||
4832 | 74 | 22 | ||
4833 | 23 | import six | ||
4834 | 24 | if six.PY3: | ||
4835 | 25 | from urllib.parse import urlparse, urlunparse | ||
4836 | 26 | else: | ||
4837 | 27 | from urlparse import urlparse, urlunparse | ||
4838 | 75 | 28 | ||
4839 | 76 | def apt_install(packages, options=None, fatal=False): | ||
4840 | 77 | """Install one or more packages""" | ||
4841 | 78 | if options is None: | ||
4842 | 79 | options = ['--option=Dpkg::Options::=--force-confold'] | ||
4843 | 80 | 29 | ||
4861 | 81 | cmd = ['apt-get', '--assume-yes'] | 30 | # The order of this list is very important. Handlers should be listed in from |
4862 | 82 | cmd.extend(options) | 31 | # least- to most-specific URL matching. |
4863 | 83 | cmd.append('install') | 32 | FETCH_HANDLERS = ( |
4864 | 84 | if isinstance(packages, basestring): | 33 | 'charmhelpers.fetch.archiveurl.ArchiveUrlFetchHandler', |
4865 | 85 | cmd.append(packages) | 34 | 'charmhelpers.fetch.bzrurl.BzrUrlFetchHandler', |
4866 | 86 | else: | 35 | 'charmhelpers.fetch.giturl.GitUrlFetchHandler', |
4867 | 87 | cmd.extend(packages) | 36 | ) |
4851 | 88 | log("Installing {} with options: {}".format(packages, | ||
4852 | 89 | options)) | ||
4853 | 90 | env = os.environ.copy() | ||
4854 | 91 | if 'DEBIAN_FRONTEND' not in env: | ||
4855 | 92 | env['DEBIAN_FRONTEND'] = 'noninteractive' | ||
4856 | 93 | |||
4857 | 94 | if fatal: | ||
4858 | 95 | subprocess.check_call(cmd, env=env) | ||
4859 | 96 | else: | ||
4860 | 97 | subprocess.call(cmd, env=env) | ||
4868 | 98 | 37 | ||
4869 | 99 | 38 | ||
4877 | 100 | def apt_update(fatal=False): | 39 | class SourceConfigError(Exception): |
4878 | 101 | """Update local apt cache""" | 40 | pass |
4872 | 102 | cmd = ['apt-get', 'update'] | ||
4873 | 103 | if fatal: | ||
4874 | 104 | subprocess.check_call(cmd) | ||
4875 | 105 | else: | ||
4876 | 106 | subprocess.call(cmd) | ||
4879 | 107 | 41 | ||
4880 | 108 | 42 | ||
4893 | 109 | def apt_purge(packages, fatal=False): | 43 | class UnhandledSource(Exception): |
4894 | 110 | """Purge one or more packages""" | 44 | pass |
4883 | 111 | cmd = ['apt-get', '--assume-yes', 'purge'] | ||
4884 | 112 | if isinstance(packages, basestring): | ||
4885 | 113 | cmd.append(packages) | ||
4886 | 114 | else: | ||
4887 | 115 | cmd.extend(packages) | ||
4888 | 116 | log("Purging {}".format(packages)) | ||
4889 | 117 | if fatal: | ||
4890 | 118 | subprocess.check_call(cmd) | ||
4891 | 119 | else: | ||
4892 | 120 | subprocess.call(cmd) | ||
4895 | 121 | 45 | ||
4896 | 122 | 46 | ||
4936 | 123 | def apt_hold(packages, fatal=False): | 47 | class AptLockError(Exception): |
4937 | 124 | """Hold one or more packages""" | 48 | pass |
4899 | 125 | cmd = ['apt-mark', 'hold'] | ||
4900 | 126 | if isinstance(packages, basestring): | ||
4901 | 127 | cmd.append(packages) | ||
4902 | 128 | else: | ||
4903 | 129 | cmd.extend(packages) | ||
4904 | 130 | log("Holding {}".format(packages)) | ||
4905 | 131 | if fatal: | ||
4906 | 132 | subprocess.check_call(cmd) | ||
4907 | 133 | else: | ||
4908 | 134 | subprocess.call(cmd) | ||
4909 | 135 | |||
4910 | 136 | |||
4911 | 137 | def add_source(source, key=None): | ||
4912 | 138 | if (source.startswith('ppa:') or | ||
4913 | 139 | source.startswith('http') or | ||
4914 | 140 | source.startswith('deb ') or | ||
4915 | 141 | source.startswith('cloud-archive:')): | ||
4916 | 142 | subprocess.check_call(['add-apt-repository', '--yes', source]) | ||
4917 | 143 | elif source.startswith('cloud:'): | ||
4918 | 144 | apt_install(filter_installed_packages(['ubuntu-cloud-keyring']), | ||
4919 | 145 | fatal=True) | ||
4920 | 146 | pocket = source.split(':')[-1] | ||
4921 | 147 | if pocket not in CLOUD_ARCHIVE_POCKETS: | ||
4922 | 148 | raise SourceConfigError( | ||
4923 | 149 | 'Unsupported cloud: source option %s' % | ||
4924 | 150 | pocket) | ||
4925 | 151 | actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket] | ||
4926 | 152 | with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt: | ||
4927 | 153 | apt.write(CLOUD_ARCHIVE.format(actual_pocket)) | ||
4928 | 154 | elif source == 'proposed': | ||
4929 | 155 | release = lsb_release()['DISTRIB_CODENAME'] | ||
4930 | 156 | with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt: | ||
4931 | 157 | apt.write(PROPOSED_POCKET.format(release)) | ||
4932 | 158 | if key: | ||
4933 | 159 | subprocess.check_call(['apt-key', 'adv', '--keyserver', | ||
4934 | 160 | 'keyserver.ubuntu.com', '--recv', | ||
4935 | 161 | key]) | ||
4938 | 162 | 49 | ||
4939 | 163 | 50 | ||
4941 | 164 | class SourceConfigError(Exception): | 51 | class GPGKeyError(Exception): |
4942 | 52 | """Exception occurs when a GPG key cannot be fetched or used. The message | ||
4943 | 53 | indicates what the problem is. | ||
4944 | 54 | """ | ||
4945 | 165 | pass | 55 | pass |
4946 | 166 | 56 | ||
4947 | 167 | 57 | ||
4948 | 58 | class BaseFetchHandler(object): | ||
4949 | 59 | |||
4950 | 60 | """Base class for FetchHandler implementations in fetch plugins""" | ||
4951 | 61 | |||
4952 | 62 | def can_handle(self, source): | ||
4953 | 63 | """Returns True if the source can be handled. Otherwise returns | ||
4954 | 64 | a string explaining why it cannot""" | ||
4955 | 65 | return "Wrong source type" | ||
4956 | 66 | |||
4957 | 67 | def install(self, source): | ||
4958 | 68 | """Try to download and unpack the source. Return the path to the | ||
4959 | 69 | unpacked files or raise UnhandledSource.""" | ||
4960 | 70 | raise UnhandledSource("Wrong source type {}".format(source)) | ||
4961 | 71 | |||
4962 | 72 | def parse_url(self, url): | ||
4963 | 73 | return urlparse(url) | ||
4964 | 74 | |||
4965 | 75 | def base_url(self, url): | ||
4966 | 76 | """Return url without querystring or fragment""" | ||
4967 | 77 | parts = list(self.parse_url(url)) | ||
4968 | 78 | parts[4:] = ['' for i in parts[4:]] | ||
4969 | 79 | return urlunparse(parts) | ||
4970 | 80 | |||
4971 | 81 | |||
4972 | 82 | __platform__ = get_platform() | ||
4973 | 83 | module = "charmhelpers.fetch.%s" % __platform__ | ||
4974 | 84 | fetch = importlib.import_module(module) | ||
4975 | 85 | |||
4976 | 86 | filter_installed_packages = fetch.filter_installed_packages | ||
4977 | 87 | install = fetch.apt_install | ||
4978 | 88 | upgrade = fetch.apt_upgrade | ||
4979 | 89 | update = _fetch_update = fetch.apt_update | ||
4980 | 90 | purge = fetch.apt_purge | ||
4981 | 91 | add_source = fetch.add_source | ||
4982 | 92 | |||
4983 | 93 | if __platform__ == "ubuntu": | ||
4984 | 94 | apt_cache = fetch.apt_cache | ||
4985 | 95 | apt_install = fetch.apt_install | ||
4986 | 96 | apt_update = fetch.apt_update | ||
4987 | 97 | apt_upgrade = fetch.apt_upgrade | ||
4988 | 98 | apt_purge = fetch.apt_purge | ||
4989 | 99 | apt_mark = fetch.apt_mark | ||
4990 | 100 | apt_hold = fetch.apt_hold | ||
4991 | 101 | apt_unhold = fetch.apt_unhold | ||
4992 | 102 | import_key = fetch.import_key | ||
4993 | 103 | get_upstream_version = fetch.get_upstream_version | ||
4994 | 104 | elif __platform__ == "centos": | ||
4995 | 105 | yum_search = fetch.yum_search | ||
4996 | 106 | |||
4997 | 107 | |||
4998 | 168 | def configure_sources(update=False, | 108 | def configure_sources(update=False, |
4999 | 169 | sources_var='install_sources', | 109 | sources_var='install_sources', |
5000 | 170 | keys_var='install_keys'): | 110 | keys_var='install_keys'): |
The diff has been truncated for viewing.
LGTM