Merge lp:~dbuliga/charms/trusty/nrpe/nrpe into lp:charms/trusty/nrpe
- Trusty Tahr (14.04)
- nrpe
- Merge into trunk
Status: | Needs review |
---|---|
Proposed branch: | lp:~dbuliga/charms/trusty/nrpe/nrpe |
Merge into: | lp:charms/trusty/nrpe |
Diff against target: |
2433 lines (+1349/-658) 17 files modified
hooks/centos.py (+29/-0) hooks/charmhelpers/__init__.py (+10/-0) hooks/charmhelpers/core/host.py (+81/-288) hooks/charmhelpers/core/host_factory/__init__.py (+492/-0) hooks/charmhelpers/core/host_factory/centos/__init__.py (+53/-0) hooks/charmhelpers/core/host_factory/ubuntu/__init__.py (+47/-0) hooks/charmhelpers/fetch/__init__.py (+66/-285) hooks/charmhelpers/fetch/bzrurl.py (+23/-32) hooks/charmhelpers/fetch/centos/__init__.py (+158/-0) hooks/charmhelpers/fetch/giturl.py (+24/-25) hooks/charmhelpers/fetch/ubuntu/__init__.py (+296/-0) hooks/nrpe_utils.py (+10/-12) hooks/services.py (+7/-6) hooks/ubuntu.py (+27/-0) templates/nrpe-centos.tmpl (+16/-0) tests/11-monitors-configurations (+5/-5) tests/13-monitors-config (+5/-5) |
To merge this branch: | bzr merge lp:~dbuliga/charms/trusty/nrpe/nrpe |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Adam Israel (community) | Needs Fixing | ||
Review via email: mp+288615@code.launchpad.net |
Commit message
Description of the change
This branch introduces a factory that based on platform, loads the proper functionality for that platform. It deploys NRPE as a subordinate charm to a charm which is deployed on CentOS as well as for a charm which is deployed on Ubuntu.
A new parameter was added in the 'monitors' relation which is supposed to send the platform for the subordinate charm. This helps for instance, NAGIOS, to set icon and description for that charm based on the parameter.
The need for a template for CentOS appeared because that pid_file has to be under the user nrpe on CentOS instead of nagios on Ubuntu.
- 41. By David Ames
-
[gnuoy, r=thedac] Add support for multiple charms to be related to nrpe via the nrpe, juju-info or local-monitors relation. This is useful when joining an additional local subordinate to this charm. See README changes.
- 42. By Denis Buliga
-
Updated nrpe to work on CentOS
Antonio Rosales (arosales) wrote : | # |
@Denis,
Thanks for your contribution.
Given this is a CentoOS update this may actually be better as a separate CentOS charm. Specifically, the metadata stating it is a CentOS charm so Juju can deploy it as such and it can be discoverable in the Charm Store.
It looks like there is some feedback on the tests, but it would be great to see this in a stand-alone CentOS charm.
-thanks,
Antonio
Unmerged revisions
- 42. By Denis Buliga
-
Updated nrpe to work on CentOS
Preview Diff
1 | === added file 'hooks/centos.py' | |||
2 | --- hooks/centos.py 1970-01-01 00:00:00 +0000 | |||
3 | +++ hooks/centos.py 2016-04-29 11:54:41 +0000 | |||
4 | @@ -0,0 +1,29 @@ | |||
5 | 1 | from charmhelpers.core import host | ||
6 | 2 | from charmhelpers.core import hookenv | ||
7 | 3 | from charmhelpers.core.services import helpers | ||
8 | 4 | |||
9 | 5 | |||
10 | 6 | def determine_packages(): | ||
11 | 7 | """ List of packages this charm needs installed """ | ||
12 | 8 | pkgs = [ | ||
13 | 9 | 'epel-release', | ||
14 | 10 | 'nagios', | ||
15 | 11 | 'nagios-plugins-nrpe', | ||
16 | 12 | 'nagios-plugins-all', | ||
17 | 13 | 'nrpe' | ||
18 | 14 | ] | ||
19 | 15 | if hookenv.config('export_nagios_definitions'): | ||
20 | 16 | pkgs.append('rsync') | ||
21 | 17 | return pkgs | ||
22 | 18 | |||
23 | 19 | |||
24 | 20 | def restart_nrpe(service_name): | ||
25 | 21 | """ Restart nrpe """ | ||
26 | 22 | host.service_restart('nrpe') | ||
27 | 23 | |||
28 | 24 | |||
29 | 25 | def render_nrpe_template(): | ||
30 | 26 | return helpers.render_template( | ||
31 | 27 | source='nrpe-centos.tmpl', | ||
32 | 28 | target='/etc/nagios/nrpe.cfg' | ||
33 | 29 | ) | ||
34 | 0 | 30 | ||
35 | === modified file 'hooks/charmhelpers/__init__.py' (properties changed: -x to +x) | |||
36 | --- hooks/charmhelpers/__init__.py 2015-03-23 09:45:10 +0000 | |||
37 | +++ hooks/charmhelpers/__init__.py 2016-04-29 11:54:41 +0000 | |||
38 | @@ -18,6 +18,7 @@ | |||
39 | 18 | # only standard libraries. | 18 | # only standard libraries. |
40 | 19 | import subprocess | 19 | import subprocess |
41 | 20 | import sys | 20 | import sys |
42 | 21 | import platform | ||
43 | 21 | 22 | ||
44 | 22 | try: | 23 | try: |
45 | 23 | import six # flake8: noqa | 24 | import six # flake8: noqa |
46 | @@ -36,3 +37,12 @@ | |||
47 | 36 | else: | 37 | else: |
48 | 37 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml']) | 38 | subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml']) |
49 | 38 | import yaml # flake8: noqa | 39 | import yaml # flake8: noqa |
50 | 40 | |||
51 | 41 | |||
52 | 42 | def get_platform(): | ||
53 | 43 | tuple_platform = platform.linux_distribution() | ||
54 | 44 | current_platform = tuple_platform[0] | ||
55 | 45 | if "Ubuntu" in current_platform: | ||
56 | 46 | return "ubuntu" | ||
57 | 47 | elif "CentOS" in current_platform: | ||
58 | 48 | return "centos" | ||
59 | 39 | 49 | ||
60 | === modified file 'hooks/charmhelpers/core/__init__.py' (properties changed: -x to +x) | |||
61 | === modified file 'hooks/charmhelpers/core/decorators.py' (properties changed: -x to +x) | |||
62 | === modified file 'hooks/charmhelpers/core/fstab.py' (properties changed: -x to +x) | |||
63 | === modified file 'hooks/charmhelpers/core/hookenv.py' (properties changed: -x to +x) | |||
64 | === modified file 'hooks/charmhelpers/core/host.py' (properties changed: -x to +x) | |||
65 | --- hooks/charmhelpers/core/host.py 2015-03-23 09:45:10 +0000 | |||
66 | +++ hooks/charmhelpers/core/host.py 2016-04-29 11:54:41 +0000 | |||
67 | @@ -21,236 +21,123 @@ | |||
68 | 21 | # Nick Moffitt <nick.moffitt@canonical.com> | 21 | # Nick Moffitt <nick.moffitt@canonical.com> |
69 | 22 | # Matthew Wedgwood <matthew.wedgwood@canonical.com> | 22 | # Matthew Wedgwood <matthew.wedgwood@canonical.com> |
70 | 23 | 23 | ||
71 | 24 | import os | ||
72 | 25 | import re | ||
73 | 26 | import pwd | ||
74 | 27 | import grp | ||
75 | 28 | import random | ||
76 | 29 | import string | ||
77 | 30 | import subprocess | ||
78 | 31 | import hashlib | ||
79 | 32 | from contextlib import contextmanager | 24 | from contextlib import contextmanager |
86 | 33 | from collections import OrderedDict | 25 | from charmhelpers.core.host_factory import Host |
87 | 34 | 26 | ||
88 | 35 | import six | 27 | host = Host() |
83 | 36 | |||
84 | 37 | from .hookenv import log | ||
85 | 38 | from .fstab import Fstab | ||
89 | 39 | 28 | ||
90 | 40 | 29 | ||
91 | 41 | def service_start(service_name): | 30 | def service_start(service_name): |
92 | 42 | """Start a system service""" | 31 | """Start a system service""" |
94 | 43 | return service('start', service_name) | 32 | return host.service_start(service_name) |
95 | 44 | 33 | ||
96 | 45 | 34 | ||
97 | 46 | def service_stop(service_name): | 35 | def service_stop(service_name): |
98 | 47 | """Stop a system service""" | 36 | """Stop a system service""" |
100 | 48 | return service('stop', service_name) | 37 | return host.service_stop(service_name) |
101 | 49 | 38 | ||
102 | 50 | 39 | ||
103 | 51 | def service_restart(service_name): | 40 | def service_restart(service_name): |
104 | 52 | """Restart a system service""" | 41 | """Restart a system service""" |
106 | 53 | return service('restart', service_name) | 42 | return host.service_restart(service_name) |
107 | 54 | 43 | ||
108 | 55 | 44 | ||
109 | 56 | def service_reload(service_name, restart_on_failure=False): | 45 | def service_reload(service_name, restart_on_failure=False): |
110 | 57 | """Reload a system service, optionally falling back to restart if | 46 | """Reload a system service, optionally falling back to restart if |
111 | 58 | reload fails""" | 47 | reload fails""" |
116 | 59 | service_result = service('reload', service_name) | 48 | return host.service_reload(service_name, restart_on_failure) |
113 | 60 | if not service_result and restart_on_failure: | ||
114 | 61 | service_result = service('restart', service_name) | ||
115 | 62 | return service_result | ||
117 | 63 | 49 | ||
118 | 64 | 50 | ||
119 | 65 | def service(action, service_name): | 51 | def service(action, service_name): |
120 | 66 | """Control a system service""" | 52 | """Control a system service""" |
123 | 67 | cmd = ['service', service_name, action] | 53 | return host.service(action, service_name) |
122 | 68 | return subprocess.call(cmd) == 0 | ||
124 | 69 | 54 | ||
125 | 70 | 55 | ||
126 | 71 | def service_running(service): | 56 | def service_running(service): |
139 | 72 | """Determine whether a system service is running""" | 57 | return host.service_running(service) |
128 | 73 | try: | ||
129 | 74 | output = subprocess.check_output( | ||
130 | 75 | ['service', service, 'status'], | ||
131 | 76 | stderr=subprocess.STDOUT).decode('UTF-8') | ||
132 | 77 | except subprocess.CalledProcessError: | ||
133 | 78 | return False | ||
134 | 79 | else: | ||
135 | 80 | if ("start/running" in output or "is running" in output): | ||
136 | 81 | return True | ||
137 | 82 | else: | ||
138 | 83 | return False | ||
140 | 84 | 58 | ||
141 | 85 | 59 | ||
142 | 86 | def service_available(service_name): | 60 | def service_available(service_name): |
143 | 87 | """Determine whether a system service is available""" | 61 | """Determine whether a system service is available""" |
174 | 88 | try: | 62 | return host.service_available(service_name) |
175 | 89 | subprocess.check_output( | 63 | |
176 | 90 | ['service', service_name, 'status'], | 64 | |
177 | 91 | stderr=subprocess.STDOUT).decode('UTF-8') | 65 | def adduser(username, password=None, shell='/bin/bash', system_user=False, |
178 | 92 | except subprocess.CalledProcessError as e: | 66 | primary_group=None, secondary_groups=None): |
179 | 93 | return 'unrecognized service' not in e.output | 67 | """ |
180 | 94 | else: | 68 | Add a user to the system. |
181 | 95 | return True | 69 | |
182 | 96 | 70 | Will log but otherwise succeed if the user already exists. | |
183 | 97 | 71 | ||
184 | 98 | def adduser(username, password=None, shell='/bin/bash', system_user=False): | 72 | :param str username: Username to create |
185 | 99 | """Add a user to the system""" | 73 | :param str password: Password for user; if ``None``, create a system user |
186 | 100 | try: | 74 | :param str shell: The default shell for the user |
187 | 101 | user_info = pwd.getpwnam(username) | 75 | :param bool system_user: Whether to create a login or system user |
188 | 102 | log('user {0} already exists!'.format(username)) | 76 | :param str primary_group: Primary group for user; defaults to their username |
189 | 103 | except KeyError: | 77 | :param list secondary_groups: Optional list of additional groups |
190 | 104 | log('creating user {0}'.format(username)) | 78 | |
191 | 105 | cmd = ['useradd'] | 79 | :returns: The password database entry struct, as returned by `pwd.getpwnam` |
192 | 106 | if system_user or password is None: | 80 | """ |
193 | 107 | cmd.append('--system') | 81 | return host.adduser(username, password, shell, system_user, |
194 | 108 | else: | 82 | primary_group, secondary_groups) |
195 | 109 | cmd.extend([ | 83 | |
196 | 110 | '--create-home', | 84 | |
197 | 111 | '--shell', shell, | 85 | def user_exists(username): |
198 | 112 | '--password', password, | 86 | """Check if a user exists""" |
199 | 113 | ]) | 87 | return host.user_exists(username) |
170 | 114 | cmd.append(username) | ||
171 | 115 | subprocess.check_call(cmd) | ||
172 | 116 | user_info = pwd.getpwnam(username) | ||
173 | 117 | return user_info | ||
200 | 118 | 88 | ||
201 | 119 | 89 | ||
202 | 120 | def add_group(group_name, system_group=False): | 90 | def add_group(group_name, system_group=False): |
220 | 121 | """Add a group to the system""" | 91 | return host.add_group(group_name, system_group) |
204 | 122 | try: | ||
205 | 123 | group_info = grp.getgrnam(group_name) | ||
206 | 124 | log('group {0} already exists!'.format(group_name)) | ||
207 | 125 | except KeyError: | ||
208 | 126 | log('creating group {0}'.format(group_name)) | ||
209 | 127 | cmd = ['addgroup'] | ||
210 | 128 | if system_group: | ||
211 | 129 | cmd.append('--system') | ||
212 | 130 | else: | ||
213 | 131 | cmd.extend([ | ||
214 | 132 | '--group', | ||
215 | 133 | ]) | ||
216 | 134 | cmd.append(group_name) | ||
217 | 135 | subprocess.check_call(cmd) | ||
218 | 136 | group_info = grp.getgrnam(group_name) | ||
219 | 137 | return group_info | ||
221 | 138 | 92 | ||
222 | 139 | 93 | ||
223 | 140 | def add_user_to_group(username, group): | 94 | def add_user_to_group(username, group): |
232 | 141 | """Add a user to a group""" | 95 | host.add_user_to_group(username, group) |
225 | 142 | cmd = [ | ||
226 | 143 | 'gpasswd', '-a', | ||
227 | 144 | username, | ||
228 | 145 | group | ||
229 | 146 | ] | ||
230 | 147 | log("Adding user {} to group {}".format(username, group)) | ||
231 | 148 | subprocess.check_call(cmd) | ||
233 | 149 | 96 | ||
234 | 150 | 97 | ||
235 | 151 | def rsync(from_path, to_path, flags='-r', options=None): | 98 | def rsync(from_path, to_path, flags='-r', options=None): |
236 | 152 | """Replicate the contents of a path""" | 99 | """Replicate the contents of a path""" |
244 | 153 | options = options or ['--delete', '--executability'] | 100 | return host.rsync(from_path, to_path, flags, options) |
238 | 154 | cmd = ['/usr/bin/rsync', flags] | ||
239 | 155 | cmd.extend(options) | ||
240 | 156 | cmd.append(from_path) | ||
241 | 157 | cmd.append(to_path) | ||
242 | 158 | log(" ".join(cmd)) | ||
243 | 159 | return subprocess.check_output(cmd).decode('UTF-8').strip() | ||
245 | 160 | 101 | ||
246 | 161 | 102 | ||
247 | 162 | def symlink(source, destination): | 103 | def symlink(source, destination): |
248 | 163 | """Create a symbolic link""" | 104 | """Create a symbolic link""" |
257 | 164 | log("Symlinking {} as {}".format(source, destination)) | 105 | host.symlink(source, destination) |
250 | 165 | cmd = [ | ||
251 | 166 | 'ln', | ||
252 | 167 | '-sf', | ||
253 | 168 | source, | ||
254 | 169 | destination, | ||
255 | 170 | ] | ||
256 | 171 | subprocess.check_call(cmd) | ||
258 | 172 | 106 | ||
259 | 173 | 107 | ||
260 | 174 | def mkdir(path, owner='root', group='root', perms=0o555, force=False): | 108 | def mkdir(path, owner='root', group='root', perms=0o555, force=False): |
261 | 175 | """Create a directory""" | 109 | """Create a directory""" |
277 | 176 | log("Making dir {} {}:{} {:o}".format(path, owner, group, | 110 | host.mkdir(path, owner, group, perms, force) |
263 | 177 | perms)) | ||
264 | 178 | uid = pwd.getpwnam(owner).pw_uid | ||
265 | 179 | gid = grp.getgrnam(group).gr_gid | ||
266 | 180 | realpath = os.path.abspath(path) | ||
267 | 181 | path_exists = os.path.exists(realpath) | ||
268 | 182 | if path_exists and force: | ||
269 | 183 | if not os.path.isdir(realpath): | ||
270 | 184 | log("Removing non-directory file {} prior to mkdir()".format(path)) | ||
271 | 185 | os.unlink(realpath) | ||
272 | 186 | os.makedirs(realpath, perms) | ||
273 | 187 | elif not path_exists: | ||
274 | 188 | os.makedirs(realpath, perms) | ||
275 | 189 | os.chown(realpath, uid, gid) | ||
276 | 190 | os.chmod(realpath, perms) | ||
278 | 191 | 111 | ||
279 | 192 | 112 | ||
280 | 193 | def write_file(path, content, owner='root', group='root', perms=0o444): | 113 | def write_file(path, content, owner='root', group='root', perms=0o444): |
281 | 194 | """Create or overwrite a file with the contents of a byte string.""" | 114 | """Create or overwrite a file with the contents of a byte string.""" |
289 | 195 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms)) | 115 | host.write_file(path, content, owner, group, perms) |
283 | 196 | uid = pwd.getpwnam(owner).pw_uid | ||
284 | 197 | gid = grp.getgrnam(group).gr_gid | ||
285 | 198 | with open(path, 'wb') as target: | ||
286 | 199 | os.fchown(target.fileno(), uid, gid) | ||
287 | 200 | os.fchmod(target.fileno(), perms) | ||
288 | 201 | target.write(content) | ||
290 | 202 | 116 | ||
291 | 203 | 117 | ||
292 | 204 | def fstab_remove(mp): | 118 | def fstab_remove(mp): |
296 | 205 | """Remove the given mountpoint entry from /etc/fstab | 119 | """Remove the given mountpoint entry from /etc/fstab""" |
297 | 206 | """ | 120 | return host.fstab_remove(mp) |
295 | 207 | return Fstab.remove_by_mountpoint(mp) | ||
298 | 208 | 121 | ||
299 | 209 | 122 | ||
300 | 210 | def fstab_add(dev, mp, fs, options=None): | 123 | def fstab_add(dev, mp, fs, options=None): |
304 | 211 | """Adds the given device entry to the /etc/fstab file | 124 | """Adds the given device entry to the /etc/fstab file""" |
305 | 212 | """ | 125 | return host.fstab_add(dev, mp, fs, options) |
303 | 213 | return Fstab.add(dev, mp, fs, options=options) | ||
306 | 214 | 126 | ||
307 | 215 | 127 | ||
308 | 216 | def mount(device, mountpoint, options=None, persist=False, filesystem="ext3"): | 128 | def mount(device, mountpoint, options=None, persist=False, filesystem="ext3"): |
309 | 217 | """Mount a filesystem at a particular mountpoint""" | 129 | """Mount a filesystem at a particular mountpoint""" |
323 | 218 | cmd_args = ['mount'] | 130 | return host.mount(device, mountpoint, options, persist, filesystem) |
311 | 219 | if options is not None: | ||
312 | 220 | cmd_args.extend(['-o', options]) | ||
313 | 221 | cmd_args.extend([device, mountpoint]) | ||
314 | 222 | try: | ||
315 | 223 | subprocess.check_output(cmd_args) | ||
316 | 224 | except subprocess.CalledProcessError as e: | ||
317 | 225 | log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output)) | ||
318 | 226 | return False | ||
319 | 227 | |||
320 | 228 | if persist: | ||
321 | 229 | return fstab_add(device, mountpoint, filesystem, options=options) | ||
322 | 230 | return True | ||
324 | 231 | 131 | ||
325 | 232 | 132 | ||
326 | 233 | def umount(mountpoint, persist=False): | 133 | def umount(mountpoint, persist=False): |
327 | 234 | """Unmount a filesystem""" | 134 | """Unmount a filesystem""" |
338 | 235 | cmd_args = ['umount', mountpoint] | 135 | return host.umount(mountpoint, persist) |
329 | 236 | try: | ||
330 | 237 | subprocess.check_output(cmd_args) | ||
331 | 238 | except subprocess.CalledProcessError as e: | ||
332 | 239 | log('Error unmounting {}\n{}'.format(mountpoint, e.output)) | ||
333 | 240 | return False | ||
334 | 241 | |||
335 | 242 | if persist: | ||
336 | 243 | return fstab_remove(mountpoint) | ||
337 | 244 | return True | ||
339 | 245 | 136 | ||
340 | 246 | 137 | ||
341 | 247 | def mounts(): | 138 | def mounts(): |
342 | 248 | """Get a list of all mounted volumes as [[mountpoint,device],[...]]""" | 139 | """Get a list of all mounted volumes as [[mountpoint,device],[...]]""" |
348 | 249 | with open('/proc/mounts') as f: | 140 | return host.mounts() |
344 | 250 | # [['/mount/point','/dev/path'],[...]] | ||
345 | 251 | system_mounts = [m[1::-1] for m in [l.strip().split() | ||
346 | 252 | for l in f.readlines()]] | ||
347 | 253 | return system_mounts | ||
349 | 254 | 141 | ||
350 | 255 | 142 | ||
351 | 256 | def file_hash(path, hash_type='md5'): | 143 | def file_hash(path, hash_type='md5'): |
352 | @@ -260,13 +147,7 @@ | |||
353 | 260 | :param str hash_type: Any hash alrgorithm supported by :mod:`hashlib`, | 147 | :param str hash_type: Any hash alrgorithm supported by :mod:`hashlib`, |
354 | 261 | such as md5, sha1, sha256, sha512, etc. | 148 | such as md5, sha1, sha256, sha512, etc. |
355 | 262 | """ | 149 | """ |
363 | 263 | if os.path.exists(path): | 150 | return host.file_hash(path, hash_type) |
357 | 264 | h = getattr(hashlib, hash_type)() | ||
358 | 265 | with open(path, 'rb') as source: | ||
359 | 266 | h.update(source.read()) | ||
360 | 267 | return h.hexdigest() | ||
361 | 268 | else: | ||
362 | 269 | return None | ||
364 | 270 | 151 | ||
365 | 271 | 152 | ||
366 | 272 | def check_hash(path, checksum, hash_type='md5'): | 153 | def check_hash(path, checksum, hash_type='md5'): |
367 | @@ -280,9 +161,7 @@ | |||
368 | 280 | :raises ChecksumError: If the file fails the checksum | 161 | :raises ChecksumError: If the file fails the checksum |
369 | 281 | 162 | ||
370 | 282 | """ | 163 | """ |
374 | 283 | actual_checksum = file_hash(path, hash_type) | 164 | host.check_hash(path, checksum, hash_type) |
372 | 284 | if checksum != actual_checksum: | ||
373 | 285 | raise ChecksumError("'%s' != '%s'" % (checksum, actual_checksum)) | ||
375 | 286 | 165 | ||
376 | 287 | 166 | ||
377 | 288 | class ChecksumError(ValueError): | 167 | class ChecksumError(ValueError): |
378 | @@ -296,154 +175,68 @@ | |||
379 | 296 | 175 | ||
380 | 297 | @restart_on_change({ | 176 | @restart_on_change({ |
381 | 298 | '/etc/ceph/ceph.conf': [ 'cinder-api', 'cinder-volume' ] | 177 | '/etc/ceph/ceph.conf': [ 'cinder-api', 'cinder-volume' ] |
382 | 178 | '/etc/apache/sites-enabled/*': [ 'apache2' ] | ||
383 | 299 | }) | 179 | }) |
385 | 300 | def ceph_client_changed(): | 180 | def config_changed(): |
386 | 301 | pass # your code here | 181 | pass # your code here |
387 | 302 | 182 | ||
388 | 303 | In this example, the cinder-api and cinder-volume services | 183 | In this example, the cinder-api and cinder-volume services |
389 | 304 | would be restarted if /etc/ceph/ceph.conf is changed by the | 184 | would be restarted if /etc/ceph/ceph.conf is changed by the |
391 | 305 | ceph_client_changed function. | 185 | ceph_client_changed function. The apache2 service would be |
392 | 186 | restarted if any file matching the pattern got changed, created | ||
393 | 187 | or removed. Standard wildcards are supported, see documentation | ||
394 | 188 | for the 'glob' module for more information. | ||
395 | 306 | """ | 189 | """ |
416 | 307 | def wrap(f): | 190 | return host.restart_on_change(restart_map, stopstart) |
397 | 308 | def wrapped_f(*args, **kwargs): | ||
398 | 309 | checksums = {} | ||
399 | 310 | for path in restart_map: | ||
400 | 311 | checksums[path] = file_hash(path) | ||
401 | 312 | f(*args, **kwargs) | ||
402 | 313 | restarts = [] | ||
403 | 314 | for path in restart_map: | ||
404 | 315 | if checksums[path] != file_hash(path): | ||
405 | 316 | restarts += restart_map[path] | ||
406 | 317 | services_list = list(OrderedDict.fromkeys(restarts)) | ||
407 | 318 | if not stopstart: | ||
408 | 319 | for service_name in services_list: | ||
409 | 320 | service('restart', service_name) | ||
410 | 321 | else: | ||
411 | 322 | for action in ['stop', 'start']: | ||
412 | 323 | for service_name in services_list: | ||
413 | 324 | service(action, service_name) | ||
414 | 325 | return wrapped_f | ||
415 | 326 | return wrap | ||
417 | 327 | 191 | ||
418 | 328 | 192 | ||
419 | 329 | def lsb_release(): | 193 | def lsb_release(): |
427 | 330 | """Return /etc/lsb-release in a dict""" | 194 | """Return /etc/os-release in a dict""" |
428 | 331 | d = {} | 195 | return host.lsb_release() |
422 | 332 | with open('/etc/lsb-release', 'r') as lsb: | ||
423 | 333 | for l in lsb: | ||
424 | 334 | k, v = l.split('=') | ||
425 | 335 | d[k.strip()] = v.strip() | ||
426 | 336 | return d | ||
429 | 337 | 196 | ||
430 | 338 | 197 | ||
431 | 339 | def pwgen(length=None): | 198 | def pwgen(length=None): |
432 | 340 | """Generate a random pasword.""" | 199 | """Generate a random pasword.""" |
448 | 341 | if length is None: | 200 | return host.pwgen(length) |
449 | 342 | # A random length is ok to use a weak PRNG | 201 | |
450 | 343 | length = random.choice(range(35, 45)) | 202 | |
451 | 344 | alphanumeric_chars = [ | 203 | def list_nics(nic_type=None): |
437 | 345 | l for l in (string.ascii_letters + string.digits) | ||
438 | 346 | if l not in 'l0QD1vAEIOUaeiou'] | ||
439 | 347 | # Use a crypto-friendly PRNG (e.g. /dev/urandom) for making the | ||
440 | 348 | # actual password | ||
441 | 349 | random_generator = random.SystemRandom() | ||
442 | 350 | random_chars = [ | ||
443 | 351 | random_generator.choice(alphanumeric_chars) for _ in range(length)] | ||
444 | 352 | return(''.join(random_chars)) | ||
445 | 353 | |||
446 | 354 | |||
447 | 355 | def list_nics(nic_type): | ||
452 | 356 | '''Return a list of nics of given type(s)''' | 204 | '''Return a list of nics of given type(s)''' |
472 | 357 | if isinstance(nic_type, six.string_types): | 205 | return host.list_nics(nic_type) |
454 | 358 | int_types = [nic_type] | ||
455 | 359 | else: | ||
456 | 360 | int_types = nic_type | ||
457 | 361 | interfaces = [] | ||
458 | 362 | for int_type in int_types: | ||
459 | 363 | cmd = ['ip', 'addr', 'show', 'label', int_type + '*'] | ||
460 | 364 | ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') | ||
461 | 365 | ip_output = (line for line in ip_output if line) | ||
462 | 366 | for line in ip_output: | ||
463 | 367 | if line.split()[1].startswith(int_type): | ||
464 | 368 | matched = re.search('.*: (' + int_type + r'[0-9]+\.[0-9]+)@.*', line) | ||
465 | 369 | if matched: | ||
466 | 370 | interface = matched.groups()[0] | ||
467 | 371 | else: | ||
468 | 372 | interface = line.split()[1].replace(":", "") | ||
469 | 373 | interfaces.append(interface) | ||
470 | 374 | |||
471 | 375 | return interfaces | ||
473 | 376 | 206 | ||
474 | 377 | 207 | ||
475 | 378 | def set_nic_mtu(nic, mtu): | 208 | def set_nic_mtu(nic, mtu): |
476 | 379 | '''Set MTU on a network interface''' | 209 | '''Set MTU on a network interface''' |
479 | 380 | cmd = ['ip', 'link', 'set', nic, 'mtu', mtu] | 210 | host.set_nic_mtu(nic, mtu) |
478 | 381 | subprocess.check_call(cmd) | ||
480 | 382 | 211 | ||
481 | 383 | 212 | ||
482 | 384 | def get_nic_mtu(nic): | 213 | def get_nic_mtu(nic): |
491 | 385 | cmd = ['ip', 'addr', 'show', nic] | 214 | '''Get MTU of a network interface''' |
492 | 386 | ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') | 215 | return host.get_nic_mtu(nic) |
485 | 387 | mtu = "" | ||
486 | 388 | for line in ip_output: | ||
487 | 389 | words = line.split() | ||
488 | 390 | if 'mtu' in words: | ||
489 | 391 | mtu = words[words.index("mtu") + 1] | ||
490 | 392 | return mtu | ||
493 | 393 | 216 | ||
494 | 394 | 217 | ||
495 | 395 | def get_nic_hwaddr(nic): | 218 | def get_nic_hwaddr(nic): |
503 | 396 | cmd = ['ip', '-o', '-0', 'addr', 'show', nic] | 219 | return host.get_nic_hwaddr(nic) |
497 | 397 | ip_output = subprocess.check_output(cmd).decode('UTF-8') | ||
498 | 398 | hwaddr = "" | ||
499 | 399 | words = ip_output.split() | ||
500 | 400 | if 'link/ether' in words: | ||
501 | 401 | hwaddr = words[words.index('link/ether') + 1] | ||
502 | 402 | return hwaddr | ||
504 | 403 | 220 | ||
505 | 404 | 221 | ||
506 | 405 | def cmp_pkgrevno(package, revno, pkgcache=None): | 222 | def cmp_pkgrevno(package, revno, pkgcache=None): |
523 | 406 | '''Compare supplied revno with the revno of the installed package | 223 | return host.cmp_pkgrevno(package, revno, pkgcache) |
508 | 407 | |||
509 | 408 | * 1 => Installed revno is greater than supplied arg | ||
510 | 409 | * 0 => Installed revno is the same as supplied arg | ||
511 | 410 | * -1 => Installed revno is less than supplied arg | ||
512 | 411 | |||
513 | 412 | This function imports apt_cache function from charmhelpers.fetch if | ||
514 | 413 | the pkgcache argument is None. Be sure to add charmhelpers.fetch if | ||
515 | 414 | you call this function, or pass an apt_pkg.Cache() instance. | ||
516 | 415 | ''' | ||
517 | 416 | import apt_pkg | ||
518 | 417 | if not pkgcache: | ||
519 | 418 | from charmhelpers.fetch import apt_cache | ||
520 | 419 | pkgcache = apt_cache() | ||
521 | 420 | pkg = pkgcache[package] | ||
522 | 421 | return apt_pkg.version_compare(pkg.current_ver.ver_str, revno) | ||
524 | 422 | 224 | ||
525 | 423 | 225 | ||
526 | 424 | @contextmanager | 226 | @contextmanager |
527 | 425 | def chdir(d): | 227 | def chdir(d): |
549 | 426 | cur = os.getcwd() | 228 | host.chdir(d) |
550 | 427 | try: | 229 | |
551 | 428 | yield os.chdir(d) | 230 | |
552 | 429 | finally: | 231 | def chownr(path, owner, group, follow_links=True, chowntopdir=False): |
553 | 430 | os.chdir(cur) | 232 | """ |
554 | 431 | 233 | Recursively change user and group ownership of files and directories | |
555 | 432 | 234 | in given path. Doesn't chown path itself by default, only its children. | |
556 | 433 | def chownr(path, owner, group, follow_links=True): | 235 | |
557 | 434 | uid = pwd.getpwnam(owner).pw_uid | 236 | :param bool follow_links: Also Chown links if True |
558 | 435 | gid = grp.getgrnam(group).gr_gid | 237 | :param bool chowntopdir: Also chown path itself if True |
559 | 436 | if follow_links: | 238 | """ |
560 | 437 | chown = os.chown | 239 | host.chownr(path, owner, group, follow_links, chowntopdir) |
540 | 438 | else: | ||
541 | 439 | chown = os.lchown | ||
542 | 440 | |||
543 | 441 | for root, dirs, files in os.walk(path): | ||
544 | 442 | for name in dirs + files: | ||
545 | 443 | full = os.path.join(root, name) | ||
546 | 444 | broken_symlink = os.path.lexists(full) and not os.path.exists(full) | ||
547 | 445 | if not broken_symlink: | ||
548 | 446 | chown(full, uid, gid) | ||
561 | 447 | 240 | ||
562 | 448 | 241 | ||
563 | 449 | def lchownr(path, owner, group): | 242 | def lchownr(path, owner, group): |
564 | 450 | 243 | ||
565 | === added directory 'hooks/charmhelpers/core/host_factory' | |||
566 | === added file 'hooks/charmhelpers/core/host_factory/__init__.py' | |||
567 | --- hooks/charmhelpers/core/host_factory/__init__.py 1970-01-01 00:00:00 +0000 | |||
568 | +++ hooks/charmhelpers/core/host_factory/__init__.py 2016-04-29 11:54:41 +0000 | |||
569 | @@ -0,0 +1,492 @@ | |||
570 | 1 | import os | ||
571 | 2 | import re | ||
572 | 3 | import pwd | ||
573 | 4 | import glob | ||
574 | 5 | import grp | ||
575 | 6 | import random | ||
576 | 7 | import string | ||
577 | 8 | import subprocess | ||
578 | 9 | import hashlib | ||
579 | 10 | import importlib | ||
580 | 11 | import six | ||
581 | 12 | |||
582 | 13 | from contextlib import contextmanager | ||
583 | 14 | from collections import OrderedDict | ||
584 | 15 | from ..hookenv import log | ||
585 | 16 | from ..fstab import Fstab | ||
586 | 17 | from charmhelpers import get_platform | ||
587 | 18 | |||
588 | 19 | SYSTEMD_SYSTEM = '/run/systemd/system' | ||
589 | 20 | |||
590 | 21 | |||
591 | 22 | class HostBase(object): | ||
592 | 23 | |||
593 | 24 | def service_start(self, service_name): | ||
594 | 25 | """Start a system service""" | ||
595 | 26 | return self.service('start', service_name) | ||
596 | 27 | |||
597 | 28 | def service_stop(self, service_name): | ||
598 | 29 | """Stop a system service""" | ||
599 | 30 | return self.service('stop', service_name) | ||
600 | 31 | |||
601 | 32 | def service_restart(self, service_name): | ||
602 | 33 | """Restart a system service""" | ||
603 | 34 | return self.service('restart', service_name) | ||
604 | 35 | |||
605 | 36 | def service_reload(self, service_name, restart_on_failure=False): | ||
606 | 37 | """Reload a system service, optionally falling back to restart if | ||
607 | 38 | reload fails""" | ||
608 | 39 | service_result = self.service('reload', service_name) | ||
609 | 40 | if not service_result and restart_on_failure: | ||
610 | 41 | service_result = self.service('restart', service_name) | ||
611 | 42 | return service_result | ||
612 | 43 | |||
613 | 44 | def service(self, action, service_name): | ||
614 | 45 | """Control a system service""" | ||
615 | 46 | if self.init_is_systemd(): | ||
616 | 47 | cmd = ['systemctl', action, service_name] | ||
617 | 48 | else: | ||
618 | 49 | cmd = ['service', service_name, action] | ||
619 | 50 | return subprocess.call(cmd) == 0 | ||
620 | 51 | |||
621 | 52 | def service_running(self, service_name): | ||
622 | 53 | """Determine whether a system service is running""" | ||
623 | 54 | if self.init_is_systemd(): | ||
624 | 55 | return self.service('is-active', service_name) | ||
625 | 56 | else: | ||
626 | 57 | try: | ||
627 | 58 | output = subprocess.check_output( | ||
628 | 59 | ['service', service_name, 'status'], | ||
629 | 60 | stderr=subprocess.STDOUT).decode('UTF-8') | ||
630 | 61 | except subprocess.CalledProcessError: | ||
631 | 62 | return False | ||
632 | 63 | else: | ||
633 | 64 | if ("start/running" in output or "is running" in output): | ||
634 | 65 | return True | ||
635 | 66 | else: | ||
636 | 67 | return False | ||
637 | 68 | |||
638 | 69 | def service_available(self, service_name): | ||
639 | 70 | """Determine whether a system service is available""" | ||
640 | 71 | if self.init_is_systemd(): | ||
641 | 72 | return self.service('is-enabled', service_name) | ||
642 | 73 | try: | ||
643 | 74 | subprocess.check_output( | ||
644 | 75 | ['service', service_name, 'status'], | ||
645 | 76 | stderr=subprocess.STDOUT).decode('UTF-8') | ||
646 | 77 | except subprocess.CalledProcessError as e: | ||
647 | 78 | return b'unrecognized service' not in e.output | ||
648 | 79 | else: | ||
649 | 80 | return True | ||
650 | 81 | |||
651 | 82 | def init_is_systemd(self): | ||
652 | 83 | """Return True if the host system uses systemd, False otherwise.""" | ||
653 | 84 | return os.path.isdir(SYSTEMD_SYSTEM) | ||
654 | 85 | |||
655 | 86 | def adduser(self, username, password=None, shell='/bin/bash', | ||
656 | 87 | system_user=False, primary_group=None, secondary_groups=None): | ||
657 | 88 | """Add a user to the system. | ||
658 | 89 | |||
659 | 90 | Will log but otherwise succeed if the user already exists. | ||
660 | 91 | |||
661 | 92 | :param str username: Username to create | ||
662 | 93 | :param str password: Password for user; | ||
663 | 94 | if ``None``, create a system user | ||
664 | 95 | :param str shell: The default shell for the user | ||
665 | 96 | :param bool system_user: Whether to create a login or system user | ||
666 | 97 | :param str primary_group: Primary group for user; defaults to username | ||
667 | 98 | :param list secondary_groups: Optional list of additional groups | ||
668 | 99 | |||
669 | 100 | :returns: The password database entry struct, | ||
670 | 101 | as returned by `pwd.getpwnam` | ||
671 | 102 | """ | ||
672 | 103 | try: | ||
673 | 104 | user_info = pwd.getpwnam(username) | ||
674 | 105 | log('user {0} already exists!'.format(username)) | ||
675 | 106 | except KeyError: | ||
676 | 107 | log('creating user {0}'.format(username)) | ||
677 | 108 | cmd = ['useradd'] | ||
678 | 109 | if system_user or password is None: | ||
679 | 110 | cmd.append('--system') | ||
680 | 111 | else: | ||
681 | 112 | cmd.extend([ | ||
682 | 113 | '--create-home', | ||
683 | 114 | '--shell', shell, | ||
684 | 115 | '--password', password, | ||
685 | 116 | ]) | ||
686 | 117 | if not primary_group: | ||
687 | 118 | try: | ||
688 | 119 | grp.getgrnam(username) | ||
689 | 120 | primary_group = username # avoid "group exists" error | ||
690 | 121 | except KeyError: | ||
691 | 122 | pass | ||
692 | 123 | if primary_group: | ||
693 | 124 | cmd.extend(['-g', primary_group]) | ||
694 | 125 | if secondary_groups: | ||
695 | 126 | cmd.extend(['-G', ','.join(secondary_groups)]) | ||
696 | 127 | cmd.append(username) | ||
697 | 128 | subprocess.check_call(cmd) | ||
698 | 129 | user_info = pwd.getpwnam(username) | ||
699 | 130 | return user_info | ||
700 | 131 | |||
701 | 132 | def _add_group(self, group_name, system_group=False): | ||
702 | 133 | raise NotImplementedError() | ||
703 | 134 | |||
704 | 135 | def add_group(self, group_name, system_group=False): | ||
705 | 136 | try: | ||
706 | 137 | group_info = grp.getgrnam(group_name) | ||
707 | 138 | log('group {0} already exists!'.format(group_name)) | ||
708 | 139 | except KeyError: | ||
709 | 140 | log('creating group {0}'.format(group_name)) | ||
710 | 141 | self._add_group(group_name, system_group) | ||
711 | 142 | group_info = grp.getgrnam(group_name) | ||
712 | 143 | return group_info | ||
713 | 144 | |||
714 | 145 | def add_user_to_group(self, username, group): | ||
715 | 146 | """Add a user to a group""" | ||
716 | 147 | cmd = ['gpasswd', '-a', username, group] | ||
717 | 148 | log("Adding user {} to group {}".format(username, group)) | ||
718 | 149 | subprocess.check_call(cmd) | ||
719 | 150 | |||
720 | 151 | def rsync(self, from_path, to_path, flags='-r', options=None): | ||
721 | 152 | """Replicate the contents of a path""" | ||
722 | 153 | options = options or ['--delete', '--executability'] | ||
723 | 154 | cmd = ['/usr/bin/rsync', flags] | ||
724 | 155 | cmd.extend(options) | ||
725 | 156 | cmd.append(from_path) | ||
726 | 157 | cmd.append(to_path) | ||
727 | 158 | log(" ".join(cmd)) | ||
728 | 159 | return subprocess.check_output(cmd).decode('UTF-8').strip() | ||
729 | 160 | |||
730 | 161 | def symlink(self, source, destination): | ||
731 | 162 | """Create a symbolic link""" | ||
732 | 163 | log("Symlinking {} as {}".format(source, destination)) | ||
733 | 164 | cmd = [ | ||
734 | 165 | 'ln', | ||
735 | 166 | '-sf', | ||
736 | 167 | source, | ||
737 | 168 | destination, | ||
738 | 169 | ] | ||
739 | 170 | subprocess.check_call(cmd) | ||
740 | 171 | |||
741 | 172 | def mkdir(self, path, owner='root', group='root', | ||
742 | 173 | perms=0o555, force=False): | ||
743 | 174 | """Create a directory""" | ||
744 | 175 | log("Making dir {} {}:{} {:o}".format(path, owner, group, | ||
745 | 176 | perms)) | ||
746 | 177 | uid = pwd.getpwnam(owner).pw_uid | ||
747 | 178 | gid = grp.getgrnam(group).gr_gid | ||
748 | 179 | realpath = os.path.abspath(path) | ||
749 | 180 | path_exists = os.path.exists(realpath) | ||
750 | 181 | if path_exists and force: | ||
751 | 182 | if not os.path.isdir(realpath): | ||
752 | 183 | log("Removing non-directory file {}" | ||
753 | 184 | " prior to mkdir()".format(path)) | ||
754 | 185 | os.unlink(realpath) | ||
755 | 186 | os.makedirs(realpath, perms) | ||
756 | 187 | elif not path_exists: | ||
757 | 188 | os.makedirs(realpath, perms) | ||
758 | 189 | os.chown(realpath, uid, gid) | ||
759 | 190 | os.chmod(realpath, perms) | ||
760 | 191 | |||
761 | 192 | def write_file(self, path, content, owner='root', | ||
762 | 193 | group='root', perms=0o444): | ||
763 | 194 | """Create or overwrite a file with the contents of a byte string.""" | ||
764 | 195 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms)) | ||
765 | 196 | uid = pwd.getpwnam(owner).pw_uid | ||
766 | 197 | gid = grp.getgrnam(group).gr_gid | ||
767 | 198 | with open(path, 'wb') as target: | ||
768 | 199 | os.fchown(target.fileno(), uid, gid) | ||
769 | 200 | os.fchmod(target.fileno(), perms) | ||
770 | 201 | target.write(content) | ||
771 | 202 | |||
772 | 203 | def fstab_remove(self, mp): | ||
773 | 204 | """Remove the given mountpoint entry from /etc/fstab""" | ||
774 | 205 | return Fstab.remove_by_mountpoint(mp) | ||
775 | 206 | |||
776 | 207 | def fstab_add(self, dev, mp, fs, options=None): | ||
777 | 208 | """Adds the given device entry to the /etc/fstab file""" | ||
778 | 209 | return Fstab.add(dev, mp, fs, options=options) | ||
779 | 210 | |||
780 | 211 | def mount(self, device, mountpoint, options=None, | ||
781 | 212 | persist=False, filesystem="ext3"): | ||
782 | 213 | """Mount a filesystem at a particular mountpoint""" | ||
783 | 214 | cmd_args = ['mount'] | ||
784 | 215 | if options is not None: | ||
785 | 216 | cmd_args.extend(['-o', options]) | ||
786 | 217 | cmd_args.extend([device, mountpoint]) | ||
787 | 218 | try: | ||
788 | 219 | subprocess.check_output(cmd_args) | ||
789 | 220 | except subprocess.CalledProcessError as e: | ||
790 | 221 | log('Error mounting {} at {}\n{}'.format( | ||
791 | 222 | device, mountpoint, e.output)) | ||
792 | 223 | return False | ||
793 | 224 | |||
794 | 225 | if persist: | ||
795 | 226 | return self.fstab_add(device, mountpoint, | ||
796 | 227 | filesystem, options=options) | ||
797 | 228 | return True | ||
798 | 229 | |||
799 | 230 | def umount(self, mountpoint, persist=False): | ||
800 | 231 | """Unmount a filesystem""" | ||
801 | 232 | cmd_args = ['umount', mountpoint] | ||
802 | 233 | try: | ||
803 | 234 | subprocess.check_output(cmd_args) | ||
804 | 235 | except subprocess.CalledProcessError as e: | ||
805 | 236 | log('Error unmounting {}\n{}'.format(mountpoint, e.output)) | ||
806 | 237 | return False | ||
807 | 238 | |||
808 | 239 | if persist: | ||
809 | 240 | return self.fstab_remove(mountpoint) | ||
810 | 241 | return True | ||
811 | 242 | |||
812 | 243 | def mounts(self): | ||
813 | 244 | """Get a list of all mounted volumes as [[mountpoint,device],[...]]""" | ||
814 | 245 | with open('/proc/mounts') as f: | ||
815 | 246 | # [['/mount/point','/dev/path'],[...]] | ||
816 | 247 | system_mounts = [m[1::-1] for m in [l.strip().split() | ||
817 | 248 | for l in f.readlines()]] | ||
818 | 249 | return system_mounts | ||
819 | 250 | |||
820 | 251 | def file_hash(self, path, hash_type='md5'): | ||
821 | 252 | """Generate a hash checksum of the contents of 'path' or None if not found. | ||
822 | 253 | |||
823 | 254 | :param str hash_type: Any hash alrgorithm supported by :mod:`hashlib`, | ||
824 | 255 | such as md5, sha1, sha256, sha512, etc. | ||
825 | 256 | """ | ||
826 | 257 | if os.path.exists(path): | ||
827 | 258 | h = getattr(hashlib, hash_type)() | ||
828 | 259 | with open(path, 'rb') as source: | ||
829 | 260 | h.update(source.read()) | ||
830 | 261 | return h.hexdigest() | ||
831 | 262 | else: | ||
832 | 263 | return None | ||
833 | 264 | |||
834 | 265 | def check_hash(self, path, checksum, hash_type='md5'): | ||
835 | 266 | """Validate a file using a cryptographic checksum. | ||
836 | 267 | |||
837 | 268 | :param str checksum: Value of the checksum used to validate the file. | ||
838 | 269 | :param str hash_type: Hash algorithm used to generate `checksum`. | ||
839 | 270 | Can be any hash alrgorithm supported by :mod:`hashlib`, | ||
840 | 271 | such as md5, sha1, sha256, sha512, etc. | ||
841 | 272 | :raises ChecksumError: If the file fails the checksum | ||
842 | 273 | |||
843 | 274 | """ | ||
844 | 275 | actual_checksum = self.file_hash(path, hash_type) | ||
845 | 276 | if checksum != actual_checksum: | ||
846 | 277 | raise ChecksumError("'%s' != '%s'" % (checksum, actual_checksum)) | ||
847 | 278 | |||
848 | 279 | def restart_on_change(self, restart_map, stopstart=False): | ||
849 | 280 | """Restart services based on configuration files changing | ||
850 | 281 | |||
851 | 282 | This function is used a decorator, for example:: | ||
852 | 283 | |||
853 | 284 | @restart_on_change({ | ||
854 | 285 | '/etc/ceph/ceph.conf': [ 'cinder-api', 'cinder-volume' ] | ||
855 | 286 | '/etc/apache/sites-enabled/*': [ 'apache2' ] | ||
856 | 287 | }) | ||
857 | 288 | def config_changed(): | ||
858 | 289 | pass # your code here | ||
859 | 290 | |||
860 | 291 | In this example, the cinder-api and cinder-volume services | ||
861 | 292 | would be restarted if /etc/ceph/ceph.conf is changed by the | ||
862 | 293 | ceph_client_changed function. The apache2 service would be | ||
863 | 294 | restarted if any file matching the pattern got changed, created | ||
864 | 295 | or removed. Standard wildcards are supported, see documentation | ||
865 | 296 | for the 'glob' module for more information. | ||
866 | 297 | """ | ||
867 | 298 | def wrap(f): | ||
868 | 299 | def wrapped_f(*args, **kwargs): | ||
869 | 300 | checksums = {path: self.path_hash(path) | ||
870 | 301 | for path in restart_map} | ||
871 | 302 | f(*args, **kwargs) | ||
872 | 303 | restarts = [] | ||
873 | 304 | for path in restart_map: | ||
874 | 305 | if self.path_hash(path) != checksums[path]: | ||
875 | 306 | restarts += restart_map[path] | ||
876 | 307 | services_list = list(OrderedDict.fromkeys(restarts)) | ||
877 | 308 | if not stopstart: | ||
878 | 309 | for service_name in services_list: | ||
879 | 310 | self.service('restart', service_name) | ||
880 | 311 | else: | ||
881 | 312 | for action in ['stop', 'start']: | ||
882 | 313 | for service_name in services_list: | ||
883 | 314 | self.service(action, service_name) | ||
884 | 315 | return wrapped_f | ||
885 | 316 | return wrap | ||
886 | 317 | |||
887 | 318 | def lsb_release(self): | ||
888 | 319 | return self._lsb_release() | ||
889 | 320 | |||
890 | 321 | def _lsb_release(self): | ||
891 | 322 | raise NotImplementedError() | ||
892 | 323 | |||
893 | 324 | def pwgen(self, length=None): | ||
894 | 325 | """Generate a random pasword.""" | ||
895 | 326 | if length is None: | ||
896 | 327 | # A random length is ok to use a weak PRNG | ||
897 | 328 | length = random.choice(range(35, 45)) | ||
898 | 329 | alphanumeric_chars = [ | ||
899 | 330 | l for l in (string.ascii_letters + string.digits) | ||
900 | 331 | if l not in 'l0QD1vAEIOUaeiou'] | ||
901 | 332 | # Use a crypto-friendly PRNG (e.g. /dev/urandom) for making the | ||
902 | 333 | # actual password | ||
903 | 334 | random_generator = random.SystemRandom() | ||
904 | 335 | random_chars = [ | ||
905 | 336 | random_generator.choice(alphanumeric_chars) for _ in range(length)] | ||
906 | 337 | return(''.join(random_chars)) | ||
907 | 338 | |||
908 | 339 | def list_nics(self, nic_type=None): | ||
909 | 340 | """Return a list of nics of given type(s)""" | ||
910 | 341 | if isinstance(nic_type, six.string_types): | ||
911 | 342 | int_types = [nic_type] | ||
912 | 343 | else: | ||
913 | 344 | int_types = nic_type | ||
914 | 345 | |||
915 | 346 | interfaces = [] | ||
916 | 347 | if nic_type: | ||
917 | 348 | for int_type in int_types: | ||
918 | 349 | cmd = ['ip', 'addr', 'show', 'label', int_type + '*'] | ||
919 | 350 | ip_output = subprocess.check_output(cmd).decode('UTF-8') | ||
920 | 351 | ip_output = ip_output.split('\n') | ||
921 | 352 | ip_output = (line for line in ip_output if line) | ||
922 | 353 | for line in ip_output: | ||
923 | 354 | if line.split()[1].startswith(int_type): | ||
924 | 355 | matched = re.search('.*: (' + int_type + | ||
925 | 356 | r'[0-9]+\.[0-9]+)@.*', line) | ||
926 | 357 | if matched: | ||
927 | 358 | iface = matched.groups()[0] | ||
928 | 359 | else: | ||
929 | 360 | iface = line.split()[1].replace(":", "") | ||
930 | 361 | |||
931 | 362 | if iface not in interfaces: | ||
932 | 363 | interfaces.append(iface) | ||
933 | 364 | else: | ||
934 | 365 | cmd = ['ip', 'a'] | ||
935 | 366 | ip_output = subprocess.check_output( | ||
936 | 367 | cmd).decode('UTF-8').split('\n') | ||
937 | 368 | ip_output = (line.strip() for line in ip_output if line) | ||
938 | 369 | |||
939 | 370 | key = re.compile('^[0-9]+:\s+(.+):') | ||
940 | 371 | for line in ip_output: | ||
941 | 372 | matched = re.search(key, line) | ||
942 | 373 | if matched: | ||
943 | 374 | iface = matched.group(1) | ||
944 | 375 | iface = iface.partition("@")[0] | ||
945 | 376 | if iface not in interfaces: | ||
946 | 377 | interfaces.append(iface) | ||
947 | 378 | return interfaces | ||
948 | 379 | |||
949 | 380 | def set_nic_mtu(self, nic, mtu): | ||
950 | 381 | """Set the Maximum Transmission Unit (MTU) on a network interface.""" | ||
951 | 382 | cmd = ['ip', 'link', 'set', nic, 'mtu', mtu] | ||
952 | 383 | subprocess.check_call(cmd) | ||
953 | 384 | |||
954 | 385 | def get_nic_mtu(self, nic): | ||
955 | 386 | """ | ||
956 | 387 | Return the Maximum Transmission Unit (MTU) for a network interface. | ||
957 | 388 | """ | ||
958 | 389 | cmd = ['ip', 'addr', 'show', nic] | ||
959 | 390 | ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') | ||
960 | 391 | mtu = "" | ||
961 | 392 | for line in ip_output: | ||
962 | 393 | words = line.split() | ||
963 | 394 | if 'mtu' in words: | ||
964 | 395 | mtu = words[words.index("mtu") + 1] | ||
965 | 396 | return mtu | ||
966 | 397 | |||
967 | 398 | def get_nic_hwaddr(self, nic): | ||
968 | 399 | """Return the Media Access Control (MAC) for a network interface.""" | ||
969 | 400 | cmd = ['ip', '-o', '-0', 'addr', 'show', nic] | ||
970 | 401 | ip_output = subprocess.check_output(cmd).decode('UTF-8') | ||
971 | 402 | hwaddr = "" | ||
972 | 403 | words = ip_output.split() | ||
973 | 404 | if 'link/ether' in words: | ||
974 | 405 | hwaddr = words[words.index('link/ether') + 1] | ||
975 | 406 | return hwaddr | ||
976 | 407 | |||
977 | 408 | def cmp_pkgrevno(self, package, revno, pkgcache=None): | ||
978 | 409 | """Compare supplied revno with the revno of the installed package | ||
979 | 410 | |||
980 | 411 | * 1 => Installed revno is greater than supplied arg | ||
981 | 412 | * 0 => Installed revno is the same as supplied arg | ||
982 | 413 | * -1 => Installed revno is less than supplied arg | ||
983 | 414 | |||
984 | 415 | This function imports apt_cache function from charmhelpers.fetch if | ||
985 | 416 | the pkgcache argument is None. Be sure to add charmhelpers.fetch if | ||
986 | 417 | you call this function, or pass an apt_pkg.Cache() instance. | ||
987 | 418 | """ | ||
988 | 419 | return self._cmp_pkgrevno(package, revno, pkgcache) | ||
989 | 420 | |||
990 | 421 | def _cmp_pkgrevno(self, package, revno, pkgcache=None): | ||
991 | 422 | raise NotImplementedError() | ||
992 | 423 | |||
993 | 424 | @contextmanager | ||
994 | 425 | def chdir(self, directory): | ||
995 | 426 | """ | ||
996 | 427 | Change the current working directory to a different directory for a | ||
997 | 428 | code block and return the previous directory after the block exits. | ||
998 | 429 | Useful to run commands from a specificed directory. | ||
999 | 430 | |||
1000 | 431 | :param str directory: The directory path to change to for this context. | ||
1001 | 432 | """ | ||
1002 | 433 | cur = os.getcwd() | ||
1003 | 434 | try: | ||
1004 | 435 | yield os.chdir(directory) | ||
1005 | 436 | finally: | ||
1006 | 437 | os.chdir(cur) | ||
1007 | 438 | |||
1008 | 439 | def chownr(self, path, owner, group, follow_links=True, chowntopdir=False): | ||
1009 | 440 | """Recursively change user and group ownership of files and directories | ||
1010 | 441 | in given path. Doesn't chown path itself by default, only its children. | ||
1011 | 442 | |||
1012 | 443 | :param str path: The string path to start changing ownership. | ||
1013 | 444 | :param str owner: The owner string to use when looking up the uid. | ||
1014 | 445 | :param str group: The group string to use when looking up the gid. | ||
1015 | 446 | :param bool follow_links: Also Chown links if True | ||
1016 | 447 | :param bool chowntopdir: Also chown path itself if True | ||
1017 | 448 | """ | ||
1018 | 449 | uid = pwd.getpwnam(owner).pw_uid | ||
1019 | 450 | gid = grp.getgrnam(group).gr_gid | ||
1020 | 451 | if follow_links: | ||
1021 | 452 | chown = os.chown | ||
1022 | 453 | else: | ||
1023 | 454 | chown = os.lchown | ||
1024 | 455 | |||
1025 | 456 | if chowntopdir: | ||
1026 | 457 | broken_symlink = os.path.lexists(path) and not os.path.exists(path) | ||
1027 | 458 | if not broken_symlink: | ||
1028 | 459 | chown(path, uid, gid) | ||
1029 | 460 | for root, dirs, files in os.walk(path): | ||
1030 | 461 | for name in dirs + files: | ||
1031 | 462 | full = os.path.join(root, name) | ||
1032 | 463 | broken_symlink = os.path.lexists( | ||
1033 | 464 | full | ||
1034 | 465 | ) and not os.path.exists(full) | ||
1035 | 466 | if not broken_symlink: | ||
1036 | 467 | chown(full, uid, gid) | ||
1037 | 468 | |||
1038 | 469 | def lchownr(self, path, owner, group): | ||
1039 | 470 | """ | ||
1040 | 471 | Recursively change user and group ownership of files and directories | ||
1041 | 472 | in a given path, not following symbolic links. See the documentation | ||
1042 | 473 | for 'os.lchown' for more information. | ||
1043 | 474 | |||
1044 | 475 | :param str path: The string path to start changing ownership. | ||
1045 | 476 | :param str owner: The owner string to use when looking up the uid. | ||
1046 | 477 | :param str group: The group string to use when looking up the gid. | ||
1047 | 478 | """ | ||
1048 | 479 | self.chownr(path, owner, group, follow_links=False) | ||
1049 | 480 | |||
1050 | 481 | |||
1051 | 482 | class ChecksumError(ValueError): | ||
1052 | 483 | """A class derived from Value error to indicate the checksum failed.""" | ||
1053 | 484 | pass | ||
1054 | 485 | |||
1055 | 486 | |||
1056 | 487 | module = "charmhelpers.core.host_factory.%s" % get_platform() | ||
1057 | 488 | host = importlib.import_module(module) | ||
1058 | 489 | |||
1059 | 490 | |||
1060 | 491 | class Host(host.Host): | ||
1061 | 492 | pass | ||
1062 | 0 | 493 | ||
1063 | === added directory 'hooks/charmhelpers/core/host_factory/centos' | |||
1064 | === added file 'hooks/charmhelpers/core/host_factory/centos/__init__.py' | |||
1065 | --- hooks/charmhelpers/core/host_factory/centos/__init__.py 1970-01-01 00:00:00 +0000 | |||
1066 | +++ hooks/charmhelpers/core/host_factory/centos/__init__.py 2016-04-29 11:54:41 +0000 | |||
1067 | @@ -0,0 +1,53 @@ | |||
1068 | 1 | import subprocess | ||
1069 | 2 | import yum | ||
1070 | 3 | |||
1071 | 4 | from .. import HostBase | ||
1072 | 5 | |||
1073 | 6 | |||
1074 | 7 | class Host(HostBase): | ||
1075 | 8 | ''' | ||
1076 | 9 | Implementation of HostBase for CentOS | ||
1077 | 10 | ''' | ||
1078 | 11 | |||
1079 | 12 | def _add_group(self, group_name, system_group=False): | ||
1080 | 13 | cmd = ['groupadd'] | ||
1081 | 14 | if system_group: | ||
1082 | 15 | cmd.append('-r') | ||
1083 | 16 | cmd.append(group_name) | ||
1084 | 17 | subprocess.check_call(cmd) | ||
1085 | 18 | |||
1086 | 19 | def _lsb_release(self): | ||
1087 | 20 | """Return /etc/os-release in a dict""" | ||
1088 | 21 | d = {} | ||
1089 | 22 | with open('/etc/os-release', 'r') as lsb: | ||
1090 | 23 | for l in lsb: | ||
1091 | 24 | if len(l.split('=')) != 2: | ||
1092 | 25 | continue | ||
1093 | 26 | k, v = l.split('=') | ||
1094 | 27 | d[k.strip()] = v.strip() | ||
1095 | 28 | return d | ||
1096 | 29 | |||
1097 | 30 | def _cmp_pkgrevno(self, package, revno, pkgcache=None): | ||
1098 | 31 | """Compare supplied revno with the revno of the installed package | ||
1099 | 32 | |||
1100 | 33 | * 1 => Installed revno is greater than supplied arg | ||
1101 | 34 | * 0 => Installed revno is the same as supplied arg | ||
1102 | 35 | * -1 => Installed revno is less than supplied arg | ||
1103 | 36 | |||
1104 | 37 | This function imports apt_cache function from charmhelpers.fetch if | ||
1105 | 38 | the pkgcache argument is None. Be sure to add charmhelpers.fetch if | ||
1106 | 39 | you call this function, or pass an apt_pkg.Cache() instance. | ||
1107 | 40 | """ | ||
1108 | 41 | if not pkgcache: | ||
1109 | 42 | y = yum.YumBase() | ||
1110 | 43 | packages = y.doPackageLists() | ||
1111 | 44 | pck = {} | ||
1112 | 45 | for i in packages["installed"]: | ||
1113 | 46 | pck[i.Name] = i.version | ||
1114 | 47 | pkgcache = pck | ||
1115 | 48 | pkg = pkgcache[package] | ||
1116 | 49 | if pkg > revno: | ||
1117 | 50 | return 1 | ||
1118 | 51 | if pkg < revno: | ||
1119 | 52 | return -1 | ||
1120 | 53 | return 0 | ||
1121 | 0 | 54 | ||
1122 | === added directory 'hooks/charmhelpers/core/host_factory/ubuntu' | |||
1123 | === added file 'hooks/charmhelpers/core/host_factory/ubuntu/__init__.py' | |||
1124 | --- hooks/charmhelpers/core/host_factory/ubuntu/__init__.py 1970-01-01 00:00:00 +0000 | |||
1125 | +++ hooks/charmhelpers/core/host_factory/ubuntu/__init__.py 2016-04-29 11:54:41 +0000 | |||
1126 | @@ -0,0 +1,47 @@ | |||
1127 | 1 | import subprocess | ||
1128 | 2 | |||
1129 | 3 | from .. import HostBase | ||
1130 | 4 | |||
1131 | 5 | |||
1132 | 6 | class Host(HostBase): | ||
1133 | 7 | ''' | ||
1134 | 8 | Implementation of HostBase for Ubuntu | ||
1135 | 9 | ''' | ||
1136 | 10 | |||
1137 | 11 | def _add_group(self, group_name, system_group=False): | ||
1138 | 12 | cmd = ['addgroup'] | ||
1139 | 13 | if system_group: | ||
1140 | 14 | cmd.append('--system') | ||
1141 | 15 | else: | ||
1142 | 16 | cmd.extend([ | ||
1143 | 17 | '--group', | ||
1144 | 18 | ]) | ||
1145 | 19 | cmd.append(group_name) | ||
1146 | 20 | subprocess.check_call(cmd) | ||
1147 | 21 | |||
1148 | 22 | def _lsb_release(self): | ||
1149 | 23 | """Return /etc/lsb-release in a dict""" | ||
1150 | 24 | d = {} | ||
1151 | 25 | with open('/etc/lsb-release', 'r') as lsb: | ||
1152 | 26 | for l in lsb: | ||
1153 | 27 | k, v = l.split('=') | ||
1154 | 28 | d[k.strip()] = v.strip() | ||
1155 | 29 | return d | ||
1156 | 30 | |||
1157 | 31 | def _cmp_pkgrevno(self, package, revno, pkgcache=None): | ||
1158 | 32 | """Compare supplied revno with the revno of the installed package | ||
1159 | 33 | |||
1160 | 34 | * 1 => Installed revno is greater than supplied arg | ||
1161 | 35 | * 0 => Installed revno is the same as supplied arg | ||
1162 | 36 | * -1 => Installed revno is less than supplied arg | ||
1163 | 37 | |||
1164 | 38 | This function imports apt_cache function from charmhelpers.fetch if | ||
1165 | 39 | the pkgcache argument is None. Be sure to add charmhelpers.fetch if | ||
1166 | 40 | you call this function, or pass an apt_pkg.Cache() instance. | ||
1167 | 41 | """ | ||
1168 | 42 | import apt_pkg | ||
1169 | 43 | if not pkgcache: | ||
1170 | 44 | from charmhelpers.fetch import apt_cache | ||
1171 | 45 | pkgcache = apt_cache() | ||
1172 | 46 | pkg = pkgcache[package] | ||
1173 | 47 | return apt_pkg.version_compare(pkg.current_ver.ver_str, revno) | ||
1174 | 0 | 48 | ||
1175 | === modified file 'hooks/charmhelpers/core/services/__init__.py' (properties changed: -x to +x) | |||
1176 | === modified file 'hooks/charmhelpers/core/services/base.py' (properties changed: -x to +x) | |||
1177 | === modified file 'hooks/charmhelpers/core/services/helpers.py' (properties changed: -x to +x) | |||
1178 | === modified file 'hooks/charmhelpers/core/strutils.py' (properties changed: -x to +x) | |||
1179 | === modified file 'hooks/charmhelpers/core/sysctl.py' (properties changed: -x to +x) | |||
1180 | === modified file 'hooks/charmhelpers/core/templating.py' (properties changed: -x to +x) | |||
1181 | === modified file 'hooks/charmhelpers/core/unitdata.py' (properties changed: -x to +x) | |||
1182 | === modified file 'hooks/charmhelpers/fetch/__init__.py' (properties changed: -x to +x) | |||
1183 | --- hooks/charmhelpers/fetch/__init__.py 2015-03-23 09:45:10 +0000 | |||
1184 | +++ hooks/charmhelpers/fetch/__init__.py 2016-04-29 11:54:41 +0000 | |||
1185 | @@ -14,84 +14,21 @@ | |||
1186 | 14 | # You should have received a copy of the GNU Lesser General Public License | 14 | # You should have received a copy of the GNU Lesser General Public License |
1187 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. |
1188 | 16 | 16 | ||
1189 | 17 | import six | ||
1190 | 17 | import importlib | 18 | import importlib |
1193 | 18 | from tempfile import NamedTemporaryFile | 19 | |
1192 | 19 | import time | ||
1194 | 20 | from yaml import safe_load | 20 | from yaml import safe_load |
1200 | 21 | from charmhelpers.core.host import ( | 21 | from charmhelpers import get_platform |
1201 | 22 | lsb_release | 22 | from charmhelpers.core.hookenv import( |
1197 | 23 | ) | ||
1198 | 24 | import subprocess | ||
1199 | 25 | from charmhelpers.core.hookenv import ( | ||
1202 | 26 | config, | 23 | config, |
1204 | 27 | log, | 24 | log |
1205 | 28 | ) | 25 | ) |
1206 | 29 | import os | ||
1207 | 30 | |||
1208 | 31 | import six | ||
1209 | 32 | if six.PY3: | 26 | if six.PY3: |
1210 | 33 | from urllib.parse import urlparse, urlunparse | 27 | from urllib.parse import urlparse, urlunparse |
1211 | 34 | else: | 28 | else: |
1212 | 35 | from urlparse import urlparse, urlunparse | 29 | from urlparse import urlparse, urlunparse |
1213 | 36 | 30 | ||
1214 | 37 | 31 | ||
1215 | 38 | CLOUD_ARCHIVE = """# Ubuntu Cloud Archive | ||
1216 | 39 | deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main | ||
1217 | 40 | """ | ||
1218 | 41 | PROPOSED_POCKET = """# Proposed | ||
1219 | 42 | deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted | ||
1220 | 43 | """ | ||
1221 | 44 | CLOUD_ARCHIVE_POCKETS = { | ||
1222 | 45 | # Folsom | ||
1223 | 46 | 'folsom': 'precise-updates/folsom', | ||
1224 | 47 | 'precise-folsom': 'precise-updates/folsom', | ||
1225 | 48 | 'precise-folsom/updates': 'precise-updates/folsom', | ||
1226 | 49 | 'precise-updates/folsom': 'precise-updates/folsom', | ||
1227 | 50 | 'folsom/proposed': 'precise-proposed/folsom', | ||
1228 | 51 | 'precise-folsom/proposed': 'precise-proposed/folsom', | ||
1229 | 52 | 'precise-proposed/folsom': 'precise-proposed/folsom', | ||
1230 | 53 | # Grizzly | ||
1231 | 54 | 'grizzly': 'precise-updates/grizzly', | ||
1232 | 55 | 'precise-grizzly': 'precise-updates/grizzly', | ||
1233 | 56 | 'precise-grizzly/updates': 'precise-updates/grizzly', | ||
1234 | 57 | 'precise-updates/grizzly': 'precise-updates/grizzly', | ||
1235 | 58 | 'grizzly/proposed': 'precise-proposed/grizzly', | ||
1236 | 59 | 'precise-grizzly/proposed': 'precise-proposed/grizzly', | ||
1237 | 60 | 'precise-proposed/grizzly': 'precise-proposed/grizzly', | ||
1238 | 61 | # Havana | ||
1239 | 62 | 'havana': 'precise-updates/havana', | ||
1240 | 63 | 'precise-havana': 'precise-updates/havana', | ||
1241 | 64 | 'precise-havana/updates': 'precise-updates/havana', | ||
1242 | 65 | 'precise-updates/havana': 'precise-updates/havana', | ||
1243 | 66 | 'havana/proposed': 'precise-proposed/havana', | ||
1244 | 67 | 'precise-havana/proposed': 'precise-proposed/havana', | ||
1245 | 68 | 'precise-proposed/havana': 'precise-proposed/havana', | ||
1246 | 69 | # Icehouse | ||
1247 | 70 | 'icehouse': 'precise-updates/icehouse', | ||
1248 | 71 | 'precise-icehouse': 'precise-updates/icehouse', | ||
1249 | 72 | 'precise-icehouse/updates': 'precise-updates/icehouse', | ||
1250 | 73 | 'precise-updates/icehouse': 'precise-updates/icehouse', | ||
1251 | 74 | 'icehouse/proposed': 'precise-proposed/icehouse', | ||
1252 | 75 | 'precise-icehouse/proposed': 'precise-proposed/icehouse', | ||
1253 | 76 | 'precise-proposed/icehouse': 'precise-proposed/icehouse', | ||
1254 | 77 | # Juno | ||
1255 | 78 | 'juno': 'trusty-updates/juno', | ||
1256 | 79 | 'trusty-juno': 'trusty-updates/juno', | ||
1257 | 80 | 'trusty-juno/updates': 'trusty-updates/juno', | ||
1258 | 81 | 'trusty-updates/juno': 'trusty-updates/juno', | ||
1259 | 82 | 'juno/proposed': 'trusty-proposed/juno', | ||
1260 | 83 | 'trusty-juno/proposed': 'trusty-proposed/juno', | ||
1261 | 84 | 'trusty-proposed/juno': 'trusty-proposed/juno', | ||
1262 | 85 | # Kilo | ||
1263 | 86 | 'kilo': 'trusty-updates/kilo', | ||
1264 | 87 | 'trusty-kilo': 'trusty-updates/kilo', | ||
1265 | 88 | 'trusty-kilo/updates': 'trusty-updates/kilo', | ||
1266 | 89 | 'trusty-updates/kilo': 'trusty-updates/kilo', | ||
1267 | 90 | 'kilo/proposed': 'trusty-proposed/kilo', | ||
1268 | 91 | 'trusty-kilo/proposed': 'trusty-proposed/kilo', | ||
1269 | 92 | 'trusty-proposed/kilo': 'trusty-proposed/kilo', | ||
1270 | 93 | } | ||
1271 | 94 | |||
1272 | 95 | # The order of this list is very important. Handlers should be listed in from | 32 | # The order of this list is very important. Handlers should be listed in from |
1273 | 96 | # least- to most-specific URL matching. | 33 | # least- to most-specific URL matching. |
1274 | 97 | FETCH_HANDLERS = ( | 34 | FETCH_HANDLERS = ( |
1275 | @@ -100,10 +37,6 @@ | |||
1276 | 100 | 'charmhelpers.fetch.giturl.GitUrlFetchHandler', | 37 | 'charmhelpers.fetch.giturl.GitUrlFetchHandler', |
1277 | 101 | ) | 38 | ) |
1278 | 102 | 39 | ||
1279 | 103 | APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT. | ||
1280 | 104 | APT_NO_LOCK_RETRY_DELAY = 10 # Wait 10 seconds between apt lock checks. | ||
1281 | 105 | APT_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times. | ||
1282 | 106 | |||
1283 | 107 | 40 | ||
1284 | 108 | class SourceConfigError(Exception): | 41 | class SourceConfigError(Exception): |
1285 | 109 | pass | 42 | pass |
1286 | @@ -141,162 +74,38 @@ | |||
1287 | 141 | return urlunparse(parts) | 74 | return urlunparse(parts) |
1288 | 142 | 75 | ||
1289 | 143 | 76 | ||
1290 | 77 | module = "charmhelpers.fetch.%s" % get_platform() | ||
1291 | 78 | fetch = importlib.import_module(module) | ||
1292 | 79 | |||
1293 | 80 | |||
1294 | 144 | def filter_installed_packages(packages): | 81 | def filter_installed_packages(packages): |
1295 | 145 | """Returns a list of packages that require installation""" | 82 | """Returns a list of packages that require installation""" |
1320 | 146 | cache = apt_cache() | 83 | return fetch.filter_installed_packages(packages) |
1321 | 147 | _pkgs = [] | 84 | |
1322 | 148 | for package in packages: | 85 | |
1323 | 149 | try: | 86 | def install(packages, options=None, fatal=False): |
1300 | 150 | p = cache[package] | ||
1301 | 151 | p.current_ver or _pkgs.append(package) | ||
1302 | 152 | except KeyError: | ||
1303 | 153 | log('Package {} has no installation candidate.'.format(package), | ||
1304 | 154 | level='WARNING') | ||
1305 | 155 | _pkgs.append(package) | ||
1306 | 156 | return _pkgs | ||
1307 | 157 | |||
1308 | 158 | |||
1309 | 159 | def apt_cache(in_memory=True): | ||
1310 | 160 | """Build and return an apt cache""" | ||
1311 | 161 | import apt_pkg | ||
1312 | 162 | apt_pkg.init() | ||
1313 | 163 | if in_memory: | ||
1314 | 164 | apt_pkg.config.set("Dir::Cache::pkgcache", "") | ||
1315 | 165 | apt_pkg.config.set("Dir::Cache::srcpkgcache", "") | ||
1316 | 166 | return apt_pkg.Cache() | ||
1317 | 167 | |||
1318 | 168 | |||
1319 | 169 | def apt_install(packages, options=None, fatal=False): | ||
1324 | 170 | """Install one or more packages""" | 87 | """Install one or more packages""" |
1341 | 171 | if options is None: | 88 | fetch.install(packages, options, fatal) |
1342 | 172 | options = ['--option=Dpkg::Options::=--force-confold'] | 89 | |
1343 | 173 | 90 | ||
1344 | 174 | cmd = ['apt-get', '--assume-yes'] | 91 | def upgrade(options=None, fatal=False, dist=False): |
1329 | 175 | cmd.extend(options) | ||
1330 | 176 | cmd.append('install') | ||
1331 | 177 | if isinstance(packages, six.string_types): | ||
1332 | 178 | cmd.append(packages) | ||
1333 | 179 | else: | ||
1334 | 180 | cmd.extend(packages) | ||
1335 | 181 | log("Installing {} with options: {}".format(packages, | ||
1336 | 182 | options)) | ||
1337 | 183 | _run_apt_command(cmd, fatal) | ||
1338 | 184 | |||
1339 | 185 | |||
1340 | 186 | def apt_upgrade(options=None, fatal=False, dist=False): | ||
1345 | 187 | """Upgrade all packages""" | 92 | """Upgrade all packages""" |
1360 | 188 | if options is None: | 93 | fetch.upgrade(options, fatal, dist) |
1361 | 189 | options = ['--option=Dpkg::Options::=--force-confold'] | 94 | |
1362 | 190 | 95 | ||
1363 | 191 | cmd = ['apt-get', '--assume-yes'] | 96 | def update(fatal=False): |
1350 | 192 | cmd.extend(options) | ||
1351 | 193 | if dist: | ||
1352 | 194 | cmd.append('dist-upgrade') | ||
1353 | 195 | else: | ||
1354 | 196 | cmd.append('upgrade') | ||
1355 | 197 | log("Upgrading with options: {}".format(options)) | ||
1356 | 198 | _run_apt_command(cmd, fatal) | ||
1357 | 199 | |||
1358 | 200 | |||
1359 | 201 | def apt_update(fatal=False): | ||
1364 | 202 | """Update local apt cache""" | 97 | """Update local apt cache""" |
1370 | 203 | cmd = ['apt-get', 'update'] | 98 | fetch.update(fatal) |
1371 | 204 | _run_apt_command(cmd, fatal) | 99 | |
1372 | 205 | 100 | ||
1373 | 206 | 101 | def purge(packages, fatal=False): | |
1369 | 207 | def apt_purge(packages, fatal=False): | ||
1374 | 208 | """Purge one or more packages""" | 102 | """Purge one or more packages""" |
1399 | 209 | cmd = ['apt-get', '--assume-yes', 'purge'] | 103 | fetch.purge(packages, fatal) |
1400 | 210 | if isinstance(packages, six.string_types): | 104 | |
1401 | 211 | cmd.append(packages) | 105 | |
1402 | 212 | else: | 106 | # PPA only works with .DEB packed and not with .RPM |
1379 | 213 | cmd.extend(packages) | ||
1380 | 214 | log("Purging {}".format(packages)) | ||
1381 | 215 | _run_apt_command(cmd, fatal) | ||
1382 | 216 | |||
1383 | 217 | |||
1384 | 218 | def apt_hold(packages, fatal=False): | ||
1385 | 219 | """Hold one or more packages""" | ||
1386 | 220 | cmd = ['apt-mark', 'hold'] | ||
1387 | 221 | if isinstance(packages, six.string_types): | ||
1388 | 222 | cmd.append(packages) | ||
1389 | 223 | else: | ||
1390 | 224 | cmd.extend(packages) | ||
1391 | 225 | log("Holding {}".format(packages)) | ||
1392 | 226 | |||
1393 | 227 | if fatal: | ||
1394 | 228 | subprocess.check_call(cmd) | ||
1395 | 229 | else: | ||
1396 | 230 | subprocess.call(cmd) | ||
1397 | 231 | |||
1398 | 232 | |||
1403 | 233 | def add_source(source, key=None): | 107 | def add_source(source, key=None): |
1470 | 234 | """Add a package source to this system. | 108 | fetch.add_source(source, key) |
1405 | 235 | |||
1406 | 236 | @param source: a URL or sources.list entry, as supported by | ||
1407 | 237 | add-apt-repository(1). Examples:: | ||
1408 | 238 | |||
1409 | 239 | ppa:charmers/example | ||
1410 | 240 | deb https://stub:key@private.example.com/ubuntu trusty main | ||
1411 | 241 | |||
1412 | 242 | In addition: | ||
1413 | 243 | 'proposed:' may be used to enable the standard 'proposed' | ||
1414 | 244 | pocket for the release. | ||
1415 | 245 | 'cloud:' may be used to activate official cloud archive pockets, | ||
1416 | 246 | such as 'cloud:icehouse' | ||
1417 | 247 | 'distro' may be used as a noop | ||
1418 | 248 | |||
1419 | 249 | @param key: A key to be added to the system's APT keyring and used | ||
1420 | 250 | to verify the signatures on packages. Ideally, this should be an | ||
1421 | 251 | ASCII format GPG public key including the block headers. A GPG key | ||
1422 | 252 | id may also be used, but be aware that only insecure protocols are | ||
1423 | 253 | available to retrieve the actual public key from a public keyserver | ||
1424 | 254 | placing your Juju environment at risk. ppa and cloud archive keys | ||
1425 | 255 | are securely added automtically, so sould not be provided. | ||
1426 | 256 | """ | ||
1427 | 257 | if source is None: | ||
1428 | 258 | log('Source is not present. Skipping') | ||
1429 | 259 | return | ||
1430 | 260 | |||
1431 | 261 | if (source.startswith('ppa:') or | ||
1432 | 262 | source.startswith('http') or | ||
1433 | 263 | source.startswith('deb ') or | ||
1434 | 264 | source.startswith('cloud-archive:')): | ||
1435 | 265 | subprocess.check_call(['add-apt-repository', '--yes', source]) | ||
1436 | 266 | elif source.startswith('cloud:'): | ||
1437 | 267 | apt_install(filter_installed_packages(['ubuntu-cloud-keyring']), | ||
1438 | 268 | fatal=True) | ||
1439 | 269 | pocket = source.split(':')[-1] | ||
1440 | 270 | if pocket not in CLOUD_ARCHIVE_POCKETS: | ||
1441 | 271 | raise SourceConfigError( | ||
1442 | 272 | 'Unsupported cloud: source option %s' % | ||
1443 | 273 | pocket) | ||
1444 | 274 | actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket] | ||
1445 | 275 | with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt: | ||
1446 | 276 | apt.write(CLOUD_ARCHIVE.format(actual_pocket)) | ||
1447 | 277 | elif source == 'proposed': | ||
1448 | 278 | release = lsb_release()['DISTRIB_CODENAME'] | ||
1449 | 279 | with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt: | ||
1450 | 280 | apt.write(PROPOSED_POCKET.format(release)) | ||
1451 | 281 | elif source == 'distro': | ||
1452 | 282 | pass | ||
1453 | 283 | else: | ||
1454 | 284 | log("Unknown source: {!r}".format(source)) | ||
1455 | 285 | |||
1456 | 286 | if key: | ||
1457 | 287 | if '-----BEGIN PGP PUBLIC KEY BLOCK-----' in key: | ||
1458 | 288 | with NamedTemporaryFile('w+') as key_file: | ||
1459 | 289 | key_file.write(key) | ||
1460 | 290 | key_file.flush() | ||
1461 | 291 | key_file.seek(0) | ||
1462 | 292 | subprocess.check_call(['apt-key', 'add', '-'], stdin=key_file) | ||
1463 | 293 | else: | ||
1464 | 294 | # Note that hkp: is in no way a secure protocol. Using a | ||
1465 | 295 | # GPG key id is pointless from a security POV unless you | ||
1466 | 296 | # absolutely trust your network and DNS. | ||
1467 | 297 | subprocess.check_call(['apt-key', 'adv', '--keyserver', | ||
1468 | 298 | 'hkp://keyserver.ubuntu.com:80', '--recv', | ||
1469 | 299 | key]) | ||
1471 | 300 | 109 | ||
1472 | 301 | 110 | ||
1473 | 302 | def configure_sources(update=False, | 111 | def configure_sources(update=False, |
1474 | @@ -338,7 +147,32 @@ | |||
1475 | 338 | for source, key in zip(sources, keys): | 147 | for source, key in zip(sources, keys): |
1476 | 339 | add_source(source, key) | 148 | add_source(source, key) |
1477 | 340 | if update: | 149 | if update: |
1479 | 341 | apt_update(fatal=True) | 150 | update(fatal=True) |
1480 | 151 | |||
1481 | 152 | |||
1482 | 153 | def install_from_config(config_var_name): | ||
1483 | 154 | charm_config = config() | ||
1484 | 155 | source = charm_config[config_var_name] | ||
1485 | 156 | return install_remote(source) | ||
1486 | 157 | |||
1487 | 158 | |||
1488 | 159 | def plugins(fetch_handlers=None): | ||
1489 | 160 | if not fetch_handlers: | ||
1490 | 161 | fetch_handlers = FETCH_HANDLERS | ||
1491 | 162 | plugin_list = [] | ||
1492 | 163 | for handler_name in fetch_handlers: | ||
1493 | 164 | package, classname = handler_name.rsplit('.', 1) | ||
1494 | 165 | try: | ||
1495 | 166 | handler_class = getattr( | ||
1496 | 167 | importlib.import_module(package), | ||
1497 | 168 | classname) | ||
1498 | 169 | plugin_list.append(handler_class()) | ||
1499 | 170 | except NotImplementedError: | ||
1500 | 171 | # Skip missing plugins so that they can be ommitted from | ||
1501 | 172 | # installation if desired | ||
1502 | 173 | log("FetchHandler {} not found, skipping plugin".format( | ||
1503 | 174 | handler_name)) | ||
1504 | 175 | return plugin_list | ||
1505 | 342 | 176 | ||
1506 | 343 | 177 | ||
1507 | 344 | def install_remote(source, *args, **kwargs): | 178 | def install_remote(source, *args, **kwargs): |
1508 | @@ -370,70 +204,17 @@ | |||
1509 | 370 | for handler in handlers: | 204 | for handler in handlers: |
1510 | 371 | try: | 205 | try: |
1511 | 372 | installed_to = handler.install(source, *args, **kwargs) | 206 | installed_to = handler.install(source, *args, **kwargs) |
1514 | 373 | except UnhandledSource: | 207 | except UnhandledSource as e: |
1515 | 374 | pass | 208 | log('Install source attempt unsuccessful: {}'.format(e), |
1516 | 209 | level='WARNING') | ||
1517 | 375 | if not installed_to: | 210 | if not installed_to: |
1518 | 376 | raise UnhandledSource("No handler found for source {}".format(source)) | 211 | raise UnhandledSource("No handler found for source {}".format(source)) |
1519 | 377 | return installed_to | 212 | return installed_to |
1520 | 378 | 213 | ||
1582 | 379 | 214 | # Backwards compatibility | |
1583 | 380 | def install_from_config(config_var_name): | 215 | if get_platform() == "ubuntu": |
1584 | 381 | charm_config = config() | 216 | from charmhelpers.fetch.ubuntu import * |
1585 | 382 | source = charm_config[config_var_name] | 217 | apt_install = install |
1586 | 383 | return install_remote(source) | 218 | apt_update = update |
1587 | 384 | 219 | apt_upgrade = upgrade | |
1588 | 385 | 220 | apt_purge = purge | |
1528 | 386 | def plugins(fetch_handlers=None): | ||
1529 | 387 | if not fetch_handlers: | ||
1530 | 388 | fetch_handlers = FETCH_HANDLERS | ||
1531 | 389 | plugin_list = [] | ||
1532 | 390 | for handler_name in fetch_handlers: | ||
1533 | 391 | package, classname = handler_name.rsplit('.', 1) | ||
1534 | 392 | try: | ||
1535 | 393 | handler_class = getattr( | ||
1536 | 394 | importlib.import_module(package), | ||
1537 | 395 | classname) | ||
1538 | 396 | plugin_list.append(handler_class()) | ||
1539 | 397 | except (ImportError, AttributeError): | ||
1540 | 398 | # Skip missing plugins so that they can be ommitted from | ||
1541 | 399 | # installation if desired | ||
1542 | 400 | log("FetchHandler {} not found, skipping plugin".format( | ||
1543 | 401 | handler_name)) | ||
1544 | 402 | return plugin_list | ||
1545 | 403 | |||
1546 | 404 | |||
1547 | 405 | def _run_apt_command(cmd, fatal=False): | ||
1548 | 406 | """ | ||
1549 | 407 | Run an APT command, checking output and retrying if the fatal flag is set | ||
1550 | 408 | to True. | ||
1551 | 409 | |||
1552 | 410 | :param: cmd: str: The apt command to run. | ||
1553 | 411 | :param: fatal: bool: Whether the command's output should be checked and | ||
1554 | 412 | retried. | ||
1555 | 413 | """ | ||
1556 | 414 | env = os.environ.copy() | ||
1557 | 415 | |||
1558 | 416 | if 'DEBIAN_FRONTEND' not in env: | ||
1559 | 417 | env['DEBIAN_FRONTEND'] = 'noninteractive' | ||
1560 | 418 | |||
1561 | 419 | if fatal: | ||
1562 | 420 | retry_count = 0 | ||
1563 | 421 | result = None | ||
1564 | 422 | |||
1565 | 423 | # If the command is considered "fatal", we need to retry if the apt | ||
1566 | 424 | # lock was not acquired. | ||
1567 | 425 | |||
1568 | 426 | while result is None or result == APT_NO_LOCK: | ||
1569 | 427 | try: | ||
1570 | 428 | result = subprocess.check_call(cmd, env=env) | ||
1571 | 429 | except subprocess.CalledProcessError as e: | ||
1572 | 430 | retry_count = retry_count + 1 | ||
1573 | 431 | if retry_count > APT_NO_LOCK_RETRY_COUNT: | ||
1574 | 432 | raise | ||
1575 | 433 | result = e.returncode | ||
1576 | 434 | log("Couldn't acquire DPKG lock. Will retry in {} seconds." | ||
1577 | 435 | "".format(APT_NO_LOCK_RETRY_DELAY)) | ||
1578 | 436 | time.sleep(APT_NO_LOCK_RETRY_DELAY) | ||
1579 | 437 | |||
1580 | 438 | else: | ||
1581 | 439 | subprocess.call(cmd, env=env) | ||
1589 | 440 | 221 | ||
1590 | === modified file 'hooks/charmhelpers/fetch/archiveurl.py' (properties changed: -x to +x) | |||
1591 | === modified file 'hooks/charmhelpers/fetch/bzrurl.py' (properties changed: -x to +x) | |||
1592 | --- hooks/charmhelpers/fetch/bzrurl.py 2015-03-23 09:45:10 +0000 | |||
1593 | +++ hooks/charmhelpers/fetch/bzrurl.py 2016-04-29 11:54:41 +0000 | |||
1594 | @@ -15,60 +15,51 @@ | |||
1595 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. |
1596 | 16 | 16 | ||
1597 | 17 | import os | 17 | import os |
1598 | 18 | |||
1599 | 19 | from subprocess import check_call | ||
1600 | 18 | from charmhelpers.fetch import ( | 20 | from charmhelpers.fetch import ( |
1601 | 19 | BaseFetchHandler, | 21 | BaseFetchHandler, |
1603 | 20 | UnhandledSource | 22 | UnhandledSource, |
1604 | 23 | filter_installed_packages, | ||
1605 | 24 | install, | ||
1606 | 21 | ) | 25 | ) |
1607 | 22 | from charmhelpers.core.host import mkdir | 26 | from charmhelpers.core.host import mkdir |
1608 | 23 | 27 | ||
1609 | 24 | import six | ||
1610 | 25 | if six.PY3: | ||
1611 | 26 | raise ImportError('bzrlib does not support Python3') | ||
1612 | 27 | 28 | ||
1621 | 28 | try: | 29 | if filter_installed_packages(['bzr']) != []: |
1622 | 29 | from bzrlib.branch import Branch | 30 | install(['bzr']) |
1623 | 30 | from bzrlib import bzrdir, workingtree, errors | 31 | if filter_installed_packages(['bzr']) != []: |
1624 | 31 | except ImportError: | 32 | raise NotImplementedError('Unable to install bzr') |
1617 | 32 | from charmhelpers.fetch import apt_install | ||
1618 | 33 | apt_install("python-bzrlib") | ||
1619 | 34 | from bzrlib.branch import Branch | ||
1620 | 35 | from bzrlib import bzrdir, workingtree, errors | ||
1625 | 36 | 33 | ||
1626 | 37 | 34 | ||
1627 | 38 | class BzrUrlFetchHandler(BaseFetchHandler): | 35 | class BzrUrlFetchHandler(BaseFetchHandler): |
1628 | 39 | """Handler for bazaar branches via generic and lp URLs""" | 36 | """Handler for bazaar branches via generic and lp URLs""" |
1629 | 40 | def can_handle(self, source): | 37 | def can_handle(self, source): |
1630 | 41 | url_parts = self.parse_url(source) | 38 | url_parts = self.parse_url(source) |
1632 | 42 | if url_parts.scheme not in ('bzr+ssh', 'lp'): | 39 | if url_parts.scheme not in ('bzr+ssh', 'lp', ''): |
1633 | 43 | return False | 40 | return False |
1634 | 41 | elif not url_parts.scheme: | ||
1635 | 42 | return os.path.exists(os.path.join(source, '.bzr')) | ||
1636 | 44 | else: | 43 | else: |
1637 | 45 | return True | 44 | return True |
1638 | 46 | 45 | ||
1639 | 47 | def branch(self, source, dest): | 46 | def branch(self, source, dest): |
1640 | 48 | url_parts = self.parse_url(source) | ||
1641 | 49 | # If we use lp:branchname scheme we need to load plugins | ||
1642 | 50 | if not self.can_handle(source): | 47 | if not self.can_handle(source): |
1643 | 51 | raise UnhandledSource("Cannot handle {}".format(source)) | 48 | raise UnhandledSource("Cannot handle {}".format(source)) |
1658 | 52 | if url_parts.scheme == "lp": | 49 | if os.path.exists(dest): |
1659 | 53 | from bzrlib.plugin import load_plugins | 50 | check_call(['bzr', 'pull', '--overwrite', '-d', dest, source]) |
1660 | 54 | load_plugins() | 51 | else: |
1661 | 55 | try: | 52 | check_call(['bzr', 'branch', source, dest]) |
1648 | 56 | local_branch = bzrdir.BzrDir.create_branch_convenience(dest) | ||
1649 | 57 | except errors.AlreadyControlDirError: | ||
1650 | 58 | local_branch = Branch.open(dest) | ||
1651 | 59 | try: | ||
1652 | 60 | remote_branch = Branch.open(source) | ||
1653 | 61 | remote_branch.push(local_branch) | ||
1654 | 62 | tree = workingtree.WorkingTree.open(dest) | ||
1655 | 63 | tree.update() | ||
1656 | 64 | except Exception as e: | ||
1657 | 65 | raise e | ||
1662 | 66 | 53 | ||
1664 | 67 | def install(self, source): | 54 | def install(self, source, dest=None): |
1665 | 68 | url_parts = self.parse_url(source) | 55 | url_parts = self.parse_url(source) |
1666 | 69 | branch_name = url_parts.path.strip("/").split("/")[-1] | 56 | branch_name = url_parts.path.strip("/").split("/")[-1] |
1669 | 70 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", | 57 | if dest: |
1670 | 71 | branch_name) | 58 | dest_dir = os.path.join(dest, branch_name) |
1671 | 59 | else: | ||
1672 | 60 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", | ||
1673 | 61 | branch_name) | ||
1674 | 62 | |||
1675 | 72 | if not os.path.exists(dest_dir): | 63 | if not os.path.exists(dest_dir): |
1676 | 73 | mkdir(dest_dir, perms=0o755) | 64 | mkdir(dest_dir, perms=0o755) |
1677 | 74 | try: | 65 | try: |
1678 | 75 | 66 | ||
1679 | === added directory 'hooks/charmhelpers/fetch/centos' | |||
1680 | === added file 'hooks/charmhelpers/fetch/centos/__init__.py' | |||
1681 | --- hooks/charmhelpers/fetch/centos/__init__.py 1970-01-01 00:00:00 +0000 | |||
1682 | +++ hooks/charmhelpers/fetch/centos/__init__.py 2016-04-29 11:54:41 +0000 | |||
1683 | @@ -0,0 +1,158 @@ | |||
1684 | 1 | import subprocess | ||
1685 | 2 | import os | ||
1686 | 3 | import time | ||
1687 | 4 | import six | ||
1688 | 5 | import yum | ||
1689 | 6 | |||
1690 | 7 | from tempfile import NamedTemporaryFile | ||
1691 | 8 | from charmhelpers.core.hookenv import ( | ||
1692 | 9 | log, | ||
1693 | 10 | ) | ||
1694 | 11 | |||
1695 | 12 | YUM_NO_LOCK = 1 # The return code for "couldn't acquire lock" in YUM. | ||
1696 | 13 | YUM_NO_LOCK_RETRY_DELAY = 10 # Wait 10 seconds between apt lock checks. | ||
1697 | 14 | YUM_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times. | ||
1698 | 15 | |||
1699 | 16 | |||
1700 | 17 | def filter_installed_packages(packages): | ||
1701 | 18 | """Returns a list of packages that require installation""" | ||
1702 | 19 | yb = yum.YumBase() | ||
1703 | 20 | pkgs = [] | ||
1704 | 21 | for package in yb.doPackageLists().installed: | ||
1705 | 22 | pkgs.append(package.base_package_name) | ||
1706 | 23 | _pkgs = [] | ||
1707 | 24 | for package in packages: | ||
1708 | 25 | if package not in pkgs: | ||
1709 | 26 | _pkgs.append(package) | ||
1710 | 27 | return _pkgs | ||
1711 | 28 | |||
1712 | 29 | |||
1713 | 30 | def install(packages, options=None, fatal=False): | ||
1714 | 31 | """Install one or more packages""" | ||
1715 | 32 | cmd = ['yum', '--assumeyes'] | ||
1716 | 33 | if options is not None: | ||
1717 | 34 | cmd.extend(options) | ||
1718 | 35 | cmd.append('install') | ||
1719 | 36 | if isinstance(packages, six.string_types): | ||
1720 | 37 | cmd.append(packages) | ||
1721 | 38 | else: | ||
1722 | 39 | cmd.extend(packages) | ||
1723 | 40 | log("Installing {} with options: {}".format(packages, | ||
1724 | 41 | options)) | ||
1725 | 42 | _run_yum_command(cmd, fatal) | ||
1726 | 43 | |||
1727 | 44 | |||
1728 | 45 | def update(fatal=False): | ||
1729 | 46 | """Update local yum cache""" | ||
1730 | 47 | cmd = ['yum', 'update', '--assumeyes'] | ||
1731 | 48 | log("Update with fatal: {}".format(fatal)) | ||
1732 | 49 | _run_yum_command(cmd, fatal) | ||
1733 | 50 | |||
1734 | 51 | |||
1735 | 52 | def upgrade(options=None, fatal=False, dist=False): | ||
1736 | 53 | """Upgrade all packages""" | ||
1737 | 54 | cmd = ['yum', '--assumeyes'] | ||
1738 | 55 | if options is not None: | ||
1739 | 56 | cmd.extend(options) | ||
1740 | 57 | cmd.append('upgrade') | ||
1741 | 58 | log("Upgrading with options: {}".format(options)) | ||
1742 | 59 | _run_yum_command(cmd, fatal) | ||
1743 | 60 | |||
1744 | 61 | |||
1745 | 62 | def purge(packages, fatal=False): | ||
1746 | 63 | """Purge one or more packages""" | ||
1747 | 64 | cmd = ['yum', 'remove', '--assumeyes'] | ||
1748 | 65 | if isinstance(packages, six.string_types): | ||
1749 | 66 | cmd.append(packages) | ||
1750 | 67 | else: | ||
1751 | 68 | cmd.extend(packages) | ||
1752 | 69 | log("Purging {}".format(packages)) | ||
1753 | 70 | _run_yum_command(cmd, fatal) | ||
1754 | 71 | |||
1755 | 72 | |||
1756 | 73 | def yum_search(packages): | ||
1757 | 74 | """Search for a package""" | ||
1758 | 75 | output = {} | ||
1759 | 76 | cmd = ['yum', 'search'] | ||
1760 | 77 | if isinstance(packages, six.string_types): | ||
1761 | 78 | cmd.append(packages) | ||
1762 | 79 | else: | ||
1763 | 80 | cmd.extend(packages) | ||
1764 | 81 | log("Searching for {}".format(packages)) | ||
1765 | 82 | result = subprocess.check_output(cmd) | ||
1766 | 83 | for package in list(packages): | ||
1767 | 84 | if package not in result: | ||
1768 | 85 | output[package] = False | ||
1769 | 86 | else: | ||
1770 | 87 | output[package] = True | ||
1771 | 88 | return output | ||
1772 | 89 | |||
1773 | 90 | |||
1774 | 91 | def add_source(source, key=None): | ||
1775 | 92 | if source is None: | ||
1776 | 93 | log('Source is not present. Skipping') | ||
1777 | 94 | return | ||
1778 | 95 | |||
1779 | 96 | if source.startswith('http'): | ||
1780 | 97 | log("Add source: {!r}".format(source)) | ||
1781 | 98 | |||
1782 | 99 | found = False | ||
1783 | 100 | # search if already exists | ||
1784 | 101 | directory = '/etc/yum.repos.d/' | ||
1785 | 102 | for filename in os.listdir(directory): | ||
1786 | 103 | with open(directory+filename, 'r') as rpm_file: | ||
1787 | 104 | if source in rpm_file: | ||
1788 | 105 | found = True | ||
1789 | 106 | |||
1790 | 107 | if not found: | ||
1791 | 108 | # write in the charms.repo | ||
1792 | 109 | with open(directory+'Charms.repo', 'a') as rpm_file: | ||
1793 | 110 | rpm_file.write('[%s]\n' % source[7:].replace('/', '_')) | ||
1794 | 111 | rpm_file.write('name=%s\n' % source[7:]) | ||
1795 | 112 | rpm_file.write('baseurl=%s\n\n' % source) | ||
1796 | 113 | else: | ||
1797 | 114 | log("Unknown source: {!r}".format(source)) | ||
1798 | 115 | |||
1799 | 116 | if key: | ||
1800 | 117 | if '-----BEGIN PGP PUBLIC KEY BLOCK-----' in key: | ||
1801 | 118 | with NamedTemporaryFile('w+') as key_file: | ||
1802 | 119 | key_file.write(key) | ||
1803 | 120 | key_file.flush() | ||
1804 | 121 | key_file.seek(0) | ||
1805 | 122 | subprocess.check_call(['rpm', '--import', key_file]) | ||
1806 | 123 | else: | ||
1807 | 124 | subprocess.check_call(['rpm', '--import', key]) | ||
1808 | 125 | |||
1809 | 126 | |||
1810 | 127 | def _run_yum_command(cmd, fatal=False): | ||
1811 | 128 | """ | ||
1812 | 129 | Run an YUM command, checking output and retrying if the fatal flag is set | ||
1813 | 130 | to True. | ||
1814 | 131 | |||
1815 | 132 | :param: cmd: str: The yum command to run. | ||
1816 | 133 | :param: fatal: bool: Whether the command's output should be checked and | ||
1817 | 134 | retried. | ||
1818 | 135 | """ | ||
1819 | 136 | env = os.environ.copy() | ||
1820 | 137 | |||
1821 | 138 | if fatal: | ||
1822 | 139 | retry_count = 0 | ||
1823 | 140 | result = None | ||
1824 | 141 | |||
1825 | 142 | # If the command is considered "fatal", we need to retry if the yum | ||
1826 | 143 | # lock was not acquired. | ||
1827 | 144 | |||
1828 | 145 | while result is None or result == YUM_NO_LOCK: | ||
1829 | 146 | try: | ||
1830 | 147 | result = subprocess.check_call(cmd, env=env) | ||
1831 | 148 | except subprocess.CalledProcessError as e: | ||
1832 | 149 | retry_count = retry_count + 1 | ||
1833 | 150 | if retry_count > YUM_NO_LOCK_RETRY_COUNT: | ||
1834 | 151 | raise | ||
1835 | 152 | result = e.returncode | ||
1836 | 153 | log("Couldn't acquire YUM lock. Will retry in {} seconds." | ||
1837 | 154 | "".format(YUM_NO_LOCK_RETRY_DELAY)) | ||
1838 | 155 | time.sleep(YUM_NO_LOCK_RETRY_DELAY) | ||
1839 | 156 | |||
1840 | 157 | else: | ||
1841 | 158 | subprocess.call(cmd, env=env) | ||
1842 | 0 | 159 | ||
1843 | === modified file 'hooks/charmhelpers/fetch/giturl.py' (properties changed: -x to +x) | |||
1844 | --- hooks/charmhelpers/fetch/giturl.py 2015-03-23 09:45:10 +0000 | |||
1845 | +++ hooks/charmhelpers/fetch/giturl.py 2016-04-29 11:54:41 +0000 | |||
1846 | @@ -15,24 +15,18 @@ | |||
1847 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. |
1848 | 16 | 16 | ||
1849 | 17 | import os | 17 | import os |
1850 | 18 | from subprocess import check_call, CalledProcessError | ||
1851 | 18 | from charmhelpers.fetch import ( | 19 | from charmhelpers.fetch import ( |
1852 | 19 | BaseFetchHandler, | 20 | BaseFetchHandler, |
1854 | 20 | UnhandledSource | 21 | UnhandledSource, |
1855 | 22 | filter_installed_packages, | ||
1856 | 23 | install, | ||
1857 | 21 | ) | 24 | ) |
1872 | 22 | from charmhelpers.core.host import mkdir | 25 | |
1873 | 23 | 26 | if filter_installed_packages(['git']) != []: | |
1874 | 24 | import six | 27 | install(['git']) |
1875 | 25 | if six.PY3: | 28 | if filter_installed_packages(['git']) != []: |
1876 | 26 | raise ImportError('GitPython does not support Python 3') | 29 | raise NotImplementedError('Unable to install git') |
1863 | 27 | |||
1864 | 28 | try: | ||
1865 | 29 | from git import Repo | ||
1866 | 30 | except ImportError: | ||
1867 | 31 | from charmhelpers.fetch import apt_install | ||
1868 | 32 | apt_install("python-git") | ||
1869 | 33 | from git import Repo | ||
1870 | 34 | |||
1871 | 35 | from git.exc import GitCommandError # noqa E402 | ||
1877 | 36 | 30 | ||
1878 | 37 | 31 | ||
1879 | 38 | class GitUrlFetchHandler(BaseFetchHandler): | 32 | class GitUrlFetchHandler(BaseFetchHandler): |
1880 | @@ -40,19 +34,26 @@ | |||
1881 | 40 | def can_handle(self, source): | 34 | def can_handle(self, source): |
1882 | 41 | url_parts = self.parse_url(source) | 35 | url_parts = self.parse_url(source) |
1883 | 42 | # TODO (mattyw) no support for ssh git@ yet | 36 | # TODO (mattyw) no support for ssh git@ yet |
1885 | 43 | if url_parts.scheme not in ('http', 'https', 'git'): | 37 | if url_parts.scheme not in ('http', 'https', 'git', ''): |
1886 | 44 | return False | 38 | return False |
1887 | 39 | elif not url_parts.scheme: | ||
1888 | 40 | return os.path.exists(os.path.join(source, '.git')) | ||
1889 | 45 | else: | 41 | else: |
1890 | 46 | return True | 42 | return True |
1891 | 47 | 43 | ||
1893 | 48 | def clone(self, source, dest, branch): | 44 | def clone(self, source, dest, branch="master", depth=None): |
1894 | 49 | if not self.can_handle(source): | 45 | if not self.can_handle(source): |
1895 | 50 | raise UnhandledSource("Cannot handle {}".format(source)) | 46 | raise UnhandledSource("Cannot handle {}".format(source)) |
1896 | 51 | 47 | ||
1899 | 52 | repo = Repo.clone_from(source, dest) | 48 | if os.path.exists(dest): |
1900 | 53 | repo.git.checkout(branch) | 49 | cmd = ['git', '-C', dest, 'pull', source, branch] |
1901 | 50 | else: | ||
1902 | 51 | cmd = ['git', 'clone', source, dest, '--branch', branch] | ||
1903 | 52 | if depth: | ||
1904 | 53 | cmd.extend(['--depth', depth]) | ||
1905 | 54 | check_call(cmd) | ||
1906 | 54 | 55 | ||
1908 | 55 | def install(self, source, branch="master", dest=None): | 56 | def install(self, source, branch="master", dest=None, depth=None): |
1909 | 56 | url_parts = self.parse_url(source) | 57 | url_parts = self.parse_url(source) |
1910 | 57 | branch_name = url_parts.path.strip("/").split("/")[-1] | 58 | branch_name = url_parts.path.strip("/").split("/")[-1] |
1911 | 58 | if dest: | 59 | if dest: |
1912 | @@ -60,12 +61,10 @@ | |||
1913 | 60 | else: | 61 | else: |
1914 | 61 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", | 62 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", |
1915 | 62 | branch_name) | 63 | branch_name) |
1916 | 63 | if not os.path.exists(dest_dir): | ||
1917 | 64 | mkdir(dest_dir, perms=0o755) | ||
1918 | 65 | try: | 64 | try: |
1922 | 66 | self.clone(source, dest_dir, branch) | 65 | self.clone(source, dest_dir, branch, depth) |
1923 | 67 | except GitCommandError as e: | 66 | except CalledProcessError as e: |
1924 | 68 | raise UnhandledSource(e.message) | 67 | raise UnhandledSource(e) |
1925 | 69 | except OSError as e: | 68 | except OSError as e: |
1926 | 70 | raise UnhandledSource(e.strerror) | 69 | raise UnhandledSource(e.strerror) |
1927 | 71 | return dest_dir | 70 | return dest_dir |
1928 | 72 | 71 | ||
1929 | === added directory 'hooks/charmhelpers/fetch/ubuntu' | |||
1930 | === added file 'hooks/charmhelpers/fetch/ubuntu/__init__.py' | |||
1931 | --- hooks/charmhelpers/fetch/ubuntu/__init__.py 1970-01-01 00:00:00 +0000 | |||
1932 | +++ hooks/charmhelpers/fetch/ubuntu/__init__.py 2016-04-29 11:54:41 +0000 | |||
1933 | @@ -0,0 +1,296 @@ | |||
1934 | 1 | import os | ||
1935 | 2 | import six | ||
1936 | 3 | import time | ||
1937 | 4 | import subprocess | ||
1938 | 5 | |||
1939 | 6 | from tempfile import NamedTemporaryFile | ||
1940 | 7 | from charmhelpers.core.host import ( | ||
1941 | 8 | lsb_release | ||
1942 | 9 | ) | ||
1943 | 10 | from charmhelpers.core.hookenv import ( | ||
1944 | 11 | log, | ||
1945 | 12 | ) | ||
1946 | 13 | from charmhelpers.fetch import ( | ||
1947 | 14 | SourceConfigError, | ||
1948 | 15 | ) | ||
1949 | 16 | |||
1950 | 17 | CLOUD_ARCHIVE = """# Ubuntu Cloud Archive | ||
1951 | 18 | deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main | ||
1952 | 19 | """ | ||
1953 | 20 | PROPOSED_POCKET = """# Proposed | ||
1954 | 21 | deb http://archive.ubuntu.com/ubuntu {}-proposed main universe multiverse restricted | ||
1955 | 22 | """ | ||
1956 | 23 | CLOUD_ARCHIVE_POCKETS = { | ||
1957 | 24 | # Folsom | ||
1958 | 25 | 'folsom': 'precise-updates/folsom', | ||
1959 | 26 | 'precise-folsom': 'precise-updates/folsom', | ||
1960 | 27 | 'precise-folsom/updates': 'precise-updates/folsom', | ||
1961 | 28 | 'precise-updates/folsom': 'precise-updates/folsom', | ||
1962 | 29 | 'folsom/proposed': 'precise-proposed/folsom', | ||
1963 | 30 | 'precise-folsom/proposed': 'precise-proposed/folsom', | ||
1964 | 31 | 'precise-proposed/folsom': 'precise-proposed/folsom', | ||
1965 | 32 | # Grizzly | ||
1966 | 33 | 'grizzly': 'precise-updates/grizzly', | ||
1967 | 34 | 'precise-grizzly': 'precise-updates/grizzly', | ||
1968 | 35 | 'precise-grizzly/updates': 'precise-updates/grizzly', | ||
1969 | 36 | 'precise-updates/grizzly': 'precise-updates/grizzly', | ||
1970 | 37 | 'grizzly/proposed': 'precise-proposed/grizzly', | ||
1971 | 38 | 'precise-grizzly/proposed': 'precise-proposed/grizzly', | ||
1972 | 39 | 'precise-proposed/grizzly': 'precise-proposed/grizzly', | ||
1973 | 40 | # Havana | ||
1974 | 41 | 'havana': 'precise-updates/havana', | ||
1975 | 42 | 'precise-havana': 'precise-updates/havana', | ||
1976 | 43 | 'precise-havana/updates': 'precise-updates/havana', | ||
1977 | 44 | 'precise-updates/havana': 'precise-updates/havana', | ||
1978 | 45 | 'havana/proposed': 'precise-proposed/havana', | ||
1979 | 46 | 'precise-havana/proposed': 'precise-proposed/havana', | ||
1980 | 47 | 'precise-proposed/havana': 'precise-proposed/havana', | ||
1981 | 48 | # Icehouse | ||
1982 | 49 | 'icehouse': 'precise-updates/icehouse', | ||
1983 | 50 | 'precise-icehouse': 'precise-updates/icehouse', | ||
1984 | 51 | 'precise-icehouse/updates': 'precise-updates/icehouse', | ||
1985 | 52 | 'precise-updates/icehouse': 'precise-updates/icehouse', | ||
1986 | 53 | 'icehouse/proposed': 'precise-proposed/icehouse', | ||
1987 | 54 | 'precise-icehouse/proposed': 'precise-proposed/icehouse', | ||
1988 | 55 | 'precise-proposed/icehouse': 'precise-proposed/icehouse', | ||
1989 | 56 | # Juno | ||
1990 | 57 | 'juno': 'trusty-updates/juno', | ||
1991 | 58 | 'trusty-juno': 'trusty-updates/juno', | ||
1992 | 59 | 'trusty-juno/updates': 'trusty-updates/juno', | ||
1993 | 60 | 'trusty-updates/juno': 'trusty-updates/juno', | ||
1994 | 61 | 'juno/proposed': 'trusty-proposed/juno', | ||
1995 | 62 | 'trusty-juno/proposed': 'trusty-proposed/juno', | ||
1996 | 63 | 'trusty-proposed/juno': 'trusty-proposed/juno', | ||
1997 | 64 | # Kilo | ||
1998 | 65 | 'kilo': 'trusty-updates/kilo', | ||
1999 | 66 | 'trusty-kilo': 'trusty-updates/kilo', | ||
2000 | 67 | 'trusty-kilo/updates': 'trusty-updates/kilo', | ||
2001 | 68 | 'trusty-updates/kilo': 'trusty-updates/kilo', | ||
2002 | 69 | 'kilo/proposed': 'trusty-proposed/kilo', | ||
2003 | 70 | 'trusty-kilo/proposed': 'trusty-proposed/kilo', | ||
2004 | 71 | 'trusty-proposed/kilo': 'trusty-proposed/kilo', | ||
2005 | 72 | # Liberty | ||
2006 | 73 | 'liberty': 'trusty-updates/liberty', | ||
2007 | 74 | 'trusty-liberty': 'trusty-updates/liberty', | ||
2008 | 75 | 'trusty-liberty/updates': 'trusty-updates/liberty', | ||
2009 | 76 | 'trusty-updates/liberty': 'trusty-updates/liberty', | ||
2010 | 77 | 'liberty/proposed': 'trusty-proposed/liberty', | ||
2011 | 78 | 'trusty-liberty/proposed': 'trusty-proposed/liberty', | ||
2012 | 79 | 'trusty-proposed/liberty': 'trusty-proposed/liberty', | ||
2013 | 80 | # Mitaka | ||
2014 | 81 | 'mitaka': 'trusty-updates/mitaka', | ||
2015 | 82 | 'trusty-mitaka': 'trusty-updates/mitaka', | ||
2016 | 83 | 'trusty-mitaka/updates': 'trusty-updates/mitaka', | ||
2017 | 84 | 'trusty-updates/mitaka': 'trusty-updates/mitaka', | ||
2018 | 85 | 'mitaka/proposed': 'trusty-proposed/mitaka', | ||
2019 | 86 | 'trusty-mitaka/proposed': 'trusty-proposed/mitaka', | ||
2020 | 87 | 'trusty-proposed/mitaka': 'trusty-proposed/mitaka', | ||
2021 | 88 | } | ||
2022 | 89 | |||
2023 | 90 | |||
2024 | 91 | APT_NO_LOCK = 100 # The return code for "couldn't acquire lock" in APT. | ||
2025 | 92 | APT_NO_LOCK_RETRY_DELAY = 10 # Wait 10 seconds between apt lock checks. | ||
2026 | 93 | APT_NO_LOCK_RETRY_COUNT = 30 # Retry to acquire the lock X times. | ||
2027 | 94 | |||
2028 | 95 | |||
2029 | 96 | def filter_installed_packages(packages): | ||
2030 | 97 | """Returns a list of packages that require installation""" | ||
2031 | 98 | temp_cache = apt_cache() | ||
2032 | 99 | _pkgs = [] | ||
2033 | 100 | for package in packages: | ||
2034 | 101 | try: | ||
2035 | 102 | p = temp_cache[package] | ||
2036 | 103 | p.current_ver or _pkgs.append(package) | ||
2037 | 104 | except KeyError: | ||
2038 | 105 | log('Package {} has no installation candidate.'.format(package), | ||
2039 | 106 | level='WARNING') | ||
2040 | 107 | _pkgs.append(package) | ||
2041 | 108 | return _pkgs | ||
2042 | 109 | |||
2043 | 110 | |||
2044 | 111 | def apt_cache(in_memory=True): | ||
2045 | 112 | """Build and return an apt cache""" | ||
2046 | 113 | from apt import apt_pkg | ||
2047 | 114 | apt_pkg.init() | ||
2048 | 115 | if in_memory: | ||
2049 | 116 | apt_pkg.config.set("Dir::Cache::pkgcache", "") | ||
2050 | 117 | apt_pkg.config.set("Dir::Cache::srcpkgcache", "") | ||
2051 | 118 | return apt_pkg.Cache() | ||
2052 | 119 | |||
2053 | 120 | |||
2054 | 121 | def install(packages, options=None, fatal=False): | ||
2055 | 122 | """Install one or more packages""" | ||
2056 | 123 | if options is None: | ||
2057 | 124 | options = ['--option=Dpkg::Options::=--force-confold'] | ||
2058 | 125 | |||
2059 | 126 | cmd = ['apt-get', '--assume-yes'] | ||
2060 | 127 | cmd.extend(options) | ||
2061 | 128 | cmd.append('install') | ||
2062 | 129 | if isinstance(packages, six.string_types): | ||
2063 | 130 | cmd.append(packages) | ||
2064 | 131 | else: | ||
2065 | 132 | cmd.extend(packages) | ||
2066 | 133 | log("Installing {} with options: {}".format(packages, | ||
2067 | 134 | options)) | ||
2068 | 135 | _run_apt_command(cmd, fatal) | ||
2069 | 136 | |||
2070 | 137 | |||
2071 | 138 | def upgrade(options=None, fatal=False, dist=False): | ||
2072 | 139 | """Upgrade all packages""" | ||
2073 | 140 | if options is None: | ||
2074 | 141 | options = ['--option=Dpkg::Options::=--force-confold'] | ||
2075 | 142 | |||
2076 | 143 | cmd = ['apt-get', '--assume-yes'] | ||
2077 | 144 | cmd.extend(options) | ||
2078 | 145 | if dist: | ||
2079 | 146 | cmd.append('dist-upgrade') | ||
2080 | 147 | else: | ||
2081 | 148 | cmd.append('upgrade') | ||
2082 | 149 | log("Upgrading with options: {}".format(options)) | ||
2083 | 150 | _run_apt_command(cmd, fatal) | ||
2084 | 151 | |||
2085 | 152 | |||
2086 | 153 | def update(fatal=False): | ||
2087 | 154 | """Update local apt cache""" | ||
2088 | 155 | cmd = ['apt-get', 'update'] | ||
2089 | 156 | _run_apt_command(cmd, fatal) | ||
2090 | 157 | |||
2091 | 158 | |||
2092 | 159 | def purge(packages, fatal=False): | ||
2093 | 160 | """Purge one or more packages""" | ||
2094 | 161 | cmd = ['apt-get', '--assume-yes', 'purge'] | ||
2095 | 162 | if isinstance(packages, six.string_types): | ||
2096 | 163 | cmd.append(packages) | ||
2097 | 164 | else: | ||
2098 | 165 | cmd.extend(packages) | ||
2099 | 166 | log("Purging {}".format(packages)) | ||
2100 | 167 | _run_apt_command(cmd, fatal) | ||
2101 | 168 | |||
2102 | 169 | |||
2103 | 170 | def apt_mark(packages, mark, fatal=False): | ||
2104 | 171 | """Flag one or more packages using apt-mark""" | ||
2105 | 172 | log("Marking {} as {}".format(packages, mark)) | ||
2106 | 173 | cmd = ['apt-mark', mark] | ||
2107 | 174 | if isinstance(packages, six.string_types): | ||
2108 | 175 | cmd.append(packages) | ||
2109 | 176 | else: | ||
2110 | 177 | cmd.extend(packages) | ||
2111 | 178 | |||
2112 | 179 | if fatal: | ||
2113 | 180 | subprocess.check_call(cmd, universal_newlines=True) | ||
2114 | 181 | else: | ||
2115 | 182 | subprocess.call(cmd, universal_newlines=True) | ||
2116 | 183 | |||
2117 | 184 | |||
2118 | 185 | def apt_hold(packages, fatal=False): | ||
2119 | 186 | return apt_mark(packages, 'hold', fatal=fatal) | ||
2120 | 187 | |||
2121 | 188 | |||
2122 | 189 | def apt_unhold(packages, fatal=False): | ||
2123 | 190 | return apt_mark(packages, 'unhold', fatal=fatal) | ||
2124 | 191 | |||
2125 | 192 | |||
2126 | 193 | def add_source(source, key=None): | ||
2127 | 194 | """Add a package source to this system. | ||
2128 | 195 | |||
2129 | 196 | @param source: a URL or sources.list entry, as supported by | ||
2130 | 197 | add-apt-repository(1). Examples:: | ||
2131 | 198 | |||
2132 | 199 | ppa:charmers/example | ||
2133 | 200 | deb https://stub:key@private.example.com/ubuntu trusty main | ||
2134 | 201 | |||
2135 | 202 | In addition: | ||
2136 | 203 | 'proposed:' may be used to enable the standard 'proposed' | ||
2137 | 204 | pocket for the release. | ||
2138 | 205 | 'cloud:' may be used to activate official cloud archive pockets, | ||
2139 | 206 | such as 'cloud:icehouse' | ||
2140 | 207 | 'distro' may be used as a noop | ||
2141 | 208 | |||
2142 | 209 | @param key: A key to be added to the system's APT keyring and used | ||
2143 | 210 | to verify the signatures on packages. Ideally, this should be an | ||
2144 | 211 | ASCII format GPG public key including the block headers. A GPG key | ||
2145 | 212 | id may also be used, but be aware that only insecure protocols are | ||
2146 | 213 | available to retrieve the actual public key from a public keyserver | ||
2147 | 214 | placing your Juju environment at risk. ppa and cloud archive keys | ||
2148 | 215 | are securely added automtically, so sould not be provided. | ||
2149 | 216 | """ | ||
2150 | 217 | if source is None: | ||
2151 | 218 | log('Source is not present. Skipping') | ||
2152 | 219 | return | ||
2153 | 220 | |||
2154 | 221 | if (source.startswith('ppa:') or | ||
2155 | 222 | source.startswith('http') or | ||
2156 | 223 | source.startswith('deb ') or | ||
2157 | 224 | source.startswith('cloud-archive:')): | ||
2158 | 225 | subprocess.check_call(['add-apt-repository', '--yes', source]) | ||
2159 | 226 | elif source.startswith('cloud:'): | ||
2160 | 227 | install(filter_installed_packages(['ubuntu-cloud-keyring']), | ||
2161 | 228 | fatal=True) | ||
2162 | 229 | pocket = source.split(':')[-1] | ||
2163 | 230 | if pocket not in CLOUD_ARCHIVE_POCKETS: | ||
2164 | 231 | raise SourceConfigError( | ||
2165 | 232 | 'Unsupported cloud: source option %s' % | ||
2166 | 233 | pocket) | ||
2167 | 234 | actual_pocket = CLOUD_ARCHIVE_POCKETS[pocket] | ||
2168 | 235 | with open('/etc/apt/sources.list.d/cloud-archive.list', 'w') as apt: | ||
2169 | 236 | apt.write(CLOUD_ARCHIVE.format(actual_pocket)) | ||
2170 | 237 | elif source == 'proposed': | ||
2171 | 238 | release = lsb_release()['DISTRIB_CODENAME'] | ||
2172 | 239 | with open('/etc/apt/sources.list.d/proposed.list', 'w') as apt: | ||
2173 | 240 | apt.write(PROPOSED_POCKET.format(release)) | ||
2174 | 241 | elif source == 'distro': | ||
2175 | 242 | pass | ||
2176 | 243 | else: | ||
2177 | 244 | log("Unknown source: {!r}".format(source)) | ||
2178 | 245 | |||
2179 | 246 | if key: | ||
2180 | 247 | if '-----BEGIN PGP PUBLIC KEY BLOCK-----' in key: | ||
2181 | 248 | with NamedTemporaryFile('w+') as key_file: | ||
2182 | 249 | key_file.write(key) | ||
2183 | 250 | key_file.flush() | ||
2184 | 251 | key_file.seek(0) | ||
2185 | 252 | subprocess.check_call(['apt-key', 'add', '-'], stdin=key_file) | ||
2186 | 253 | else: | ||
2187 | 254 | # Note that hkp: is in no way a secure protocol. Using a | ||
2188 | 255 | # GPG key id is pointless from a security POV unless you | ||
2189 | 256 | # absolutely trust your network and DNS. | ||
2190 | 257 | subprocess.check_call(['apt-key', 'adv', '--keyserver', | ||
2191 | 258 | 'hkp://keyserver.ubuntu.com:80', '--recv', | ||
2192 | 259 | key]) | ||
2193 | 260 | |||
2194 | 261 | |||
2195 | 262 | def _run_apt_command(cmd, fatal=False): | ||
2196 | 263 | """ | ||
2197 | 264 | Run an APT command, checking output and retrying if the fatal flag is set | ||
2198 | 265 | to True. | ||
2199 | 266 | |||
2200 | 267 | :param: cmd: str: The apt command to run. | ||
2201 | 268 | :param: fatal: bool: Whether the command's output should be checked and | ||
2202 | 269 | retried. | ||
2203 | 270 | """ | ||
2204 | 271 | env = os.environ.copy() | ||
2205 | 272 | |||
2206 | 273 | if 'DEBIAN_FRONTEND' not in env: | ||
2207 | 274 | env['DEBIAN_FRONTEND'] = 'noninteractive' | ||
2208 | 275 | |||
2209 | 276 | if fatal: | ||
2210 | 277 | retry_count = 0 | ||
2211 | 278 | result = None | ||
2212 | 279 | |||
2213 | 280 | # If the command is considered "fatal", we need to retry if the apt | ||
2214 | 281 | # lock was not acquired. | ||
2215 | 282 | |||
2216 | 283 | while result is None or result == APT_NO_LOCK: | ||
2217 | 284 | try: | ||
2218 | 285 | result = subprocess.check_call(cmd, env=env) | ||
2219 | 286 | except subprocess.CalledProcessError as e: | ||
2220 | 287 | retry_count = retry_count + 1 | ||
2221 | 288 | if retry_count > APT_NO_LOCK_RETRY_COUNT: | ||
2222 | 289 | raise | ||
2223 | 290 | result = e.returncode | ||
2224 | 291 | log("Couldn't acquire DPKG lock. Will retry in {} seconds." | ||
2225 | 292 | "".format(APT_NO_LOCK_RETRY_DELAY)) | ||
2226 | 293 | time.sleep(APT_NO_LOCK_RETRY_DELAY) | ||
2227 | 294 | |||
2228 | 295 | else: | ||
2229 | 296 | subprocess.call(cmd, env=env) | ||
2230 | 0 | 297 | ||
2231 | === modified file 'hooks/nrpe_helpers.py' (properties changed: -x to +x) | |||
2232 | === modified file 'hooks/nrpe_utils.py' (properties changed: -x to +x) | |||
2233 | --- hooks/nrpe_utils.py 2016-01-18 22:54:35 +0000 | |||
2234 | +++ hooks/nrpe_utils.py 2016-04-29 11:54:41 +0000 | |||
2235 | @@ -1,14 +1,18 @@ | |||
2236 | 1 | import os | 1 | import os |
2237 | 2 | import shutil | 2 | import shutil |
2238 | 3 | import glob | 3 | import glob |
2239 | 4 | import importlib | ||
2240 | 4 | 5 | ||
2241 | 5 | from charmhelpers import fetch | 6 | from charmhelpers import fetch |
2242 | 6 | from charmhelpers.core import host | 7 | from charmhelpers.core import host |
2243 | 7 | from charmhelpers.core.templating import render | 8 | from charmhelpers.core.templating import render |
2244 | 8 | from charmhelpers.core import hookenv | 9 | from charmhelpers.core import hookenv |
2245 | 10 | from charmhelpers import get_platform | ||
2246 | 9 | 11 | ||
2247 | 10 | import nrpe_helpers | 12 | import nrpe_helpers |
2248 | 11 | 13 | ||
2249 | 14 | platform = importlib.import_module(get_platform()) | ||
2250 | 15 | |||
2251 | 12 | 16 | ||
2252 | 13 | def restart_rsync(service_name): | 17 | def restart_rsync(service_name): |
2253 | 14 | """ Restart rsync """ | 18 | """ Restart rsync """ |
2254 | @@ -17,25 +21,18 @@ | |||
2255 | 17 | 21 | ||
2256 | 18 | def restart_nrpe(service_name): | 22 | def restart_nrpe(service_name): |
2257 | 19 | """ Restart nrpe """ | 23 | """ Restart nrpe """ |
2259 | 20 | host.service_restart('nagios-nrpe-server') | 24 | platform.restart_nrpe(service_name) |
2260 | 21 | 25 | ||
2261 | 22 | 26 | ||
2262 | 23 | def determine_packages(): | 27 | def determine_packages(): |
2263 | 24 | """ List of packages this charm needs installed """ | 28 | """ List of packages this charm needs installed """ |
2272 | 25 | pkgs = [ | 29 | return platform.determine_packages() |
2265 | 26 | 'nagios-nrpe-server', | ||
2266 | 27 | 'nagios-plugins-basic', | ||
2267 | 28 | 'nagios-plugins-standard' | ||
2268 | 29 | ] | ||
2269 | 30 | if hookenv.config('export_nagios_definitions'): | ||
2270 | 31 | pkgs.append('rsync') | ||
2271 | 32 | return pkgs | ||
2273 | 33 | 30 | ||
2274 | 34 | 31 | ||
2275 | 35 | def install_packages(service_name): | 32 | def install_packages(service_name): |
2276 | 36 | """ Install packages """ | 33 | """ Install packages """ |
2279 | 37 | fetch.apt_update() | 34 | fetch.update() |
2280 | 38 | fetch.apt_install(determine_packages(), fatal=True) | 35 | fetch.install(determine_packages(), fatal=True) |
2281 | 39 | 36 | ||
2282 | 40 | 37 | ||
2283 | 41 | def remove_host_export_fragments(service_name): | 38 | def remove_host_export_fragments(service_name): |
2284 | @@ -139,5 +136,6 @@ | |||
2285 | 139 | for rid in hookenv.relation_ids('monitors'): | 136 | for rid in hookenv.relation_ids('monitors'): |
2286 | 140 | hookenv.relation_set( | 137 | hookenv.relation_set( |
2287 | 141 | relation_id=rid, | 138 | relation_id=rid, |
2289 | 142 | relation_settings=monitor_relation.provide_data() | 139 | relation_settings=monitor_relation.provide_data(), |
2290 | 140 | charm_platform=get_platform() | ||
2291 | 143 | ) | 141 | ) |
2292 | 144 | 142 | ||
2293 | === modified file 'hooks/services.py' (properties changed: -x to +x) | |||
2294 | --- hooks/services.py 2016-01-19 19:29:15 +0000 | |||
2295 | +++ hooks/services.py 2016-04-29 11:54:41 +0000 | |||
2296 | @@ -1,9 +1,13 @@ | |||
2297 | 1 | import nrpe_utils | ||
2298 | 2 | import nrpe_helpers | ||
2299 | 3 | import importlib | ||
2300 | 4 | |||
2301 | 1 | from charmhelpers.core import hookenv | 5 | from charmhelpers.core import hookenv |
2302 | 2 | from charmhelpers.core.services.base import ServiceManager | 6 | from charmhelpers.core.services.base import ServiceManager |
2303 | 3 | from charmhelpers.core.services import helpers | 7 | from charmhelpers.core.services import helpers |
2304 | 8 | from charmhelpers import get_platform | ||
2305 | 4 | 9 | ||
2308 | 5 | import nrpe_utils | 10 | platform = importlib.import_module(get_platform()) |
2307 | 6 | import nrpe_helpers | ||
2309 | 7 | 11 | ||
2310 | 8 | 12 | ||
2311 | 9 | def manage(): | 13 | def manage(): |
2312 | @@ -29,10 +33,7 @@ | |||
2313 | 29 | nrpe_utils.update_nrpe_external_master_relation, | 33 | nrpe_utils.update_nrpe_external_master_relation, |
2314 | 30 | nrpe_utils.update_monitor_relation, | 34 | nrpe_utils.update_monitor_relation, |
2315 | 31 | nrpe_utils.render_nrped_files, | 35 | nrpe_utils.render_nrped_files, |
2320 | 32 | helpers.render_template( | 36 | platform.render_nrpe_template(), |
2317 | 33 | source='nrpe.tmpl', | ||
2318 | 34 | target='/etc/nagios/nrpe.cfg' | ||
2319 | 35 | ), | ||
2321 | 36 | ], | 37 | ], |
2322 | 37 | 'provided_data': [nrpe_helpers.PrincipleRelation()], | 38 | 'provided_data': [nrpe_helpers.PrincipleRelation()], |
2323 | 38 | 'start': [nrpe_utils.restart_nrpe], | 39 | 'start': [nrpe_utils.restart_nrpe], |
2324 | 39 | 40 | ||
2325 | === added file 'hooks/ubuntu.py' | |||
2326 | --- hooks/ubuntu.py 1970-01-01 00:00:00 +0000 | |||
2327 | +++ hooks/ubuntu.py 2016-04-29 11:54:41 +0000 | |||
2328 | @@ -0,0 +1,27 @@ | |||
2329 | 1 | from charmhelpers.core import host | ||
2330 | 2 | from charmhelpers.core import hookenv | ||
2331 | 3 | from charmhelpers.core.services import helpers | ||
2332 | 4 | |||
2333 | 5 | |||
2334 | 6 | def determine_packages(): | ||
2335 | 7 | """ List of packages this charm needs installed """ | ||
2336 | 8 | pkgs = [ | ||
2337 | 9 | 'nagios-nrpe-server', | ||
2338 | 10 | 'nagios-plugins-basic', | ||
2339 | 11 | 'nagios-plugins-standard' | ||
2340 | 12 | ] | ||
2341 | 13 | if hookenv.config('export_nagios_definitions'): | ||
2342 | 14 | pkgs.append('rsync') | ||
2343 | 15 | return pkgs | ||
2344 | 16 | |||
2345 | 17 | |||
2346 | 18 | def restart_nrpe(service_name): | ||
2347 | 19 | """ Restart nrpe """ | ||
2348 | 20 | host.service_restart('nagios-nrpe-server') | ||
2349 | 21 | |||
2350 | 22 | |||
2351 | 23 | def render_nrpe_template(): | ||
2352 | 24 | return helpers.render_template( | ||
2353 | 25 | source='nrpe.tmpl', | ||
2354 | 26 | target='/etc/nagios/nrpe.cfg' | ||
2355 | 27 | ) | ||
2356 | 0 | 28 | ||
2357 | === added file 'templates/nrpe-centos.tmpl' | |||
2358 | --- templates/nrpe-centos.tmpl 1970-01-01 00:00:00 +0000 | |||
2359 | +++ templates/nrpe-centos.tmpl 2016-04-29 11:54:41 +0000 | |||
2360 | @@ -0,0 +1,16 @@ | |||
2361 | 1 | #-------------------------------------------------------- | ||
2362 | 2 | # This file is managed by Juju | ||
2363 | 3 | #-------------------------------------------------------- | ||
2364 | 4 | |||
2365 | 5 | server_port={{ server_port }} | ||
2366 | 6 | allowed_hosts={{ external_nagios_master }},{{ monitor_allowed_hosts }} | ||
2367 | 7 | nrpe_user=nrpe | ||
2368 | 8 | nrpe_group=nrpe | ||
2369 | 9 | dont_blame_nrpe=0cat | ||
2370 | 10 | debug=0 | ||
2371 | 11 | command_timeout=60 | ||
2372 | 12 | pid_file=/var/run/nrpe/nrpe.pid | ||
2373 | 13 | |||
2374 | 14 | # All configuration snippets go into nrpe.d/ | ||
2375 | 15 | include_dir=/etc/nagios/nrpe.d/ | ||
2376 | 16 | |||
2377 | 0 | 17 | ||
2378 | === modified file 'tests/11-monitors-configurations' | |||
2379 | --- tests/11-monitors-configurations 2016-02-03 22:17:17 +0000 | |||
2380 | +++ tests/11-monitors-configurations 2016-04-29 11:54:41 +0000 | |||
2381 | @@ -37,11 +37,11 @@ | |||
2382 | 37 | # look for procrunning in nrpe config | 37 | # look for procrunning in nrpe config |
2383 | 38 | try: | 38 | try: |
2384 | 39 | mysql_unit.file_contents('/etc/nagios/nrpe.d/' | 39 | mysql_unit.file_contents('/etc/nagios/nrpe.d/' |
2386 | 40 | 'check_proc_mysqld_principle.cfg') | 40 | 'check_total_procs_testing.cfg') |
2387 | 41 | except IOError as e: | 41 | except IOError as e: |
2391 | 42 | amulet.raise_status(amulet.ERROR, | 42 | amulet.raise_status(amulet.FAIL, |
2392 | 43 | msg="procrunning config not found. Error:" + | 43 | msg="procrunning config not found. Error: {0}".format(e) |
2393 | 44 | e.args[1]) | 44 | ) |
2394 | 45 | 45 | ||
2395 | 46 | 46 | ||
2396 | 47 | def test_nagios_monitors_response(): | 47 | def test_nagios_monitors_response(): |
2397 | @@ -52,7 +52,7 @@ | |||
2398 | 52 | r = requests.get(host_url % nagios_unit.info['public-address'], | 52 | r = requests.get(host_url % nagios_unit.info['public-address'], |
2399 | 53 | auth=('nagiosadmin', nagpwd)) | 53 | auth=('nagiosadmin', nagpwd)) |
2400 | 54 | if not r.text.find('mysql-0-basic'): | 54 | if not r.text.find('mysql-0-basic'): |
2402 | 55 | amulet.raise_status(amulet.ERROR, | 55 | amulet.raise_status(amulet.FAIL, |
2403 | 56 | msg='Nagios is not monitoring the' + | 56 | msg='Nagios is not monitoring the' + |
2404 | 57 | ' hosts it supposed to.') | 57 | ' hosts it supposed to.') |
2405 | 58 | 58 | ||
2406 | 59 | 59 | ||
2407 | === modified file 'tests/13-monitors-config' | |||
2408 | --- tests/13-monitors-config 2016-02-03 22:17:17 +0000 | |||
2409 | +++ tests/13-monitors-config 2016-04-29 11:54:41 +0000 | |||
2410 | @@ -49,11 +49,11 @@ | |||
2411 | 49 | # look for procrunning in nrpe config | 49 | # look for procrunning in nrpe config |
2412 | 50 | try: | 50 | try: |
2413 | 51 | mysql_unit.file_contents('/etc/nagios/nrpe.d/' | 51 | mysql_unit.file_contents('/etc/nagios/nrpe.d/' |
2415 | 52 | 'check_proc_mysqld_principle.cfg') | 52 | 'check_total_procs_testing.cfg') |
2416 | 53 | except IOError as e: | 53 | except IOError as e: |
2420 | 54 | amulet.raise_status(amulet.ERROR, | 54 | amulet.raise_status(amulet.FAIL, |
2421 | 55 | msg="procrunning config not found. Error:" + | 55 | msg="procrunning config not found. Error: {0}".format(e) |
2422 | 56 | e.args[1]) | 56 | ) |
2423 | 57 | 57 | ||
2424 | 58 | 58 | ||
2425 | 59 | def test_nagios_monitors_response(): | 59 | def test_nagios_monitors_response(): |
2426 | @@ -64,7 +64,7 @@ | |||
2427 | 64 | r = requests.get(host_url % nagios_unit.info['public-address'], | 64 | r = requests.get(host_url % nagios_unit.info['public-address'], |
2428 | 65 | auth=('nagiosadmin', nagpwd)) | 65 | auth=('nagiosadmin', nagpwd)) |
2429 | 66 | if not r.text.find('processcount'): | 66 | if not r.text.find('processcount'): |
2431 | 67 | amulet.raise_status(amulet.ERROR, | 67 | amulet.raise_status(amulet.FAIL, |
2432 | 68 | msg='Nagios is not monitoring the' + | 68 | msg='Nagios is not monitoring the' + |
2433 | 69 | ' hosts it supposed to.') | 69 | ' hosts it supposed to.') |
2434 | 70 | 70 |
Hi Denis,
I've had a chance to review your merge proposal. I've run into a few errors running the amulet tests related to missing config files. Could you check the logs at the pastebin link below and fix the tests? Once they pass, I'll be happy to re-review and get this promulgated.
http:// pastebin. ubuntu. com/17298099/