Merge lp:~hopem/charms/trusty/cinder/lp1518975 into lp:~openstack-charmers-archive/charms/trusty/cinder/next
- Trusty Tahr (14.04)
- lp1518975
- Merge into next
Proposed by
Edward Hope-Morley
Status: | Merged | ||||
---|---|---|---|---|---|
Merged at revision: | 145 | ||||
Proposed branch: | lp:~hopem/charms/trusty/cinder/lp1518975 | ||||
Merge into: | lp:~openstack-charmers-archive/charms/trusty/cinder/next | ||||
Diff against target: |
672 lines (+212/-97) 9 files modified
charm-helpers-hooks.yaml (+1/-1) hooks/charmhelpers/contrib/openstack/amulet/deployment.py (+3/-2) hooks/charmhelpers/contrib/openstack/context.py (+11/-7) hooks/charmhelpers/contrib/openstack/neutron.py (+18/-6) hooks/charmhelpers/contrib/openstack/utils.py (+108/-43) hooks/charmhelpers/contrib/python/packages.py (+22/-7) hooks/charmhelpers/core/host.py (+41/-26) hooks/charmhelpers/fetch/giturl.py (+5/-3) tests/charmhelpers/contrib/openstack/amulet/deployment.py (+3/-2) |
||||
To merge this branch: | bzr merge lp:~hopem/charms/trusty/cinder/lp1518975 | ||||
Related bugs: |
|
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
OpenStack Charmers | Pending | ||
Review via email: mp+285747@code.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Revision history for this message
uosci-testing-bot (uosci-testing-bot) wrote : | # |
Revision history for this message
uosci-testing-bot (uosci-testing-bot) wrote : | # |
charm_lint_check #221 cinder-next for hopem mp285747
LINT OK: passed
- 146. By Edward Hope-Morley
-
charm-helpers sync
Revision history for this message
uosci-testing-bot (uosci-testing-bot) wrote : | # |
charm_lint_check #227 cinder-next for hopem mp285747
LINT OK: passed
Revision history for this message
uosci-testing-bot (uosci-testing-bot) wrote : | # |
charm_unit_test #212 cinder-next for hopem mp285747
UNIT OK: passed
Revision history for this message
uosci-testing-bot (uosci-testing-bot) wrote : | # |
charm_amulet_test #105 cinder-next for hopem mp285747
AMULET OK: passed
- 147. By Edward Hope-Morley
-
restore charm-helpers-
hooks.yaml (no other changes since sync was null)
Revision history for this message
uosci-testing-bot (uosci-testing-bot) wrote : | # |
charm_lint_check #354 cinder-next for hopem mp285747
LINT OK: passed
Revision history for this message
uosci-testing-bot (uosci-testing-bot) wrote : | # |
charm_unit_test #277 cinder-next for hopem mp285747
UNIT OK: passed
Revision history for this message
uosci-testing-bot (uosci-testing-bot) wrote : | # |
charm_amulet_test #129 cinder-next for hopem mp285747
AMULET OK: passed
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'charm-helpers-hooks.yaml' |
2 | --- charm-helpers-hooks.yaml 2016-01-08 02:32:19 +0000 |
3 | +++ charm-helpers-hooks.yaml 2016-02-12 09:36:40 +0000 |
4 | @@ -1,4 +1,4 @@ |
5 | -branch: lp:charm-helpers |
6 | +branch: lp:charm-helpers |
7 | destination: hooks/charmhelpers |
8 | include: |
9 | - core |
10 | |
11 | === modified file 'hooks/charmhelpers/contrib/openstack/amulet/deployment.py' |
12 | --- hooks/charmhelpers/contrib/openstack/amulet/deployment.py 2016-01-04 21:26:14 +0000 |
13 | +++ hooks/charmhelpers/contrib/openstack/amulet/deployment.py 2016-02-12 09:36:40 +0000 |
14 | @@ -121,11 +121,12 @@ |
15 | |
16 | # Charms which should use the source config option |
17 | use_source = ['mysql', 'mongodb', 'rabbitmq-server', 'ceph', |
18 | - 'ceph-osd', 'ceph-radosgw'] |
19 | + 'ceph-osd', 'ceph-radosgw', 'ceph-mon'] |
20 | |
21 | # Charms which can not use openstack-origin, ie. many subordinates |
22 | no_origin = ['cinder-ceph', 'hacluster', 'neutron-openvswitch', 'nrpe', |
23 | - 'openvswitch-odl', 'neutron-api-odl', 'odl-controller'] |
24 | + 'openvswitch-odl', 'neutron-api-odl', 'odl-controller', |
25 | + 'cinder-backup'] |
26 | |
27 | if self.openstack: |
28 | for svc in services: |
29 | |
30 | === modified file 'hooks/charmhelpers/contrib/openstack/context.py' |
31 | --- hooks/charmhelpers/contrib/openstack/context.py 2016-01-08 02:37:16 +0000 |
32 | +++ hooks/charmhelpers/contrib/openstack/context.py 2016-02-12 09:36:40 +0000 |
33 | @@ -90,6 +90,12 @@ |
34 | from charmhelpers.contrib.openstack.utils import get_host_ip |
35 | from charmhelpers.core.unitdata import kv |
36 | |
37 | +try: |
38 | + import psutil |
39 | +except ImportError: |
40 | + apt_install('python-psutil', fatal=True) |
41 | + import psutil |
42 | + |
43 | CA_CERT_PATH = '/usr/local/share/ca-certificates/keystone_juju_ca_cert.crt' |
44 | ADDRESS_TYPES = ['admin', 'internal', 'public'] |
45 | |
46 | @@ -1258,13 +1264,11 @@ |
47 | |
48 | @property |
49 | def num_cpus(self): |
50 | - try: |
51 | - from psutil import NUM_CPUS |
52 | - except ImportError: |
53 | - apt_install('python-psutil', fatal=True) |
54 | - from psutil import NUM_CPUS |
55 | - |
56 | - return NUM_CPUS |
57 | + # NOTE: use cpu_count if present (16.04 support) |
58 | + if hasattr(psutil, 'cpu_count'): |
59 | + return psutil.cpu_count() |
60 | + else: |
61 | + return psutil.NUM_CPUS |
62 | |
63 | def __call__(self): |
64 | multiplier = config('worker-multiplier') or 0 |
65 | |
66 | === modified file 'hooks/charmhelpers/contrib/openstack/neutron.py' |
67 | --- hooks/charmhelpers/contrib/openstack/neutron.py 2016-01-04 21:26:14 +0000 |
68 | +++ hooks/charmhelpers/contrib/openstack/neutron.py 2016-02-12 09:36:40 +0000 |
69 | @@ -50,7 +50,7 @@ |
70 | if kernel_version() >= (3, 13): |
71 | return [] |
72 | else: |
73 | - return ['openvswitch-datapath-dkms'] |
74 | + return [headers_package(), 'openvswitch-datapath-dkms'] |
75 | |
76 | |
77 | # legacy |
78 | @@ -70,7 +70,7 @@ |
79 | relation_prefix='neutron', |
80 | ssl_dir=QUANTUM_CONF_DIR)], |
81 | 'services': ['quantum-plugin-openvswitch-agent'], |
82 | - 'packages': [[headers_package()] + determine_dkms_package(), |
83 | + 'packages': [determine_dkms_package(), |
84 | ['quantum-plugin-openvswitch-agent']], |
85 | 'server_packages': ['quantum-server', |
86 | 'quantum-plugin-openvswitch'], |
87 | @@ -111,7 +111,7 @@ |
88 | relation_prefix='neutron', |
89 | ssl_dir=NEUTRON_CONF_DIR)], |
90 | 'services': ['neutron-plugin-openvswitch-agent'], |
91 | - 'packages': [[headers_package()] + determine_dkms_package(), |
92 | + 'packages': [determine_dkms_package(), |
93 | ['neutron-plugin-openvswitch-agent']], |
94 | 'server_packages': ['neutron-server', |
95 | 'neutron-plugin-openvswitch'], |
96 | @@ -155,7 +155,7 @@ |
97 | relation_prefix='neutron', |
98 | ssl_dir=NEUTRON_CONF_DIR)], |
99 | 'services': [], |
100 | - 'packages': [[headers_package()] + determine_dkms_package(), |
101 | + 'packages': [determine_dkms_package(), |
102 | ['neutron-plugin-cisco']], |
103 | 'server_packages': ['neutron-server', |
104 | 'neutron-plugin-cisco'], |
105 | @@ -174,7 +174,7 @@ |
106 | 'neutron-dhcp-agent', |
107 | 'nova-api-metadata', |
108 | 'etcd'], |
109 | - 'packages': [[headers_package()] + determine_dkms_package(), |
110 | + 'packages': [determine_dkms_package(), |
111 | ['calico-compute', |
112 | 'bird', |
113 | 'neutron-dhcp-agent', |
114 | @@ -219,7 +219,7 @@ |
115 | relation_prefix='neutron', |
116 | ssl_dir=NEUTRON_CONF_DIR)], |
117 | 'services': [], |
118 | - 'packages': [[headers_package()] + determine_dkms_package()], |
119 | + 'packages': [determine_dkms_package()], |
120 | 'server_packages': ['neutron-server', |
121 | 'python-neutron-plugin-midonet'], |
122 | 'server_services': ['neutron-server'] |
123 | @@ -233,6 +233,18 @@ |
124 | 'neutron-plugin-ml2'] |
125 | # NOTE: patch in vmware renames nvp->nsx for icehouse onwards |
126 | plugins['nvp'] = plugins['nsx'] |
127 | + if release >= 'kilo': |
128 | + plugins['midonet']['driver'] = ( |
129 | + 'neutron.plugins.midonet.plugin.MidonetPluginV2') |
130 | + if release >= 'liberty': |
131 | + midonet_origin = config('midonet-origin') |
132 | + if midonet_origin is not None and midonet_origin[4:5] == '1': |
133 | + plugins['midonet']['driver'] = ( |
134 | + 'midonet.neutron.plugin_v1.MidonetPluginV2') |
135 | + plugins['midonet']['server_packages'].remove( |
136 | + 'python-neutron-plugin-midonet') |
137 | + plugins['midonet']['server_packages'].append( |
138 | + 'python-networking-midonet') |
139 | return plugins |
140 | |
141 | |
142 | |
143 | === modified file 'hooks/charmhelpers/contrib/openstack/utils.py' |
144 | --- hooks/charmhelpers/contrib/openstack/utils.py 2016-01-08 02:37:16 +0000 |
145 | +++ hooks/charmhelpers/contrib/openstack/utils.py 2016-02-12 09:36:40 +0000 |
146 | @@ -25,6 +25,7 @@ |
147 | import re |
148 | |
149 | import six |
150 | +import tempfile |
151 | import traceback |
152 | import uuid |
153 | import yaml |
154 | @@ -41,6 +42,7 @@ |
155 | config, |
156 | log as juju_log, |
157 | charm_dir, |
158 | + DEBUG, |
159 | INFO, |
160 | related_units, |
161 | relation_ids, |
162 | @@ -103,29 +105,28 @@ |
163 | ('2016.1', 'mitaka'), |
164 | ]) |
165 | |
166 | -# The ugly duckling |
167 | +# The ugly duckling - must list releases oldest to newest |
168 | SWIFT_CODENAMES = OrderedDict([ |
169 | - ('1.4.3', 'diablo'), |
170 | - ('1.4.8', 'essex'), |
171 | - ('1.7.4', 'folsom'), |
172 | - ('1.8.0', 'grizzly'), |
173 | - ('1.7.7', 'grizzly'), |
174 | - ('1.7.6', 'grizzly'), |
175 | - ('1.10.0', 'havana'), |
176 | - ('1.9.1', 'havana'), |
177 | - ('1.9.0', 'havana'), |
178 | - ('1.13.1', 'icehouse'), |
179 | - ('1.13.0', 'icehouse'), |
180 | - ('1.12.0', 'icehouse'), |
181 | - ('1.11.0', 'icehouse'), |
182 | - ('2.0.0', 'juno'), |
183 | - ('2.1.0', 'juno'), |
184 | - ('2.2.0', 'juno'), |
185 | - ('2.2.1', 'kilo'), |
186 | - ('2.2.2', 'kilo'), |
187 | - ('2.3.0', 'liberty'), |
188 | - ('2.4.0', 'liberty'), |
189 | - ('2.5.0', 'liberty'), |
190 | + ('diablo', |
191 | + ['1.4.3']), |
192 | + ('essex', |
193 | + ['1.4.8']), |
194 | + ('folsom', |
195 | + ['1.7.4']), |
196 | + ('grizzly', |
197 | + ['1.7.6', '1.7.7', '1.8.0']), |
198 | + ('havana', |
199 | + ['1.9.0', '1.9.1', '1.10.0']), |
200 | + ('icehouse', |
201 | + ['1.11.0', '1.12.0', '1.13.0', '1.13.1']), |
202 | + ('juno', |
203 | + ['2.0.0', '2.1.0', '2.2.0']), |
204 | + ('kilo', |
205 | + ['2.2.1', '2.2.2']), |
206 | + ('liberty', |
207 | + ['2.3.0', '2.4.0', '2.5.0']), |
208 | + ('mitaka', |
209 | + ['2.5.0']), |
210 | ]) |
211 | |
212 | # >= Liberty version->codename mapping |
213 | @@ -227,6 +228,33 @@ |
214 | error_out(e) |
215 | |
216 | |
217 | +def get_os_version_codename_swift(codename): |
218 | + '''Determine OpenStack version number of swift from codename.''' |
219 | + for k, v in six.iteritems(SWIFT_CODENAMES): |
220 | + if k == codename: |
221 | + return v[-1] |
222 | + e = 'Could not derive swift version for '\ |
223 | + 'codename: %s' % codename |
224 | + error_out(e) |
225 | + |
226 | + |
227 | +def get_swift_codename(version): |
228 | + '''Determine OpenStack codename that corresponds to swift version.''' |
229 | + codenames = [k for k, v in six.iteritems(SWIFT_CODENAMES) if version in v] |
230 | + if len(codenames) > 1: |
231 | + # If more than one release codename contains this version we determine |
232 | + # the actual codename based on the highest available install source. |
233 | + for codename in reversed(codenames): |
234 | + releases = UBUNTU_OPENSTACK_RELEASE |
235 | + release = [k for k, v in six.iteritems(releases) if codename in v] |
236 | + ret = subprocess.check_output(['apt-cache', 'policy', 'swift']) |
237 | + if codename in ret or release[0] in ret: |
238 | + return codename |
239 | + elif len(codenames) == 1: |
240 | + return codenames[0] |
241 | + return None |
242 | + |
243 | + |
244 | def get_os_codename_package(package, fatal=True): |
245 | '''Derive OpenStack release codename from an installed package.''' |
246 | import apt_pkg as apt |
247 | @@ -270,7 +298,7 @@ |
248 | # < Liberty co-ordinated project versions |
249 | try: |
250 | if 'swift' in pkg.name: |
251 | - return SWIFT_CODENAMES[vers] |
252 | + return get_swift_codename(vers) |
253 | else: |
254 | return OPENSTACK_CODENAMES[vers] |
255 | except KeyError: |
256 | @@ -289,12 +317,14 @@ |
257 | |
258 | if 'swift' in pkg: |
259 | vers_map = SWIFT_CODENAMES |
260 | + for cname, version in six.iteritems(vers_map): |
261 | + if cname == codename: |
262 | + return version[-1] |
263 | else: |
264 | vers_map = OPENSTACK_CODENAMES |
265 | - |
266 | - for version, cname in six.iteritems(vers_map): |
267 | - if cname == codename: |
268 | - return version |
269 | + for version, cname in six.iteritems(vers_map): |
270 | + if cname == codename: |
271 | + return version |
272 | # e = "Could not determine OpenStack version for package: %s" % pkg |
273 | # error_out(e) |
274 | |
275 | @@ -319,12 +349,42 @@ |
276 | |
277 | |
278 | def import_key(keyid): |
279 | - cmd = "apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 " \ |
280 | - "--recv-keys %s" % keyid |
281 | - try: |
282 | - subprocess.check_call(cmd.split(' ')) |
283 | - except subprocess.CalledProcessError: |
284 | - error_out("Error importing repo key %s" % keyid) |
285 | + key = keyid.strip() |
286 | + if (key.startswith('-----BEGIN PGP PUBLIC KEY BLOCK-----') and |
287 | + key.endswith('-----END PGP PUBLIC KEY BLOCK-----')): |
288 | + juju_log("PGP key found (looks like ASCII Armor format)", level=DEBUG) |
289 | + juju_log("Importing ASCII Armor PGP key", level=DEBUG) |
290 | + with tempfile.NamedTemporaryFile() as keyfile: |
291 | + with open(keyfile.name, 'w') as fd: |
292 | + fd.write(key) |
293 | + fd.write("\n") |
294 | + |
295 | + cmd = ['apt-key', 'add', keyfile.name] |
296 | + try: |
297 | + subprocess.check_call(cmd) |
298 | + except subprocess.CalledProcessError: |
299 | + error_out("Error importing PGP key '%s'" % key) |
300 | + else: |
301 | + juju_log("PGP key found (looks like Radix64 format)", level=DEBUG) |
302 | + juju_log("Importing PGP key from keyserver", level=DEBUG) |
303 | + cmd = ['apt-key', 'adv', '--keyserver', |
304 | + 'hkp://keyserver.ubuntu.com:80', '--recv-keys', key] |
305 | + try: |
306 | + subprocess.check_call(cmd) |
307 | + except subprocess.CalledProcessError: |
308 | + error_out("Error importing PGP key '%s'" % key) |
309 | + |
310 | + |
311 | +def get_source_and_pgp_key(input): |
312 | + """Look for a pgp key ID or ascii-armor key in the given input.""" |
313 | + index = input.strip() |
314 | + index = input.rfind('|') |
315 | + if index < 0: |
316 | + return input, None |
317 | + |
318 | + key = input[index + 1:].strip('|') |
319 | + source = input[:index] |
320 | + return source, key |
321 | |
322 | |
323 | def configure_installation_source(rel): |
324 | @@ -336,16 +396,16 @@ |
325 | with open('/etc/apt/sources.list.d/juju_deb.list', 'w') as f: |
326 | f.write(DISTRO_PROPOSED % ubuntu_rel) |
327 | elif rel[:4] == "ppa:": |
328 | - src = rel |
329 | + src, key = get_source_and_pgp_key(rel) |
330 | + if key: |
331 | + import_key(key) |
332 | + |
333 | subprocess.check_call(["add-apt-repository", "-y", src]) |
334 | elif rel[:3] == "deb": |
335 | - l = len(rel.split('|')) |
336 | - if l == 2: |
337 | - src, key = rel.split('|') |
338 | - juju_log("Importing PPA key from keyserver for %s" % src) |
339 | + src, key = get_source_and_pgp_key(rel) |
340 | + if key: |
341 | import_key(key) |
342 | - elif l == 1: |
343 | - src = rel |
344 | + |
345 | with open('/etc/apt/sources.list.d/juju_deb.list', 'w') as f: |
346 | f.write(src) |
347 | elif rel[:6] == 'cloud:': |
348 | @@ -460,11 +520,16 @@ |
349 | cur_vers = get_os_version_package(package) |
350 | if "swift" in package: |
351 | codename = get_os_codename_install_source(src) |
352 | - available_vers = get_os_version_codename(codename, SWIFT_CODENAMES) |
353 | + avail_vers = get_os_version_codename_swift(codename) |
354 | else: |
355 | - available_vers = get_os_version_install_source(src) |
356 | + avail_vers = get_os_version_install_source(src) |
357 | apt.init() |
358 | - return apt.version_compare(available_vers, cur_vers) == 1 |
359 | + if "swift" in package: |
360 | + major_cur_vers = cur_vers.split('.', 1)[0] |
361 | + major_avail_vers = avail_vers.split('.', 1)[0] |
362 | + major_diff = apt.version_compare(major_avail_vers, major_cur_vers) |
363 | + return avail_vers > cur_vers and (major_diff == 1 or major_diff == 0) |
364 | + return apt.version_compare(avail_vers, cur_vers) == 1 |
365 | |
366 | |
367 | def ensure_block_device(block_device): |
368 | |
369 | === modified file 'hooks/charmhelpers/contrib/python/packages.py' |
370 | --- hooks/charmhelpers/contrib/python/packages.py 2016-01-04 21:26:14 +0000 |
371 | +++ hooks/charmhelpers/contrib/python/packages.py 2016-02-12 09:36:40 +0000 |
372 | @@ -19,20 +19,35 @@ |
373 | |
374 | import os |
375 | import subprocess |
376 | +import sys |
377 | |
378 | from charmhelpers.fetch import apt_install, apt_update |
379 | from charmhelpers.core.hookenv import charm_dir, log |
380 | |
381 | -try: |
382 | - from pip import main as pip_execute |
383 | -except ImportError: |
384 | - apt_update() |
385 | - apt_install('python-pip') |
386 | - from pip import main as pip_execute |
387 | - |
388 | __author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" |
389 | |
390 | |
391 | +def pip_execute(*args, **kwargs): |
392 | + """Overriden pip_execute() to stop sys.path being changed. |
393 | + |
394 | + The act of importing main from the pip module seems to cause add wheels |
395 | + from the /usr/share/python-wheels which are installed by various tools. |
396 | + This function ensures that sys.path remains the same after the call is |
397 | + executed. |
398 | + """ |
399 | + try: |
400 | + _path = sys.path |
401 | + try: |
402 | + from pip import main as _pip_execute |
403 | + except ImportError: |
404 | + apt_update() |
405 | + apt_install('python-pip') |
406 | + from pip import main as _pip_execute |
407 | + _pip_execute(*args, **kwargs) |
408 | + finally: |
409 | + sys.path = _path |
410 | + |
411 | + |
412 | def parse_options(given, available): |
413 | """Given a set of options, check if available""" |
414 | for key, value in sorted(given.items()): |
415 | |
416 | === modified file 'hooks/charmhelpers/core/host.py' |
417 | --- hooks/charmhelpers/core/host.py 2016-01-08 02:37:16 +0000 |
418 | +++ hooks/charmhelpers/core/host.py 2016-02-12 09:36:40 +0000 |
419 | @@ -138,7 +138,8 @@ |
420 | except subprocess.CalledProcessError: |
421 | return False |
422 | else: |
423 | - if ("start/running" in output or "is running" in output): |
424 | + if ("start/running" in output or "is running" in output or |
425 | + "up and running" in output): |
426 | return True |
427 | else: |
428 | return False |
429 | @@ -160,13 +161,13 @@ |
430 | |
431 | |
432 | def init_is_systemd(): |
433 | + """Return True if the host system uses systemd, False otherwise.""" |
434 | return os.path.isdir(SYSTEMD_SYSTEM) |
435 | |
436 | |
437 | def adduser(username, password=None, shell='/bin/bash', system_user=False, |
438 | primary_group=None, secondary_groups=None): |
439 | - """ |
440 | - Add a user to the system. |
441 | + """Add a user to the system. |
442 | |
443 | Will log but otherwise succeed if the user already exists. |
444 | |
445 | @@ -174,7 +175,7 @@ |
446 | :param str password: Password for user; if ``None``, create a system user |
447 | :param str shell: The default shell for the user |
448 | :param bool system_user: Whether to create a login or system user |
449 | - :param str primary_group: Primary group for user; defaults to their username |
450 | + :param str primary_group: Primary group for user; defaults to username |
451 | :param list secondary_groups: Optional list of additional groups |
452 | |
453 | :returns: The password database entry struct, as returned by `pwd.getpwnam` |
454 | @@ -300,14 +301,12 @@ |
455 | |
456 | |
457 | def fstab_remove(mp): |
458 | - """Remove the given mountpoint entry from /etc/fstab |
459 | - """ |
460 | + """Remove the given mountpoint entry from /etc/fstab""" |
461 | return Fstab.remove_by_mountpoint(mp) |
462 | |
463 | |
464 | def fstab_add(dev, mp, fs, options=None): |
465 | - """Adds the given device entry to the /etc/fstab file |
466 | - """ |
467 | + """Adds the given device entry to the /etc/fstab file""" |
468 | return Fstab.add(dev, mp, fs, options=options) |
469 | |
470 | |
471 | @@ -363,8 +362,7 @@ |
472 | |
473 | |
474 | def file_hash(path, hash_type='md5'): |
475 | - """ |
476 | - Generate a hash checksum of the contents of 'path' or None if not found. |
477 | + """Generate a hash checksum of the contents of 'path' or None if not found. |
478 | |
479 | :param str hash_type: Any hash alrgorithm supported by :mod:`hashlib`, |
480 | such as md5, sha1, sha256, sha512, etc. |
481 | @@ -379,10 +377,9 @@ |
482 | |
483 | |
484 | def path_hash(path): |
485 | - """ |
486 | - Generate a hash checksum of all files matching 'path'. Standard wildcards |
487 | - like '*' and '?' are supported, see documentation for the 'glob' module for |
488 | - more information. |
489 | + """Generate a hash checksum of all files matching 'path'. Standard |
490 | + wildcards like '*' and '?' are supported, see documentation for the 'glob' |
491 | + module for more information. |
492 | |
493 | :return: dict: A { filename: hash } dictionary for all matched files. |
494 | Empty if none found. |
495 | @@ -394,8 +391,7 @@ |
496 | |
497 | |
498 | def check_hash(path, checksum, hash_type='md5'): |
499 | - """ |
500 | - Validate a file using a cryptographic checksum. |
501 | + """Validate a file using a cryptographic checksum. |
502 | |
503 | :param str checksum: Value of the checksum used to validate the file. |
504 | :param str hash_type: Hash algorithm used to generate `checksum`. |
505 | @@ -410,6 +406,7 @@ |
506 | |
507 | |
508 | class ChecksumError(ValueError): |
509 | + """A class derived from Value error to indicate the checksum failed.""" |
510 | pass |
511 | |
512 | |
513 | @@ -515,7 +512,7 @@ |
514 | |
515 | |
516 | def list_nics(nic_type=None): |
517 | - '''Return a list of nics of given type(s)''' |
518 | + """Return a list of nics of given type(s)""" |
519 | if isinstance(nic_type, six.string_types): |
520 | int_types = [nic_type] |
521 | else: |
522 | @@ -557,12 +554,13 @@ |
523 | |
524 | |
525 | def set_nic_mtu(nic, mtu): |
526 | - '''Set MTU on a network interface''' |
527 | + """Set the Maximum Transmission Unit (MTU) on a network interface.""" |
528 | cmd = ['ip', 'link', 'set', nic, 'mtu', mtu] |
529 | subprocess.check_call(cmd) |
530 | |
531 | |
532 | def get_nic_mtu(nic): |
533 | + """Return the Maximum Transmission Unit (MTU) for a network interface.""" |
534 | cmd = ['ip', 'addr', 'show', nic] |
535 | ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') |
536 | mtu = "" |
537 | @@ -574,6 +572,7 @@ |
538 | |
539 | |
540 | def get_nic_hwaddr(nic): |
541 | + """Return the Media Access Control (MAC) for a network interface.""" |
542 | cmd = ['ip', '-o', '-0', 'addr', 'show', nic] |
543 | ip_output = subprocess.check_output(cmd).decode('UTF-8') |
544 | hwaddr = "" |
545 | @@ -584,7 +583,7 @@ |
546 | |
547 | |
548 | def cmp_pkgrevno(package, revno, pkgcache=None): |
549 | - '''Compare supplied revno with the revno of the installed package |
550 | + """Compare supplied revno with the revno of the installed package |
551 | |
552 | * 1 => Installed revno is greater than supplied arg |
553 | * 0 => Installed revno is the same as supplied arg |
554 | @@ -593,7 +592,7 @@ |
555 | This function imports apt_cache function from charmhelpers.fetch if |
556 | the pkgcache argument is None. Be sure to add charmhelpers.fetch if |
557 | you call this function, or pass an apt_pkg.Cache() instance. |
558 | - ''' |
559 | + """ |
560 | import apt_pkg |
561 | if not pkgcache: |
562 | from charmhelpers.fetch import apt_cache |
563 | @@ -603,19 +602,27 @@ |
564 | |
565 | |
566 | @contextmanager |
567 | -def chdir(d): |
568 | +def chdir(directory): |
569 | + """Change the current working directory to a different directory for a code |
570 | + block and return the previous directory after the block exits. Useful to |
571 | + run commands from a specificed directory. |
572 | + |
573 | + :param str directory: The directory path to change to for this context. |
574 | + """ |
575 | cur = os.getcwd() |
576 | try: |
577 | - yield os.chdir(d) |
578 | + yield os.chdir(directory) |
579 | finally: |
580 | os.chdir(cur) |
581 | |
582 | |
583 | def chownr(path, owner, group, follow_links=True, chowntopdir=False): |
584 | - """ |
585 | - Recursively change user and group ownership of files and directories |
586 | + """Recursively change user and group ownership of files and directories |
587 | in given path. Doesn't chown path itself by default, only its children. |
588 | |
589 | + :param str path: The string path to start changing ownership. |
590 | + :param str owner: The owner string to use when looking up the uid. |
591 | + :param str group: The group string to use when looking up the gid. |
592 | :param bool follow_links: Also Chown links if True |
593 | :param bool chowntopdir: Also chown path itself if True |
594 | """ |
595 | @@ -639,15 +646,23 @@ |
596 | |
597 | |
598 | def lchownr(path, owner, group): |
599 | + """Recursively change user and group ownership of files and directories |
600 | + in a given path, not following symbolic links. See the documentation for |
601 | + 'os.lchown' for more information. |
602 | + |
603 | + :param str path: The string path to start changing ownership. |
604 | + :param str owner: The owner string to use when looking up the uid. |
605 | + :param str group: The group string to use when looking up the gid. |
606 | + """ |
607 | chownr(path, owner, group, follow_links=False) |
608 | |
609 | |
610 | def get_total_ram(): |
611 | - '''The total amount of system RAM in bytes. |
612 | + """The total amount of system RAM in bytes. |
613 | |
614 | This is what is reported by the OS, and may be overcommitted when |
615 | there are multiple containers hosted on the same machine. |
616 | - ''' |
617 | + """ |
618 | with open('/proc/meminfo', 'r') as f: |
619 | for line in f.readlines(): |
620 | if line: |
621 | |
622 | === modified file 'hooks/charmhelpers/fetch/giturl.py' |
623 | --- hooks/charmhelpers/fetch/giturl.py 2016-01-08 02:37:16 +0000 |
624 | +++ hooks/charmhelpers/fetch/giturl.py 2016-02-12 09:36:40 +0000 |
625 | @@ -15,7 +15,7 @@ |
626 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. |
627 | |
628 | import os |
629 | -from subprocess import check_call |
630 | +from subprocess import check_call, CalledProcessError |
631 | from charmhelpers.fetch import ( |
632 | BaseFetchHandler, |
633 | UnhandledSource, |
634 | @@ -49,8 +49,8 @@ |
635 | cmd = ['git', '-C', dest, 'pull', source, branch] |
636 | else: |
637 | cmd = ['git', 'clone', source, dest, '--branch', branch] |
638 | - if depth: |
639 | - cmd.extend(['--depth', depth]) |
640 | + if depth: |
641 | + cmd.extend(['--depth', depth]) |
642 | check_call(cmd) |
643 | |
644 | def install(self, source, branch="master", dest=None, depth=None): |
645 | @@ -63,6 +63,8 @@ |
646 | branch_name) |
647 | try: |
648 | self.clone(source, dest_dir, branch, depth) |
649 | + except CalledProcessError as e: |
650 | + raise UnhandledSource(e) |
651 | except OSError as e: |
652 | raise UnhandledSource(e.strerror) |
653 | return dest_dir |
654 | |
655 | === modified file 'tests/charmhelpers/contrib/openstack/amulet/deployment.py' |
656 | --- tests/charmhelpers/contrib/openstack/amulet/deployment.py 2016-01-04 21:26:14 +0000 |
657 | +++ tests/charmhelpers/contrib/openstack/amulet/deployment.py 2016-02-12 09:36:40 +0000 |
658 | @@ -121,11 +121,12 @@ |
659 | |
660 | # Charms which should use the source config option |
661 | use_source = ['mysql', 'mongodb', 'rabbitmq-server', 'ceph', |
662 | - 'ceph-osd', 'ceph-radosgw'] |
663 | + 'ceph-osd', 'ceph-radosgw', 'ceph-mon'] |
664 | |
665 | # Charms which can not use openstack-origin, ie. many subordinates |
666 | no_origin = ['cinder-ceph', 'hacluster', 'neutron-openvswitch', 'nrpe', |
667 | - 'openvswitch-odl', 'neutron-api-odl', 'odl-controller'] |
668 | + 'openvswitch-odl', 'neutron-api-odl', 'odl-controller', |
669 | + 'cinder-backup'] |
670 | |
671 | if self.openstack: |
672 | for svc in services: |
charm_unit_test #202 cinder-next for hopem mp285747
UNIT OK: passed
Build: http:// 10.245. 162.36: 8080/job/ charm_unit_ test/202/