Merge lp:~hopem/charms/trusty/ceilometer-agent/lp1535062 into lp:~openstack-charmers-archive/charms/trusty/ceilometer-agent/next
- Trusty Tahr (14.04)
- lp1535062
- Merge into next
Proposed by
Edward Hope-Morley
Status: | Merged | ||||
---|---|---|---|---|---|
Merged at revision: | 76 | ||||
Proposed branch: | lp:~hopem/charms/trusty/ceilometer-agent/lp1535062 | ||||
Merge into: | lp:~openstack-charmers-archive/charms/trusty/ceilometer-agent/next | ||||
Diff against target: |
617 lines (+182/-112) 4 files modified
hooks/charmhelpers/contrib/openstack/context.py (+12/-2) hooks/charmhelpers/contrib/openstack/utils.py (+95/-63) hooks/charmhelpers/core/host.py (+73/-42) hooks/charmhelpers/fetch/giturl.py (+2/-5) |
||||
To merge this branch: | bzr merge lp:~hopem/charms/trusty/ceilometer-agent/lp1535062 | ||||
Related bugs: |
|
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
OpenStack Charmers | Pending | ||
Review via email: mp+282881@code.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Revision history for this message
uosci-testing-bot (uosci-testing-bot) wrote : | # |
Revision history for this message
uosci-testing-bot (uosci-testing-bot) wrote : | # |
charm_lint_check #17514 ceilometer-
LINT OK: passed
Build: http://
Revision history for this message
uosci-testing-bot (uosci-testing-bot) wrote : | # |
charm_amulet_test #8875 ceilometer-
AMULET OK: passed
Build: http://
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'hooks/charmhelpers/contrib/openstack/context.py' |
2 | --- hooks/charmhelpers/contrib/openstack/context.py 2016-01-04 21:25:19 +0000 |
3 | +++ hooks/charmhelpers/contrib/openstack/context.py 2016-01-17 21:20:50 +0000 |
4 | @@ -57,6 +57,7 @@ |
5 | get_nic_hwaddr, |
6 | mkdir, |
7 | write_file, |
8 | + pwgen, |
9 | ) |
10 | from charmhelpers.contrib.hahelpers.cluster import ( |
11 | determine_apache_port, |
12 | @@ -87,6 +88,8 @@ |
13 | is_bridge_member, |
14 | ) |
15 | from charmhelpers.contrib.openstack.utils import get_host_ip |
16 | +from charmhelpers.core.unitdata import kv |
17 | + |
18 | CA_CERT_PATH = '/usr/local/share/ca-certificates/keystone_juju_ca_cert.crt' |
19 | ADDRESS_TYPES = ['admin', 'internal', 'public'] |
20 | |
21 | @@ -636,11 +639,18 @@ |
22 | ctxt['ipv6'] = True |
23 | ctxt['local_host'] = 'ip6-localhost' |
24 | ctxt['haproxy_host'] = '::' |
25 | - ctxt['stat_port'] = ':::8888' |
26 | else: |
27 | ctxt['local_host'] = '127.0.0.1' |
28 | ctxt['haproxy_host'] = '0.0.0.0' |
29 | - ctxt['stat_port'] = ':8888' |
30 | + |
31 | + ctxt['stat_port'] = '8888' |
32 | + |
33 | + db = kv() |
34 | + ctxt['stat_password'] = db.get('stat-password') |
35 | + if not ctxt['stat_password']: |
36 | + ctxt['stat_password'] = db.set('stat-password', |
37 | + pwgen(32)) |
38 | + db.flush() |
39 | |
40 | for frontend in cluster_hosts: |
41 | if (len(cluster_hosts[frontend]['backends']) > 1 or |
42 | |
43 | === modified file 'hooks/charmhelpers/contrib/openstack/utils.py' |
44 | --- hooks/charmhelpers/contrib/openstack/utils.py 2016-01-04 21:25:19 +0000 |
45 | +++ hooks/charmhelpers/contrib/openstack/utils.py 2016-01-17 21:20:50 +0000 |
46 | @@ -103,68 +103,67 @@ |
47 | ('2016.1', 'mitaka'), |
48 | ]) |
49 | |
50 | -# The ugly duckling |
51 | +# The ugly duckling - must list releases oldest to newest |
52 | SWIFT_CODENAMES = OrderedDict([ |
53 | - ('1.4.3', 'diablo'), |
54 | - ('1.4.8', 'essex'), |
55 | - ('1.7.4', 'folsom'), |
56 | - ('1.8.0', 'grizzly'), |
57 | - ('1.7.7', 'grizzly'), |
58 | - ('1.7.6', 'grizzly'), |
59 | - ('1.10.0', 'havana'), |
60 | - ('1.9.1', 'havana'), |
61 | - ('1.9.0', 'havana'), |
62 | - ('1.13.1', 'icehouse'), |
63 | - ('1.13.0', 'icehouse'), |
64 | - ('1.12.0', 'icehouse'), |
65 | - ('1.11.0', 'icehouse'), |
66 | - ('2.0.0', 'juno'), |
67 | - ('2.1.0', 'juno'), |
68 | - ('2.2.0', 'juno'), |
69 | - ('2.2.1', 'kilo'), |
70 | - ('2.2.2', 'kilo'), |
71 | - ('2.3.0', 'liberty'), |
72 | - ('2.4.0', 'liberty'), |
73 | - ('2.5.0', 'liberty'), |
74 | + ('diablo', |
75 | + ['1.4.3']), |
76 | + ('essex', |
77 | + ['1.4.8']), |
78 | + ('folsom', |
79 | + ['1.7.4']), |
80 | + ('grizzly', |
81 | + ['1.7.6', '1.7.7', '1.8.0']), |
82 | + ('havana', |
83 | + ['1.9.0', '1.9.1', '1.10.0']), |
84 | + ('icehouse', |
85 | + ['1.11.0', '1.12.0', '1.13.0', '1.13.1']), |
86 | + ('juno', |
87 | + ['2.0.0', '2.1.0', '2.2.0']), |
88 | + ('kilo', |
89 | + ['2.2.1', '2.2.2']), |
90 | + ('liberty', |
91 | + ['2.3.0', '2.4.0', '2.5.0']), |
92 | + ('mitaka', |
93 | + ['2.5.0']), |
94 | ]) |
95 | |
96 | # >= Liberty version->codename mapping |
97 | PACKAGE_CODENAMES = { |
98 | 'nova-common': OrderedDict([ |
99 | - ('12.0.0', 'liberty'), |
100 | - ('13.0.0', 'mitaka'), |
101 | + ('12.0', 'liberty'), |
102 | + ('13.0', 'mitaka'), |
103 | ]), |
104 | 'neutron-common': OrderedDict([ |
105 | - ('7.0.0', 'liberty'), |
106 | - ('8.0.0', 'mitaka'), |
107 | + ('7.0', 'liberty'), |
108 | + ('8.0', 'mitaka'), |
109 | ]), |
110 | 'cinder-common': OrderedDict([ |
111 | - ('7.0.0', 'liberty'), |
112 | - ('8.0.0', 'mitaka'), |
113 | + ('7.0', 'liberty'), |
114 | + ('8.0', 'mitaka'), |
115 | ]), |
116 | 'keystone': OrderedDict([ |
117 | - ('8.0.0', 'liberty'), |
118 | - ('9.0.0', 'mitaka'), |
119 | + ('8.0', 'liberty'), |
120 | + ('9.0', 'mitaka'), |
121 | ]), |
122 | 'horizon-common': OrderedDict([ |
123 | - ('8.0.0', 'liberty'), |
124 | - ('9.0.0', 'mitaka'), |
125 | + ('8.0', 'liberty'), |
126 | + ('9.0', 'mitaka'), |
127 | ]), |
128 | 'ceilometer-common': OrderedDict([ |
129 | - ('5.0.0', 'liberty'), |
130 | - ('6.0.0', 'mitaka'), |
131 | + ('5.0', 'liberty'), |
132 | + ('6.0', 'mitaka'), |
133 | ]), |
134 | 'heat-common': OrderedDict([ |
135 | - ('5.0.0', 'liberty'), |
136 | - ('6.0.0', 'mitaka'), |
137 | + ('5.0', 'liberty'), |
138 | + ('6.0', 'mitaka'), |
139 | ]), |
140 | 'glance-common': OrderedDict([ |
141 | - ('11.0.0', 'liberty'), |
142 | - ('12.0.0', 'mitaka'), |
143 | + ('11.0', 'liberty'), |
144 | + ('12.0', 'mitaka'), |
145 | ]), |
146 | 'openstack-dashboard': OrderedDict([ |
147 | - ('8.0.0', 'liberty'), |
148 | - ('9.0.0', 'mitaka'), |
149 | + ('8.0', 'liberty'), |
150 | + ('9.0', 'mitaka'), |
151 | ]), |
152 | } |
153 | |
154 | @@ -227,6 +226,33 @@ |
155 | error_out(e) |
156 | |
157 | |
158 | +def get_os_version_codename_swift(codename): |
159 | + '''Determine OpenStack version number of swift from codename.''' |
160 | + for k, v in six.iteritems(SWIFT_CODENAMES): |
161 | + if k == codename: |
162 | + return v[-1] |
163 | + e = 'Could not derive swift version for '\ |
164 | + 'codename: %s' % codename |
165 | + error_out(e) |
166 | + |
167 | + |
168 | +def get_swift_codename(version): |
169 | + '''Determine OpenStack codename that corresponds to swift version.''' |
170 | + codenames = [k for k, v in six.iteritems(SWIFT_CODENAMES) if version in v] |
171 | + if len(codenames) > 1: |
172 | + # If more than one release codename contains this version we determine |
173 | + # the actual codename based on the highest available install source. |
174 | + for codename in reversed(codenames): |
175 | + releases = UBUNTU_OPENSTACK_RELEASE |
176 | + release = [k for k, v in six.iteritems(releases) if codename in v] |
177 | + ret = subprocess.check_output(['apt-cache', 'policy', 'swift']) |
178 | + if codename in ret or release[0] in ret: |
179 | + return codename |
180 | + elif len(codenames) == 1: |
181 | + return codenames[0] |
182 | + return None |
183 | + |
184 | + |
185 | def get_os_codename_package(package, fatal=True): |
186 | '''Derive OpenStack release codename from an installed package.''' |
187 | import apt_pkg as apt |
188 | @@ -251,7 +277,14 @@ |
189 | error_out(e) |
190 | |
191 | vers = apt.upstream_version(pkg.current_ver.ver_str) |
192 | - match = re.match('^(\d+)\.(\d+)\.(\d+)', vers) |
193 | + if 'swift' in pkg.name: |
194 | + # Fully x.y.z match for swift versions |
195 | + match = re.match('^(\d+)\.(\d+)\.(\d+)', vers) |
196 | + else: |
197 | + # x.y match only for 20XX.X |
198 | + # and ignore patch level for other packages |
199 | + match = re.match('^(\d+)\.(\d+)', vers) |
200 | + |
201 | if match: |
202 | vers = match.group(0) |
203 | |
204 | @@ -263,13 +296,8 @@ |
205 | # < Liberty co-ordinated project versions |
206 | try: |
207 | if 'swift' in pkg.name: |
208 | - swift_vers = vers[:5] |
209 | - if swift_vers not in SWIFT_CODENAMES: |
210 | - # Deal with 1.10.0 upward |
211 | - swift_vers = vers[:6] |
212 | - return SWIFT_CODENAMES[swift_vers] |
213 | + return get_swift_codename(vers) |
214 | else: |
215 | - vers = vers[:6] |
216 | return OPENSTACK_CODENAMES[vers] |
217 | except KeyError: |
218 | if not fatal: |
219 | @@ -287,12 +315,14 @@ |
220 | |
221 | if 'swift' in pkg: |
222 | vers_map = SWIFT_CODENAMES |
223 | + for cname, version in six.iteritems(vers_map): |
224 | + if cname == codename: |
225 | + return version[-1] |
226 | else: |
227 | vers_map = OPENSTACK_CODENAMES |
228 | - |
229 | - for version, cname in six.iteritems(vers_map): |
230 | - if cname == codename: |
231 | - return version |
232 | + for version, cname in six.iteritems(vers_map): |
233 | + if cname == codename: |
234 | + return version |
235 | # e = "Could not determine OpenStack version for package: %s" % pkg |
236 | # error_out(e) |
237 | |
238 | @@ -458,11 +488,16 @@ |
239 | cur_vers = get_os_version_package(package) |
240 | if "swift" in package: |
241 | codename = get_os_codename_install_source(src) |
242 | - available_vers = get_os_version_codename(codename, SWIFT_CODENAMES) |
243 | + avail_vers = get_os_version_codename_swift(codename) |
244 | else: |
245 | - available_vers = get_os_version_install_source(src) |
246 | + avail_vers = get_os_version_install_source(src) |
247 | apt.init() |
248 | - return apt.version_compare(available_vers, cur_vers) == 1 |
249 | + if "swift" in package: |
250 | + major_cur_vers = cur_vers.split('.', 1)[0] |
251 | + major_avail_vers = avail_vers.split('.', 1)[0] |
252 | + major_diff = apt.version_compare(major_avail_vers, major_cur_vers) |
253 | + return avail_vers > cur_vers and (major_diff == 1 or major_diff == 0) |
254 | + return apt.version_compare(avail_vers, cur_vers) == 1 |
255 | |
256 | |
257 | def ensure_block_device(block_device): |
258 | @@ -591,7 +626,7 @@ |
259 | return yaml.load(projects_yaml) |
260 | |
261 | |
262 | -def git_clone_and_install(projects_yaml, core_project, depth=1): |
263 | +def git_clone_and_install(projects_yaml, core_project): |
264 | """ |
265 | Clone/install all specified OpenStack repositories. |
266 | |
267 | @@ -641,6 +676,9 @@ |
268 | for p in projects['repositories']: |
269 | repo = p['repository'] |
270 | branch = p['branch'] |
271 | + depth = '1' |
272 | + if 'depth' in p.keys(): |
273 | + depth = p['depth'] |
274 | if p['name'] == 'requirements': |
275 | repo_dir = _git_clone_and_install_single(repo, branch, depth, |
276 | parent_dir, http_proxy, |
277 | @@ -685,19 +723,13 @@ |
278 | """ |
279 | Clone and install a single git repository. |
280 | """ |
281 | - dest_dir = os.path.join(parent_dir, os.path.basename(repo)) |
282 | - |
283 | if not os.path.exists(parent_dir): |
284 | juju_log('Directory already exists at {}. ' |
285 | 'No need to create directory.'.format(parent_dir)) |
286 | os.mkdir(parent_dir) |
287 | |
288 | - if not os.path.exists(dest_dir): |
289 | - juju_log('Cloning git repo: {}, branch: {}'.format(repo, branch)) |
290 | - repo_dir = install_remote(repo, dest=parent_dir, branch=branch, |
291 | - depth=depth) |
292 | - else: |
293 | - repo_dir = dest_dir |
294 | + juju_log('Cloning git repo: {}, branch: {}'.format(repo, branch)) |
295 | + repo_dir = install_remote(repo, dest=parent_dir, branch=branch, depth=depth) |
296 | |
297 | venv = os.path.join(parent_dir, 'venv') |
298 | |
299 | |
300 | === modified file 'hooks/charmhelpers/core/host.py' |
301 | --- hooks/charmhelpers/core/host.py 2016-01-04 21:25:19 +0000 |
302 | +++ hooks/charmhelpers/core/host.py 2016-01-17 21:20:50 +0000 |
303 | @@ -72,7 +72,9 @@ |
304 | stopped = service_stop(service_name) |
305 | upstart_file = os.path.join(init_dir, "{}.conf".format(service_name)) |
306 | sysv_file = os.path.join(initd_dir, service_name) |
307 | - if os.path.exists(upstart_file): |
308 | + if init_is_systemd(): |
309 | + service('disable', service_name) |
310 | + elif os.path.exists(upstart_file): |
311 | override_path = os.path.join( |
312 | init_dir, '{}.override'.format(service_name)) |
313 | with open(override_path, 'w') as fh: |
314 | @@ -80,9 +82,9 @@ |
315 | elif os.path.exists(sysv_file): |
316 | subprocess.check_call(["update-rc.d", service_name, "disable"]) |
317 | else: |
318 | - # XXX: Support SystemD too |
319 | raise ValueError( |
320 | - "Unable to detect {0} as either Upstart {1} or SysV {2}".format( |
321 | + "Unable to detect {0} as SystemD, Upstart {1} or" |
322 | + " SysV {2}".format( |
323 | service_name, upstart_file, sysv_file)) |
324 | return stopped |
325 | |
326 | @@ -94,7 +96,9 @@ |
327 | Reenable starting again at boot. Start the service""" |
328 | upstart_file = os.path.join(init_dir, "{}.conf".format(service_name)) |
329 | sysv_file = os.path.join(initd_dir, service_name) |
330 | - if os.path.exists(upstart_file): |
331 | + if init_is_systemd(): |
332 | + service('enable', service_name) |
333 | + elif os.path.exists(upstart_file): |
334 | override_path = os.path.join( |
335 | init_dir, '{}.override'.format(service_name)) |
336 | if os.path.exists(override_path): |
337 | @@ -102,9 +106,9 @@ |
338 | elif os.path.exists(sysv_file): |
339 | subprocess.check_call(["update-rc.d", service_name, "enable"]) |
340 | else: |
341 | - # XXX: Support SystemD too |
342 | raise ValueError( |
343 | - "Unable to detect {0} as either Upstart {1} or SysV {2}".format( |
344 | + "Unable to detect {0} as SystemD, Upstart {1} or" |
345 | + " SysV {2}".format( |
346 | service_name, upstart_file, sysv_file)) |
347 | |
348 | started = service_running(service_name) |
349 | @@ -115,23 +119,29 @@ |
350 | |
351 | def service(action, service_name): |
352 | """Control a system service""" |
353 | - cmd = ['service', service_name, action] |
354 | + if init_is_systemd(): |
355 | + cmd = ['systemctl', action, service_name] |
356 | + else: |
357 | + cmd = ['service', service_name, action] |
358 | return subprocess.call(cmd) == 0 |
359 | |
360 | |
361 | -def service_running(service): |
362 | +def service_running(service_name): |
363 | """Determine whether a system service is running""" |
364 | - try: |
365 | - output = subprocess.check_output( |
366 | - ['service', service, 'status'], |
367 | - stderr=subprocess.STDOUT).decode('UTF-8') |
368 | - except subprocess.CalledProcessError: |
369 | - return False |
370 | + if init_is_systemd(): |
371 | + return service('is-active', service_name) |
372 | else: |
373 | - if ("start/running" in output or "is running" in output): |
374 | - return True |
375 | - else: |
376 | + try: |
377 | + output = subprocess.check_output( |
378 | + ['service', service_name, 'status'], |
379 | + stderr=subprocess.STDOUT).decode('UTF-8') |
380 | + except subprocess.CalledProcessError: |
381 | return False |
382 | + else: |
383 | + if ("start/running" in output or "is running" in output): |
384 | + return True |
385 | + else: |
386 | + return False |
387 | |
388 | |
389 | def service_available(service_name): |
390 | @@ -146,10 +156,17 @@ |
391 | return True |
392 | |
393 | |
394 | +SYSTEMD_SYSTEM = '/run/systemd/system' |
395 | + |
396 | + |
397 | +def init_is_systemd(): |
398 | + """Return True if the host system uses systemd, False otherwise.""" |
399 | + return os.path.isdir(SYSTEMD_SYSTEM) |
400 | + |
401 | + |
402 | def adduser(username, password=None, shell='/bin/bash', system_user=False, |
403 | primary_group=None, secondary_groups=None): |
404 | - """ |
405 | - Add a user to the system. |
406 | + """Add a user to the system. |
407 | |
408 | Will log but otherwise succeed if the user already exists. |
409 | |
410 | @@ -157,7 +174,7 @@ |
411 | :param str password: Password for user; if ``None``, create a system user |
412 | :param str shell: The default shell for the user |
413 | :param bool system_user: Whether to create a login or system user |
414 | - :param str primary_group: Primary group for user; defaults to their username |
415 | + :param str primary_group: Primary group for user; defaults to username |
416 | :param list secondary_groups: Optional list of additional groups |
417 | |
418 | :returns: The password database entry struct, as returned by `pwd.getpwnam` |
419 | @@ -283,14 +300,12 @@ |
420 | |
421 | |
422 | def fstab_remove(mp): |
423 | - """Remove the given mountpoint entry from /etc/fstab |
424 | - """ |
425 | + """Remove the given mountpoint entry from /etc/fstab""" |
426 | return Fstab.remove_by_mountpoint(mp) |
427 | |
428 | |
429 | def fstab_add(dev, mp, fs, options=None): |
430 | - """Adds the given device entry to the /etc/fstab file |
431 | - """ |
432 | + """Adds the given device entry to the /etc/fstab file""" |
433 | return Fstab.add(dev, mp, fs, options=options) |
434 | |
435 | |
436 | @@ -346,8 +361,7 @@ |
437 | |
438 | |
439 | def file_hash(path, hash_type='md5'): |
440 | - """ |
441 | - Generate a hash checksum of the contents of 'path' or None if not found. |
442 | + """Generate a hash checksum of the contents of 'path' or None if not found. |
443 | |
444 | :param str hash_type: Any hash alrgorithm supported by :mod:`hashlib`, |
445 | such as md5, sha1, sha256, sha512, etc. |
446 | @@ -362,10 +376,9 @@ |
447 | |
448 | |
449 | def path_hash(path): |
450 | - """ |
451 | - Generate a hash checksum of all files matching 'path'. Standard wildcards |
452 | - like '*' and '?' are supported, see documentation for the 'glob' module for |
453 | - more information. |
454 | + """Generate a hash checksum of all files matching 'path'. Standard |
455 | + wildcards like '*' and '?' are supported, see documentation for the 'glob' |
456 | + module for more information. |
457 | |
458 | :return: dict: A { filename: hash } dictionary for all matched files. |
459 | Empty if none found. |
460 | @@ -377,8 +390,7 @@ |
461 | |
462 | |
463 | def check_hash(path, checksum, hash_type='md5'): |
464 | - """ |
465 | - Validate a file using a cryptographic checksum. |
466 | + """Validate a file using a cryptographic checksum. |
467 | |
468 | :param str checksum: Value of the checksum used to validate the file. |
469 | :param str hash_type: Hash algorithm used to generate `checksum`. |
470 | @@ -393,6 +405,7 @@ |
471 | |
472 | |
473 | class ChecksumError(ValueError): |
474 | + """A class derived from Value error to indicate the checksum failed.""" |
475 | pass |
476 | |
477 | |
478 | @@ -498,7 +511,7 @@ |
479 | |
480 | |
481 | def list_nics(nic_type=None): |
482 | - '''Return a list of nics of given type(s)''' |
483 | + """Return a list of nics of given type(s)""" |
484 | if isinstance(nic_type, six.string_types): |
485 | int_types = [nic_type] |
486 | else: |
487 | @@ -540,12 +553,13 @@ |
488 | |
489 | |
490 | def set_nic_mtu(nic, mtu): |
491 | - '''Set MTU on a network interface''' |
492 | + """Set the Maximum Transmission Unit (MTU) on a network interface.""" |
493 | cmd = ['ip', 'link', 'set', nic, 'mtu', mtu] |
494 | subprocess.check_call(cmd) |
495 | |
496 | |
497 | def get_nic_mtu(nic): |
498 | + """Return the Maximum Transmission Unit (MTU) for a network interface.""" |
499 | cmd = ['ip', 'addr', 'show', nic] |
500 | ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') |
501 | mtu = "" |
502 | @@ -557,6 +571,7 @@ |
503 | |
504 | |
505 | def get_nic_hwaddr(nic): |
506 | + """Return the Media Access Control (MAC) for a network interface.""" |
507 | cmd = ['ip', '-o', '-0', 'addr', 'show', nic] |
508 | ip_output = subprocess.check_output(cmd).decode('UTF-8') |
509 | hwaddr = "" |
510 | @@ -567,7 +582,7 @@ |
511 | |
512 | |
513 | def cmp_pkgrevno(package, revno, pkgcache=None): |
514 | - '''Compare supplied revno with the revno of the installed package |
515 | + """Compare supplied revno with the revno of the installed package |
516 | |
517 | * 1 => Installed revno is greater than supplied arg |
518 | * 0 => Installed revno is the same as supplied arg |
519 | @@ -576,7 +591,7 @@ |
520 | This function imports apt_cache function from charmhelpers.fetch if |
521 | the pkgcache argument is None. Be sure to add charmhelpers.fetch if |
522 | you call this function, or pass an apt_pkg.Cache() instance. |
523 | - ''' |
524 | + """ |
525 | import apt_pkg |
526 | if not pkgcache: |
527 | from charmhelpers.fetch import apt_cache |
528 | @@ -586,19 +601,27 @@ |
529 | |
530 | |
531 | @contextmanager |
532 | -def chdir(d): |
533 | +def chdir(directory): |
534 | + """Change the current working directory to a different directory for a code |
535 | + block and return the previous directory after the block exits. Useful to |
536 | + run commands from a specificed directory. |
537 | + |
538 | + :param str directory: The directory path to change to for this context. |
539 | + """ |
540 | cur = os.getcwd() |
541 | try: |
542 | - yield os.chdir(d) |
543 | + yield os.chdir(directory) |
544 | finally: |
545 | os.chdir(cur) |
546 | |
547 | |
548 | def chownr(path, owner, group, follow_links=True, chowntopdir=False): |
549 | - """ |
550 | - Recursively change user and group ownership of files and directories |
551 | + """Recursively change user and group ownership of files and directories |
552 | in given path. Doesn't chown path itself by default, only its children. |
553 | |
554 | + :param str path: The string path to start changing ownership. |
555 | + :param str owner: The owner string to use when looking up the uid. |
556 | + :param str group: The group string to use when looking up the gid. |
557 | :param bool follow_links: Also Chown links if True |
558 | :param bool chowntopdir: Also chown path itself if True |
559 | """ |
560 | @@ -622,15 +645,23 @@ |
561 | |
562 | |
563 | def lchownr(path, owner, group): |
564 | + """Recursively change user and group ownership of files and directories |
565 | + in a given path, not following symbolic links. See the documentation for |
566 | + 'os.lchown' for more information. |
567 | + |
568 | + :param str path: The string path to start changing ownership. |
569 | + :param str owner: The owner string to use when looking up the uid. |
570 | + :param str group: The group string to use when looking up the gid. |
571 | + """ |
572 | chownr(path, owner, group, follow_links=False) |
573 | |
574 | |
575 | def get_total_ram(): |
576 | - '''The total amount of system RAM in bytes. |
577 | + """The total amount of system RAM in bytes. |
578 | |
579 | This is what is reported by the OS, and may be overcommitted when |
580 | there are multiple containers hosted on the same machine. |
581 | - ''' |
582 | + """ |
583 | with open('/proc/meminfo', 'r') as f: |
584 | for line in f.readlines(): |
585 | if line: |
586 | |
587 | === modified file 'hooks/charmhelpers/fetch/giturl.py' |
588 | --- hooks/charmhelpers/fetch/giturl.py 2016-01-04 21:25:19 +0000 |
589 | +++ hooks/charmhelpers/fetch/giturl.py 2016-01-17 21:20:50 +0000 |
590 | @@ -22,7 +22,6 @@ |
591 | filter_installed_packages, |
592 | apt_install, |
593 | ) |
594 | -from charmhelpers.core.host import mkdir |
595 | |
596 | if filter_installed_packages(['git']) != []: |
597 | apt_install(['git']) |
598 | @@ -50,8 +49,8 @@ |
599 | cmd = ['git', '-C', dest, 'pull', source, branch] |
600 | else: |
601 | cmd = ['git', 'clone', source, dest, '--branch', branch] |
602 | - if depth: |
603 | - cmd.extend(['--depth', depth]) |
604 | + if depth: |
605 | + cmd.extend(['--depth', depth]) |
606 | check_call(cmd) |
607 | |
608 | def install(self, source, branch="master", dest=None, depth=None): |
609 | @@ -62,8 +61,6 @@ |
610 | else: |
611 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", |
612 | branch_name) |
613 | - if not os.path.exists(dest_dir): |
614 | - mkdir(dest_dir, perms=0o755) |
615 | try: |
616 | self.clone(source, dest_dir, branch, depth) |
617 | except OSError as e: |
charm_unit_test #16361 ceilometer- agent-next for hopem mp282881
UNIT OK: passed
Build: http:// 10.245. 162.77: 8080/job/ charm_unit_ test/16361/