Merge lp:~hopem/charms/trusty/cinder-ceph/lp1535062 into lp:~openstack-charmers-archive/charms/trusty/cinder-ceph/next
- Trusty Tahr (14.04)
- lp1535062
- Merge into next
Proposed by
Edward Hope-Morley
Status: | Merged | ||||
---|---|---|---|---|---|
Merged at revision: | 52 | ||||
Proposed branch: | lp:~hopem/charms/trusty/cinder-ceph/lp1535062 | ||||
Merge into: | lp:~openstack-charmers-archive/charms/trusty/cinder-ceph/next | ||||
Diff against target: |
638 lines (+185/-114) 5 files modified
hooks/charmhelpers/contrib/openstack/context.py (+12/-2) hooks/charmhelpers/contrib/openstack/templates/haproxy.cfg (+3/-2) hooks/charmhelpers/contrib/openstack/utils.py (+95/-63) hooks/charmhelpers/core/host.py (+73/-42) hooks/charmhelpers/fetch/giturl.py (+2/-5) |
||||
To merge this branch: | bzr merge lp:~hopem/charms/trusty/cinder-ceph/lp1535062 | ||||
Related bugs: |
|
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
OpenStack Charmers | Pending | ||
Review via email:
|
Commit message
Description of the change
To post a comment you must log in.
Revision history for this message
![](/+icing/build/overlay/assets/skins/sam/images/close.gif)
uosci-testing-bot (uosci-testing-bot) wrote : | # |
Revision history for this message
![](/+icing/build/overlay/assets/skins/sam/images/close.gif)
uosci-testing-bot (uosci-testing-bot) wrote : | # |
charm_unit_test #16362 cinder-ceph-next for hopem mp282880
UNIT OK: passed
Revision history for this message
![](/+icing/build/overlay/assets/skins/sam/images/close.gif)
uosci-testing-bot (uosci-testing-bot) wrote : | # |
charm_amulet_test #8874 cinder-ceph-next for hopem mp282880
AMULET OK: passed
Build: http://
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'hooks/charmhelpers/contrib/openstack/context.py' | |||
2 | --- hooks/charmhelpers/contrib/openstack/context.py 2016-01-04 21:26:40 +0000 | |||
3 | +++ hooks/charmhelpers/contrib/openstack/context.py 2016-01-17 21:20:37 +0000 | |||
4 | @@ -57,6 +57,7 @@ | |||
5 | 57 | get_nic_hwaddr, | 57 | get_nic_hwaddr, |
6 | 58 | mkdir, | 58 | mkdir, |
7 | 59 | write_file, | 59 | write_file, |
8 | 60 | pwgen, | ||
9 | 60 | ) | 61 | ) |
10 | 61 | from charmhelpers.contrib.hahelpers.cluster import ( | 62 | from charmhelpers.contrib.hahelpers.cluster import ( |
11 | 62 | determine_apache_port, | 63 | determine_apache_port, |
12 | @@ -87,6 +88,8 @@ | |||
13 | 87 | is_bridge_member, | 88 | is_bridge_member, |
14 | 88 | ) | 89 | ) |
15 | 89 | from charmhelpers.contrib.openstack.utils import get_host_ip | 90 | from charmhelpers.contrib.openstack.utils import get_host_ip |
16 | 91 | from charmhelpers.core.unitdata import kv | ||
17 | 92 | |||
18 | 90 | CA_CERT_PATH = '/usr/local/share/ca-certificates/keystone_juju_ca_cert.crt' | 93 | CA_CERT_PATH = '/usr/local/share/ca-certificates/keystone_juju_ca_cert.crt' |
19 | 91 | ADDRESS_TYPES = ['admin', 'internal', 'public'] | 94 | ADDRESS_TYPES = ['admin', 'internal', 'public'] |
20 | 92 | 95 | ||
21 | @@ -636,11 +639,18 @@ | |||
22 | 636 | ctxt['ipv6'] = True | 639 | ctxt['ipv6'] = True |
23 | 637 | ctxt['local_host'] = 'ip6-localhost' | 640 | ctxt['local_host'] = 'ip6-localhost' |
24 | 638 | ctxt['haproxy_host'] = '::' | 641 | ctxt['haproxy_host'] = '::' |
25 | 639 | ctxt['stat_port'] = ':::8888' | ||
26 | 640 | else: | 642 | else: |
27 | 641 | ctxt['local_host'] = '127.0.0.1' | 643 | ctxt['local_host'] = '127.0.0.1' |
28 | 642 | ctxt['haproxy_host'] = '0.0.0.0' | 644 | ctxt['haproxy_host'] = '0.0.0.0' |
30 | 643 | ctxt['stat_port'] = ':8888' | 645 | |
31 | 646 | ctxt['stat_port'] = '8888' | ||
32 | 647 | |||
33 | 648 | db = kv() | ||
34 | 649 | ctxt['stat_password'] = db.get('stat-password') | ||
35 | 650 | if not ctxt['stat_password']: | ||
36 | 651 | ctxt['stat_password'] = db.set('stat-password', | ||
37 | 652 | pwgen(32)) | ||
38 | 653 | db.flush() | ||
39 | 644 | 654 | ||
40 | 645 | for frontend in cluster_hosts: | 655 | for frontend in cluster_hosts: |
41 | 646 | if (len(cluster_hosts[frontend]['backends']) > 1 or | 656 | if (len(cluster_hosts[frontend]['backends']) > 1 or |
42 | 647 | 657 | ||
43 | === modified file 'hooks/charmhelpers/contrib/openstack/templates/haproxy.cfg' | |||
44 | --- hooks/charmhelpers/contrib/openstack/templates/haproxy.cfg 2016-01-04 21:26:40 +0000 | |||
45 | +++ hooks/charmhelpers/contrib/openstack/templates/haproxy.cfg 2016-01-17 21:20:37 +0000 | |||
46 | @@ -33,13 +33,14 @@ | |||
47 | 33 | timeout server 30000 | 33 | timeout server 30000 |
48 | 34 | {%- endif %} | 34 | {%- endif %} |
49 | 35 | 35 | ||
51 | 36 | listen stats {{ stat_port }} | 36 | listen stats |
52 | 37 | bind {{ local_host }}:{{ stat_port }} | ||
53 | 37 | mode http | 38 | mode http |
54 | 38 | stats enable | 39 | stats enable |
55 | 39 | stats hide-version | 40 | stats hide-version |
56 | 40 | stats realm Haproxy\ Statistics | 41 | stats realm Haproxy\ Statistics |
57 | 41 | stats uri / | 42 | stats uri / |
59 | 42 | stats auth admin:password | 43 | stats auth admin:{{ stat_password }} |
60 | 43 | 44 | ||
61 | 44 | {% if frontends -%} | 45 | {% if frontends -%} |
62 | 45 | {% for service, ports in service_ports.items() -%} | 46 | {% for service, ports in service_ports.items() -%} |
63 | 46 | 47 | ||
64 | === modified file 'hooks/charmhelpers/contrib/openstack/utils.py' | |||
65 | --- hooks/charmhelpers/contrib/openstack/utils.py 2016-01-04 21:26:40 +0000 | |||
66 | +++ hooks/charmhelpers/contrib/openstack/utils.py 2016-01-17 21:20:37 +0000 | |||
67 | @@ -103,68 +103,67 @@ | |||
68 | 103 | ('2016.1', 'mitaka'), | 103 | ('2016.1', 'mitaka'), |
69 | 104 | ]) | 104 | ]) |
70 | 105 | 105 | ||
72 | 106 | # The ugly duckling | 106 | # The ugly duckling - must list releases oldest to newest |
73 | 107 | SWIFT_CODENAMES = OrderedDict([ | 107 | SWIFT_CODENAMES = OrderedDict([ |
95 | 108 | ('1.4.3', 'diablo'), | 108 | ('diablo', |
96 | 109 | ('1.4.8', 'essex'), | 109 | ['1.4.3']), |
97 | 110 | ('1.7.4', 'folsom'), | 110 | ('essex', |
98 | 111 | ('1.8.0', 'grizzly'), | 111 | ['1.4.8']), |
99 | 112 | ('1.7.7', 'grizzly'), | 112 | ('folsom', |
100 | 113 | ('1.7.6', 'grizzly'), | 113 | ['1.7.4']), |
101 | 114 | ('1.10.0', 'havana'), | 114 | ('grizzly', |
102 | 115 | ('1.9.1', 'havana'), | 115 | ['1.7.6', '1.7.7', '1.8.0']), |
103 | 116 | ('1.9.0', 'havana'), | 116 | ('havana', |
104 | 117 | ('1.13.1', 'icehouse'), | 117 | ['1.9.0', '1.9.1', '1.10.0']), |
105 | 118 | ('1.13.0', 'icehouse'), | 118 | ('icehouse', |
106 | 119 | ('1.12.0', 'icehouse'), | 119 | ['1.11.0', '1.12.0', '1.13.0', '1.13.1']), |
107 | 120 | ('1.11.0', 'icehouse'), | 120 | ('juno', |
108 | 121 | ('2.0.0', 'juno'), | 121 | ['2.0.0', '2.1.0', '2.2.0']), |
109 | 122 | ('2.1.0', 'juno'), | 122 | ('kilo', |
110 | 123 | ('2.2.0', 'juno'), | 123 | ['2.2.1', '2.2.2']), |
111 | 124 | ('2.2.1', 'kilo'), | 124 | ('liberty', |
112 | 125 | ('2.2.2', 'kilo'), | 125 | ['2.3.0', '2.4.0', '2.5.0']), |
113 | 126 | ('2.3.0', 'liberty'), | 126 | ('mitaka', |
114 | 127 | ('2.4.0', 'liberty'), | 127 | ['2.5.0']), |
94 | 128 | ('2.5.0', 'liberty'), | ||
115 | 129 | ]) | 128 | ]) |
116 | 130 | 129 | ||
117 | 131 | # >= Liberty version->codename mapping | 130 | # >= Liberty version->codename mapping |
118 | 132 | PACKAGE_CODENAMES = { | 131 | PACKAGE_CODENAMES = { |
119 | 133 | 'nova-common': OrderedDict([ | 132 | 'nova-common': OrderedDict([ |
122 | 134 | ('12.0.0', 'liberty'), | 133 | ('12.0', 'liberty'), |
123 | 135 | ('13.0.0', 'mitaka'), | 134 | ('13.0', 'mitaka'), |
124 | 136 | ]), | 135 | ]), |
125 | 137 | 'neutron-common': OrderedDict([ | 136 | 'neutron-common': OrderedDict([ |
128 | 138 | ('7.0.0', 'liberty'), | 137 | ('7.0', 'liberty'), |
129 | 139 | ('8.0.0', 'mitaka'), | 138 | ('8.0', 'mitaka'), |
130 | 140 | ]), | 139 | ]), |
131 | 141 | 'cinder-common': OrderedDict([ | 140 | 'cinder-common': OrderedDict([ |
134 | 142 | ('7.0.0', 'liberty'), | 141 | ('7.0', 'liberty'), |
135 | 143 | ('8.0.0', 'mitaka'), | 142 | ('8.0', 'mitaka'), |
136 | 144 | ]), | 143 | ]), |
137 | 145 | 'keystone': OrderedDict([ | 144 | 'keystone': OrderedDict([ |
140 | 146 | ('8.0.0', 'liberty'), | 145 | ('8.0', 'liberty'), |
141 | 147 | ('9.0.0', 'mitaka'), | 146 | ('9.0', 'mitaka'), |
142 | 148 | ]), | 147 | ]), |
143 | 149 | 'horizon-common': OrderedDict([ | 148 | 'horizon-common': OrderedDict([ |
146 | 150 | ('8.0.0', 'liberty'), | 149 | ('8.0', 'liberty'), |
147 | 151 | ('9.0.0', 'mitaka'), | 150 | ('9.0', 'mitaka'), |
148 | 152 | ]), | 151 | ]), |
149 | 153 | 'ceilometer-common': OrderedDict([ | 152 | 'ceilometer-common': OrderedDict([ |
152 | 154 | ('5.0.0', 'liberty'), | 153 | ('5.0', 'liberty'), |
153 | 155 | ('6.0.0', 'mitaka'), | 154 | ('6.0', 'mitaka'), |
154 | 156 | ]), | 155 | ]), |
155 | 157 | 'heat-common': OrderedDict([ | 156 | 'heat-common': OrderedDict([ |
158 | 158 | ('5.0.0', 'liberty'), | 157 | ('5.0', 'liberty'), |
159 | 159 | ('6.0.0', 'mitaka'), | 158 | ('6.0', 'mitaka'), |
160 | 160 | ]), | 159 | ]), |
161 | 161 | 'glance-common': OrderedDict([ | 160 | 'glance-common': OrderedDict([ |
164 | 162 | ('11.0.0', 'liberty'), | 161 | ('11.0', 'liberty'), |
165 | 163 | ('12.0.0', 'mitaka'), | 162 | ('12.0', 'mitaka'), |
166 | 164 | ]), | 163 | ]), |
167 | 165 | 'openstack-dashboard': OrderedDict([ | 164 | 'openstack-dashboard': OrderedDict([ |
170 | 166 | ('8.0.0', 'liberty'), | 165 | ('8.0', 'liberty'), |
171 | 167 | ('9.0.0', 'mitaka'), | 166 | ('9.0', 'mitaka'), |
172 | 168 | ]), | 167 | ]), |
173 | 169 | } | 168 | } |
174 | 170 | 169 | ||
175 | @@ -227,6 +226,33 @@ | |||
176 | 227 | error_out(e) | 226 | error_out(e) |
177 | 228 | 227 | ||
178 | 229 | 228 | ||
179 | 229 | def get_os_version_codename_swift(codename): | ||
180 | 230 | '''Determine OpenStack version number of swift from codename.''' | ||
181 | 231 | for k, v in six.iteritems(SWIFT_CODENAMES): | ||
182 | 232 | if k == codename: | ||
183 | 233 | return v[-1] | ||
184 | 234 | e = 'Could not derive swift version for '\ | ||
185 | 235 | 'codename: %s' % codename | ||
186 | 236 | error_out(e) | ||
187 | 237 | |||
188 | 238 | |||
189 | 239 | def get_swift_codename(version): | ||
190 | 240 | '''Determine OpenStack codename that corresponds to swift version.''' | ||
191 | 241 | codenames = [k for k, v in six.iteritems(SWIFT_CODENAMES) if version in v] | ||
192 | 242 | if len(codenames) > 1: | ||
193 | 243 | # If more than one release codename contains this version we determine | ||
194 | 244 | # the actual codename based on the highest available install source. | ||
195 | 245 | for codename in reversed(codenames): | ||
196 | 246 | releases = UBUNTU_OPENSTACK_RELEASE | ||
197 | 247 | release = [k for k, v in six.iteritems(releases) if codename in v] | ||
198 | 248 | ret = subprocess.check_output(['apt-cache', 'policy', 'swift']) | ||
199 | 249 | if codename in ret or release[0] in ret: | ||
200 | 250 | return codename | ||
201 | 251 | elif len(codenames) == 1: | ||
202 | 252 | return codenames[0] | ||
203 | 253 | return None | ||
204 | 254 | |||
205 | 255 | |||
206 | 230 | def get_os_codename_package(package, fatal=True): | 256 | def get_os_codename_package(package, fatal=True): |
207 | 231 | '''Derive OpenStack release codename from an installed package.''' | 257 | '''Derive OpenStack release codename from an installed package.''' |
208 | 232 | import apt_pkg as apt | 258 | import apt_pkg as apt |
209 | @@ -251,7 +277,14 @@ | |||
210 | 251 | error_out(e) | 277 | error_out(e) |
211 | 252 | 278 | ||
212 | 253 | vers = apt.upstream_version(pkg.current_ver.ver_str) | 279 | vers = apt.upstream_version(pkg.current_ver.ver_str) |
214 | 254 | match = re.match('^(\d+)\.(\d+)\.(\d+)', vers) | 280 | if 'swift' in pkg.name: |
215 | 281 | # Fully x.y.z match for swift versions | ||
216 | 282 | match = re.match('^(\d+)\.(\d+)\.(\d+)', vers) | ||
217 | 283 | else: | ||
218 | 284 | # x.y match only for 20XX.X | ||
219 | 285 | # and ignore patch level for other packages | ||
220 | 286 | match = re.match('^(\d+)\.(\d+)', vers) | ||
221 | 287 | |||
222 | 255 | if match: | 288 | if match: |
223 | 256 | vers = match.group(0) | 289 | vers = match.group(0) |
224 | 257 | 290 | ||
225 | @@ -263,13 +296,8 @@ | |||
226 | 263 | # < Liberty co-ordinated project versions | 296 | # < Liberty co-ordinated project versions |
227 | 264 | try: | 297 | try: |
228 | 265 | if 'swift' in pkg.name: | 298 | if 'swift' in pkg.name: |
234 | 266 | swift_vers = vers[:5] | 299 | return get_swift_codename(vers) |
230 | 267 | if swift_vers not in SWIFT_CODENAMES: | ||
231 | 268 | # Deal with 1.10.0 upward | ||
232 | 269 | swift_vers = vers[:6] | ||
233 | 270 | return SWIFT_CODENAMES[swift_vers] | ||
235 | 271 | else: | 300 | else: |
236 | 272 | vers = vers[:6] | ||
237 | 273 | return OPENSTACK_CODENAMES[vers] | 301 | return OPENSTACK_CODENAMES[vers] |
238 | 274 | except KeyError: | 302 | except KeyError: |
239 | 275 | if not fatal: | 303 | if not fatal: |
240 | @@ -287,12 +315,14 @@ | |||
241 | 287 | 315 | ||
242 | 288 | if 'swift' in pkg: | 316 | if 'swift' in pkg: |
243 | 289 | vers_map = SWIFT_CODENAMES | 317 | vers_map = SWIFT_CODENAMES |
244 | 318 | for cname, version in six.iteritems(vers_map): | ||
245 | 319 | if cname == codename: | ||
246 | 320 | return version[-1] | ||
247 | 290 | else: | 321 | else: |
248 | 291 | vers_map = OPENSTACK_CODENAMES | 322 | vers_map = OPENSTACK_CODENAMES |
253 | 292 | 323 | for version, cname in six.iteritems(vers_map): | |
254 | 293 | for version, cname in six.iteritems(vers_map): | 324 | if cname == codename: |
255 | 294 | if cname == codename: | 325 | return version |
252 | 295 | return version | ||
256 | 296 | # e = "Could not determine OpenStack version for package: %s" % pkg | 326 | # e = "Could not determine OpenStack version for package: %s" % pkg |
257 | 297 | # error_out(e) | 327 | # error_out(e) |
258 | 298 | 328 | ||
259 | @@ -458,11 +488,16 @@ | |||
260 | 458 | cur_vers = get_os_version_package(package) | 488 | cur_vers = get_os_version_package(package) |
261 | 459 | if "swift" in package: | 489 | if "swift" in package: |
262 | 460 | codename = get_os_codename_install_source(src) | 490 | codename = get_os_codename_install_source(src) |
264 | 461 | available_vers = get_os_version_codename(codename, SWIFT_CODENAMES) | 491 | avail_vers = get_os_version_codename_swift(codename) |
265 | 462 | else: | 492 | else: |
267 | 463 | available_vers = get_os_version_install_source(src) | 493 | avail_vers = get_os_version_install_source(src) |
268 | 464 | apt.init() | 494 | apt.init() |
270 | 465 | return apt.version_compare(available_vers, cur_vers) == 1 | 495 | if "swift" in package: |
271 | 496 | major_cur_vers = cur_vers.split('.', 1)[0] | ||
272 | 497 | major_avail_vers = avail_vers.split('.', 1)[0] | ||
273 | 498 | major_diff = apt.version_compare(major_avail_vers, major_cur_vers) | ||
274 | 499 | return avail_vers > cur_vers and (major_diff == 1 or major_diff == 0) | ||
275 | 500 | return apt.version_compare(avail_vers, cur_vers) == 1 | ||
276 | 466 | 501 | ||
277 | 467 | 502 | ||
278 | 468 | def ensure_block_device(block_device): | 503 | def ensure_block_device(block_device): |
279 | @@ -591,7 +626,7 @@ | |||
280 | 591 | return yaml.load(projects_yaml) | 626 | return yaml.load(projects_yaml) |
281 | 592 | 627 | ||
282 | 593 | 628 | ||
284 | 594 | def git_clone_and_install(projects_yaml, core_project, depth=1): | 629 | def git_clone_and_install(projects_yaml, core_project): |
285 | 595 | """ | 630 | """ |
286 | 596 | Clone/install all specified OpenStack repositories. | 631 | Clone/install all specified OpenStack repositories. |
287 | 597 | 632 | ||
288 | @@ -641,6 +676,9 @@ | |||
289 | 641 | for p in projects['repositories']: | 676 | for p in projects['repositories']: |
290 | 642 | repo = p['repository'] | 677 | repo = p['repository'] |
291 | 643 | branch = p['branch'] | 678 | branch = p['branch'] |
292 | 679 | depth = '1' | ||
293 | 680 | if 'depth' in p.keys(): | ||
294 | 681 | depth = p['depth'] | ||
295 | 644 | if p['name'] == 'requirements': | 682 | if p['name'] == 'requirements': |
296 | 645 | repo_dir = _git_clone_and_install_single(repo, branch, depth, | 683 | repo_dir = _git_clone_and_install_single(repo, branch, depth, |
297 | 646 | parent_dir, http_proxy, | 684 | parent_dir, http_proxy, |
298 | @@ -685,19 +723,13 @@ | |||
299 | 685 | """ | 723 | """ |
300 | 686 | Clone and install a single git repository. | 724 | Clone and install a single git repository. |
301 | 687 | """ | 725 | """ |
302 | 688 | dest_dir = os.path.join(parent_dir, os.path.basename(repo)) | ||
303 | 689 | |||
304 | 690 | if not os.path.exists(parent_dir): | 726 | if not os.path.exists(parent_dir): |
305 | 691 | juju_log('Directory already exists at {}. ' | 727 | juju_log('Directory already exists at {}. ' |
306 | 692 | 'No need to create directory.'.format(parent_dir)) | 728 | 'No need to create directory.'.format(parent_dir)) |
307 | 693 | os.mkdir(parent_dir) | 729 | os.mkdir(parent_dir) |
308 | 694 | 730 | ||
315 | 695 | if not os.path.exists(dest_dir): | 731 | juju_log('Cloning git repo: {}, branch: {}'.format(repo, branch)) |
316 | 696 | juju_log('Cloning git repo: {}, branch: {}'.format(repo, branch)) | 732 | repo_dir = install_remote(repo, dest=parent_dir, branch=branch, depth=depth) |
311 | 697 | repo_dir = install_remote(repo, dest=parent_dir, branch=branch, | ||
312 | 698 | depth=depth) | ||
313 | 699 | else: | ||
314 | 700 | repo_dir = dest_dir | ||
317 | 701 | 733 | ||
318 | 702 | venv = os.path.join(parent_dir, 'venv') | 734 | venv = os.path.join(parent_dir, 'venv') |
319 | 703 | 735 | ||
320 | 704 | 736 | ||
321 | === modified file 'hooks/charmhelpers/core/host.py' | |||
322 | --- hooks/charmhelpers/core/host.py 2016-01-04 21:26:40 +0000 | |||
323 | +++ hooks/charmhelpers/core/host.py 2016-01-17 21:20:37 +0000 | |||
324 | @@ -72,7 +72,9 @@ | |||
325 | 72 | stopped = service_stop(service_name) | 72 | stopped = service_stop(service_name) |
326 | 73 | upstart_file = os.path.join(init_dir, "{}.conf".format(service_name)) | 73 | upstart_file = os.path.join(init_dir, "{}.conf".format(service_name)) |
327 | 74 | sysv_file = os.path.join(initd_dir, service_name) | 74 | sysv_file = os.path.join(initd_dir, service_name) |
329 | 75 | if os.path.exists(upstart_file): | 75 | if init_is_systemd(): |
330 | 76 | service('disable', service_name) | ||
331 | 77 | elif os.path.exists(upstart_file): | ||
332 | 76 | override_path = os.path.join( | 78 | override_path = os.path.join( |
333 | 77 | init_dir, '{}.override'.format(service_name)) | 79 | init_dir, '{}.override'.format(service_name)) |
334 | 78 | with open(override_path, 'w') as fh: | 80 | with open(override_path, 'w') as fh: |
335 | @@ -80,9 +82,9 @@ | |||
336 | 80 | elif os.path.exists(sysv_file): | 82 | elif os.path.exists(sysv_file): |
337 | 81 | subprocess.check_call(["update-rc.d", service_name, "disable"]) | 83 | subprocess.check_call(["update-rc.d", service_name, "disable"]) |
338 | 82 | else: | 84 | else: |
339 | 83 | # XXX: Support SystemD too | ||
340 | 84 | raise ValueError( | 85 | raise ValueError( |
342 | 85 | "Unable to detect {0} as either Upstart {1} or SysV {2}".format( | 86 | "Unable to detect {0} as SystemD, Upstart {1} or" |
343 | 87 | " SysV {2}".format( | ||
344 | 86 | service_name, upstart_file, sysv_file)) | 88 | service_name, upstart_file, sysv_file)) |
345 | 87 | return stopped | 89 | return stopped |
346 | 88 | 90 | ||
347 | @@ -94,7 +96,9 @@ | |||
348 | 94 | Reenable starting again at boot. Start the service""" | 96 | Reenable starting again at boot. Start the service""" |
349 | 95 | upstart_file = os.path.join(init_dir, "{}.conf".format(service_name)) | 97 | upstart_file = os.path.join(init_dir, "{}.conf".format(service_name)) |
350 | 96 | sysv_file = os.path.join(initd_dir, service_name) | 98 | sysv_file = os.path.join(initd_dir, service_name) |
352 | 97 | if os.path.exists(upstart_file): | 99 | if init_is_systemd(): |
353 | 100 | service('enable', service_name) | ||
354 | 101 | elif os.path.exists(upstart_file): | ||
355 | 98 | override_path = os.path.join( | 102 | override_path = os.path.join( |
356 | 99 | init_dir, '{}.override'.format(service_name)) | 103 | init_dir, '{}.override'.format(service_name)) |
357 | 100 | if os.path.exists(override_path): | 104 | if os.path.exists(override_path): |
358 | @@ -102,9 +106,9 @@ | |||
359 | 102 | elif os.path.exists(sysv_file): | 106 | elif os.path.exists(sysv_file): |
360 | 103 | subprocess.check_call(["update-rc.d", service_name, "enable"]) | 107 | subprocess.check_call(["update-rc.d", service_name, "enable"]) |
361 | 104 | else: | 108 | else: |
362 | 105 | # XXX: Support SystemD too | ||
363 | 106 | raise ValueError( | 109 | raise ValueError( |
365 | 107 | "Unable to detect {0} as either Upstart {1} or SysV {2}".format( | 110 | "Unable to detect {0} as SystemD, Upstart {1} or" |
366 | 111 | " SysV {2}".format( | ||
367 | 108 | service_name, upstart_file, sysv_file)) | 112 | service_name, upstart_file, sysv_file)) |
368 | 109 | 113 | ||
369 | 110 | started = service_running(service_name) | 114 | started = service_running(service_name) |
370 | @@ -115,23 +119,29 @@ | |||
371 | 115 | 119 | ||
372 | 116 | def service(action, service_name): | 120 | def service(action, service_name): |
373 | 117 | """Control a system service""" | 121 | """Control a system service""" |
375 | 118 | cmd = ['service', service_name, action] | 122 | if init_is_systemd(): |
376 | 123 | cmd = ['systemctl', action, service_name] | ||
377 | 124 | else: | ||
378 | 125 | cmd = ['service', service_name, action] | ||
379 | 119 | return subprocess.call(cmd) == 0 | 126 | return subprocess.call(cmd) == 0 |
380 | 120 | 127 | ||
381 | 121 | 128 | ||
383 | 122 | def service_running(service): | 129 | def service_running(service_name): |
384 | 123 | """Determine whether a system service is running""" | 130 | """Determine whether a system service is running""" |
391 | 124 | try: | 131 | if init_is_systemd(): |
392 | 125 | output = subprocess.check_output( | 132 | return service('is-active', service_name) |
387 | 126 | ['service', service, 'status'], | ||
388 | 127 | stderr=subprocess.STDOUT).decode('UTF-8') | ||
389 | 128 | except subprocess.CalledProcessError: | ||
390 | 129 | return False | ||
393 | 130 | else: | 133 | else: |
397 | 131 | if ("start/running" in output or "is running" in output): | 134 | try: |
398 | 132 | return True | 135 | output = subprocess.check_output( |
399 | 133 | else: | 136 | ['service', service_name, 'status'], |
400 | 137 | stderr=subprocess.STDOUT).decode('UTF-8') | ||
401 | 138 | except subprocess.CalledProcessError: | ||
402 | 134 | return False | 139 | return False |
403 | 140 | else: | ||
404 | 141 | if ("start/running" in output or "is running" in output): | ||
405 | 142 | return True | ||
406 | 143 | else: | ||
407 | 144 | return False | ||
408 | 135 | 145 | ||
409 | 136 | 146 | ||
410 | 137 | def service_available(service_name): | 147 | def service_available(service_name): |
411 | @@ -146,10 +156,17 @@ | |||
412 | 146 | return True | 156 | return True |
413 | 147 | 157 | ||
414 | 148 | 158 | ||
415 | 159 | SYSTEMD_SYSTEM = '/run/systemd/system' | ||
416 | 160 | |||
417 | 161 | |||
418 | 162 | def init_is_systemd(): | ||
419 | 163 | """Return True if the host system uses systemd, False otherwise.""" | ||
420 | 164 | return os.path.isdir(SYSTEMD_SYSTEM) | ||
421 | 165 | |||
422 | 166 | |||
423 | 149 | def adduser(username, password=None, shell='/bin/bash', system_user=False, | 167 | def adduser(username, password=None, shell='/bin/bash', system_user=False, |
424 | 150 | primary_group=None, secondary_groups=None): | 168 | primary_group=None, secondary_groups=None): |
427 | 151 | """ | 169 | """Add a user to the system. |
426 | 152 | Add a user to the system. | ||
428 | 153 | 170 | ||
429 | 154 | Will log but otherwise succeed if the user already exists. | 171 | Will log but otherwise succeed if the user already exists. |
430 | 155 | 172 | ||
431 | @@ -157,7 +174,7 @@ | |||
432 | 157 | :param str password: Password for user; if ``None``, create a system user | 174 | :param str password: Password for user; if ``None``, create a system user |
433 | 158 | :param str shell: The default shell for the user | 175 | :param str shell: The default shell for the user |
434 | 159 | :param bool system_user: Whether to create a login or system user | 176 | :param bool system_user: Whether to create a login or system user |
436 | 160 | :param str primary_group: Primary group for user; defaults to their username | 177 | :param str primary_group: Primary group for user; defaults to username |
437 | 161 | :param list secondary_groups: Optional list of additional groups | 178 | :param list secondary_groups: Optional list of additional groups |
438 | 162 | 179 | ||
439 | 163 | :returns: The password database entry struct, as returned by `pwd.getpwnam` | 180 | :returns: The password database entry struct, as returned by `pwd.getpwnam` |
440 | @@ -283,14 +300,12 @@ | |||
441 | 283 | 300 | ||
442 | 284 | 301 | ||
443 | 285 | def fstab_remove(mp): | 302 | def fstab_remove(mp): |
446 | 286 | """Remove the given mountpoint entry from /etc/fstab | 303 | """Remove the given mountpoint entry from /etc/fstab""" |
445 | 287 | """ | ||
447 | 288 | return Fstab.remove_by_mountpoint(mp) | 304 | return Fstab.remove_by_mountpoint(mp) |
448 | 289 | 305 | ||
449 | 290 | 306 | ||
450 | 291 | def fstab_add(dev, mp, fs, options=None): | 307 | def fstab_add(dev, mp, fs, options=None): |
453 | 292 | """Adds the given device entry to the /etc/fstab file | 308 | """Adds the given device entry to the /etc/fstab file""" |
452 | 293 | """ | ||
454 | 294 | return Fstab.add(dev, mp, fs, options=options) | 309 | return Fstab.add(dev, mp, fs, options=options) |
455 | 295 | 310 | ||
456 | 296 | 311 | ||
457 | @@ -346,8 +361,7 @@ | |||
458 | 346 | 361 | ||
459 | 347 | 362 | ||
460 | 348 | def file_hash(path, hash_type='md5'): | 363 | def file_hash(path, hash_type='md5'): |
463 | 349 | """ | 364 | """Generate a hash checksum of the contents of 'path' or None if not found. |
462 | 350 | Generate a hash checksum of the contents of 'path' or None if not found. | ||
464 | 351 | 365 | ||
465 | 352 | :param str hash_type: Any hash alrgorithm supported by :mod:`hashlib`, | 366 | :param str hash_type: Any hash alrgorithm supported by :mod:`hashlib`, |
466 | 353 | such as md5, sha1, sha256, sha512, etc. | 367 | such as md5, sha1, sha256, sha512, etc. |
467 | @@ -362,10 +376,9 @@ | |||
468 | 362 | 376 | ||
469 | 363 | 377 | ||
470 | 364 | def path_hash(path): | 378 | def path_hash(path): |
475 | 365 | """ | 379 | """Generate a hash checksum of all files matching 'path'. Standard |
476 | 366 | Generate a hash checksum of all files matching 'path'. Standard wildcards | 380 | wildcards like '*' and '?' are supported, see documentation for the 'glob' |
477 | 367 | like '*' and '?' are supported, see documentation for the 'glob' module for | 381 | module for more information. |
474 | 368 | more information. | ||
478 | 369 | 382 | ||
479 | 370 | :return: dict: A { filename: hash } dictionary for all matched files. | 383 | :return: dict: A { filename: hash } dictionary for all matched files. |
480 | 371 | Empty if none found. | 384 | Empty if none found. |
481 | @@ -377,8 +390,7 @@ | |||
482 | 377 | 390 | ||
483 | 378 | 391 | ||
484 | 379 | def check_hash(path, checksum, hash_type='md5'): | 392 | def check_hash(path, checksum, hash_type='md5'): |
487 | 380 | """ | 393 | """Validate a file using a cryptographic checksum. |
486 | 381 | Validate a file using a cryptographic checksum. | ||
488 | 382 | 394 | ||
489 | 383 | :param str checksum: Value of the checksum used to validate the file. | 395 | :param str checksum: Value of the checksum used to validate the file. |
490 | 384 | :param str hash_type: Hash algorithm used to generate `checksum`. | 396 | :param str hash_type: Hash algorithm used to generate `checksum`. |
491 | @@ -393,6 +405,7 @@ | |||
492 | 393 | 405 | ||
493 | 394 | 406 | ||
494 | 395 | class ChecksumError(ValueError): | 407 | class ChecksumError(ValueError): |
495 | 408 | """A class derived from Value error to indicate the checksum failed.""" | ||
496 | 396 | pass | 409 | pass |
497 | 397 | 410 | ||
498 | 398 | 411 | ||
499 | @@ -498,7 +511,7 @@ | |||
500 | 498 | 511 | ||
501 | 499 | 512 | ||
502 | 500 | def list_nics(nic_type=None): | 513 | def list_nics(nic_type=None): |
504 | 501 | '''Return a list of nics of given type(s)''' | 514 | """Return a list of nics of given type(s)""" |
505 | 502 | if isinstance(nic_type, six.string_types): | 515 | if isinstance(nic_type, six.string_types): |
506 | 503 | int_types = [nic_type] | 516 | int_types = [nic_type] |
507 | 504 | else: | 517 | else: |
508 | @@ -540,12 +553,13 @@ | |||
509 | 540 | 553 | ||
510 | 541 | 554 | ||
511 | 542 | def set_nic_mtu(nic, mtu): | 555 | def set_nic_mtu(nic, mtu): |
513 | 543 | '''Set MTU on a network interface''' | 556 | """Set the Maximum Transmission Unit (MTU) on a network interface.""" |
514 | 544 | cmd = ['ip', 'link', 'set', nic, 'mtu', mtu] | 557 | cmd = ['ip', 'link', 'set', nic, 'mtu', mtu] |
515 | 545 | subprocess.check_call(cmd) | 558 | subprocess.check_call(cmd) |
516 | 546 | 559 | ||
517 | 547 | 560 | ||
518 | 548 | def get_nic_mtu(nic): | 561 | def get_nic_mtu(nic): |
519 | 562 | """Return the Maximum Transmission Unit (MTU) for a network interface.""" | ||
520 | 549 | cmd = ['ip', 'addr', 'show', nic] | 563 | cmd = ['ip', 'addr', 'show', nic] |
521 | 550 | ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') | 564 | ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') |
522 | 551 | mtu = "" | 565 | mtu = "" |
523 | @@ -557,6 +571,7 @@ | |||
524 | 557 | 571 | ||
525 | 558 | 572 | ||
526 | 559 | def get_nic_hwaddr(nic): | 573 | def get_nic_hwaddr(nic): |
527 | 574 | """Return the Media Access Control (MAC) for a network interface.""" | ||
528 | 560 | cmd = ['ip', '-o', '-0', 'addr', 'show', nic] | 575 | cmd = ['ip', '-o', '-0', 'addr', 'show', nic] |
529 | 561 | ip_output = subprocess.check_output(cmd).decode('UTF-8') | 576 | ip_output = subprocess.check_output(cmd).decode('UTF-8') |
530 | 562 | hwaddr = "" | 577 | hwaddr = "" |
531 | @@ -567,7 +582,7 @@ | |||
532 | 567 | 582 | ||
533 | 568 | 583 | ||
534 | 569 | def cmp_pkgrevno(package, revno, pkgcache=None): | 584 | def cmp_pkgrevno(package, revno, pkgcache=None): |
536 | 570 | '''Compare supplied revno with the revno of the installed package | 585 | """Compare supplied revno with the revno of the installed package |
537 | 571 | 586 | ||
538 | 572 | * 1 => Installed revno is greater than supplied arg | 587 | * 1 => Installed revno is greater than supplied arg |
539 | 573 | * 0 => Installed revno is the same as supplied arg | 588 | * 0 => Installed revno is the same as supplied arg |
540 | @@ -576,7 +591,7 @@ | |||
541 | 576 | This function imports apt_cache function from charmhelpers.fetch if | 591 | This function imports apt_cache function from charmhelpers.fetch if |
542 | 577 | the pkgcache argument is None. Be sure to add charmhelpers.fetch if | 592 | the pkgcache argument is None. Be sure to add charmhelpers.fetch if |
543 | 578 | you call this function, or pass an apt_pkg.Cache() instance. | 593 | you call this function, or pass an apt_pkg.Cache() instance. |
545 | 579 | ''' | 594 | """ |
546 | 580 | import apt_pkg | 595 | import apt_pkg |
547 | 581 | if not pkgcache: | 596 | if not pkgcache: |
548 | 582 | from charmhelpers.fetch import apt_cache | 597 | from charmhelpers.fetch import apt_cache |
549 | @@ -586,19 +601,27 @@ | |||
550 | 586 | 601 | ||
551 | 587 | 602 | ||
552 | 588 | @contextmanager | 603 | @contextmanager |
554 | 589 | def chdir(d): | 604 | def chdir(directory): |
555 | 605 | """Change the current working directory to a different directory for a code | ||
556 | 606 | block and return the previous directory after the block exits. Useful to | ||
557 | 607 | run commands from a specificed directory. | ||
558 | 608 | |||
559 | 609 | :param str directory: The directory path to change to for this context. | ||
560 | 610 | """ | ||
561 | 590 | cur = os.getcwd() | 611 | cur = os.getcwd() |
562 | 591 | try: | 612 | try: |
564 | 592 | yield os.chdir(d) | 613 | yield os.chdir(directory) |
565 | 593 | finally: | 614 | finally: |
566 | 594 | os.chdir(cur) | 615 | os.chdir(cur) |
567 | 595 | 616 | ||
568 | 596 | 617 | ||
569 | 597 | def chownr(path, owner, group, follow_links=True, chowntopdir=False): | 618 | def chownr(path, owner, group, follow_links=True, chowntopdir=False): |
572 | 598 | """ | 619 | """Recursively change user and group ownership of files and directories |
571 | 599 | Recursively change user and group ownership of files and directories | ||
573 | 600 | in given path. Doesn't chown path itself by default, only its children. | 620 | in given path. Doesn't chown path itself by default, only its children. |
574 | 601 | 621 | ||
575 | 622 | :param str path: The string path to start changing ownership. | ||
576 | 623 | :param str owner: The owner string to use when looking up the uid. | ||
577 | 624 | :param str group: The group string to use when looking up the gid. | ||
578 | 602 | :param bool follow_links: Also Chown links if True | 625 | :param bool follow_links: Also Chown links if True |
579 | 603 | :param bool chowntopdir: Also chown path itself if True | 626 | :param bool chowntopdir: Also chown path itself if True |
580 | 604 | """ | 627 | """ |
581 | @@ -622,15 +645,23 @@ | |||
582 | 622 | 645 | ||
583 | 623 | 646 | ||
584 | 624 | def lchownr(path, owner, group): | 647 | def lchownr(path, owner, group): |
585 | 648 | """Recursively change user and group ownership of files and directories | ||
586 | 649 | in a given path, not following symbolic links. See the documentation for | ||
587 | 650 | 'os.lchown' for more information. | ||
588 | 651 | |||
589 | 652 | :param str path: The string path to start changing ownership. | ||
590 | 653 | :param str owner: The owner string to use when looking up the uid. | ||
591 | 654 | :param str group: The group string to use when looking up the gid. | ||
592 | 655 | """ | ||
593 | 625 | chownr(path, owner, group, follow_links=False) | 656 | chownr(path, owner, group, follow_links=False) |
594 | 626 | 657 | ||
595 | 627 | 658 | ||
596 | 628 | def get_total_ram(): | 659 | def get_total_ram(): |
598 | 629 | '''The total amount of system RAM in bytes. | 660 | """The total amount of system RAM in bytes. |
599 | 630 | 661 | ||
600 | 631 | This is what is reported by the OS, and may be overcommitted when | 662 | This is what is reported by the OS, and may be overcommitted when |
601 | 632 | there are multiple containers hosted on the same machine. | 663 | there are multiple containers hosted on the same machine. |
603 | 633 | ''' | 664 | """ |
604 | 634 | with open('/proc/meminfo', 'r') as f: | 665 | with open('/proc/meminfo', 'r') as f: |
605 | 635 | for line in f.readlines(): | 666 | for line in f.readlines(): |
606 | 636 | if line: | 667 | if line: |
607 | 637 | 668 | ||
608 | === modified file 'hooks/charmhelpers/fetch/giturl.py' | |||
609 | --- hooks/charmhelpers/fetch/giturl.py 2016-01-04 21:26:40 +0000 | |||
610 | +++ hooks/charmhelpers/fetch/giturl.py 2016-01-17 21:20:37 +0000 | |||
611 | @@ -22,7 +22,6 @@ | |||
612 | 22 | filter_installed_packages, | 22 | filter_installed_packages, |
613 | 23 | apt_install, | 23 | apt_install, |
614 | 24 | ) | 24 | ) |
615 | 25 | from charmhelpers.core.host import mkdir | ||
616 | 26 | 25 | ||
617 | 27 | if filter_installed_packages(['git']) != []: | 26 | if filter_installed_packages(['git']) != []: |
618 | 28 | apt_install(['git']) | 27 | apt_install(['git']) |
619 | @@ -50,8 +49,8 @@ | |||
620 | 50 | cmd = ['git', '-C', dest, 'pull', source, branch] | 49 | cmd = ['git', '-C', dest, 'pull', source, branch] |
621 | 51 | else: | 50 | else: |
622 | 52 | cmd = ['git', 'clone', source, dest, '--branch', branch] | 51 | cmd = ['git', 'clone', source, dest, '--branch', branch] |
625 | 53 | if depth: | 52 | if depth: |
626 | 54 | cmd.extend(['--depth', depth]) | 53 | cmd.extend(['--depth', depth]) |
627 | 55 | check_call(cmd) | 54 | check_call(cmd) |
628 | 56 | 55 | ||
629 | 57 | def install(self, source, branch="master", dest=None, depth=None): | 56 | def install(self, source, branch="master", dest=None, depth=None): |
630 | @@ -62,8 +61,6 @@ | |||
631 | 62 | else: | 61 | else: |
632 | 63 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", | 62 | dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", |
633 | 64 | branch_name) | 63 | branch_name) |
634 | 65 | if not os.path.exists(dest_dir): | ||
635 | 66 | mkdir(dest_dir, perms=0o755) | ||
636 | 67 | try: | 64 | try: |
637 | 68 | self.clone(source, dest_dir, branch, depth) | 65 | self.clone(source, dest_dir, branch, depth) |
638 | 69 | except OSError as e: | 66 | except OSError as e: |
charm_lint_check #17513 cinder-ceph-next for hopem mp282880
LINT OK: passed
Build: http:// 10.245. 162.77: 8080/job/ charm_lint_ check/17513/