Merge lp:~1chb1n/charms/trusty/nova-cell/next-amulet-debug-and-makefile into lp:~openstack-charmers/charms/trusty/nova-cell/next
- Trusty Tahr (14.04)
- next-amulet-debug-and-makefile
- Merge into next
Status: | Merged |
---|---|
Merged at revision: | 65 |
Proposed branch: | lp:~1chb1n/charms/trusty/nova-cell/next-amulet-debug-and-makefile |
Merge into: | lp:~openstack-charmers/charms/trusty/nova-cell/next |
Diff against target: |
2271 lines (+1460/-208) 20 files modified
hooks/charmhelpers/contrib/network/ip.py (+84/-1) hooks/charmhelpers/contrib/openstack/amulet/deployment.py (+34/-5) hooks/charmhelpers/contrib/openstack/context.py (+289/-15) hooks/charmhelpers/contrib/openstack/files/__init__.py (+18/-0) hooks/charmhelpers/contrib/openstack/ip.py (+37/-0) hooks/charmhelpers/contrib/openstack/neutron.py (+83/-0) hooks/charmhelpers/contrib/openstack/utils.py (+142/-141) hooks/charmhelpers/contrib/python/packages.py (+2/-2) hooks/charmhelpers/core/fstab.py (+4/-4) hooks/charmhelpers/core/hookenv.py (+40/-1) hooks/charmhelpers/core/host.py (+10/-6) hooks/charmhelpers/core/services/helpers.py (+12/-4) hooks/charmhelpers/core/strutils.py (+42/-0) hooks/charmhelpers/core/sysctl.py (+13/-7) hooks/charmhelpers/core/templating.py (+3/-3) hooks/charmhelpers/core/unitdata.py (+477/-0) hooks/charmhelpers/fetch/archiveurl.py (+10/-10) hooks/charmhelpers/fetch/giturl.py (+1/-1) tests/charmhelpers/contrib/amulet/utils.py (+125/-3) tests/charmhelpers/contrib/openstack/amulet/deployment.py (+34/-5) |
To merge this branch: | bzr merge lp:~1chb1n/charms/trusty/nova-cell/next-amulet-debug-and-makefile |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
OpenStack Charmers | Pending | ||
Review via email:
|
Commit message
Description of the change
auto sync charmhelpers
![](/+icing/build/overlay/assets/skins/sam/images/close.gif)
uosci-testing-bot (uosci-testing-bot) wrote : | # |
![](/+icing/build/overlay/assets/skins/sam/images/close.gif)
uosci-testing-bot (uosci-testing-bot) wrote : | # |
charm_unit_test #3314 nova-cell-next for 1chb1n mp256591
UNIT OK: passed
![](/+icing/build/overlay/assets/skins/sam/images/close.gif)
uosci-testing-bot (uosci-testing-bot) wrote : | # |
charm_amulet_test #3281 nova-cell-next for 1chb1n mp256591
AMULET FAIL: amulet-test failed
AMULET Results (max last 2 lines):
make: *** [test] Error 1
ERROR:root:Make target returned non-zero.
Full amulet test output: http://
Build: http://
![](/+icing/build/overlay/assets/skins/sam/images/close.gif)
uosci-testing-bot (uosci-testing-bot) wrote : | # |
charm_lint_check #3550 nova-cell-next for 1chb1n mp256591
LINT OK: passed
![](/+icing/build/overlay/assets/skins/sam/images/close.gif)
uosci-testing-bot (uosci-testing-bot) wrote : | # |
charm_lint_check #3560 nova-cell-next for 1chb1n mp256591
LINT OK: passed
![](/+icing/build/overlay/assets/skins/sam/images/close.gif)
uosci-testing-bot (uosci-testing-bot) wrote : | # |
charm_unit_test #3348 nova-cell-next for 1chb1n mp256591
UNIT OK: passed
![](/+icing/build/overlay/assets/skins/sam/images/close.gif)
uosci-testing-bot (uosci-testing-bot) wrote : | # |
charm_amulet_test #3317 nova-cell-next for 1chb1n mp256591
AMULET FAIL: amulet-test failed
AMULET Results (max last 2 lines):
make: *** [test] Error 1
ERROR:root:Make target returned non-zero.
Full amulet test output: http://
Build: http://
![](/+icing/build/overlay/assets/skins/sam/images/close.gif)
Ryan Beisner (1chb1n) wrote : | # |
The amulet fail is actually due to missing / no tests in this charm.
00:01:08.349 juju-test CRITICAL: No tests were found
Preview Diff
1 | === modified file 'hooks/charmhelpers/contrib/network/ip.py' | |||
2 | --- hooks/charmhelpers/contrib/network/ip.py 2015-01-29 13:02:55 +0000 | |||
3 | +++ hooks/charmhelpers/contrib/network/ip.py 2015-04-16 21:56:47 +0000 | |||
4 | @@ -17,13 +17,16 @@ | |||
5 | 17 | import glob | 17 | import glob |
6 | 18 | import re | 18 | import re |
7 | 19 | import subprocess | 19 | import subprocess |
8 | 20 | import six | ||
9 | 21 | import socket | ||
10 | 20 | 22 | ||
11 | 21 | from functools import partial | 23 | from functools import partial |
12 | 22 | 24 | ||
13 | 23 | from charmhelpers.core.hookenv import unit_get | 25 | from charmhelpers.core.hookenv import unit_get |
14 | 24 | from charmhelpers.fetch import apt_install | 26 | from charmhelpers.fetch import apt_install |
15 | 25 | from charmhelpers.core.hookenv import ( | 27 | from charmhelpers.core.hookenv import ( |
17 | 26 | log | 28 | log, |
18 | 29 | WARNING, | ||
19 | 27 | ) | 30 | ) |
20 | 28 | 31 | ||
21 | 29 | try: | 32 | try: |
22 | @@ -365,3 +368,83 @@ | |||
23 | 365 | return True | 368 | return True |
24 | 366 | 369 | ||
25 | 367 | return False | 370 | return False |
26 | 371 | |||
27 | 372 | |||
28 | 373 | def is_ip(address): | ||
29 | 374 | """ | ||
30 | 375 | Returns True if address is a valid IP address. | ||
31 | 376 | """ | ||
32 | 377 | try: | ||
33 | 378 | # Test to see if already an IPv4 address | ||
34 | 379 | socket.inet_aton(address) | ||
35 | 380 | return True | ||
36 | 381 | except socket.error: | ||
37 | 382 | return False | ||
38 | 383 | |||
39 | 384 | |||
40 | 385 | def ns_query(address): | ||
41 | 386 | try: | ||
42 | 387 | import dns.resolver | ||
43 | 388 | except ImportError: | ||
44 | 389 | apt_install('python-dnspython') | ||
45 | 390 | import dns.resolver | ||
46 | 391 | |||
47 | 392 | if isinstance(address, dns.name.Name): | ||
48 | 393 | rtype = 'PTR' | ||
49 | 394 | elif isinstance(address, six.string_types): | ||
50 | 395 | rtype = 'A' | ||
51 | 396 | else: | ||
52 | 397 | return None | ||
53 | 398 | |||
54 | 399 | answers = dns.resolver.query(address, rtype) | ||
55 | 400 | if answers: | ||
56 | 401 | return str(answers[0]) | ||
57 | 402 | return None | ||
58 | 403 | |||
59 | 404 | |||
60 | 405 | def get_host_ip(hostname, fallback=None): | ||
61 | 406 | """ | ||
62 | 407 | Resolves the IP for a given hostname, or returns | ||
63 | 408 | the input if it is already an IP. | ||
64 | 409 | """ | ||
65 | 410 | if is_ip(hostname): | ||
66 | 411 | return hostname | ||
67 | 412 | |||
68 | 413 | ip_addr = ns_query(hostname) | ||
69 | 414 | if not ip_addr: | ||
70 | 415 | try: | ||
71 | 416 | ip_addr = socket.gethostbyname(hostname) | ||
72 | 417 | except: | ||
73 | 418 | log("Failed to resolve hostname '%s'" % (hostname), | ||
74 | 419 | level=WARNING) | ||
75 | 420 | return fallback | ||
76 | 421 | return ip_addr | ||
77 | 422 | |||
78 | 423 | |||
79 | 424 | def get_hostname(address, fqdn=True): | ||
80 | 425 | """ | ||
81 | 426 | Resolves hostname for given IP, or returns the input | ||
82 | 427 | if it is already a hostname. | ||
83 | 428 | """ | ||
84 | 429 | if is_ip(address): | ||
85 | 430 | try: | ||
86 | 431 | import dns.reversename | ||
87 | 432 | except ImportError: | ||
88 | 433 | apt_install("python-dnspython") | ||
89 | 434 | import dns.reversename | ||
90 | 435 | |||
91 | 436 | rev = dns.reversename.from_address(address) | ||
92 | 437 | result = ns_query(rev) | ||
93 | 438 | if not result: | ||
94 | 439 | return None | ||
95 | 440 | else: | ||
96 | 441 | result = address | ||
97 | 442 | |||
98 | 443 | if fqdn: | ||
99 | 444 | # strip trailing . | ||
100 | 445 | if result.endswith('.'): | ||
101 | 446 | return result[:-1] | ||
102 | 447 | else: | ||
103 | 448 | return result | ||
104 | 449 | else: | ||
105 | 450 | return result.split('.')[0] | ||
106 | 368 | 451 | ||
107 | === modified file 'hooks/charmhelpers/contrib/openstack/amulet/deployment.py' | |||
108 | --- hooks/charmhelpers/contrib/openstack/amulet/deployment.py 2015-01-29 13:02:55 +0000 | |||
109 | +++ hooks/charmhelpers/contrib/openstack/amulet/deployment.py 2015-04-16 21:56:47 +0000 | |||
110 | @@ -15,6 +15,7 @@ | |||
111 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. |
112 | 16 | 16 | ||
113 | 17 | import six | 17 | import six |
114 | 18 | from collections import OrderedDict | ||
115 | 18 | from charmhelpers.contrib.amulet.deployment import ( | 19 | from charmhelpers.contrib.amulet.deployment import ( |
116 | 19 | AmuletDeployment | 20 | AmuletDeployment |
117 | 20 | ) | 21 | ) |
118 | @@ -43,7 +44,7 @@ | |||
119 | 43 | Determine if the local branch being tested is derived from its | 44 | Determine if the local branch being tested is derived from its |
120 | 44 | stable or next (dev) branch, and based on this, use the corresonding | 45 | stable or next (dev) branch, and based on this, use the corresonding |
121 | 45 | stable or next branches for the other_services.""" | 46 | stable or next branches for the other_services.""" |
123 | 46 | base_charms = ['mysql', 'mongodb', 'rabbitmq-server'] | 47 | base_charms = ['mysql', 'mongodb'] |
124 | 47 | 48 | ||
125 | 48 | if self.stable: | 49 | if self.stable: |
126 | 49 | for svc in other_services: | 50 | for svc in other_services: |
127 | @@ -71,16 +72,19 @@ | |||
128 | 71 | services.append(this_service) | 72 | services.append(this_service) |
129 | 72 | use_source = ['mysql', 'mongodb', 'rabbitmq-server', 'ceph', | 73 | use_source = ['mysql', 'mongodb', 'rabbitmq-server', 'ceph', |
130 | 73 | 'ceph-osd', 'ceph-radosgw'] | 74 | 'ceph-osd', 'ceph-radosgw'] |
131 | 75 | # Openstack subordinate charms do not expose an origin option as that | ||
132 | 76 | # is controlled by the principle | ||
133 | 77 | ignore = ['neutron-openvswitch'] | ||
134 | 74 | 78 | ||
135 | 75 | if self.openstack: | 79 | if self.openstack: |
136 | 76 | for svc in services: | 80 | for svc in services: |
138 | 77 | if svc['name'] not in use_source: | 81 | if svc['name'] not in use_source + ignore: |
139 | 78 | config = {'openstack-origin': self.openstack} | 82 | config = {'openstack-origin': self.openstack} |
140 | 79 | self.d.configure(svc['name'], config) | 83 | self.d.configure(svc['name'], config) |
141 | 80 | 84 | ||
142 | 81 | if self.source: | 85 | if self.source: |
143 | 82 | for svc in services: | 86 | for svc in services: |
145 | 83 | if svc['name'] in use_source: | 87 | if svc['name'] in use_source and svc['name'] not in ignore: |
146 | 84 | config = {'source': self.source} | 88 | config = {'source': self.source} |
147 | 85 | self.d.configure(svc['name'], config) | 89 | self.d.configure(svc['name'], config) |
148 | 86 | 90 | ||
149 | @@ -97,12 +101,37 @@ | |||
150 | 97 | """ | 101 | """ |
151 | 98 | (self.precise_essex, self.precise_folsom, self.precise_grizzly, | 102 | (self.precise_essex, self.precise_folsom, self.precise_grizzly, |
152 | 99 | self.precise_havana, self.precise_icehouse, | 103 | self.precise_havana, self.precise_icehouse, |
154 | 100 | self.trusty_icehouse) = range(6) | 104 | self.trusty_icehouse, self.trusty_juno, self.trusty_kilo, |
155 | 105 | self.utopic_juno, self.vivid_kilo) = range(10) | ||
156 | 101 | releases = { | 106 | releases = { |
157 | 102 | ('precise', None): self.precise_essex, | 107 | ('precise', None): self.precise_essex, |
158 | 103 | ('precise', 'cloud:precise-folsom'): self.precise_folsom, | 108 | ('precise', 'cloud:precise-folsom'): self.precise_folsom, |
159 | 104 | ('precise', 'cloud:precise-grizzly'): self.precise_grizzly, | 109 | ('precise', 'cloud:precise-grizzly'): self.precise_grizzly, |
160 | 105 | ('precise', 'cloud:precise-havana'): self.precise_havana, | 110 | ('precise', 'cloud:precise-havana'): self.precise_havana, |
161 | 106 | ('precise', 'cloud:precise-icehouse'): self.precise_icehouse, | 111 | ('precise', 'cloud:precise-icehouse'): self.precise_icehouse, |
163 | 107 | ('trusty', None): self.trusty_icehouse} | 112 | ('trusty', None): self.trusty_icehouse, |
164 | 113 | ('trusty', 'cloud:trusty-juno'): self.trusty_juno, | ||
165 | 114 | ('trusty', 'cloud:trusty-kilo'): self.trusty_kilo, | ||
166 | 115 | ('utopic', None): self.utopic_juno, | ||
167 | 116 | ('vivid', None): self.vivid_kilo} | ||
168 | 108 | return releases[(self.series, self.openstack)] | 117 | return releases[(self.series, self.openstack)] |
169 | 118 | |||
170 | 119 | def _get_openstack_release_string(self): | ||
171 | 120 | """Get openstack release string. | ||
172 | 121 | |||
173 | 122 | Return a string representing the openstack release. | ||
174 | 123 | """ | ||
175 | 124 | releases = OrderedDict([ | ||
176 | 125 | ('precise', 'essex'), | ||
177 | 126 | ('quantal', 'folsom'), | ||
178 | 127 | ('raring', 'grizzly'), | ||
179 | 128 | ('saucy', 'havana'), | ||
180 | 129 | ('trusty', 'icehouse'), | ||
181 | 130 | ('utopic', 'juno'), | ||
182 | 131 | ('vivid', 'kilo'), | ||
183 | 132 | ]) | ||
184 | 133 | if self.openstack: | ||
185 | 134 | os_origin = self.openstack.split(':')[1] | ||
186 | 135 | return os_origin.split('%s-' % self.series)[1].split('/')[0] | ||
187 | 136 | else: | ||
188 | 137 | return releases[self.series] | ||
189 | 109 | 138 | ||
190 | === modified file 'hooks/charmhelpers/contrib/openstack/context.py' | |||
191 | --- hooks/charmhelpers/contrib/openstack/context.py 2015-01-29 13:02:55 +0000 | |||
192 | +++ hooks/charmhelpers/contrib/openstack/context.py 2015-04-16 21:56:47 +0000 | |||
193 | @@ -16,11 +16,13 @@ | |||
194 | 16 | 16 | ||
195 | 17 | import json | 17 | import json |
196 | 18 | import os | 18 | import os |
197 | 19 | import re | ||
198 | 19 | import time | 20 | import time |
199 | 20 | from base64 import b64decode | 21 | from base64 import b64decode |
200 | 21 | from subprocess import check_call | 22 | from subprocess import check_call |
201 | 22 | 23 | ||
202 | 23 | import six | 24 | import six |
203 | 25 | import yaml | ||
204 | 24 | 26 | ||
205 | 25 | from charmhelpers.fetch import ( | 27 | from charmhelpers.fetch import ( |
206 | 26 | apt_install, | 28 | apt_install, |
207 | @@ -45,8 +47,11 @@ | |||
208 | 45 | ) | 47 | ) |
209 | 46 | 48 | ||
210 | 47 | from charmhelpers.core.sysctl import create as sysctl_create | 49 | from charmhelpers.core.sysctl import create as sysctl_create |
211 | 50 | from charmhelpers.core.strutils import bool_from_string | ||
212 | 48 | 51 | ||
213 | 49 | from charmhelpers.core.host import ( | 52 | from charmhelpers.core.host import ( |
214 | 53 | list_nics, | ||
215 | 54 | get_nic_hwaddr, | ||
216 | 50 | mkdir, | 55 | mkdir, |
217 | 51 | write_file, | 56 | write_file, |
218 | 52 | ) | 57 | ) |
219 | @@ -63,16 +68,22 @@ | |||
220 | 63 | ) | 68 | ) |
221 | 64 | from charmhelpers.contrib.openstack.neutron import ( | 69 | from charmhelpers.contrib.openstack.neutron import ( |
222 | 65 | neutron_plugin_attribute, | 70 | neutron_plugin_attribute, |
223 | 71 | parse_data_port_mappings, | ||
224 | 72 | ) | ||
225 | 73 | from charmhelpers.contrib.openstack.ip import ( | ||
226 | 74 | resolve_address, | ||
227 | 75 | INTERNAL, | ||
228 | 66 | ) | 76 | ) |
229 | 67 | from charmhelpers.contrib.network.ip import ( | 77 | from charmhelpers.contrib.network.ip import ( |
230 | 68 | get_address_in_network, | 78 | get_address_in_network, |
231 | 79 | get_ipv4_addr, | ||
232 | 69 | get_ipv6_addr, | 80 | get_ipv6_addr, |
233 | 70 | get_netmask_for_address, | 81 | get_netmask_for_address, |
234 | 71 | format_ipv6_addr, | 82 | format_ipv6_addr, |
235 | 72 | is_address_in_network, | 83 | is_address_in_network, |
236 | 84 | is_bridge_member, | ||
237 | 73 | ) | 85 | ) |
238 | 74 | from charmhelpers.contrib.openstack.utils import get_host_ip | 86 | from charmhelpers.contrib.openstack.utils import get_host_ip |
239 | 75 | |||
240 | 76 | CA_CERT_PATH = '/usr/local/share/ca-certificates/keystone_juju_ca_cert.crt' | 87 | CA_CERT_PATH = '/usr/local/share/ca-certificates/keystone_juju_ca_cert.crt' |
241 | 77 | ADDRESS_TYPES = ['admin', 'internal', 'public'] | 88 | ADDRESS_TYPES = ['admin', 'internal', 'public'] |
242 | 78 | 89 | ||
243 | @@ -104,9 +115,41 @@ | |||
244 | 104 | def config_flags_parser(config_flags): | 115 | def config_flags_parser(config_flags): |
245 | 105 | """Parses config flags string into dict. | 116 | """Parses config flags string into dict. |
246 | 106 | 117 | ||
247 | 118 | This parsing method supports a few different formats for the config | ||
248 | 119 | flag values to be parsed: | ||
249 | 120 | |||
250 | 121 | 1. A string in the simple format of key=value pairs, with the possibility | ||
251 | 122 | of specifying multiple key value pairs within the same string. For | ||
252 | 123 | example, a string in the format of 'key1=value1, key2=value2' will | ||
253 | 124 | return a dict of: | ||
254 | 125 | {'key1': 'value1', | ||
255 | 126 | 'key2': 'value2'}. | ||
256 | 127 | |||
257 | 128 | 2. A string in the above format, but supporting a comma-delimited list | ||
258 | 129 | of values for the same key. For example, a string in the format of | ||
259 | 130 | 'key1=value1, key2=value3,value4,value5' will return a dict of: | ||
260 | 131 | {'key1', 'value1', | ||
261 | 132 | 'key2', 'value2,value3,value4'} | ||
262 | 133 | |||
263 | 134 | 3. A string containing a colon character (:) prior to an equal | ||
264 | 135 | character (=) will be treated as yaml and parsed as such. This can be | ||
265 | 136 | used to specify more complex key value pairs. For example, | ||
266 | 137 | a string in the format of 'key1: subkey1=value1, subkey2=value2' will | ||
267 | 138 | return a dict of: | ||
268 | 139 | {'key1', 'subkey1=value1, subkey2=value2'} | ||
269 | 140 | |||
270 | 107 | The provided config_flags string may be a list of comma-separated values | 141 | The provided config_flags string may be a list of comma-separated values |
271 | 108 | which themselves may be comma-separated list of values. | 142 | which themselves may be comma-separated list of values. |
272 | 109 | """ | 143 | """ |
273 | 144 | # If we find a colon before an equals sign then treat it as yaml. | ||
274 | 145 | # Note: limit it to finding the colon first since this indicates assignment | ||
275 | 146 | # for inline yaml. | ||
276 | 147 | colon = config_flags.find(':') | ||
277 | 148 | equals = config_flags.find('=') | ||
278 | 149 | if colon > 0: | ||
279 | 150 | if colon < equals or equals < 0: | ||
280 | 151 | return yaml.safe_load(config_flags) | ||
281 | 152 | |||
282 | 110 | if config_flags.find('==') >= 0: | 153 | if config_flags.find('==') >= 0: |
283 | 111 | log("config_flags is not in expected format (key=value)", level=ERROR) | 154 | log("config_flags is not in expected format (key=value)", level=ERROR) |
284 | 112 | raise OSContextError | 155 | raise OSContextError |
285 | @@ -191,7 +234,7 @@ | |||
286 | 191 | unit=local_unit()) | 234 | unit=local_unit()) |
287 | 192 | if set_hostname != access_hostname: | 235 | if set_hostname != access_hostname: |
288 | 193 | relation_set(relation_settings={hostname_key: access_hostname}) | 236 | relation_set(relation_settings={hostname_key: access_hostname}) |
290 | 194 | return ctxt # Defer any further hook execution for now.... | 237 | return None # Defer any further hook execution for now.... |
291 | 195 | 238 | ||
292 | 196 | password_setting = 'password' | 239 | password_setting = 'password' |
293 | 197 | if self.relation_prefix: | 240 | if self.relation_prefix: |
294 | @@ -277,12 +320,29 @@ | |||
295 | 277 | 320 | ||
296 | 278 | 321 | ||
297 | 279 | class IdentityServiceContext(OSContextGenerator): | 322 | class IdentityServiceContext(OSContextGenerator): |
299 | 280 | interfaces = ['identity-service'] | 323 | |
300 | 324 | def __init__(self, service=None, service_user=None, rel_name='identity-service'): | ||
301 | 325 | self.service = service | ||
302 | 326 | self.service_user = service_user | ||
303 | 327 | self.rel_name = rel_name | ||
304 | 328 | self.interfaces = [self.rel_name] | ||
305 | 281 | 329 | ||
306 | 282 | def __call__(self): | 330 | def __call__(self): |
308 | 283 | log('Generating template context for identity-service', level=DEBUG) | 331 | log('Generating template context for ' + self.rel_name, level=DEBUG) |
309 | 284 | ctxt = {} | 332 | ctxt = {} |
311 | 285 | for rid in relation_ids('identity-service'): | 333 | |
312 | 334 | if self.service and self.service_user: | ||
313 | 335 | # This is required for pki token signing if we don't want /tmp to | ||
314 | 336 | # be used. | ||
315 | 337 | cachedir = '/var/cache/%s' % (self.service) | ||
316 | 338 | if not os.path.isdir(cachedir): | ||
317 | 339 | log("Creating service cache dir %s" % (cachedir), level=DEBUG) | ||
318 | 340 | mkdir(path=cachedir, owner=self.service_user, | ||
319 | 341 | group=self.service_user, perms=0o700) | ||
320 | 342 | |||
321 | 343 | ctxt['signing_dir'] = cachedir | ||
322 | 344 | |||
323 | 345 | for rid in relation_ids(self.rel_name): | ||
324 | 286 | for unit in related_units(rid): | 346 | for unit in related_units(rid): |
325 | 287 | rdata = relation_get(rid=rid, unit=unit) | 347 | rdata = relation_get(rid=rid, unit=unit) |
326 | 288 | serv_host = rdata.get('service_host') | 348 | serv_host = rdata.get('service_host') |
327 | @@ -291,15 +351,16 @@ | |||
328 | 291 | auth_host = format_ipv6_addr(auth_host) or auth_host | 351 | auth_host = format_ipv6_addr(auth_host) or auth_host |
329 | 292 | svc_protocol = rdata.get('service_protocol') or 'http' | 352 | svc_protocol = rdata.get('service_protocol') or 'http' |
330 | 293 | auth_protocol = rdata.get('auth_protocol') or 'http' | 353 | auth_protocol = rdata.get('auth_protocol') or 'http' |
340 | 294 | ctxt = {'service_port': rdata.get('service_port'), | 354 | ctxt.update({'service_port': rdata.get('service_port'), |
341 | 295 | 'service_host': serv_host, | 355 | 'service_host': serv_host, |
342 | 296 | 'auth_host': auth_host, | 356 | 'auth_host': auth_host, |
343 | 297 | 'auth_port': rdata.get('auth_port'), | 357 | 'auth_port': rdata.get('auth_port'), |
344 | 298 | 'admin_tenant_name': rdata.get('service_tenant'), | 358 | 'admin_tenant_name': rdata.get('service_tenant'), |
345 | 299 | 'admin_user': rdata.get('service_username'), | 359 | 'admin_user': rdata.get('service_username'), |
346 | 300 | 'admin_password': rdata.get('service_password'), | 360 | 'admin_password': rdata.get('service_password'), |
347 | 301 | 'service_protocol': svc_protocol, | 361 | 'service_protocol': svc_protocol, |
348 | 302 | 'auth_protocol': auth_protocol} | 362 | 'auth_protocol': auth_protocol}) |
349 | 363 | |||
350 | 303 | if context_complete(ctxt): | 364 | if context_complete(ctxt): |
351 | 304 | # NOTE(jamespage) this is required for >= icehouse | 365 | # NOTE(jamespage) this is required for >= icehouse |
352 | 305 | # so a missing value just indicates keystone needs | 366 | # so a missing value just indicates keystone needs |
353 | @@ -398,6 +459,11 @@ | |||
354 | 398 | 459 | ||
355 | 399 | ctxt['rabbitmq_hosts'] = ','.join(sorted(rabbitmq_hosts)) | 460 | ctxt['rabbitmq_hosts'] = ','.join(sorted(rabbitmq_hosts)) |
356 | 400 | 461 | ||
357 | 462 | oslo_messaging_flags = conf.get('oslo-messaging-flags', None) | ||
358 | 463 | if oslo_messaging_flags: | ||
359 | 464 | ctxt['oslo_messaging_flags'] = config_flags_parser( | ||
360 | 465 | oslo_messaging_flags) | ||
361 | 466 | |||
362 | 401 | if not context_complete(ctxt): | 467 | if not context_complete(ctxt): |
363 | 402 | return {} | 468 | return {} |
364 | 403 | 469 | ||
365 | @@ -677,7 +743,14 @@ | |||
366 | 677 | 'endpoints': [], | 743 | 'endpoints': [], |
367 | 678 | 'ext_ports': []} | 744 | 'ext_ports': []} |
368 | 679 | 745 | ||
370 | 680 | for cn in self.canonical_names(): | 746 | cns = self.canonical_names() |
371 | 747 | if cns: | ||
372 | 748 | for cn in cns: | ||
373 | 749 | self.configure_cert(cn) | ||
374 | 750 | else: | ||
375 | 751 | # Expect cert/key provided in config (currently assumed that ca | ||
376 | 752 | # uses ip for cn) | ||
377 | 753 | cn = resolve_address(endpoint_type=INTERNAL) | ||
378 | 681 | self.configure_cert(cn) | 754 | self.configure_cert(cn) |
379 | 682 | 755 | ||
380 | 683 | addresses = self.get_network_addresses() | 756 | addresses = self.get_network_addresses() |
381 | @@ -740,6 +813,19 @@ | |||
382 | 740 | 813 | ||
383 | 741 | return ovs_ctxt | 814 | return ovs_ctxt |
384 | 742 | 815 | ||
385 | 816 | def nuage_ctxt(self): | ||
386 | 817 | driver = neutron_plugin_attribute(self.plugin, 'driver', | ||
387 | 818 | self.network_manager) | ||
388 | 819 | config = neutron_plugin_attribute(self.plugin, 'config', | ||
389 | 820 | self.network_manager) | ||
390 | 821 | nuage_ctxt = {'core_plugin': driver, | ||
391 | 822 | 'neutron_plugin': 'vsp', | ||
392 | 823 | 'neutron_security_groups': self.neutron_security_groups, | ||
393 | 824 | 'local_ip': unit_private_ip(), | ||
394 | 825 | 'config': config} | ||
395 | 826 | |||
396 | 827 | return nuage_ctxt | ||
397 | 828 | |||
398 | 743 | def nvp_ctxt(self): | 829 | def nvp_ctxt(self): |
399 | 744 | driver = neutron_plugin_attribute(self.plugin, 'driver', | 830 | driver = neutron_plugin_attribute(self.plugin, 'driver', |
400 | 745 | self.network_manager) | 831 | self.network_manager) |
401 | @@ -823,6 +909,8 @@ | |||
402 | 823 | ctxt.update(self.n1kv_ctxt()) | 909 | ctxt.update(self.n1kv_ctxt()) |
403 | 824 | elif self.plugin == 'Calico': | 910 | elif self.plugin == 'Calico': |
404 | 825 | ctxt.update(self.calico_ctxt()) | 911 | ctxt.update(self.calico_ctxt()) |
405 | 912 | elif self.plugin == 'vsp': | ||
406 | 913 | ctxt.update(self.nuage_ctxt()) | ||
407 | 826 | 914 | ||
408 | 827 | alchemy_flags = config('neutron-alchemy-flags') | 915 | alchemy_flags = config('neutron-alchemy-flags') |
409 | 828 | if alchemy_flags: | 916 | if alchemy_flags: |
410 | @@ -833,6 +921,48 @@ | |||
411 | 833 | return ctxt | 921 | return ctxt |
412 | 834 | 922 | ||
413 | 835 | 923 | ||
414 | 924 | class NeutronPortContext(OSContextGenerator): | ||
415 | 925 | NIC_PREFIXES = ['eth', 'bond'] | ||
416 | 926 | |||
417 | 927 | def resolve_ports(self, ports): | ||
418 | 928 | """Resolve NICs not yet bound to bridge(s) | ||
419 | 929 | |||
420 | 930 | If hwaddress provided then returns resolved hwaddress otherwise NIC. | ||
421 | 931 | """ | ||
422 | 932 | if not ports: | ||
423 | 933 | return None | ||
424 | 934 | |||
425 | 935 | hwaddr_to_nic = {} | ||
426 | 936 | hwaddr_to_ip = {} | ||
427 | 937 | for nic in list_nics(self.NIC_PREFIXES): | ||
428 | 938 | hwaddr = get_nic_hwaddr(nic) | ||
429 | 939 | hwaddr_to_nic[hwaddr] = nic | ||
430 | 940 | addresses = get_ipv4_addr(nic, fatal=False) | ||
431 | 941 | addresses += get_ipv6_addr(iface=nic, fatal=False) | ||
432 | 942 | hwaddr_to_ip[hwaddr] = addresses | ||
433 | 943 | |||
434 | 944 | resolved = [] | ||
435 | 945 | mac_regex = re.compile(r'([0-9A-F]{2}[:-]){5}([0-9A-F]{2})', re.I) | ||
436 | 946 | for entry in ports: | ||
437 | 947 | if re.match(mac_regex, entry): | ||
438 | 948 | # NIC is in known NICs and does NOT hace an IP address | ||
439 | 949 | if entry in hwaddr_to_nic and not hwaddr_to_ip[entry]: | ||
440 | 950 | # If the nic is part of a bridge then don't use it | ||
441 | 951 | if is_bridge_member(hwaddr_to_nic[entry]): | ||
442 | 952 | continue | ||
443 | 953 | |||
444 | 954 | # Entry is a MAC address for a valid interface that doesn't | ||
445 | 955 | # have an IP address assigned yet. | ||
446 | 956 | resolved.append(hwaddr_to_nic[entry]) | ||
447 | 957 | else: | ||
448 | 958 | # If the passed entry is not a MAC address, assume it's a valid | ||
449 | 959 | # interface, and that the user put it there on purpose (we can | ||
450 | 960 | # trust it to be the real external network). | ||
451 | 961 | resolved.append(entry) | ||
452 | 962 | |||
453 | 963 | return resolved | ||
454 | 964 | |||
455 | 965 | |||
456 | 836 | class OSConfigFlagContext(OSContextGenerator): | 966 | class OSConfigFlagContext(OSContextGenerator): |
457 | 837 | """Provides support for user-defined config flags. | 967 | """Provides support for user-defined config flags. |
458 | 838 | 968 | ||
459 | @@ -1021,6 +1151,8 @@ | |||
460 | 1021 | for unit in related_units(rid): | 1151 | for unit in related_units(rid): |
461 | 1022 | ctxt['zmq_nonce'] = relation_get('nonce', unit, rid) | 1152 | ctxt['zmq_nonce'] = relation_get('nonce', unit, rid) |
462 | 1023 | ctxt['zmq_host'] = relation_get('host', unit, rid) | 1153 | ctxt['zmq_host'] = relation_get('host', unit, rid) |
463 | 1154 | ctxt['zmq_redis_address'] = relation_get( | ||
464 | 1155 | 'zmq_redis_address', unit, rid) | ||
465 | 1024 | 1156 | ||
466 | 1025 | return ctxt | 1157 | return ctxt |
467 | 1026 | 1158 | ||
468 | @@ -1052,3 +1184,145 @@ | |||
469 | 1052 | sysctl_create(sysctl_dict, | 1184 | sysctl_create(sysctl_dict, |
470 | 1053 | '/etc/sysctl.d/50-{0}.conf'.format(charm_name())) | 1185 | '/etc/sysctl.d/50-{0}.conf'.format(charm_name())) |
471 | 1054 | return {'sysctl': sysctl_dict} | 1186 | return {'sysctl': sysctl_dict} |
472 | 1187 | |||
473 | 1188 | |||
474 | 1189 | class NeutronAPIContext(OSContextGenerator): | ||
475 | 1190 | ''' | ||
476 | 1191 | Inspects current neutron-plugin-api relation for neutron settings. Return | ||
477 | 1192 | defaults if it is not present. | ||
478 | 1193 | ''' | ||
479 | 1194 | interfaces = ['neutron-plugin-api'] | ||
480 | 1195 | |||
481 | 1196 | def __call__(self): | ||
482 | 1197 | self.neutron_defaults = { | ||
483 | 1198 | 'l2_population': { | ||
484 | 1199 | 'rel_key': 'l2-population', | ||
485 | 1200 | 'default': False, | ||
486 | 1201 | }, | ||
487 | 1202 | 'overlay_network_type': { | ||
488 | 1203 | 'rel_key': 'overlay-network-type', | ||
489 | 1204 | 'default': 'gre', | ||
490 | 1205 | }, | ||
491 | 1206 | 'neutron_security_groups': { | ||
492 | 1207 | 'rel_key': 'neutron-security-groups', | ||
493 | 1208 | 'default': False, | ||
494 | 1209 | }, | ||
495 | 1210 | 'network_device_mtu': { | ||
496 | 1211 | 'rel_key': 'network-device-mtu', | ||
497 | 1212 | 'default': None, | ||
498 | 1213 | }, | ||
499 | 1214 | 'enable_dvr': { | ||
500 | 1215 | 'rel_key': 'enable-dvr', | ||
501 | 1216 | 'default': False, | ||
502 | 1217 | }, | ||
503 | 1218 | 'enable_l3ha': { | ||
504 | 1219 | 'rel_key': 'enable-l3ha', | ||
505 | 1220 | 'default': False, | ||
506 | 1221 | }, | ||
507 | 1222 | } | ||
508 | 1223 | ctxt = self.get_neutron_options({}) | ||
509 | 1224 | for rid in relation_ids('neutron-plugin-api'): | ||
510 | 1225 | for unit in related_units(rid): | ||
511 | 1226 | rdata = relation_get(rid=rid, unit=unit) | ||
512 | 1227 | if 'l2-population' in rdata: | ||
513 | 1228 | ctxt.update(self.get_neutron_options(rdata)) | ||
514 | 1229 | |||
515 | 1230 | return ctxt | ||
516 | 1231 | |||
517 | 1232 | def get_neutron_options(self, rdata): | ||
518 | 1233 | settings = {} | ||
519 | 1234 | for nkey in self.neutron_defaults.keys(): | ||
520 | 1235 | defv = self.neutron_defaults[nkey]['default'] | ||
521 | 1236 | rkey = self.neutron_defaults[nkey]['rel_key'] | ||
522 | 1237 | if rkey in rdata.keys(): | ||
523 | 1238 | if type(defv) is bool: | ||
524 | 1239 | settings[nkey] = bool_from_string(rdata[rkey]) | ||
525 | 1240 | else: | ||
526 | 1241 | settings[nkey] = rdata[rkey] | ||
527 | 1242 | else: | ||
528 | 1243 | settings[nkey] = defv | ||
529 | 1244 | return settings | ||
530 | 1245 | |||
531 | 1246 | |||
532 | 1247 | class ExternalPortContext(NeutronPortContext): | ||
533 | 1248 | |||
534 | 1249 | def __call__(self): | ||
535 | 1250 | ctxt = {} | ||
536 | 1251 | ports = config('ext-port') | ||
537 | 1252 | if ports: | ||
538 | 1253 | ports = [p.strip() for p in ports.split()] | ||
539 | 1254 | ports = self.resolve_ports(ports) | ||
540 | 1255 | if ports: | ||
541 | 1256 | ctxt = {"ext_port": ports[0]} | ||
542 | 1257 | napi_settings = NeutronAPIContext()() | ||
543 | 1258 | mtu = napi_settings.get('network_device_mtu') | ||
544 | 1259 | if mtu: | ||
545 | 1260 | ctxt['ext_port_mtu'] = mtu | ||
546 | 1261 | |||
547 | 1262 | return ctxt | ||
548 | 1263 | |||
549 | 1264 | |||
550 | 1265 | class DataPortContext(NeutronPortContext): | ||
551 | 1266 | |||
552 | 1267 | def __call__(self): | ||
553 | 1268 | ports = config('data-port') | ||
554 | 1269 | if ports: | ||
555 | 1270 | portmap = parse_data_port_mappings(ports) | ||
556 | 1271 | ports = portmap.values() | ||
557 | 1272 | resolved = self.resolve_ports(ports) | ||
558 | 1273 | normalized = {get_nic_hwaddr(port): port for port in resolved | ||
559 | 1274 | if port not in ports} | ||
560 | 1275 | normalized.update({port: port for port in resolved | ||
561 | 1276 | if port in ports}) | ||
562 | 1277 | if resolved: | ||
563 | 1278 | return {bridge: normalized[port] for bridge, port in | ||
564 | 1279 | six.iteritems(portmap) if port in normalized.keys()} | ||
565 | 1280 | |||
566 | 1281 | return None | ||
567 | 1282 | |||
568 | 1283 | |||
569 | 1284 | class PhyNICMTUContext(DataPortContext): | ||
570 | 1285 | |||
571 | 1286 | def __call__(self): | ||
572 | 1287 | ctxt = {} | ||
573 | 1288 | mappings = super(PhyNICMTUContext, self).__call__() | ||
574 | 1289 | if mappings and mappings.values(): | ||
575 | 1290 | ports = mappings.values() | ||
576 | 1291 | napi_settings = NeutronAPIContext()() | ||
577 | 1292 | mtu = napi_settings.get('network_device_mtu') | ||
578 | 1293 | if mtu: | ||
579 | 1294 | ctxt["devs"] = '\\n'.join(ports) | ||
580 | 1295 | ctxt['mtu'] = mtu | ||
581 | 1296 | |||
582 | 1297 | return ctxt | ||
583 | 1298 | |||
584 | 1299 | |||
585 | 1300 | class NetworkServiceContext(OSContextGenerator): | ||
586 | 1301 | |||
587 | 1302 | def __init__(self, rel_name='quantum-network-service'): | ||
588 | 1303 | self.rel_name = rel_name | ||
589 | 1304 | self.interfaces = [rel_name] | ||
590 | 1305 | |||
591 | 1306 | def __call__(self): | ||
592 | 1307 | for rid in relation_ids(self.rel_name): | ||
593 | 1308 | for unit in related_units(rid): | ||
594 | 1309 | rdata = relation_get(rid=rid, unit=unit) | ||
595 | 1310 | ctxt = { | ||
596 | 1311 | 'keystone_host': rdata.get('keystone_host'), | ||
597 | 1312 | 'service_port': rdata.get('service_port'), | ||
598 | 1313 | 'auth_port': rdata.get('auth_port'), | ||
599 | 1314 | 'service_tenant': rdata.get('service_tenant'), | ||
600 | 1315 | 'service_username': rdata.get('service_username'), | ||
601 | 1316 | 'service_password': rdata.get('service_password'), | ||
602 | 1317 | 'quantum_host': rdata.get('quantum_host'), | ||
603 | 1318 | 'quantum_port': rdata.get('quantum_port'), | ||
604 | 1319 | 'quantum_url': rdata.get('quantum_url'), | ||
605 | 1320 | 'region': rdata.get('region'), | ||
606 | 1321 | 'service_protocol': | ||
607 | 1322 | rdata.get('service_protocol') or 'http', | ||
608 | 1323 | 'auth_protocol': | ||
609 | 1324 | rdata.get('auth_protocol') or 'http', | ||
610 | 1325 | } | ||
611 | 1326 | if context_complete(ctxt): | ||
612 | 1327 | return ctxt | ||
613 | 1328 | return {} | ||
614 | 1055 | 1329 | ||
615 | === added directory 'hooks/charmhelpers/contrib/openstack/files' | |||
616 | === added file 'hooks/charmhelpers/contrib/openstack/files/__init__.py' | |||
617 | --- hooks/charmhelpers/contrib/openstack/files/__init__.py 1970-01-01 00:00:00 +0000 | |||
618 | +++ hooks/charmhelpers/contrib/openstack/files/__init__.py 2015-04-16 21:56:47 +0000 | |||
619 | @@ -0,0 +1,18 @@ | |||
620 | 1 | # Copyright 2014-2015 Canonical Limited. | ||
621 | 2 | # | ||
622 | 3 | # This file is part of charm-helpers. | ||
623 | 4 | # | ||
624 | 5 | # charm-helpers is free software: you can redistribute it and/or modify | ||
625 | 6 | # it under the terms of the GNU Lesser General Public License version 3 as | ||
626 | 7 | # published by the Free Software Foundation. | ||
627 | 8 | # | ||
628 | 9 | # charm-helpers is distributed in the hope that it will be useful, | ||
629 | 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
630 | 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
631 | 12 | # GNU Lesser General Public License for more details. | ||
632 | 13 | # | ||
633 | 14 | # You should have received a copy of the GNU Lesser General Public License | ||
634 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | ||
635 | 16 | |||
636 | 17 | # dummy __init__.py to fool syncer into thinking this is a syncable python | ||
637 | 18 | # module | ||
638 | 0 | 19 | ||
639 | === modified file 'hooks/charmhelpers/contrib/openstack/ip.py' | |||
640 | --- hooks/charmhelpers/contrib/openstack/ip.py 2015-01-29 13:02:55 +0000 | |||
641 | +++ hooks/charmhelpers/contrib/openstack/ip.py 2015-04-16 21:56:47 +0000 | |||
642 | @@ -26,6 +26,8 @@ | |||
643 | 26 | ) | 26 | ) |
644 | 27 | from charmhelpers.contrib.hahelpers.cluster import is_clustered | 27 | from charmhelpers.contrib.hahelpers.cluster import is_clustered |
645 | 28 | 28 | ||
646 | 29 | from functools import partial | ||
647 | 30 | |||
648 | 29 | PUBLIC = 'public' | 31 | PUBLIC = 'public' |
649 | 30 | INTERNAL = 'int' | 32 | INTERNAL = 'int' |
650 | 31 | ADMIN = 'admin' | 33 | ADMIN = 'admin' |
651 | @@ -107,3 +109,38 @@ | |||
652 | 107 | "clustered=%s)" % (net_type, clustered)) | 109 | "clustered=%s)" % (net_type, clustered)) |
653 | 108 | 110 | ||
654 | 109 | return resolved_address | 111 | return resolved_address |
655 | 112 | |||
656 | 113 | |||
657 | 114 | def endpoint_url(configs, url_template, port, endpoint_type=PUBLIC, | ||
658 | 115 | override=None): | ||
659 | 116 | """Returns the correct endpoint URL to advertise to Keystone. | ||
660 | 117 | |||
661 | 118 | This method provides the correct endpoint URL which should be advertised to | ||
662 | 119 | the keystone charm for endpoint creation. This method allows for the url to | ||
663 | 120 | be overridden to force a keystone endpoint to have specific URL for any of | ||
664 | 121 | the defined scopes (admin, internal, public). | ||
665 | 122 | |||
666 | 123 | :param configs: OSTemplateRenderer config templating object to inspect | ||
667 | 124 | for a complete https context. | ||
668 | 125 | :param url_template: str format string for creating the url template. Only | ||
669 | 126 | two values will be passed - the scheme+hostname | ||
670 | 127 | returned by the canonical_url and the port. | ||
671 | 128 | :param endpoint_type: str endpoint type to resolve. | ||
672 | 129 | :param override: str the name of the config option which overrides the | ||
673 | 130 | endpoint URL defined by the charm itself. None will | ||
674 | 131 | disable any overrides (default). | ||
675 | 132 | """ | ||
676 | 133 | if override: | ||
677 | 134 | # Return any user-defined overrides for the keystone endpoint URL. | ||
678 | 135 | user_value = config(override) | ||
679 | 136 | if user_value: | ||
680 | 137 | return user_value.strip() | ||
681 | 138 | |||
682 | 139 | return url_template % (canonical_url(configs, endpoint_type), port) | ||
683 | 140 | |||
684 | 141 | |||
685 | 142 | public_endpoint = partial(endpoint_url, endpoint_type=PUBLIC) | ||
686 | 143 | |||
687 | 144 | internal_endpoint = partial(endpoint_url, endpoint_type=INTERNAL) | ||
688 | 145 | |||
689 | 146 | admin_endpoint = partial(endpoint_url, endpoint_type=ADMIN) | ||
690 | 110 | 147 | ||
691 | === modified file 'hooks/charmhelpers/contrib/openstack/neutron.py' | |||
692 | --- hooks/charmhelpers/contrib/openstack/neutron.py 2015-01-29 13:02:55 +0000 | |||
693 | +++ hooks/charmhelpers/contrib/openstack/neutron.py 2015-04-16 21:56:47 +0000 | |||
694 | @@ -16,6 +16,7 @@ | |||
695 | 16 | 16 | ||
696 | 17 | # Various utilies for dealing with Neutron and the renaming from Quantum. | 17 | # Various utilies for dealing with Neutron and the renaming from Quantum. |
697 | 18 | 18 | ||
698 | 19 | import six | ||
699 | 19 | from subprocess import check_output | 20 | from subprocess import check_output |
700 | 20 | 21 | ||
701 | 21 | from charmhelpers.core.hookenv import ( | 22 | from charmhelpers.core.hookenv import ( |
702 | @@ -179,6 +180,19 @@ | |||
703 | 179 | 'nova-api-metadata']], | 180 | 'nova-api-metadata']], |
704 | 180 | 'server_packages': ['neutron-server', 'calico-control'], | 181 | 'server_packages': ['neutron-server', 'calico-control'], |
705 | 181 | 'server_services': ['neutron-server'] | 182 | 'server_services': ['neutron-server'] |
706 | 183 | }, | ||
707 | 184 | 'vsp': { | ||
708 | 185 | 'config': '/etc/neutron/plugins/nuage/nuage_plugin.ini', | ||
709 | 186 | 'driver': 'neutron.plugins.nuage.plugin.NuagePlugin', | ||
710 | 187 | 'contexts': [ | ||
711 | 188 | context.SharedDBContext(user=config('neutron-database-user'), | ||
712 | 189 | database=config('neutron-database'), | ||
713 | 190 | relation_prefix='neutron', | ||
714 | 191 | ssl_dir=NEUTRON_CONF_DIR)], | ||
715 | 192 | 'services': [], | ||
716 | 193 | 'packages': [], | ||
717 | 194 | 'server_packages': ['neutron-server', 'neutron-plugin-nuage'], | ||
718 | 195 | 'server_services': ['neutron-server'] | ||
719 | 182 | } | 196 | } |
720 | 183 | } | 197 | } |
721 | 184 | if release >= 'icehouse': | 198 | if release >= 'icehouse': |
722 | @@ -237,3 +251,72 @@ | |||
723 | 237 | else: | 251 | else: |
724 | 238 | # ensure accurate naming for all releases post-H | 252 | # ensure accurate naming for all releases post-H |
725 | 239 | return 'neutron' | 253 | return 'neutron' |
726 | 254 | |||
727 | 255 | |||
728 | 256 | def parse_mappings(mappings): | ||
729 | 257 | parsed = {} | ||
730 | 258 | if mappings: | ||
731 | 259 | mappings = mappings.split(' ') | ||
732 | 260 | for m in mappings: | ||
733 | 261 | p = m.partition(':') | ||
734 | 262 | if p[1] == ':': | ||
735 | 263 | parsed[p[0].strip()] = p[2].strip() | ||
736 | 264 | |||
737 | 265 | return parsed | ||
738 | 266 | |||
739 | 267 | |||
740 | 268 | def parse_bridge_mappings(mappings): | ||
741 | 269 | """Parse bridge mappings. | ||
742 | 270 | |||
743 | 271 | Mappings must be a space-delimited list of provider:bridge mappings. | ||
744 | 272 | |||
745 | 273 | Returns dict of the form {provider:bridge}. | ||
746 | 274 | """ | ||
747 | 275 | return parse_mappings(mappings) | ||
748 | 276 | |||
749 | 277 | |||
750 | 278 | def parse_data_port_mappings(mappings, default_bridge='br-data'): | ||
751 | 279 | """Parse data port mappings. | ||
752 | 280 | |||
753 | 281 | Mappings must be a space-delimited list of bridge:port mappings. | ||
754 | 282 | |||
755 | 283 | Returns dict of the form {bridge:port}. | ||
756 | 284 | """ | ||
757 | 285 | _mappings = parse_mappings(mappings) | ||
758 | 286 | if not _mappings: | ||
759 | 287 | if not mappings: | ||
760 | 288 | return {} | ||
761 | 289 | |||
762 | 290 | # For backwards-compatibility we need to support port-only provided in | ||
763 | 291 | # config. | ||
764 | 292 | _mappings = {default_bridge: mappings.split(' ')[0]} | ||
765 | 293 | |||
766 | 294 | bridges = _mappings.keys() | ||
767 | 295 | ports = _mappings.values() | ||
768 | 296 | if len(set(bridges)) != len(bridges): | ||
769 | 297 | raise Exception("It is not allowed to have more than one port " | ||
770 | 298 | "configured on the same bridge") | ||
771 | 299 | |||
772 | 300 | if len(set(ports)) != len(ports): | ||
773 | 301 | raise Exception("It is not allowed to have the same port configured " | ||
774 | 302 | "on more than one bridge") | ||
775 | 303 | |||
776 | 304 | return _mappings | ||
777 | 305 | |||
778 | 306 | |||
779 | 307 | def parse_vlan_range_mappings(mappings): | ||
780 | 308 | """Parse vlan range mappings. | ||
781 | 309 | |||
782 | 310 | Mappings must be a space-delimited list of provider:start:end mappings. | ||
783 | 311 | |||
784 | 312 | Returns dict of the form {provider: (start, end)}. | ||
785 | 313 | """ | ||
786 | 314 | _mappings = parse_mappings(mappings) | ||
787 | 315 | if not _mappings: | ||
788 | 316 | return {} | ||
789 | 317 | |||
790 | 318 | mappings = {} | ||
791 | 319 | for p, r in six.iteritems(_mappings): | ||
792 | 320 | mappings[p] = tuple(r.split(':')) | ||
793 | 321 | |||
794 | 322 | return mappings | ||
795 | 240 | 323 | ||
796 | === modified file 'hooks/charmhelpers/contrib/openstack/utils.py' | |||
797 | --- hooks/charmhelpers/contrib/openstack/utils.py 2015-01-29 13:02:55 +0000 | |||
798 | +++ hooks/charmhelpers/contrib/openstack/utils.py 2015-04-16 21:56:47 +0000 | |||
799 | @@ -23,12 +23,17 @@ | |||
800 | 23 | import subprocess | 23 | import subprocess |
801 | 24 | import json | 24 | import json |
802 | 25 | import os | 25 | import os |
803 | 26 | import socket | ||
804 | 27 | import sys | 26 | import sys |
805 | 28 | 27 | ||
806 | 29 | import six | 28 | import six |
807 | 30 | import yaml | 29 | import yaml |
808 | 31 | 30 | ||
809 | 31 | from charmhelpers.contrib.network import ip | ||
810 | 32 | |||
811 | 33 | from charmhelpers.core import ( | ||
812 | 34 | unitdata, | ||
813 | 35 | ) | ||
814 | 36 | |||
815 | 32 | from charmhelpers.core.hookenv import ( | 37 | from charmhelpers.core.hookenv import ( |
816 | 33 | config, | 38 | config, |
817 | 34 | log as juju_log, | 39 | log as juju_log, |
818 | @@ -103,6 +108,7 @@ | |||
819 | 103 | ('2.1.0', 'juno'), | 108 | ('2.1.0', 'juno'), |
820 | 104 | ('2.2.0', 'juno'), | 109 | ('2.2.0', 'juno'), |
821 | 105 | ('2.2.1', 'kilo'), | 110 | ('2.2.1', 'kilo'), |
822 | 111 | ('2.2.2', 'kilo'), | ||
823 | 106 | ]) | 112 | ]) |
824 | 107 | 113 | ||
825 | 108 | DEFAULT_LOOPBACK_SIZE = '5G' | 114 | DEFAULT_LOOPBACK_SIZE = '5G' |
826 | @@ -328,6 +334,21 @@ | |||
827 | 328 | error_out("Invalid openstack-release specified: %s" % rel) | 334 | error_out("Invalid openstack-release specified: %s" % rel) |
828 | 329 | 335 | ||
829 | 330 | 336 | ||
830 | 337 | def config_value_changed(option): | ||
831 | 338 | """ | ||
832 | 339 | Determine if config value changed since last call to this function. | ||
833 | 340 | """ | ||
834 | 341 | hook_data = unitdata.HookData() | ||
835 | 342 | with hook_data(): | ||
836 | 343 | db = unitdata.kv() | ||
837 | 344 | current = config(option) | ||
838 | 345 | saved = db.get(option) | ||
839 | 346 | db.set(option, current) | ||
840 | 347 | if saved is None: | ||
841 | 348 | return False | ||
842 | 349 | return current != saved | ||
843 | 350 | |||
844 | 351 | |||
845 | 331 | def save_script_rc(script_path="scripts/scriptrc", **env_vars): | 352 | def save_script_rc(script_path="scripts/scriptrc", **env_vars): |
846 | 332 | """ | 353 | """ |
847 | 333 | Write an rc file in the charm-delivered directory containing | 354 | Write an rc file in the charm-delivered directory containing |
848 | @@ -420,77 +441,10 @@ | |||
849 | 420 | else: | 441 | else: |
850 | 421 | zap_disk(block_device) | 442 | zap_disk(block_device) |
851 | 422 | 443 | ||
923 | 423 | 444 | is_ip = ip.is_ip | |
924 | 424 | def is_ip(address): | 445 | ns_query = ip.ns_query |
925 | 425 | """ | 446 | get_host_ip = ip.get_host_ip |
926 | 426 | Returns True if address is a valid IP address. | 447 | get_hostname = ip.get_hostname |
856 | 427 | """ | ||
857 | 428 | try: | ||
858 | 429 | # Test to see if already an IPv4 address | ||
859 | 430 | socket.inet_aton(address) | ||
860 | 431 | return True | ||
861 | 432 | except socket.error: | ||
862 | 433 | return False | ||
863 | 434 | |||
864 | 435 | |||
865 | 436 | def ns_query(address): | ||
866 | 437 | try: | ||
867 | 438 | import dns.resolver | ||
868 | 439 | except ImportError: | ||
869 | 440 | apt_install('python-dnspython') | ||
870 | 441 | import dns.resolver | ||
871 | 442 | |||
872 | 443 | if isinstance(address, dns.name.Name): | ||
873 | 444 | rtype = 'PTR' | ||
874 | 445 | elif isinstance(address, six.string_types): | ||
875 | 446 | rtype = 'A' | ||
876 | 447 | else: | ||
877 | 448 | return None | ||
878 | 449 | |||
879 | 450 | answers = dns.resolver.query(address, rtype) | ||
880 | 451 | if answers: | ||
881 | 452 | return str(answers[0]) | ||
882 | 453 | return None | ||
883 | 454 | |||
884 | 455 | |||
885 | 456 | def get_host_ip(hostname): | ||
886 | 457 | """ | ||
887 | 458 | Resolves the IP for a given hostname, or returns | ||
888 | 459 | the input if it is already an IP. | ||
889 | 460 | """ | ||
890 | 461 | if is_ip(hostname): | ||
891 | 462 | return hostname | ||
892 | 463 | |||
893 | 464 | return ns_query(hostname) | ||
894 | 465 | |||
895 | 466 | |||
896 | 467 | def get_hostname(address, fqdn=True): | ||
897 | 468 | """ | ||
898 | 469 | Resolves hostname for given IP, or returns the input | ||
899 | 470 | if it is already a hostname. | ||
900 | 471 | """ | ||
901 | 472 | if is_ip(address): | ||
902 | 473 | try: | ||
903 | 474 | import dns.reversename | ||
904 | 475 | except ImportError: | ||
905 | 476 | apt_install('python-dnspython') | ||
906 | 477 | import dns.reversename | ||
907 | 478 | |||
908 | 479 | rev = dns.reversename.from_address(address) | ||
909 | 480 | result = ns_query(rev) | ||
910 | 481 | if not result: | ||
911 | 482 | return None | ||
912 | 483 | else: | ||
913 | 484 | result = address | ||
914 | 485 | |||
915 | 486 | if fqdn: | ||
916 | 487 | # strip trailing . | ||
917 | 488 | if result.endswith('.'): | ||
918 | 489 | return result[:-1] | ||
919 | 490 | else: | ||
920 | 491 | return result | ||
921 | 492 | else: | ||
922 | 493 | return result.split('.')[0] | ||
927 | 494 | 448 | ||
928 | 495 | 449 | ||
929 | 496 | def get_matchmaker_map(mm_file='/etc/oslo/matchmaker_ring.json'): | 450 | def get_matchmaker_map(mm_file='/etc/oslo/matchmaker_ring.json'): |
930 | @@ -534,82 +488,106 @@ | |||
931 | 534 | 488 | ||
932 | 535 | 489 | ||
933 | 536 | def git_install_requested(): | 490 | def git_install_requested(): |
936 | 537 | """Returns true if openstack-origin-git is specified.""" | 491 | """ |
937 | 538 | return config('openstack-origin-git') != "None" | 492 | Returns true if openstack-origin-git is specified. |
938 | 493 | """ | ||
939 | 494 | return config('openstack-origin-git') is not None | ||
940 | 539 | 495 | ||
941 | 540 | 496 | ||
942 | 541 | requirements_dir = None | 497 | requirements_dir = None |
943 | 542 | 498 | ||
944 | 543 | 499 | ||
947 | 544 | def git_clone_and_install(file_name, core_project): | 500 | def git_clone_and_install(projects_yaml, core_project): |
948 | 545 | """Clone/install all OpenStack repos specified in yaml config file.""" | 501 | """ |
949 | 502 | Clone/install all specified OpenStack repositories. | ||
950 | 503 | |||
951 | 504 | The expected format of projects_yaml is: | ||
952 | 505 | repositories: | ||
953 | 506 | - {name: keystone, | ||
954 | 507 | repository: 'git://git.openstack.org/openstack/keystone.git', | ||
955 | 508 | branch: 'stable/icehouse'} | ||
956 | 509 | - {name: requirements, | ||
957 | 510 | repository: 'git://git.openstack.org/openstack/requirements.git', | ||
958 | 511 | branch: 'stable/icehouse'} | ||
959 | 512 | directory: /mnt/openstack-git | ||
960 | 513 | http_proxy: http://squid.internal:3128 | ||
961 | 514 | https_proxy: https://squid.internal:3128 | ||
962 | 515 | |||
963 | 516 | The directory, http_proxy, and https_proxy keys are optional. | ||
964 | 517 | """ | ||
965 | 546 | global requirements_dir | 518 | global requirements_dir |
966 | 519 | parent_dir = '/mnt/openstack-git' | ||
967 | 547 | 520 | ||
969 | 548 | if file_name == "None": | 521 | if not projects_yaml: |
970 | 549 | return | 522 | return |
971 | 550 | 523 | ||
1030 | 551 | yaml_file = os.path.join(charm_dir(), file_name) | 524 | projects = yaml.load(projects_yaml) |
1031 | 552 | 525 | _git_validate_projects_yaml(projects, core_project) | |
1032 | 553 | # clone/install the requirements project first | 526 | |
1033 | 554 | installed = _git_clone_and_install_subset(yaml_file, | 527 | old_environ = dict(os.environ) |
1034 | 555 | whitelist=['requirements']) | 528 | |
1035 | 556 | if 'requirements' not in installed: | 529 | if 'http_proxy' in projects.keys(): |
1036 | 557 | error_out('requirements git repository must be specified') | 530 | os.environ['http_proxy'] = projects['http_proxy'] |
1037 | 558 | 531 | if 'https_proxy' in projects.keys(): | |
1038 | 559 | # clone/install all other projects except requirements and the core project | 532 | os.environ['https_proxy'] = projects['https_proxy'] |
1039 | 560 | blacklist = ['requirements', core_project] | 533 | |
1040 | 561 | _git_clone_and_install_subset(yaml_file, blacklist=blacklist, | 534 | if 'directory' in projects.keys(): |
1041 | 562 | update_requirements=True) | 535 | parent_dir = projects['directory'] |
1042 | 563 | 536 | ||
1043 | 564 | # clone/install the core project | 537 | for p in projects['repositories']: |
1044 | 565 | whitelist = [core_project] | 538 | repo = p['repository'] |
1045 | 566 | installed = _git_clone_and_install_subset(yaml_file, whitelist=whitelist, | 539 | branch = p['branch'] |
1046 | 567 | update_requirements=True) | 540 | if p['name'] == 'requirements': |
1047 | 568 | if core_project not in installed: | 541 | repo_dir = _git_clone_and_install_single(repo, branch, parent_dir, |
1048 | 569 | error_out('{} git repository must be specified'.format(core_project)) | 542 | update_requirements=False) |
1049 | 570 | 543 | requirements_dir = repo_dir | |
1050 | 571 | 544 | else: | |
1051 | 572 | def _git_clone_and_install_subset(yaml_file, whitelist=[], blacklist=[], | 545 | repo_dir = _git_clone_and_install_single(repo, branch, parent_dir, |
1052 | 573 | update_requirements=False): | 546 | update_requirements=True) |
1053 | 574 | """Clone/install subset of OpenStack repos specified in yaml config file.""" | 547 | |
1054 | 575 | global requirements_dir | 548 | os.environ = old_environ |
1055 | 576 | installed = [] | 549 | |
1056 | 577 | 550 | ||
1057 | 578 | with open(yaml_file, 'r') as fd: | 551 | def _git_validate_projects_yaml(projects, core_project): |
1058 | 579 | projects = yaml.load(fd) | 552 | """ |
1059 | 580 | for proj, val in projects.items(): | 553 | Validate the projects yaml. |
1060 | 581 | # The project subset is chosen based on the following 3 rules: | 554 | """ |
1061 | 582 | # 1) If project is in blacklist, we don't clone/install it, period. | 555 | _git_ensure_key_exists('repositories', projects) |
1062 | 583 | # 2) If whitelist is empty, we clone/install everything else. | 556 | |
1063 | 584 | # 3) If whitelist is not empty, we clone/install everything in the | 557 | for project in projects['repositories']: |
1064 | 585 | # whitelist. | 558 | _git_ensure_key_exists('name', project.keys()) |
1065 | 586 | if proj in blacklist: | 559 | _git_ensure_key_exists('repository', project.keys()) |
1066 | 587 | continue | 560 | _git_ensure_key_exists('branch', project.keys()) |
1067 | 588 | if whitelist and proj not in whitelist: | 561 | |
1068 | 589 | continue | 562 | if projects['repositories'][0]['name'] != 'requirements': |
1069 | 590 | repo = val['repository'] | 563 | error_out('{} git repo must be specified first'.format('requirements')) |
1070 | 591 | branch = val['branch'] | 564 | |
1071 | 592 | repo_dir = _git_clone_and_install_single(repo, branch, | 565 | if projects['repositories'][-1]['name'] != core_project: |
1072 | 593 | update_requirements) | 566 | error_out('{} git repo must be specified last'.format(core_project)) |
1073 | 594 | if proj == 'requirements': | 567 | |
1074 | 595 | requirements_dir = repo_dir | 568 | |
1075 | 596 | installed.append(proj) | 569 | def _git_ensure_key_exists(key, keys): |
1076 | 597 | return installed | 570 | """ |
1077 | 598 | 571 | Ensure that key exists in keys. | |
1078 | 599 | 572 | """ | |
1079 | 600 | def _git_clone_and_install_single(repo, branch, update_requirements=False): | 573 | if key not in keys: |
1080 | 601 | """Clone and install a single git repository.""" | 574 | error_out('openstack-origin-git key \'{}\' is missing'.format(key)) |
1081 | 602 | dest_parent_dir = "/mnt/openstack-git/" | 575 | |
1082 | 603 | dest_dir = os.path.join(dest_parent_dir, os.path.basename(repo)) | 576 | |
1083 | 604 | 577 | def _git_clone_and_install_single(repo, branch, parent_dir, update_requirements): | |
1084 | 605 | if not os.path.exists(dest_parent_dir): | 578 | """ |
1085 | 606 | juju_log('Host dir not mounted at {}. ' | 579 | Clone and install a single git repository. |
1086 | 607 | 'Creating directory there instead.'.format(dest_parent_dir)) | 580 | """ |
1087 | 608 | os.mkdir(dest_parent_dir) | 581 | dest_dir = os.path.join(parent_dir, os.path.basename(repo)) |
1088 | 582 | |||
1089 | 583 | if not os.path.exists(parent_dir): | ||
1090 | 584 | juju_log('Directory already exists at {}. ' | ||
1091 | 585 | 'No need to create directory.'.format(parent_dir)) | ||
1092 | 586 | os.mkdir(parent_dir) | ||
1093 | 609 | 587 | ||
1094 | 610 | if not os.path.exists(dest_dir): | 588 | if not os.path.exists(dest_dir): |
1095 | 611 | juju_log('Cloning git repo: {}, branch: {}'.format(repo, branch)) | 589 | juju_log('Cloning git repo: {}, branch: {}'.format(repo, branch)) |
1097 | 612 | repo_dir = install_remote(repo, dest=dest_parent_dir, branch=branch) | 590 | repo_dir = install_remote(repo, dest=parent_dir, branch=branch) |
1098 | 613 | else: | 591 | else: |
1099 | 614 | repo_dir = dest_dir | 592 | repo_dir = dest_dir |
1100 | 615 | 593 | ||
1101 | @@ -626,16 +604,39 @@ | |||
1102 | 626 | 604 | ||
1103 | 627 | 605 | ||
1104 | 628 | def _git_update_requirements(package_dir, reqs_dir): | 606 | def _git_update_requirements(package_dir, reqs_dir): |
1106 | 629 | """Update from global requirements. | 607 | """ |
1107 | 608 | Update from global requirements. | ||
1108 | 630 | 609 | ||
1111 | 631 | Update an OpenStack git directory's requirements.txt and | 610 | Update an OpenStack git directory's requirements.txt and |
1112 | 632 | test-requirements.txt from global-requirements.txt.""" | 611 | test-requirements.txt from global-requirements.txt. |
1113 | 612 | """ | ||
1114 | 633 | orig_dir = os.getcwd() | 613 | orig_dir = os.getcwd() |
1115 | 634 | os.chdir(reqs_dir) | 614 | os.chdir(reqs_dir) |
1117 | 635 | cmd = "python update.py {}".format(package_dir) | 615 | cmd = ['python', 'update.py', package_dir] |
1118 | 636 | try: | 616 | try: |
1120 | 637 | subprocess.check_call(cmd.split(' ')) | 617 | subprocess.check_call(cmd) |
1121 | 638 | except subprocess.CalledProcessError: | 618 | except subprocess.CalledProcessError: |
1122 | 639 | package = os.path.basename(package_dir) | 619 | package = os.path.basename(package_dir) |
1123 | 640 | error_out("Error updating {} from global-requirements.txt".format(package)) | 620 | error_out("Error updating {} from global-requirements.txt".format(package)) |
1124 | 641 | os.chdir(orig_dir) | 621 | os.chdir(orig_dir) |
1125 | 622 | |||
1126 | 623 | |||
1127 | 624 | def git_src_dir(projects_yaml, project): | ||
1128 | 625 | """ | ||
1129 | 626 | Return the directory where the specified project's source is located. | ||
1130 | 627 | """ | ||
1131 | 628 | parent_dir = '/mnt/openstack-git' | ||
1132 | 629 | |||
1133 | 630 | if not projects_yaml: | ||
1134 | 631 | return | ||
1135 | 632 | |||
1136 | 633 | projects = yaml.load(projects_yaml) | ||
1137 | 634 | |||
1138 | 635 | if 'directory' in projects.keys(): | ||
1139 | 636 | parent_dir = projects['directory'] | ||
1140 | 637 | |||
1141 | 638 | for p in projects['repositories']: | ||
1142 | 639 | if p['name'] == project: | ||
1143 | 640 | return os.path.join(parent_dir, os.path.basename(p['repository'])) | ||
1144 | 641 | |||
1145 | 642 | return None | ||
1146 | 642 | 643 | ||
1147 | === modified file 'hooks/charmhelpers/contrib/python/packages.py' | |||
1148 | --- hooks/charmhelpers/contrib/python/packages.py 2015-01-29 13:15:20 +0000 | |||
1149 | +++ hooks/charmhelpers/contrib/python/packages.py 2015-04-16 21:56:47 +0000 | |||
1150 | @@ -17,8 +17,6 @@ | |||
1151 | 17 | # You should have received a copy of the GNU Lesser General Public License | 17 | # You should have received a copy of the GNU Lesser General Public License |
1152 | 18 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | 18 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. |
1153 | 19 | 19 | ||
1154 | 20 | __author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" | ||
1155 | 21 | |||
1156 | 22 | from charmhelpers.fetch import apt_install, apt_update | 20 | from charmhelpers.fetch import apt_install, apt_update |
1157 | 23 | from charmhelpers.core.hookenv import log | 21 | from charmhelpers.core.hookenv import log |
1158 | 24 | 22 | ||
1159 | @@ -29,6 +27,8 @@ | |||
1160 | 29 | apt_install('python-pip') | 27 | apt_install('python-pip') |
1161 | 30 | from pip import main as pip_execute | 28 | from pip import main as pip_execute |
1162 | 31 | 29 | ||
1163 | 30 | __author__ = "Jorge Niedbalski <jorge.niedbalski@canonical.com>" | ||
1164 | 31 | |||
1165 | 32 | 32 | ||
1166 | 33 | def parse_options(given, available): | 33 | def parse_options(given, available): |
1167 | 34 | """Given a set of options, check if available""" | 34 | """Given a set of options, check if available""" |
1168 | 35 | 35 | ||
1169 | === modified file 'hooks/charmhelpers/core/fstab.py' | |||
1170 | --- hooks/charmhelpers/core/fstab.py 2015-01-29 13:02:55 +0000 | |||
1171 | +++ hooks/charmhelpers/core/fstab.py 2015-04-16 21:56:47 +0000 | |||
1172 | @@ -17,11 +17,11 @@ | |||
1173 | 17 | # You should have received a copy of the GNU Lesser General Public License | 17 | # You should have received a copy of the GNU Lesser General Public License |
1174 | 18 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | 18 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. |
1175 | 19 | 19 | ||
1176 | 20 | __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' | ||
1177 | 21 | |||
1178 | 22 | import io | 20 | import io |
1179 | 23 | import os | 21 | import os |
1180 | 24 | 22 | ||
1181 | 23 | __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' | ||
1182 | 24 | |||
1183 | 25 | 25 | ||
1184 | 26 | class Fstab(io.FileIO): | 26 | class Fstab(io.FileIO): |
1185 | 27 | """This class extends file in order to implement a file reader/writer | 27 | """This class extends file in order to implement a file reader/writer |
1186 | @@ -77,7 +77,7 @@ | |||
1187 | 77 | for line in self.readlines(): | 77 | for line in self.readlines(): |
1188 | 78 | line = line.decode('us-ascii') | 78 | line = line.decode('us-ascii') |
1189 | 79 | try: | 79 | try: |
1191 | 80 | if line.strip() and not line.startswith("#"): | 80 | if line.strip() and not line.strip().startswith("#"): |
1192 | 81 | yield self._hydrate_entry(line) | 81 | yield self._hydrate_entry(line) |
1193 | 82 | except ValueError: | 82 | except ValueError: |
1194 | 83 | pass | 83 | pass |
1195 | @@ -104,7 +104,7 @@ | |||
1196 | 104 | 104 | ||
1197 | 105 | found = False | 105 | found = False |
1198 | 106 | for index, line in enumerate(lines): | 106 | for index, line in enumerate(lines): |
1200 | 107 | if not line.startswith("#"): | 107 | if line.strip() and not line.strip().startswith("#"): |
1201 | 108 | if self._hydrate_entry(line) == entry: | 108 | if self._hydrate_entry(line) == entry: |
1202 | 109 | found = True | 109 | found = True |
1203 | 110 | break | 110 | break |
1204 | 111 | 111 | ||
1205 | === modified file 'hooks/charmhelpers/core/hookenv.py' | |||
1206 | --- hooks/charmhelpers/core/hookenv.py 2015-01-29 13:02:55 +0000 | |||
1207 | +++ hooks/charmhelpers/core/hookenv.py 2015-04-16 21:56:47 +0000 | |||
1208 | @@ -20,11 +20,13 @@ | |||
1209 | 20 | # Authors: | 20 | # Authors: |
1210 | 21 | # Charm Helpers Developers <juju@lists.ubuntu.com> | 21 | # Charm Helpers Developers <juju@lists.ubuntu.com> |
1211 | 22 | 22 | ||
1212 | 23 | from __future__ import print_function | ||
1213 | 23 | import os | 24 | import os |
1214 | 24 | import json | 25 | import json |
1215 | 25 | import yaml | 26 | import yaml |
1216 | 26 | import subprocess | 27 | import subprocess |
1217 | 27 | import sys | 28 | import sys |
1218 | 29 | import errno | ||
1219 | 28 | from subprocess import CalledProcessError | 30 | from subprocess import CalledProcessError |
1220 | 29 | 31 | ||
1221 | 30 | import six | 32 | import six |
1222 | @@ -87,7 +89,18 @@ | |||
1223 | 87 | if not isinstance(message, six.string_types): | 89 | if not isinstance(message, six.string_types): |
1224 | 88 | message = repr(message) | 90 | message = repr(message) |
1225 | 89 | command += [message] | 91 | command += [message] |
1227 | 90 | subprocess.call(command) | 92 | # Missing juju-log should not cause failures in unit tests |
1228 | 93 | # Send log output to stderr | ||
1229 | 94 | try: | ||
1230 | 95 | subprocess.call(command) | ||
1231 | 96 | except OSError as e: | ||
1232 | 97 | if e.errno == errno.ENOENT: | ||
1233 | 98 | if level: | ||
1234 | 99 | message = "{}: {}".format(level, message) | ||
1235 | 100 | message = "juju-log: {}".format(message) | ||
1236 | 101 | print(message, file=sys.stderr) | ||
1237 | 102 | else: | ||
1238 | 103 | raise | ||
1239 | 91 | 104 | ||
1240 | 92 | 105 | ||
1241 | 93 | class Serializable(UserDict): | 106 | class Serializable(UserDict): |
1242 | @@ -566,3 +579,29 @@ | |||
1243 | 566 | def charm_dir(): | 579 | def charm_dir(): |
1244 | 567 | """Return the root directory of the current charm""" | 580 | """Return the root directory of the current charm""" |
1245 | 568 | return os.environ.get('CHARM_DIR') | 581 | return os.environ.get('CHARM_DIR') |
1246 | 582 | |||
1247 | 583 | |||
1248 | 584 | @cached | ||
1249 | 585 | def action_get(key=None): | ||
1250 | 586 | """Gets the value of an action parameter, or all key/value param pairs""" | ||
1251 | 587 | cmd = ['action-get'] | ||
1252 | 588 | if key is not None: | ||
1253 | 589 | cmd.append(key) | ||
1254 | 590 | cmd.append('--format=json') | ||
1255 | 591 | action_data = json.loads(subprocess.check_output(cmd).decode('UTF-8')) | ||
1256 | 592 | return action_data | ||
1257 | 593 | |||
1258 | 594 | |||
1259 | 595 | def action_set(values): | ||
1260 | 596 | """Sets the values to be returned after the action finishes""" | ||
1261 | 597 | cmd = ['action-set'] | ||
1262 | 598 | for k, v in list(values.items()): | ||
1263 | 599 | cmd.append('{}={}'.format(k, v)) | ||
1264 | 600 | subprocess.check_call(cmd) | ||
1265 | 601 | |||
1266 | 602 | |||
1267 | 603 | def action_fail(message): | ||
1268 | 604 | """Sets the action status to failed and sets the error message. | ||
1269 | 605 | |||
1270 | 606 | The results set by action_set are preserved.""" | ||
1271 | 607 | subprocess.check_call(['action-fail', message]) | ||
1272 | 569 | 608 | ||
1273 | === modified file 'hooks/charmhelpers/core/host.py' | |||
1274 | --- hooks/charmhelpers/core/host.py 2015-01-29 13:02:55 +0000 | |||
1275 | +++ hooks/charmhelpers/core/host.py 2015-04-16 21:56:47 +0000 | |||
1276 | @@ -191,11 +191,11 @@ | |||
1277 | 191 | 191 | ||
1278 | 192 | 192 | ||
1279 | 193 | def write_file(path, content, owner='root', group='root', perms=0o444): | 193 | def write_file(path, content, owner='root', group='root', perms=0o444): |
1281 | 194 | """Create or overwrite a file with the contents of a string""" | 194 | """Create or overwrite a file with the contents of a byte string.""" |
1282 | 195 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms)) | 195 | log("Writing file {} {}:{} {:o}".format(path, owner, group, perms)) |
1283 | 196 | uid = pwd.getpwnam(owner).pw_uid | 196 | uid = pwd.getpwnam(owner).pw_uid |
1284 | 197 | gid = grp.getgrnam(group).gr_gid | 197 | gid = grp.getgrnam(group).gr_gid |
1286 | 198 | with open(path, 'w') as target: | 198 | with open(path, 'wb') as target: |
1287 | 199 | os.fchown(target.fileno(), uid, gid) | 199 | os.fchown(target.fileno(), uid, gid) |
1288 | 200 | os.fchmod(target.fileno(), perms) | 200 | os.fchmod(target.fileno(), perms) |
1289 | 201 | target.write(content) | 201 | target.write(content) |
1290 | @@ -305,11 +305,11 @@ | |||
1291 | 305 | ceph_client_changed function. | 305 | ceph_client_changed function. |
1292 | 306 | """ | 306 | """ |
1293 | 307 | def wrap(f): | 307 | def wrap(f): |
1295 | 308 | def wrapped_f(*args): | 308 | def wrapped_f(*args, **kwargs): |
1296 | 309 | checksums = {} | 309 | checksums = {} |
1297 | 310 | for path in restart_map: | 310 | for path in restart_map: |
1298 | 311 | checksums[path] = file_hash(path) | 311 | checksums[path] = file_hash(path) |
1300 | 312 | f(*args) | 312 | f(*args, **kwargs) |
1301 | 313 | restarts = [] | 313 | restarts = [] |
1302 | 314 | for path in restart_map: | 314 | for path in restart_map: |
1303 | 315 | if checksums[path] != file_hash(path): | 315 | if checksums[path] != file_hash(path): |
1304 | @@ -339,12 +339,16 @@ | |||
1305 | 339 | def pwgen(length=None): | 339 | def pwgen(length=None): |
1306 | 340 | """Generate a random pasword.""" | 340 | """Generate a random pasword.""" |
1307 | 341 | if length is None: | 341 | if length is None: |
1308 | 342 | # A random length is ok to use a weak PRNG | ||
1309 | 342 | length = random.choice(range(35, 45)) | 343 | length = random.choice(range(35, 45)) |
1310 | 343 | alphanumeric_chars = [ | 344 | alphanumeric_chars = [ |
1311 | 344 | l for l in (string.ascii_letters + string.digits) | 345 | l for l in (string.ascii_letters + string.digits) |
1312 | 345 | if l not in 'l0QD1vAEIOUaeiou'] | 346 | if l not in 'l0QD1vAEIOUaeiou'] |
1313 | 347 | # Use a crypto-friendly PRNG (e.g. /dev/urandom) for making the | ||
1314 | 348 | # actual password | ||
1315 | 349 | random_generator = random.SystemRandom() | ||
1316 | 346 | random_chars = [ | 350 | random_chars = [ |
1318 | 347 | random.choice(alphanumeric_chars) for _ in range(length)] | 351 | random_generator.choice(alphanumeric_chars) for _ in range(length)] |
1319 | 348 | return(''.join(random_chars)) | 352 | return(''.join(random_chars)) |
1320 | 349 | 353 | ||
1321 | 350 | 354 | ||
1322 | @@ -361,7 +365,7 @@ | |||
1323 | 361 | ip_output = (line for line in ip_output if line) | 365 | ip_output = (line for line in ip_output if line) |
1324 | 362 | for line in ip_output: | 366 | for line in ip_output: |
1325 | 363 | if line.split()[1].startswith(int_type): | 367 | if line.split()[1].startswith(int_type): |
1327 | 364 | matched = re.search('.*: (bond[0-9]+\.[0-9]+)@.*', line) | 368 | matched = re.search('.*: (' + int_type + r'[0-9]+\.[0-9]+)@.*', line) |
1328 | 365 | if matched: | 369 | if matched: |
1329 | 366 | interface = matched.groups()[0] | 370 | interface = matched.groups()[0] |
1330 | 367 | else: | 371 | else: |
1331 | 368 | 372 | ||
1332 | === modified file 'hooks/charmhelpers/core/services/helpers.py' | |||
1333 | --- hooks/charmhelpers/core/services/helpers.py 2015-01-29 13:02:55 +0000 | |||
1334 | +++ hooks/charmhelpers/core/services/helpers.py 2015-04-16 21:56:47 +0000 | |||
1335 | @@ -45,12 +45,14 @@ | |||
1336 | 45 | """ | 45 | """ |
1337 | 46 | name = None | 46 | name = None |
1338 | 47 | interface = None | 47 | interface = None |
1339 | 48 | required_keys = [] | ||
1340 | 49 | 48 | ||
1341 | 50 | def __init__(self, name=None, additional_required_keys=None): | 49 | def __init__(self, name=None, additional_required_keys=None): |
1342 | 50 | if not hasattr(self, 'required_keys'): | ||
1343 | 51 | self.required_keys = [] | ||
1344 | 52 | |||
1345 | 51 | if name is not None: | 53 | if name is not None: |
1346 | 52 | self.name = name | 54 | self.name = name |
1348 | 53 | if additional_required_keys is not None: | 55 | if additional_required_keys: |
1349 | 54 | self.required_keys.extend(additional_required_keys) | 56 | self.required_keys.extend(additional_required_keys) |
1350 | 55 | self.get_data() | 57 | self.get_data() |
1351 | 56 | 58 | ||
1352 | @@ -134,7 +136,10 @@ | |||
1353 | 134 | """ | 136 | """ |
1354 | 135 | name = 'db' | 137 | name = 'db' |
1355 | 136 | interface = 'mysql' | 138 | interface = 'mysql' |
1357 | 137 | required_keys = ['host', 'user', 'password', 'database'] | 139 | |
1358 | 140 | def __init__(self, *args, **kwargs): | ||
1359 | 141 | self.required_keys = ['host', 'user', 'password', 'database'] | ||
1360 | 142 | RelationContext.__init__(self, *args, **kwargs) | ||
1361 | 138 | 143 | ||
1362 | 139 | 144 | ||
1363 | 140 | class HttpRelation(RelationContext): | 145 | class HttpRelation(RelationContext): |
1364 | @@ -146,7 +151,10 @@ | |||
1365 | 146 | """ | 151 | """ |
1366 | 147 | name = 'website' | 152 | name = 'website' |
1367 | 148 | interface = 'http' | 153 | interface = 'http' |
1369 | 149 | required_keys = ['host', 'port'] | 154 | |
1370 | 155 | def __init__(self, *args, **kwargs): | ||
1371 | 156 | self.required_keys = ['host', 'port'] | ||
1372 | 157 | RelationContext.__init__(self, *args, **kwargs) | ||
1373 | 150 | 158 | ||
1374 | 151 | def provide_data(self): | 159 | def provide_data(self): |
1375 | 152 | return { | 160 | return { |
1376 | 153 | 161 | ||
1377 | === added file 'hooks/charmhelpers/core/strutils.py' | |||
1378 | --- hooks/charmhelpers/core/strutils.py 1970-01-01 00:00:00 +0000 | |||
1379 | +++ hooks/charmhelpers/core/strutils.py 2015-04-16 21:56:47 +0000 | |||
1380 | @@ -0,0 +1,42 @@ | |||
1381 | 1 | #!/usr/bin/env python | ||
1382 | 2 | # -*- coding: utf-8 -*- | ||
1383 | 3 | |||
1384 | 4 | # Copyright 2014-2015 Canonical Limited. | ||
1385 | 5 | # | ||
1386 | 6 | # This file is part of charm-helpers. | ||
1387 | 7 | # | ||
1388 | 8 | # charm-helpers is free software: you can redistribute it and/or modify | ||
1389 | 9 | # it under the terms of the GNU Lesser General Public License version 3 as | ||
1390 | 10 | # published by the Free Software Foundation. | ||
1391 | 11 | # | ||
1392 | 12 | # charm-helpers is distributed in the hope that it will be useful, | ||
1393 | 13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
1394 | 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
1395 | 15 | # GNU Lesser General Public License for more details. | ||
1396 | 16 | # | ||
1397 | 17 | # You should have received a copy of the GNU Lesser General Public License | ||
1398 | 18 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | ||
1399 | 19 | |||
1400 | 20 | import six | ||
1401 | 21 | |||
1402 | 22 | |||
1403 | 23 | def bool_from_string(value): | ||
1404 | 24 | """Interpret string value as boolean. | ||
1405 | 25 | |||
1406 | 26 | Returns True if value translates to True otherwise False. | ||
1407 | 27 | """ | ||
1408 | 28 | if isinstance(value, six.string_types): | ||
1409 | 29 | value = six.text_type(value) | ||
1410 | 30 | else: | ||
1411 | 31 | msg = "Unable to interpret non-string value '%s' as boolean" % (value) | ||
1412 | 32 | raise ValueError(msg) | ||
1413 | 33 | |||
1414 | 34 | value = value.strip().lower() | ||
1415 | 35 | |||
1416 | 36 | if value in ['y', 'yes', 'true', 't', 'on']: | ||
1417 | 37 | return True | ||
1418 | 38 | elif value in ['n', 'no', 'false', 'f', 'off']: | ||
1419 | 39 | return False | ||
1420 | 40 | |||
1421 | 41 | msg = "Unable to interpret string value '%s' as boolean" % (value) | ||
1422 | 42 | raise ValueError(msg) | ||
1423 | 0 | 43 | ||
1424 | === modified file 'hooks/charmhelpers/core/sysctl.py' | |||
1425 | --- hooks/charmhelpers/core/sysctl.py 2015-01-29 13:02:55 +0000 | |||
1426 | +++ hooks/charmhelpers/core/sysctl.py 2015-04-16 21:56:47 +0000 | |||
1427 | @@ -17,8 +17,6 @@ | |||
1428 | 17 | # You should have received a copy of the GNU Lesser General Public License | 17 | # You should have received a copy of the GNU Lesser General Public License |
1429 | 18 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | 18 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. |
1430 | 19 | 19 | ||
1431 | 20 | __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' | ||
1432 | 21 | |||
1433 | 22 | import yaml | 20 | import yaml |
1434 | 23 | 21 | ||
1435 | 24 | from subprocess import check_call | 22 | from subprocess import check_call |
1436 | @@ -26,25 +24,33 @@ | |||
1437 | 26 | from charmhelpers.core.hookenv import ( | 24 | from charmhelpers.core.hookenv import ( |
1438 | 27 | log, | 25 | log, |
1439 | 28 | DEBUG, | 26 | DEBUG, |
1440 | 27 | ERROR, | ||
1441 | 29 | ) | 28 | ) |
1442 | 30 | 29 | ||
1443 | 30 | __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' | ||
1444 | 31 | |||
1445 | 31 | 32 | ||
1446 | 32 | def create(sysctl_dict, sysctl_file): | 33 | def create(sysctl_dict, sysctl_file): |
1447 | 33 | """Creates a sysctl.conf file from a YAML associative array | 34 | """Creates a sysctl.conf file from a YAML associative array |
1448 | 34 | 35 | ||
1451 | 35 | :param sysctl_dict: a dict of sysctl options eg { 'kernel.max_pid': 1337 } | 36 | :param sysctl_dict: a YAML-formatted string of sysctl options eg "{ 'kernel.max_pid': 1337 }" |
1452 | 36 | :type sysctl_dict: dict | 37 | :type sysctl_dict: str |
1453 | 37 | :param sysctl_file: path to the sysctl file to be saved | 38 | :param sysctl_file: path to the sysctl file to be saved |
1454 | 38 | :type sysctl_file: str or unicode | 39 | :type sysctl_file: str or unicode |
1455 | 39 | :returns: None | 40 | :returns: None |
1456 | 40 | """ | 41 | """ |
1458 | 41 | sysctl_dict = yaml.load(sysctl_dict) | 42 | try: |
1459 | 43 | sysctl_dict_parsed = yaml.safe_load(sysctl_dict) | ||
1460 | 44 | except yaml.YAMLError: | ||
1461 | 45 | log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict), | ||
1462 | 46 | level=ERROR) | ||
1463 | 47 | return | ||
1464 | 42 | 48 | ||
1465 | 43 | with open(sysctl_file, "w") as fd: | 49 | with open(sysctl_file, "w") as fd: |
1467 | 44 | for key, value in sysctl_dict.items(): | 50 | for key, value in sysctl_dict_parsed.items(): |
1468 | 45 | fd.write("{}={}\n".format(key, value)) | 51 | fd.write("{}={}\n".format(key, value)) |
1469 | 46 | 52 | ||
1471 | 47 | log("Updating sysctl_file: %s values: %s" % (sysctl_file, sysctl_dict), | 53 | log("Updating sysctl_file: %s values: %s" % (sysctl_file, sysctl_dict_parsed), |
1472 | 48 | level=DEBUG) | 54 | level=DEBUG) |
1473 | 49 | 55 | ||
1474 | 50 | check_call(["sysctl", "-p", sysctl_file]) | 56 | check_call(["sysctl", "-p", sysctl_file]) |
1475 | 51 | 57 | ||
1476 | === modified file 'hooks/charmhelpers/core/templating.py' | |||
1477 | --- hooks/charmhelpers/core/templating.py 2015-01-29 13:02:55 +0000 | |||
1478 | +++ hooks/charmhelpers/core/templating.py 2015-04-16 21:56:47 +0000 | |||
1479 | @@ -21,7 +21,7 @@ | |||
1480 | 21 | 21 | ||
1481 | 22 | 22 | ||
1482 | 23 | def render(source, target, context, owner='root', group='root', | 23 | def render(source, target, context, owner='root', group='root', |
1484 | 24 | perms=0o444, templates_dir=None): | 24 | perms=0o444, templates_dir=None, encoding='UTF-8'): |
1485 | 25 | """ | 25 | """ |
1486 | 26 | Render a template. | 26 | Render a template. |
1487 | 27 | 27 | ||
1488 | @@ -64,5 +64,5 @@ | |||
1489 | 64 | level=hookenv.ERROR) | 64 | level=hookenv.ERROR) |
1490 | 65 | raise e | 65 | raise e |
1491 | 66 | content = template.render(context) | 66 | content = template.render(context) |
1494 | 67 | host.mkdir(os.path.dirname(target), owner, group) | 67 | host.mkdir(os.path.dirname(target), owner, group, perms=0o755) |
1495 | 68 | host.write_file(target, content, owner, group, perms) | 68 | host.write_file(target, content.encode(encoding), owner, group, perms) |
1496 | 69 | 69 | ||
1497 | === added file 'hooks/charmhelpers/core/unitdata.py' | |||
1498 | --- hooks/charmhelpers/core/unitdata.py 1970-01-01 00:00:00 +0000 | |||
1499 | +++ hooks/charmhelpers/core/unitdata.py 2015-04-16 21:56:47 +0000 | |||
1500 | @@ -0,0 +1,477 @@ | |||
1501 | 1 | #!/usr/bin/env python | ||
1502 | 2 | # -*- coding: utf-8 -*- | ||
1503 | 3 | # | ||
1504 | 4 | # Copyright 2014-2015 Canonical Limited. | ||
1505 | 5 | # | ||
1506 | 6 | # This file is part of charm-helpers. | ||
1507 | 7 | # | ||
1508 | 8 | # charm-helpers is free software: you can redistribute it and/or modify | ||
1509 | 9 | # it under the terms of the GNU Lesser General Public License version 3 as | ||
1510 | 10 | # published by the Free Software Foundation. | ||
1511 | 11 | # | ||
1512 | 12 | # charm-helpers is distributed in the hope that it will be useful, | ||
1513 | 13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
1514 | 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
1515 | 15 | # GNU Lesser General Public License for more details. | ||
1516 | 16 | # | ||
1517 | 17 | # You should have received a copy of the GNU Lesser General Public License | ||
1518 | 18 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | ||
1519 | 19 | # | ||
1520 | 20 | # | ||
1521 | 21 | # Authors: | ||
1522 | 22 | # Kapil Thangavelu <kapil.foss@gmail.com> | ||
1523 | 23 | # | ||
1524 | 24 | """ | ||
1525 | 25 | Intro | ||
1526 | 26 | ----- | ||
1527 | 27 | |||
1528 | 28 | A simple way to store state in units. This provides a key value | ||
1529 | 29 | storage with support for versioned, transactional operation, | ||
1530 | 30 | and can calculate deltas from previous values to simplify unit logic | ||
1531 | 31 | when processing changes. | ||
1532 | 32 | |||
1533 | 33 | |||
1534 | 34 | Hook Integration | ||
1535 | 35 | ---------------- | ||
1536 | 36 | |||
1537 | 37 | There are several extant frameworks for hook execution, including | ||
1538 | 38 | |||
1539 | 39 | - charmhelpers.core.hookenv.Hooks | ||
1540 | 40 | - charmhelpers.core.services.ServiceManager | ||
1541 | 41 | |||
1542 | 42 | The storage classes are framework agnostic, one simple integration is | ||
1543 | 43 | via the HookData contextmanager. It will record the current hook | ||
1544 | 44 | execution environment (including relation data, config data, etc.), | ||
1545 | 45 | setup a transaction and allow easy access to the changes from | ||
1546 | 46 | previously seen values. One consequence of the integration is the | ||
1547 | 47 | reservation of particular keys ('rels', 'unit', 'env', 'config', | ||
1548 | 48 | 'charm_revisions') for their respective values. | ||
1549 | 49 | |||
1550 | 50 | Here's a fully worked integration example using hookenv.Hooks:: | ||
1551 | 51 | |||
1552 | 52 | from charmhelper.core import hookenv, unitdata | ||
1553 | 53 | |||
1554 | 54 | hook_data = unitdata.HookData() | ||
1555 | 55 | db = unitdata.kv() | ||
1556 | 56 | hooks = hookenv.Hooks() | ||
1557 | 57 | |||
1558 | 58 | @hooks.hook | ||
1559 | 59 | def config_changed(): | ||
1560 | 60 | # Print all changes to configuration from previously seen | ||
1561 | 61 | # values. | ||
1562 | 62 | for changed, (prev, cur) in hook_data.conf.items(): | ||
1563 | 63 | print('config changed', changed, | ||
1564 | 64 | 'previous value', prev, | ||
1565 | 65 | 'current value', cur) | ||
1566 | 66 | |||
1567 | 67 | # Get some unit specific bookeeping | ||
1568 | 68 | if not db.get('pkg_key'): | ||
1569 | 69 | key = urllib.urlopen('https://example.com/pkg_key').read() | ||
1570 | 70 | db.set('pkg_key', key) | ||
1571 | 71 | |||
1572 | 72 | # Directly access all charm config as a mapping. | ||
1573 | 73 | conf = db.getrange('config', True) | ||
1574 | 74 | |||
1575 | 75 | # Directly access all relation data as a mapping | ||
1576 | 76 | rels = db.getrange('rels', True) | ||
1577 | 77 | |||
1578 | 78 | if __name__ == '__main__': | ||
1579 | 79 | with hook_data(): | ||
1580 | 80 | hook.execute() | ||
1581 | 81 | |||
1582 | 82 | |||
1583 | 83 | A more basic integration is via the hook_scope context manager which simply | ||
1584 | 84 | manages transaction scope (and records hook name, and timestamp):: | ||
1585 | 85 | |||
1586 | 86 | >>> from unitdata import kv | ||
1587 | 87 | >>> db = kv() | ||
1588 | 88 | >>> with db.hook_scope('install'): | ||
1589 | 89 | ... # do work, in transactional scope. | ||
1590 | 90 | ... db.set('x', 1) | ||
1591 | 91 | >>> db.get('x') | ||
1592 | 92 | 1 | ||
1593 | 93 | |||
1594 | 94 | |||
1595 | 95 | Usage | ||
1596 | 96 | ----- | ||
1597 | 97 | |||
1598 | 98 | Values are automatically json de/serialized to preserve basic typing | ||
1599 | 99 | and complex data struct capabilities (dicts, lists, ints, booleans, etc). | ||
1600 | 100 | |||
1601 | 101 | Individual values can be manipulated via get/set:: | ||
1602 | 102 | |||
1603 | 103 | >>> kv.set('y', True) | ||
1604 | 104 | >>> kv.get('y') | ||
1605 | 105 | True | ||
1606 | 106 | |||
1607 | 107 | # We can set complex values (dicts, lists) as a single key. | ||
1608 | 108 | >>> kv.set('config', {'a': 1, 'b': True'}) | ||
1609 | 109 | |||
1610 | 110 | # Also supports returning dictionaries as a record which | ||
1611 | 111 | # provides attribute access. | ||
1612 | 112 | >>> config = kv.get('config', record=True) | ||
1613 | 113 | >>> config.b | ||
1614 | 114 | True | ||
1615 | 115 | |||
1616 | 116 | |||
1617 | 117 | Groups of keys can be manipulated with update/getrange:: | ||
1618 | 118 | |||
1619 | 119 | >>> kv.update({'z': 1, 'y': 2}, prefix="gui.") | ||
1620 | 120 | >>> kv.getrange('gui.', strip=True) | ||
1621 | 121 | {'z': 1, 'y': 2} | ||
1622 | 122 | |||
1623 | 123 | When updating values, its very helpful to understand which values | ||
1624 | 124 | have actually changed and how have they changed. The storage | ||
1625 | 125 | provides a delta method to provide for this:: | ||
1626 | 126 | |||
1627 | 127 | >>> data = {'debug': True, 'option': 2} | ||
1628 | 128 | >>> delta = kv.delta(data, 'config.') | ||
1629 | 129 | >>> delta.debug.previous | ||
1630 | 130 | None | ||
1631 | 131 | >>> delta.debug.current | ||
1632 | 132 | True | ||
1633 | 133 | >>> delta | ||
1634 | 134 | {'debug': (None, True), 'option': (None, 2)} | ||
1635 | 135 | |||
1636 | 136 | Note the delta method does not persist the actual change, it needs to | ||
1637 | 137 | be explicitly saved via 'update' method:: | ||
1638 | 138 | |||
1639 | 139 | >>> kv.update(data, 'config.') | ||
1640 | 140 | |||
1641 | 141 | Values modified in the context of a hook scope retain historical values | ||
1642 | 142 | associated to the hookname. | ||
1643 | 143 | |||
1644 | 144 | >>> with db.hook_scope('config-changed'): | ||
1645 | 145 | ... db.set('x', 42) | ||
1646 | 146 | >>> db.gethistory('x') | ||
1647 | 147 | [(1, u'x', 1, u'install', u'2015-01-21T16:49:30.038372'), | ||
1648 | 148 | (2, u'x', 42, u'config-changed', u'2015-01-21T16:49:30.038786')] | ||
1649 | 149 | |||
1650 | 150 | """ | ||
1651 | 151 | |||
1652 | 152 | import collections | ||
1653 | 153 | import contextlib | ||
1654 | 154 | import datetime | ||
1655 | 155 | import json | ||
1656 | 156 | import os | ||
1657 | 157 | import pprint | ||
1658 | 158 | import sqlite3 | ||
1659 | 159 | import sys | ||
1660 | 160 | |||
1661 | 161 | __author__ = 'Kapil Thangavelu <kapil.foss@gmail.com>' | ||
1662 | 162 | |||
1663 | 163 | |||
1664 | 164 | class Storage(object): | ||
1665 | 165 | """Simple key value database for local unit state within charms. | ||
1666 | 166 | |||
1667 | 167 | Modifications are automatically committed at hook exit. That's | ||
1668 | 168 | currently regardless of exit code. | ||
1669 | 169 | |||
1670 | 170 | To support dicts, lists, integer, floats, and booleans values | ||
1671 | 171 | are automatically json encoded/decoded. | ||
1672 | 172 | """ | ||
1673 | 173 | def __init__(self, path=None): | ||
1674 | 174 | self.db_path = path | ||
1675 | 175 | if path is None: | ||
1676 | 176 | self.db_path = os.path.join( | ||
1677 | 177 | os.environ.get('CHARM_DIR', ''), '.unit-state.db') | ||
1678 | 178 | self.conn = sqlite3.connect('%s' % self.db_path) | ||
1679 | 179 | self.cursor = self.conn.cursor() | ||
1680 | 180 | self.revision = None | ||
1681 | 181 | self._closed = False | ||
1682 | 182 | self._init() | ||
1683 | 183 | |||
1684 | 184 | def close(self): | ||
1685 | 185 | if self._closed: | ||
1686 | 186 | return | ||
1687 | 187 | self.flush(False) | ||
1688 | 188 | self.cursor.close() | ||
1689 | 189 | self.conn.close() | ||
1690 | 190 | self._closed = True | ||
1691 | 191 | |||
1692 | 192 | def _scoped_query(self, stmt, params=None): | ||
1693 | 193 | if params is None: | ||
1694 | 194 | params = [] | ||
1695 | 195 | return stmt, params | ||
1696 | 196 | |||
1697 | 197 | def get(self, key, default=None, record=False): | ||
1698 | 198 | self.cursor.execute( | ||
1699 | 199 | *self._scoped_query( | ||
1700 | 200 | 'select data from kv where key=?', [key])) | ||
1701 | 201 | result = self.cursor.fetchone() | ||
1702 | 202 | if not result: | ||
1703 | 203 | return default | ||
1704 | 204 | if record: | ||
1705 | 205 | return Record(json.loads(result[0])) | ||
1706 | 206 | return json.loads(result[0]) | ||
1707 | 207 | |||
1708 | 208 | def getrange(self, key_prefix, strip=False): | ||
1709 | 209 | stmt = "select key, data from kv where key like '%s%%'" % key_prefix | ||
1710 | 210 | self.cursor.execute(*self._scoped_query(stmt)) | ||
1711 | 211 | result = self.cursor.fetchall() | ||
1712 | 212 | |||
1713 | 213 | if not result: | ||
1714 | 214 | return None | ||
1715 | 215 | if not strip: | ||
1716 | 216 | key_prefix = '' | ||
1717 | 217 | return dict([ | ||
1718 | 218 | (k[len(key_prefix):], json.loads(v)) for k, v in result]) | ||
1719 | 219 | |||
1720 | 220 | def update(self, mapping, prefix=""): | ||
1721 | 221 | for k, v in mapping.items(): | ||
1722 | 222 | self.set("%s%s" % (prefix, k), v) | ||
1723 | 223 | |||
1724 | 224 | def unset(self, key): | ||
1725 | 225 | self.cursor.execute('delete from kv where key=?', [key]) | ||
1726 | 226 | if self.revision and self.cursor.rowcount: | ||
1727 | 227 | self.cursor.execute( | ||
1728 | 228 | 'insert into kv_revisions values (?, ?, ?)', | ||
1729 | 229 | [key, self.revision, json.dumps('DELETED')]) | ||
1730 | 230 | |||
1731 | 231 | def set(self, key, value): | ||
1732 | 232 | serialized = json.dumps(value) | ||
1733 | 233 | |||
1734 | 234 | self.cursor.execute( | ||
1735 | 235 | 'select data from kv where key=?', [key]) | ||
1736 | 236 | exists = self.cursor.fetchone() | ||
1737 | 237 | |||
1738 | 238 | # Skip mutations to the same value | ||
1739 | 239 | if exists: | ||
1740 | 240 | if exists[0] == serialized: | ||
1741 | 241 | return value | ||
1742 | 242 | |||
1743 | 243 | if not exists: | ||
1744 | 244 | self.cursor.execute( | ||
1745 | 245 | 'insert into kv (key, data) values (?, ?)', | ||
1746 | 246 | (key, serialized)) | ||
1747 | 247 | else: | ||
1748 | 248 | self.cursor.execute(''' | ||
1749 | 249 | update kv | ||
1750 | 250 | set data = ? | ||
1751 | 251 | where key = ?''', [serialized, key]) | ||
1752 | 252 | |||
1753 | 253 | # Save | ||
1754 | 254 | if not self.revision: | ||
1755 | 255 | return value | ||
1756 | 256 | |||
1757 | 257 | self.cursor.execute( | ||
1758 | 258 | 'select 1 from kv_revisions where key=? and revision=?', | ||
1759 | 259 | [key, self.revision]) | ||
1760 | 260 | exists = self.cursor.fetchone() | ||
1761 | 261 | |||
1762 | 262 | if not exists: | ||
1763 | 263 | self.cursor.execute( | ||
1764 | 264 | '''insert into kv_revisions ( | ||
1765 | 265 | revision, key, data) values (?, ?, ?)''', | ||
1766 | 266 | (self.revision, key, serialized)) | ||
1767 | 267 | else: | ||
1768 | 268 | self.cursor.execute( | ||
1769 | 269 | ''' | ||
1770 | 270 | update kv_revisions | ||
1771 | 271 | set data = ? | ||
1772 | 272 | where key = ? | ||
1773 | 273 | and revision = ?''', | ||
1774 | 274 | [serialized, key, self.revision]) | ||
1775 | 275 | |||
1776 | 276 | return value | ||
1777 | 277 | |||
1778 | 278 | def delta(self, mapping, prefix): | ||
1779 | 279 | """ | ||
1780 | 280 | return a delta containing values that have changed. | ||
1781 | 281 | """ | ||
1782 | 282 | previous = self.getrange(prefix, strip=True) | ||
1783 | 283 | if not previous: | ||
1784 | 284 | pk = set() | ||
1785 | 285 | else: | ||
1786 | 286 | pk = set(previous.keys()) | ||
1787 | 287 | ck = set(mapping.keys()) | ||
1788 | 288 | delta = DeltaSet() | ||
1789 | 289 | |||
1790 | 290 | # added | ||
1791 | 291 | for k in ck.difference(pk): | ||
1792 | 292 | delta[k] = Delta(None, mapping[k]) | ||
1793 | 293 | |||
1794 | 294 | # removed | ||
1795 | 295 | for k in pk.difference(ck): | ||
1796 | 296 | delta[k] = Delta(previous[k], None) | ||
1797 | 297 | |||
1798 | 298 | # changed | ||
1799 | 299 | for k in pk.intersection(ck): | ||
1800 | 300 | c = mapping[k] | ||
1801 | 301 | p = previous[k] | ||
1802 | 302 | if c != p: | ||
1803 | 303 | delta[k] = Delta(p, c) | ||
1804 | 304 | |||
1805 | 305 | return delta | ||
1806 | 306 | |||
1807 | 307 | @contextlib.contextmanager | ||
1808 | 308 | def hook_scope(self, name=""): | ||
1809 | 309 | """Scope all future interactions to the current hook execution | ||
1810 | 310 | revision.""" | ||
1811 | 311 | assert not self.revision | ||
1812 | 312 | self.cursor.execute( | ||
1813 | 313 | 'insert into hooks (hook, date) values (?, ?)', | ||
1814 | 314 | (name or sys.argv[0], | ||
1815 | 315 | datetime.datetime.utcnow().isoformat())) | ||
1816 | 316 | self.revision = self.cursor.lastrowid | ||
1817 | 317 | try: | ||
1818 | 318 | yield self.revision | ||
1819 | 319 | self.revision = None | ||
1820 | 320 | except: | ||
1821 | 321 | self.flush(False) | ||
1822 | 322 | self.revision = None | ||
1823 | 323 | raise | ||
1824 | 324 | else: | ||
1825 | 325 | self.flush() | ||
1826 | 326 | |||
1827 | 327 | def flush(self, save=True): | ||
1828 | 328 | if save: | ||
1829 | 329 | self.conn.commit() | ||
1830 | 330 | elif self._closed: | ||
1831 | 331 | return | ||
1832 | 332 | else: | ||
1833 | 333 | self.conn.rollback() | ||
1834 | 334 | |||
1835 | 335 | def _init(self): | ||
1836 | 336 | self.cursor.execute(''' | ||
1837 | 337 | create table if not exists kv ( | ||
1838 | 338 | key text, | ||
1839 | 339 | data text, | ||
1840 | 340 | primary key (key) | ||
1841 | 341 | )''') | ||
1842 | 342 | self.cursor.execute(''' | ||
1843 | 343 | create table if not exists kv_revisions ( | ||
1844 | 344 | key text, | ||
1845 | 345 | revision integer, | ||
1846 | 346 | data text, | ||
1847 | 347 | primary key (key, revision) | ||
1848 | 348 | )''') | ||
1849 | 349 | self.cursor.execute(''' | ||
1850 | 350 | create table if not exists hooks ( | ||
1851 | 351 | version integer primary key autoincrement, | ||
1852 | 352 | hook text, | ||
1853 | 353 | date text | ||
1854 | 354 | )''') | ||
1855 | 355 | self.conn.commit() | ||
1856 | 356 | |||
1857 | 357 | def gethistory(self, key, deserialize=False): | ||
1858 | 358 | self.cursor.execute( | ||
1859 | 359 | ''' | ||
1860 | 360 | select kv.revision, kv.key, kv.data, h.hook, h.date | ||
1861 | 361 | from kv_revisions kv, | ||
1862 | 362 | hooks h | ||
1863 | 363 | where kv.key=? | ||
1864 | 364 | and kv.revision = h.version | ||
1865 | 365 | ''', [key]) | ||
1866 | 366 | if deserialize is False: | ||
1867 | 367 | return self.cursor.fetchall() | ||
1868 | 368 | return map(_parse_history, self.cursor.fetchall()) | ||
1869 | 369 | |||
1870 | 370 | def debug(self, fh=sys.stderr): | ||
1871 | 371 | self.cursor.execute('select * from kv') | ||
1872 | 372 | pprint.pprint(self.cursor.fetchall(), stream=fh) | ||
1873 | 373 | self.cursor.execute('select * from kv_revisions') | ||
1874 | 374 | pprint.pprint(self.cursor.fetchall(), stream=fh) | ||
1875 | 375 | |||
1876 | 376 | |||
1877 | 377 | def _parse_history(d): | ||
1878 | 378 | return (d[0], d[1], json.loads(d[2]), d[3], | ||
1879 | 379 | datetime.datetime.strptime(d[-1], "%Y-%m-%dT%H:%M:%S.%f")) | ||
1880 | 380 | |||
1881 | 381 | |||
1882 | 382 | class HookData(object): | ||
1883 | 383 | """Simple integration for existing hook exec frameworks. | ||
1884 | 384 | |||
1885 | 385 | Records all unit information, and stores deltas for processing | ||
1886 | 386 | by the hook. | ||
1887 | 387 | |||
1888 | 388 | Sample:: | ||
1889 | 389 | |||
1890 | 390 | from charmhelper.core import hookenv, unitdata | ||
1891 | 391 | |||
1892 | 392 | changes = unitdata.HookData() | ||
1893 | 393 | db = unitdata.kv() | ||
1894 | 394 | hooks = hookenv.Hooks() | ||
1895 | 395 | |||
1896 | 396 | @hooks.hook | ||
1897 | 397 | def config_changed(): | ||
1898 | 398 | # View all changes to configuration | ||
1899 | 399 | for changed, (prev, cur) in changes.conf.items(): | ||
1900 | 400 | print('config changed', changed, | ||
1901 | 401 | 'previous value', prev, | ||
1902 | 402 | 'current value', cur) | ||
1903 | 403 | |||
1904 | 404 | # Get some unit specific bookeeping | ||
1905 | 405 | if not db.get('pkg_key'): | ||
1906 | 406 | key = urllib.urlopen('https://example.com/pkg_key').read() | ||
1907 | 407 | db.set('pkg_key', key) | ||
1908 | 408 | |||
1909 | 409 | if __name__ == '__main__': | ||
1910 | 410 | with changes(): | ||
1911 | 411 | hook.execute() | ||
1912 | 412 | |||
1913 | 413 | """ | ||
1914 | 414 | def __init__(self): | ||
1915 | 415 | self.kv = kv() | ||
1916 | 416 | self.conf = None | ||
1917 | 417 | self.rels = None | ||
1918 | 418 | |||
1919 | 419 | @contextlib.contextmanager | ||
1920 | 420 | def __call__(self): | ||
1921 | 421 | from charmhelpers.core import hookenv | ||
1922 | 422 | hook_name = hookenv.hook_name() | ||
1923 | 423 | |||
1924 | 424 | with self.kv.hook_scope(hook_name): | ||
1925 | 425 | self._record_charm_version(hookenv.charm_dir()) | ||
1926 | 426 | delta_config, delta_relation = self._record_hook(hookenv) | ||
1927 | 427 | yield self.kv, delta_config, delta_relation | ||
1928 | 428 | |||
1929 | 429 | def _record_charm_version(self, charm_dir): | ||
1930 | 430 | # Record revisions.. charm revisions are meaningless | ||
1931 | 431 | # to charm authors as they don't control the revision. | ||
1932 | 432 | # so logic dependnent on revision is not particularly | ||
1933 | 433 | # useful, however it is useful for debugging analysis. | ||
1934 | 434 | charm_rev = open( | ||
1935 | 435 | os.path.join(charm_dir, 'revision')).read().strip() | ||
1936 | 436 | charm_rev = charm_rev or '0' | ||
1937 | 437 | revs = self.kv.get('charm_revisions', []) | ||
1938 | 438 | if charm_rev not in revs: | ||
1939 | 439 | revs.append(charm_rev.strip() or '0') | ||
1940 | 440 | self.kv.set('charm_revisions', revs) | ||
1941 | 441 | |||
1942 | 442 | def _record_hook(self, hookenv): | ||
1943 | 443 | data = hookenv.execution_environment() | ||
1944 | 444 | self.conf = conf_delta = self.kv.delta(data['conf'], 'config') | ||
1945 | 445 | self.rels = rels_delta = self.kv.delta(data['rels'], 'rels') | ||
1946 | 446 | self.kv.set('env', dict(data['env'])) | ||
1947 | 447 | self.kv.set('unit', data['unit']) | ||
1948 | 448 | self.kv.set('relid', data.get('relid')) | ||
1949 | 449 | return conf_delta, rels_delta | ||
1950 | 450 | |||
1951 | 451 | |||
1952 | 452 | class Record(dict): | ||
1953 | 453 | |||
1954 | 454 | __slots__ = () | ||
1955 | 455 | |||
1956 | 456 | def __getattr__(self, k): | ||
1957 | 457 | if k in self: | ||
1958 | 458 | return self[k] | ||
1959 | 459 | raise AttributeError(k) | ||
1960 | 460 | |||
1961 | 461 | |||
1962 | 462 | class DeltaSet(Record): | ||
1963 | 463 | |||
1964 | 464 | __slots__ = () | ||
1965 | 465 | |||
1966 | 466 | |||
1967 | 467 | Delta = collections.namedtuple('Delta', ['previous', 'current']) | ||
1968 | 468 | |||
1969 | 469 | |||
1970 | 470 | _KV = None | ||
1971 | 471 | |||
1972 | 472 | |||
1973 | 473 | def kv(): | ||
1974 | 474 | global _KV | ||
1975 | 475 | if _KV is None: | ||
1976 | 476 | _KV = Storage() | ||
1977 | 477 | return _KV | ||
1978 | 0 | 478 | ||
1979 | === modified file 'hooks/charmhelpers/fetch/archiveurl.py' | |||
1980 | --- hooks/charmhelpers/fetch/archiveurl.py 2015-01-29 13:02:55 +0000 | |||
1981 | +++ hooks/charmhelpers/fetch/archiveurl.py 2015-04-16 21:56:47 +0000 | |||
1982 | @@ -18,6 +18,16 @@ | |||
1983 | 18 | import hashlib | 18 | import hashlib |
1984 | 19 | import re | 19 | import re |
1985 | 20 | 20 | ||
1986 | 21 | from charmhelpers.fetch import ( | ||
1987 | 22 | BaseFetchHandler, | ||
1988 | 23 | UnhandledSource | ||
1989 | 24 | ) | ||
1990 | 25 | from charmhelpers.payload.archive import ( | ||
1991 | 26 | get_archive_handler, | ||
1992 | 27 | extract, | ||
1993 | 28 | ) | ||
1994 | 29 | from charmhelpers.core.host import mkdir, check_hash | ||
1995 | 30 | |||
1996 | 21 | import six | 31 | import six |
1997 | 22 | if six.PY3: | 32 | if six.PY3: |
1998 | 23 | from urllib.request import ( | 33 | from urllib.request import ( |
1999 | @@ -35,16 +45,6 @@ | |||
2000 | 35 | ) | 45 | ) |
2001 | 36 | from urlparse import urlparse, urlunparse, parse_qs | 46 | from urlparse import urlparse, urlunparse, parse_qs |
2002 | 37 | 47 | ||
2003 | 38 | from charmhelpers.fetch import ( | ||
2004 | 39 | BaseFetchHandler, | ||
2005 | 40 | UnhandledSource | ||
2006 | 41 | ) | ||
2007 | 42 | from charmhelpers.payload.archive import ( | ||
2008 | 43 | get_archive_handler, | ||
2009 | 44 | extract, | ||
2010 | 45 | ) | ||
2011 | 46 | from charmhelpers.core.host import mkdir, check_hash | ||
2012 | 47 | |||
2013 | 48 | 48 | ||
2014 | 49 | def splituser(host): | 49 | def splituser(host): |
2015 | 50 | '''urllib.splituser(), but six's support of this seems broken''' | 50 | '''urllib.splituser(), but six's support of this seems broken''' |
2016 | 51 | 51 | ||
2017 | === modified file 'hooks/charmhelpers/fetch/giturl.py' | |||
2018 | --- hooks/charmhelpers/fetch/giturl.py 2015-01-29 13:02:55 +0000 | |||
2019 | +++ hooks/charmhelpers/fetch/giturl.py 2015-04-16 21:56:47 +0000 | |||
2020 | @@ -32,7 +32,7 @@ | |||
2021 | 32 | apt_install("python-git") | 32 | apt_install("python-git") |
2022 | 33 | from git import Repo | 33 | from git import Repo |
2023 | 34 | 34 | ||
2025 | 35 | from git.exc import GitCommandError | 35 | from git.exc import GitCommandError # noqa E402 |
2026 | 36 | 36 | ||
2027 | 37 | 37 | ||
2028 | 38 | class GitUrlFetchHandler(BaseFetchHandler): | 38 | class GitUrlFetchHandler(BaseFetchHandler): |
2029 | 39 | 39 | ||
2030 | === modified file 'tests/charmhelpers/contrib/amulet/utils.py' | |||
2031 | --- tests/charmhelpers/contrib/amulet/utils.py 2015-01-29 13:02:55 +0000 | |||
2032 | +++ tests/charmhelpers/contrib/amulet/utils.py 2015-04-16 21:56:47 +0000 | |||
2033 | @@ -118,6 +118,9 @@ | |||
2034 | 118 | longs, or can be a function that evaluate a variable and returns a | 118 | longs, or can be a function that evaluate a variable and returns a |
2035 | 119 | bool. | 119 | bool. |
2036 | 120 | """ | 120 | """ |
2037 | 121 | self.log.debug('actual: {}'.format(repr(actual))) | ||
2038 | 122 | self.log.debug('expected: {}'.format(repr(expected))) | ||
2039 | 123 | |||
2040 | 121 | for k, v in six.iteritems(expected): | 124 | for k, v in six.iteritems(expected): |
2041 | 122 | if k in actual: | 125 | if k in actual: |
2042 | 123 | if (isinstance(v, six.string_types) or | 126 | if (isinstance(v, six.string_types) or |
2043 | @@ -134,7 +137,6 @@ | |||
2044 | 134 | def validate_relation_data(self, sentry_unit, relation, expected): | 137 | def validate_relation_data(self, sentry_unit, relation, expected): |
2045 | 135 | """Validate actual relation data based on expected relation data.""" | 138 | """Validate actual relation data based on expected relation data.""" |
2046 | 136 | actual = sentry_unit.relation(relation[0], relation[1]) | 139 | actual = sentry_unit.relation(relation[0], relation[1]) |
2047 | 137 | self.log.debug('actual: {}'.format(repr(actual))) | ||
2048 | 138 | return self._validate_dict_data(expected, actual) | 140 | return self._validate_dict_data(expected, actual) |
2049 | 139 | 141 | ||
2050 | 140 | def _validate_list_data(self, expected, actual): | 142 | def _validate_list_data(self, expected, actual): |
2051 | @@ -169,8 +171,13 @@ | |||
2052 | 169 | cmd = 'pgrep -o -f {}'.format(service) | 171 | cmd = 'pgrep -o -f {}'.format(service) |
2053 | 170 | else: | 172 | else: |
2054 | 171 | cmd = 'pgrep -o {}'.format(service) | 173 | cmd = 'pgrep -o {}'.format(service) |
2057 | 172 | proc_dir = '/proc/{}'.format(sentry_unit.run(cmd)[0].strip()) | 174 | cmd = cmd + ' | grep -v pgrep || exit 0' |
2058 | 173 | return self._get_dir_mtime(sentry_unit, proc_dir) | 175 | cmd_out = sentry_unit.run(cmd) |
2059 | 176 | self.log.debug('CMDout: ' + str(cmd_out)) | ||
2060 | 177 | if cmd_out[0]: | ||
2061 | 178 | self.log.debug('Pid for %s %s' % (service, str(cmd_out[0]))) | ||
2062 | 179 | proc_dir = '/proc/{}'.format(cmd_out[0].strip()) | ||
2063 | 180 | return self._get_dir_mtime(sentry_unit, proc_dir) | ||
2064 | 174 | 181 | ||
2065 | 175 | def service_restarted(self, sentry_unit, service, filename, | 182 | def service_restarted(self, sentry_unit, service, filename, |
2066 | 176 | pgrep_full=False, sleep_time=20): | 183 | pgrep_full=False, sleep_time=20): |
2067 | @@ -187,6 +194,121 @@ | |||
2068 | 187 | else: | 194 | else: |
2069 | 188 | return False | 195 | return False |
2070 | 189 | 196 | ||
2071 | 197 | def service_restarted_since(self, sentry_unit, mtime, service, | ||
2072 | 198 | pgrep_full=False, sleep_time=20, | ||
2073 | 199 | retry_count=2): | ||
2074 | 200 | """Check if service was been started after a given time. | ||
2075 | 201 | |||
2076 | 202 | Args: | ||
2077 | 203 | sentry_unit (sentry): The sentry unit to check for the service on | ||
2078 | 204 | mtime (float): The epoch time to check against | ||
2079 | 205 | service (string): service name to look for in process table | ||
2080 | 206 | pgrep_full (boolean): Use full command line search mode with pgrep | ||
2081 | 207 | sleep_time (int): Seconds to sleep before looking for process | ||
2082 | 208 | retry_count (int): If service is not found, how many times to retry | ||
2083 | 209 | |||
2084 | 210 | Returns: | ||
2085 | 211 | bool: True if service found and its start time it newer than mtime, | ||
2086 | 212 | False if service is older than mtime or if service was | ||
2087 | 213 | not found. | ||
2088 | 214 | """ | ||
2089 | 215 | self.log.debug('Checking %s restarted since %s' % (service, mtime)) | ||
2090 | 216 | time.sleep(sleep_time) | ||
2091 | 217 | proc_start_time = self._get_proc_start_time(sentry_unit, service, | ||
2092 | 218 | pgrep_full) | ||
2093 | 219 | while retry_count > 0 and not proc_start_time: | ||
2094 | 220 | self.log.debug('No pid file found for service %s, will retry %i ' | ||
2095 | 221 | 'more times' % (service, retry_count)) | ||
2096 | 222 | time.sleep(30) | ||
2097 | 223 | proc_start_time = self._get_proc_start_time(sentry_unit, service, | ||
2098 | 224 | pgrep_full) | ||
2099 | 225 | retry_count = retry_count - 1 | ||
2100 | 226 | |||
2101 | 227 | if not proc_start_time: | ||
2102 | 228 | self.log.warn('No proc start time found, assuming service did ' | ||
2103 | 229 | 'not start') | ||
2104 | 230 | return False | ||
2105 | 231 | if proc_start_time >= mtime: | ||
2106 | 232 | self.log.debug('proc start time is newer than provided mtime' | ||
2107 | 233 | '(%s >= %s)' % (proc_start_time, mtime)) | ||
2108 | 234 | return True | ||
2109 | 235 | else: | ||
2110 | 236 | self.log.warn('proc start time (%s) is older than provided mtime ' | ||
2111 | 237 | '(%s), service did not restart' % (proc_start_time, | ||
2112 | 238 | mtime)) | ||
2113 | 239 | return False | ||
2114 | 240 | |||
2115 | 241 | def config_updated_since(self, sentry_unit, filename, mtime, | ||
2116 | 242 | sleep_time=20): | ||
2117 | 243 | """Check if file was modified after a given time. | ||
2118 | 244 | |||
2119 | 245 | Args: | ||
2120 | 246 | sentry_unit (sentry): The sentry unit to check the file mtime on | ||
2121 | 247 | filename (string): The file to check mtime of | ||
2122 | 248 | mtime (float): The epoch time to check against | ||
2123 | 249 | sleep_time (int): Seconds to sleep before looking for process | ||
2124 | 250 | |||
2125 | 251 | Returns: | ||
2126 | 252 | bool: True if file was modified more recently than mtime, False if | ||
2127 | 253 | file was modified before mtime, | ||
2128 | 254 | """ | ||
2129 | 255 | self.log.debug('Checking %s updated since %s' % (filename, mtime)) | ||
2130 | 256 | time.sleep(sleep_time) | ||
2131 | 257 | file_mtime = self._get_file_mtime(sentry_unit, filename) | ||
2132 | 258 | if file_mtime >= mtime: | ||
2133 | 259 | self.log.debug('File mtime is newer than provided mtime ' | ||
2134 | 260 | '(%s >= %s)' % (file_mtime, mtime)) | ||
2135 | 261 | return True | ||
2136 | 262 | else: | ||
2137 | 263 | self.log.warn('File mtime %s is older than provided mtime %s' | ||
2138 | 264 | % (file_mtime, mtime)) | ||
2139 | 265 | return False | ||
2140 | 266 | |||
2141 | 267 | def validate_service_config_changed(self, sentry_unit, mtime, service, | ||
2142 | 268 | filename, pgrep_full=False, | ||
2143 | 269 | sleep_time=20, retry_count=2): | ||
2144 | 270 | """Check service and file were updated after mtime | ||
2145 | 271 | |||
2146 | 272 | Args: | ||
2147 | 273 | sentry_unit (sentry): The sentry unit to check for the service on | ||
2148 | 274 | mtime (float): The epoch time to check against | ||
2149 | 275 | service (string): service name to look for in process table | ||
2150 | 276 | filename (string): The file to check mtime of | ||
2151 | 277 | pgrep_full (boolean): Use full command line search mode with pgrep | ||
2152 | 278 | sleep_time (int): Seconds to sleep before looking for process | ||
2153 | 279 | retry_count (int): If service is not found, how many times to retry | ||
2154 | 280 | |||
2155 | 281 | Typical Usage: | ||
2156 | 282 | u = OpenStackAmuletUtils(ERROR) | ||
2157 | 283 | ... | ||
2158 | 284 | mtime = u.get_sentry_time(self.cinder_sentry) | ||
2159 | 285 | self.d.configure('cinder', {'verbose': 'True', 'debug': 'True'}) | ||
2160 | 286 | if not u.validate_service_config_changed(self.cinder_sentry, | ||
2161 | 287 | mtime, | ||
2162 | 288 | 'cinder-api', | ||
2163 | 289 | '/etc/cinder/cinder.conf') | ||
2164 | 290 | amulet.raise_status(amulet.FAIL, msg='update failed') | ||
2165 | 291 | Returns: | ||
2166 | 292 | bool: True if both service and file where updated/restarted after | ||
2167 | 293 | mtime, False if service is older than mtime or if service was | ||
2168 | 294 | not found or if filename was modified before mtime. | ||
2169 | 295 | """ | ||
2170 | 296 | self.log.debug('Checking %s restarted since %s' % (service, mtime)) | ||
2171 | 297 | time.sleep(sleep_time) | ||
2172 | 298 | service_restart = self.service_restarted_since(sentry_unit, mtime, | ||
2173 | 299 | service, | ||
2174 | 300 | pgrep_full=pgrep_full, | ||
2175 | 301 | sleep_time=0, | ||
2176 | 302 | retry_count=retry_count) | ||
2177 | 303 | config_update = self.config_updated_since(sentry_unit, filename, mtime, | ||
2178 | 304 | sleep_time=0) | ||
2179 | 305 | return service_restart and config_update | ||
2180 | 306 | |||
2181 | 307 | def get_sentry_time(self, sentry_unit): | ||
2182 | 308 | """Return current epoch time on a sentry""" | ||
2183 | 309 | cmd = "date +'%s'" | ||
2184 | 310 | return float(sentry_unit.run(cmd)[0]) | ||
2185 | 311 | |||
2186 | 190 | def relation_error(self, name, data): | 312 | def relation_error(self, name, data): |
2187 | 191 | return 'unexpected relation data in {} - {}'.format(name, data) | 313 | return 'unexpected relation data in {} - {}'.format(name, data) |
2188 | 192 | 314 | ||
2189 | 193 | 315 | ||
2190 | === modified file 'tests/charmhelpers/contrib/openstack/amulet/deployment.py' | |||
2191 | --- tests/charmhelpers/contrib/openstack/amulet/deployment.py 2015-01-29 13:02:55 +0000 | |||
2192 | +++ tests/charmhelpers/contrib/openstack/amulet/deployment.py 2015-04-16 21:56:47 +0000 | |||
2193 | @@ -15,6 +15,7 @@ | |||
2194 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. |
2195 | 16 | 16 | ||
2196 | 17 | import six | 17 | import six |
2197 | 18 | from collections import OrderedDict | ||
2198 | 18 | from charmhelpers.contrib.amulet.deployment import ( | 19 | from charmhelpers.contrib.amulet.deployment import ( |
2199 | 19 | AmuletDeployment | 20 | AmuletDeployment |
2200 | 20 | ) | 21 | ) |
2201 | @@ -43,7 +44,7 @@ | |||
2202 | 43 | Determine if the local branch being tested is derived from its | 44 | Determine if the local branch being tested is derived from its |
2203 | 44 | stable or next (dev) branch, and based on this, use the corresonding | 45 | stable or next (dev) branch, and based on this, use the corresonding |
2204 | 45 | stable or next branches for the other_services.""" | 46 | stable or next branches for the other_services.""" |
2206 | 46 | base_charms = ['mysql', 'mongodb', 'rabbitmq-server'] | 47 | base_charms = ['mysql', 'mongodb'] |
2207 | 47 | 48 | ||
2208 | 48 | if self.stable: | 49 | if self.stable: |
2209 | 49 | for svc in other_services: | 50 | for svc in other_services: |
2210 | @@ -71,16 +72,19 @@ | |||
2211 | 71 | services.append(this_service) | 72 | services.append(this_service) |
2212 | 72 | use_source = ['mysql', 'mongodb', 'rabbitmq-server', 'ceph', | 73 | use_source = ['mysql', 'mongodb', 'rabbitmq-server', 'ceph', |
2213 | 73 | 'ceph-osd', 'ceph-radosgw'] | 74 | 'ceph-osd', 'ceph-radosgw'] |
2214 | 75 | # Openstack subordinate charms do not expose an origin option as that | ||
2215 | 76 | # is controlled by the principle | ||
2216 | 77 | ignore = ['neutron-openvswitch'] | ||
2217 | 74 | 78 | ||
2218 | 75 | if self.openstack: | 79 | if self.openstack: |
2219 | 76 | for svc in services: | 80 | for svc in services: |
2221 | 77 | if svc['name'] not in use_source: | 81 | if svc['name'] not in use_source + ignore: |
2222 | 78 | config = {'openstack-origin': self.openstack} | 82 | config = {'openstack-origin': self.openstack} |
2223 | 79 | self.d.configure(svc['name'], config) | 83 | self.d.configure(svc['name'], config) |
2224 | 80 | 84 | ||
2225 | 81 | if self.source: | 85 | if self.source: |
2226 | 82 | for svc in services: | 86 | for svc in services: |
2228 | 83 | if svc['name'] in use_source: | 87 | if svc['name'] in use_source and svc['name'] not in ignore: |
2229 | 84 | config = {'source': self.source} | 88 | config = {'source': self.source} |
2230 | 85 | self.d.configure(svc['name'], config) | 89 | self.d.configure(svc['name'], config) |
2231 | 86 | 90 | ||
2232 | @@ -97,12 +101,37 @@ | |||
2233 | 97 | """ | 101 | """ |
2234 | 98 | (self.precise_essex, self.precise_folsom, self.precise_grizzly, | 102 | (self.precise_essex, self.precise_folsom, self.precise_grizzly, |
2235 | 99 | self.precise_havana, self.precise_icehouse, | 103 | self.precise_havana, self.precise_icehouse, |
2237 | 100 | self.trusty_icehouse) = range(6) | 104 | self.trusty_icehouse, self.trusty_juno, self.trusty_kilo, |
2238 | 105 | self.utopic_juno, self.vivid_kilo) = range(10) | ||
2239 | 101 | releases = { | 106 | releases = { |
2240 | 102 | ('precise', None): self.precise_essex, | 107 | ('precise', None): self.precise_essex, |
2241 | 103 | ('precise', 'cloud:precise-folsom'): self.precise_folsom, | 108 | ('precise', 'cloud:precise-folsom'): self.precise_folsom, |
2242 | 104 | ('precise', 'cloud:precise-grizzly'): self.precise_grizzly, | 109 | ('precise', 'cloud:precise-grizzly'): self.precise_grizzly, |
2243 | 105 | ('precise', 'cloud:precise-havana'): self.precise_havana, | 110 | ('precise', 'cloud:precise-havana'): self.precise_havana, |
2244 | 106 | ('precise', 'cloud:precise-icehouse'): self.precise_icehouse, | 111 | ('precise', 'cloud:precise-icehouse'): self.precise_icehouse, |
2246 | 107 | ('trusty', None): self.trusty_icehouse} | 112 | ('trusty', None): self.trusty_icehouse, |
2247 | 113 | ('trusty', 'cloud:trusty-juno'): self.trusty_juno, | ||
2248 | 114 | ('trusty', 'cloud:trusty-kilo'): self.trusty_kilo, | ||
2249 | 115 | ('utopic', None): self.utopic_juno, | ||
2250 | 116 | ('vivid', None): self.vivid_kilo} | ||
2251 | 108 | return releases[(self.series, self.openstack)] | 117 | return releases[(self.series, self.openstack)] |
2252 | 118 | |||
2253 | 119 | def _get_openstack_release_string(self): | ||
2254 | 120 | """Get openstack release string. | ||
2255 | 121 | |||
2256 | 122 | Return a string representing the openstack release. | ||
2257 | 123 | """ | ||
2258 | 124 | releases = OrderedDict([ | ||
2259 | 125 | ('precise', 'essex'), | ||
2260 | 126 | ('quantal', 'folsom'), | ||
2261 | 127 | ('raring', 'grizzly'), | ||
2262 | 128 | ('saucy', 'havana'), | ||
2263 | 129 | ('trusty', 'icehouse'), | ||
2264 | 130 | ('utopic', 'juno'), | ||
2265 | 131 | ('vivid', 'kilo'), | ||
2266 | 132 | ]) | ||
2267 | 133 | if self.openstack: | ||
2268 | 134 | os_origin = self.openstack.split(':')[1] | ||
2269 | 135 | return os_origin.split('%s-' % self.series)[1].split('/')[0] | ||
2270 | 136 | else: | ||
2271 | 137 | return releases[self.series] |
charm_lint_check #3526 nova-cell-next for 1chb1n mp256591
LINT OK: passed
Build: http:// 10.245. 162.77: 8080/job/ charm_lint_ check/3526/