Merge lp:~le-charmers/charms/trusty/keystone/leadership-election into lp:~openstack-charmers-archive/charms/trusty/keystone/next
- Trusty Tahr (14.04)
- leadership-election
- Merge into next
Proposed by
Edward Hope-Morley
Status: | Merged |
---|---|
Merged at revision: | 151 |
Proposed branch: | lp:~le-charmers/charms/trusty/keystone/leadership-election |
Merge into: | lp:~openstack-charmers-archive/charms/trusty/keystone/next |
Diff against target: |
1683 lines (+597/-235) 17 files modified
charm-helpers-tests.yaml (+1/-1) hooks/charmhelpers/contrib/hahelpers/cluster.py (+37/-2) hooks/charmhelpers/contrib/openstack/neutron.py (+10/-5) hooks/charmhelpers/contrib/openstack/templates/section-zeromq (+0/-14) hooks/charmhelpers/contrib/openstack/utils.py (+65/-18) hooks/charmhelpers/contrib/peerstorage/__init__.py (+123/-3) hooks/charmhelpers/contrib/python/packages.py (+28/-5) hooks/charmhelpers/contrib/unison/__init__.py (+5/-4) hooks/charmhelpers/core/hookenv.py (+147/-10) hooks/charmhelpers/core/host.py (+1/-1) hooks/charmhelpers/core/services/base.py (+32/-11) hooks/charmhelpers/fetch/__init__.py (+1/-1) hooks/charmhelpers/fetch/giturl.py (+7/-5) hooks/keystone_hooks.py (+16/-12) hooks/keystone_ssl.py (+3/-27) hooks/keystone_utils.py (+88/-65) unit_tests/test_keystone_hooks.py (+33/-51) |
To merge this branch: | bzr merge lp:~le-charmers/charms/trusty/keystone/leadership-election |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
OpenStack Charmers | Pending | ||
Review via email:
|
Commit message
Description of the change
To post a comment you must log in.
- 158. By Edward Hope-Morley
-
synced /next
- 159. By Edward Hope-Morley
-
make sync
- 160. By Liam Young
-
Merged trunk in + LE charmhelper sync
- 161. By Liam Young
-
Resync le charm helpers
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'charm-helpers-tests.yaml' | |||
2 | --- charm-helpers-tests.yaml 2015-05-13 09:42:17 +0000 | |||
3 | +++ charm-helpers-tests.yaml 2015-06-04 08:44:44 +0000 | |||
4 | @@ -1,4 +1,4 @@ | |||
6 | 1 | branch: lp:charm-helpers | 1 | branch: lp:charm-helpers |
7 | 2 | destination: tests/charmhelpers | 2 | destination: tests/charmhelpers |
8 | 3 | include: | 3 | include: |
9 | 4 | - contrib.amulet | 4 | - contrib.amulet |
10 | 5 | 5 | ||
11 | === modified file 'hooks/charmhelpers/contrib/hahelpers/cluster.py' | |||
12 | --- hooks/charmhelpers/contrib/hahelpers/cluster.py 2015-03-18 18:59:03 +0000 | |||
13 | +++ hooks/charmhelpers/contrib/hahelpers/cluster.py 2015-06-04 08:44:44 +0000 | |||
14 | @@ -44,6 +44,7 @@ | |||
15 | 44 | ERROR, | 44 | ERROR, |
16 | 45 | WARNING, | 45 | WARNING, |
17 | 46 | unit_get, | 46 | unit_get, |
18 | 47 | is_leader as juju_is_leader | ||
19 | 47 | ) | 48 | ) |
20 | 48 | from charmhelpers.core.decorators import ( | 49 | from charmhelpers.core.decorators import ( |
21 | 49 | retry_on_exception, | 50 | retry_on_exception, |
22 | @@ -52,6 +53,8 @@ | |||
23 | 52 | bool_from_string, | 53 | bool_from_string, |
24 | 53 | ) | 54 | ) |
25 | 54 | 55 | ||
26 | 56 | DC_RESOURCE_NAME = 'DC' | ||
27 | 57 | |||
28 | 55 | 58 | ||
29 | 56 | class HAIncompleteConfig(Exception): | 59 | class HAIncompleteConfig(Exception): |
30 | 57 | pass | 60 | pass |
31 | @@ -66,12 +69,21 @@ | |||
32 | 66 | Returns True if the charm executing this is the elected cluster leader. | 69 | Returns True if the charm executing this is the elected cluster leader. |
33 | 67 | 70 | ||
34 | 68 | It relies on two mechanisms to determine leadership: | 71 | It relies on two mechanisms to determine leadership: |
36 | 69 | 1. If the charm is part of a corosync cluster, call corosync to | 72 | 1. If juju is sufficiently new and leadership election is supported, |
37 | 73 | the is_leader command will be used. | ||
38 | 74 | 2. If the charm is part of a corosync cluster, call corosync to | ||
39 | 70 | determine leadership. | 75 | determine leadership. |
41 | 71 | 2. If the charm is not part of a corosync cluster, the leader is | 76 | 3. If the charm is not part of a corosync cluster, the leader is |
42 | 72 | determined as being "the alive unit with the lowest unit numer". In | 77 | determined as being "the alive unit with the lowest unit numer". In |
43 | 73 | other words, the oldest surviving unit. | 78 | other words, the oldest surviving unit. |
44 | 74 | """ | 79 | """ |
45 | 80 | try: | ||
46 | 81 | return juju_is_leader() | ||
47 | 82 | except NotImplementedError: | ||
48 | 83 | log('Juju leadership election feature not enabled' | ||
49 | 84 | ', using fallback support', | ||
50 | 85 | level=WARNING) | ||
51 | 86 | |||
52 | 75 | if is_clustered(): | 87 | if is_clustered(): |
53 | 76 | if not is_crm_leader(resource): | 88 | if not is_crm_leader(resource): |
54 | 77 | log('Deferring action to CRM leader.', level=INFO) | 89 | log('Deferring action to CRM leader.', level=INFO) |
55 | @@ -95,6 +107,27 @@ | |||
56 | 95 | return False | 107 | return False |
57 | 96 | 108 | ||
58 | 97 | 109 | ||
59 | 110 | def is_crm_dc(): | ||
60 | 111 | """ | ||
61 | 112 | Determine leadership by querying the pacemaker Designated Controller | ||
62 | 113 | """ | ||
63 | 114 | cmd = ['crm', 'status'] | ||
64 | 115 | try: | ||
65 | 116 | status = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | ||
66 | 117 | if not isinstance(status, six.text_type): | ||
67 | 118 | status = six.text_type(status, "utf-8") | ||
68 | 119 | except subprocess.CalledProcessError: | ||
69 | 120 | return False | ||
70 | 121 | current_dc = '' | ||
71 | 122 | for line in status.split('\n'): | ||
72 | 123 | if line.startswith('Current DC'): | ||
73 | 124 | # Current DC: juju-lytrusty-machine-2 (168108163) - partition with quorum | ||
74 | 125 | current_dc = line.split(':')[1].split()[0] | ||
75 | 126 | if current_dc == get_unit_hostname(): | ||
76 | 127 | return True | ||
77 | 128 | return False | ||
78 | 129 | |||
79 | 130 | |||
80 | 98 | @retry_on_exception(5, base_delay=2, exc_type=CRMResourceNotFound) | 131 | @retry_on_exception(5, base_delay=2, exc_type=CRMResourceNotFound) |
81 | 99 | def is_crm_leader(resource, retry=False): | 132 | def is_crm_leader(resource, retry=False): |
82 | 100 | """ | 133 | """ |
83 | @@ -104,6 +137,8 @@ | |||
84 | 104 | We allow this operation to be retried to avoid the possibility of getting a | 137 | We allow this operation to be retried to avoid the possibility of getting a |
85 | 105 | false negative. See LP #1396246 for more info. | 138 | false negative. See LP #1396246 for more info. |
86 | 106 | """ | 139 | """ |
87 | 140 | if resource == DC_RESOURCE_NAME: | ||
88 | 141 | return is_crm_dc() | ||
89 | 107 | cmd = ['crm', 'resource', 'show', resource] | 142 | cmd = ['crm', 'resource', 'show', resource] |
90 | 108 | try: | 143 | try: |
91 | 109 | status = subprocess.check_output(cmd, stderr=subprocess.STDOUT) | 144 | status = subprocess.check_output(cmd, stderr=subprocess.STDOUT) |
92 | 110 | 145 | ||
93 | === modified file 'hooks/charmhelpers/contrib/openstack/neutron.py' | |||
94 | --- hooks/charmhelpers/contrib/openstack/neutron.py 2015-04-16 19:55:16 +0000 | |||
95 | +++ hooks/charmhelpers/contrib/openstack/neutron.py 2015-06-04 08:44:44 +0000 | |||
96 | @@ -256,11 +256,14 @@ | |||
97 | 256 | def parse_mappings(mappings): | 256 | def parse_mappings(mappings): |
98 | 257 | parsed = {} | 257 | parsed = {} |
99 | 258 | if mappings: | 258 | if mappings: |
101 | 259 | mappings = mappings.split(' ') | 259 | mappings = mappings.split() |
102 | 260 | for m in mappings: | 260 | for m in mappings: |
103 | 261 | p = m.partition(':') | 261 | p = m.partition(':') |
106 | 262 | if p[1] == ':': | 262 | key = p[0].strip() |
107 | 263 | parsed[p[0].strip()] = p[2].strip() | 263 | if p[1]: |
108 | 264 | parsed[key] = p[2].strip() | ||
109 | 265 | else: | ||
110 | 266 | parsed[key] = '' | ||
111 | 264 | 267 | ||
112 | 265 | return parsed | 268 | return parsed |
113 | 266 | 269 | ||
114 | @@ -283,13 +286,13 @@ | |||
115 | 283 | Returns dict of the form {bridge:port}. | 286 | Returns dict of the form {bridge:port}. |
116 | 284 | """ | 287 | """ |
117 | 285 | _mappings = parse_mappings(mappings) | 288 | _mappings = parse_mappings(mappings) |
119 | 286 | if not _mappings: | 289 | if not _mappings or list(_mappings.values()) == ['']: |
120 | 287 | if not mappings: | 290 | if not mappings: |
121 | 288 | return {} | 291 | return {} |
122 | 289 | 292 | ||
123 | 290 | # For backwards-compatibility we need to support port-only provided in | 293 | # For backwards-compatibility we need to support port-only provided in |
124 | 291 | # config. | 294 | # config. |
126 | 292 | _mappings = {default_bridge: mappings.split(' ')[0]} | 295 | _mappings = {default_bridge: mappings.split()[0]} |
127 | 293 | 296 | ||
128 | 294 | bridges = _mappings.keys() | 297 | bridges = _mappings.keys() |
129 | 295 | ports = _mappings.values() | 298 | ports = _mappings.values() |
130 | @@ -309,6 +312,8 @@ | |||
131 | 309 | 312 | ||
132 | 310 | Mappings must be a space-delimited list of provider:start:end mappings. | 313 | Mappings must be a space-delimited list of provider:start:end mappings. |
133 | 311 | 314 | ||
134 | 315 | The start:end range is optional and may be omitted. | ||
135 | 316 | |||
136 | 312 | Returns dict of the form {provider: (start, end)}. | 317 | Returns dict of the form {provider: (start, end)}. |
137 | 313 | """ | 318 | """ |
138 | 314 | _mappings = parse_mappings(mappings) | 319 | _mappings = parse_mappings(mappings) |
139 | 315 | 320 | ||
140 | === added file 'hooks/charmhelpers/contrib/openstack/templates/section-zeromq' | |||
141 | --- hooks/charmhelpers/contrib/openstack/templates/section-zeromq 1970-01-01 00:00:00 +0000 | |||
142 | +++ hooks/charmhelpers/contrib/openstack/templates/section-zeromq 2015-06-04 08:44:44 +0000 | |||
143 | @@ -0,0 +1,14 @@ | |||
144 | 1 | {% if zmq_host -%} | ||
145 | 2 | # ZeroMQ configuration (restart-nonce: {{ zmq_nonce }}) | ||
146 | 3 | rpc_backend = zmq | ||
147 | 4 | rpc_zmq_host = {{ zmq_host }} | ||
148 | 5 | {% if zmq_redis_address -%} | ||
149 | 6 | rpc_zmq_matchmaker = redis | ||
150 | 7 | matchmaker_heartbeat_freq = 15 | ||
151 | 8 | matchmaker_heartbeat_ttl = 30 | ||
152 | 9 | [matchmaker_redis] | ||
153 | 10 | host = {{ zmq_redis_address }} | ||
154 | 11 | {% else -%} | ||
155 | 12 | rpc_zmq_matchmaker = ring | ||
156 | 13 | {% endif -%} | ||
157 | 14 | {% endif -%} | ||
158 | 0 | 15 | ||
159 | === removed file 'hooks/charmhelpers/contrib/openstack/templates/section-zeromq' | |||
160 | --- hooks/charmhelpers/contrib/openstack/templates/section-zeromq 2015-04-09 02:17:36 +0000 | |||
161 | +++ hooks/charmhelpers/contrib/openstack/templates/section-zeromq 1970-01-01 00:00:00 +0000 | |||
162 | @@ -1,14 +0,0 @@ | |||
163 | 1 | {% if zmq_host -%} | ||
164 | 2 | # ZeroMQ configuration (restart-nonce: {{ zmq_nonce }}) | ||
165 | 3 | rpc_backend = zmq | ||
166 | 4 | rpc_zmq_host = {{ zmq_host }} | ||
167 | 5 | {% if zmq_redis_address -%} | ||
168 | 6 | rpc_zmq_matchmaker = redis | ||
169 | 7 | matchmaker_heartbeat_freq = 15 | ||
170 | 8 | matchmaker_heartbeat_ttl = 30 | ||
171 | 9 | [matchmaker_redis] | ||
172 | 10 | host = {{ zmq_redis_address }} | ||
173 | 11 | {% else -%} | ||
174 | 12 | rpc_zmq_matchmaker = ring | ||
175 | 13 | {% endif -%} | ||
176 | 14 | {% endif -%} | ||
177 | 15 | 0 | ||
178 | === modified file 'hooks/charmhelpers/contrib/openstack/utils.py' | |||
179 | --- hooks/charmhelpers/contrib/openstack/utils.py 2015-04-16 14:09:47 +0000 | |||
180 | +++ hooks/charmhelpers/contrib/openstack/utils.py 2015-06-04 08:44:44 +0000 | |||
181 | @@ -53,9 +53,13 @@ | |||
182 | 53 | get_ipv6_addr | 53 | get_ipv6_addr |
183 | 54 | ) | 54 | ) |
184 | 55 | 55 | ||
185 | 56 | from charmhelpers.contrib.python.packages import ( | ||
186 | 57 | pip_create_virtualenv, | ||
187 | 58 | pip_install, | ||
188 | 59 | ) | ||
189 | 60 | |||
190 | 56 | from charmhelpers.core.host import lsb_release, mounts, umount | 61 | from charmhelpers.core.host import lsb_release, mounts, umount |
191 | 57 | from charmhelpers.fetch import apt_install, apt_cache, install_remote | 62 | from charmhelpers.fetch import apt_install, apt_cache, install_remote |
192 | 58 | from charmhelpers.contrib.python.packages import pip_install | ||
193 | 59 | from charmhelpers.contrib.storage.linux.utils import is_block_device, zap_disk | 63 | from charmhelpers.contrib.storage.linux.utils import is_block_device, zap_disk |
194 | 60 | from charmhelpers.contrib.storage.linux.loopback import ensure_loopback_device | 64 | from charmhelpers.contrib.storage.linux.loopback import ensure_loopback_device |
195 | 61 | 65 | ||
196 | @@ -497,7 +501,17 @@ | |||
197 | 497 | requirements_dir = None | 501 | requirements_dir = None |
198 | 498 | 502 | ||
199 | 499 | 503 | ||
201 | 500 | def git_clone_and_install(projects_yaml, core_project): | 504 | def _git_yaml_load(projects_yaml): |
202 | 505 | """ | ||
203 | 506 | Load the specified yaml into a dictionary. | ||
204 | 507 | """ | ||
205 | 508 | if not projects_yaml: | ||
206 | 509 | return None | ||
207 | 510 | |||
208 | 511 | return yaml.load(projects_yaml) | ||
209 | 512 | |||
210 | 513 | |||
211 | 514 | def git_clone_and_install(projects_yaml, core_project, depth=1): | ||
212 | 501 | """ | 515 | """ |
213 | 502 | Clone/install all specified OpenStack repositories. | 516 | Clone/install all specified OpenStack repositories. |
214 | 503 | 517 | ||
215 | @@ -510,23 +524,22 @@ | |||
216 | 510 | repository: 'git://git.openstack.org/openstack/requirements.git', | 524 | repository: 'git://git.openstack.org/openstack/requirements.git', |
217 | 511 | branch: 'stable/icehouse'} | 525 | branch: 'stable/icehouse'} |
218 | 512 | directory: /mnt/openstack-git | 526 | directory: /mnt/openstack-git |
221 | 513 | http_proxy: http://squid.internal:3128 | 527 | http_proxy: squid-proxy-url |
222 | 514 | https_proxy: https://squid.internal:3128 | 528 | https_proxy: squid-proxy-url |
223 | 515 | 529 | ||
224 | 516 | The directory, http_proxy, and https_proxy keys are optional. | 530 | The directory, http_proxy, and https_proxy keys are optional. |
225 | 517 | """ | 531 | """ |
226 | 518 | global requirements_dir | 532 | global requirements_dir |
227 | 519 | parent_dir = '/mnt/openstack-git' | 533 | parent_dir = '/mnt/openstack-git' |
233 | 520 | 534 | http_proxy = None | |
234 | 521 | if not projects_yaml: | 535 | |
235 | 522 | return | 536 | projects = _git_yaml_load(projects_yaml) |
231 | 523 | |||
232 | 524 | projects = yaml.load(projects_yaml) | ||
236 | 525 | _git_validate_projects_yaml(projects, core_project) | 537 | _git_validate_projects_yaml(projects, core_project) |
237 | 526 | 538 | ||
238 | 527 | old_environ = dict(os.environ) | 539 | old_environ = dict(os.environ) |
239 | 528 | 540 | ||
240 | 529 | if 'http_proxy' in projects.keys(): | 541 | if 'http_proxy' in projects.keys(): |
241 | 542 | http_proxy = projects['http_proxy'] | ||
242 | 530 | os.environ['http_proxy'] = projects['http_proxy'] | 543 | os.environ['http_proxy'] = projects['http_proxy'] |
243 | 531 | if 'https_proxy' in projects.keys(): | 544 | if 'https_proxy' in projects.keys(): |
244 | 532 | os.environ['https_proxy'] = projects['https_proxy'] | 545 | os.environ['https_proxy'] = projects['https_proxy'] |
245 | @@ -534,15 +547,19 @@ | |||
246 | 534 | if 'directory' in projects.keys(): | 547 | if 'directory' in projects.keys(): |
247 | 535 | parent_dir = projects['directory'] | 548 | parent_dir = projects['directory'] |
248 | 536 | 549 | ||
249 | 550 | pip_create_virtualenv(os.path.join(parent_dir, 'venv')) | ||
250 | 551 | |||
251 | 537 | for p in projects['repositories']: | 552 | for p in projects['repositories']: |
252 | 538 | repo = p['repository'] | 553 | repo = p['repository'] |
253 | 539 | branch = p['branch'] | 554 | branch = p['branch'] |
254 | 540 | if p['name'] == 'requirements': | 555 | if p['name'] == 'requirements': |
256 | 541 | repo_dir = _git_clone_and_install_single(repo, branch, parent_dir, | 556 | repo_dir = _git_clone_and_install_single(repo, branch, depth, |
257 | 557 | parent_dir, http_proxy, | ||
258 | 542 | update_requirements=False) | 558 | update_requirements=False) |
259 | 543 | requirements_dir = repo_dir | 559 | requirements_dir = repo_dir |
260 | 544 | else: | 560 | else: |
262 | 545 | repo_dir = _git_clone_and_install_single(repo, branch, parent_dir, | 561 | repo_dir = _git_clone_and_install_single(repo, branch, depth, |
263 | 562 | parent_dir, http_proxy, | ||
264 | 546 | update_requirements=True) | 563 | update_requirements=True) |
265 | 547 | 564 | ||
266 | 548 | os.environ = old_environ | 565 | os.environ = old_environ |
267 | @@ -574,7 +591,8 @@ | |||
268 | 574 | error_out('openstack-origin-git key \'{}\' is missing'.format(key)) | 591 | error_out('openstack-origin-git key \'{}\' is missing'.format(key)) |
269 | 575 | 592 | ||
270 | 576 | 593 | ||
272 | 577 | def _git_clone_and_install_single(repo, branch, parent_dir, update_requirements): | 594 | def _git_clone_and_install_single(repo, branch, depth, parent_dir, http_proxy, |
273 | 595 | update_requirements): | ||
274 | 578 | """ | 596 | """ |
275 | 579 | Clone and install a single git repository. | 597 | Clone and install a single git repository. |
276 | 580 | """ | 598 | """ |
277 | @@ -587,7 +605,8 @@ | |||
278 | 587 | 605 | ||
279 | 588 | if not os.path.exists(dest_dir): | 606 | if not os.path.exists(dest_dir): |
280 | 589 | juju_log('Cloning git repo: {}, branch: {}'.format(repo, branch)) | 607 | juju_log('Cloning git repo: {}, branch: {}'.format(repo, branch)) |
282 | 590 | repo_dir = install_remote(repo, dest=parent_dir, branch=branch) | 608 | repo_dir = install_remote(repo, dest=parent_dir, branch=branch, |
283 | 609 | depth=depth) | ||
284 | 591 | else: | 610 | else: |
285 | 592 | repo_dir = dest_dir | 611 | repo_dir = dest_dir |
286 | 593 | 612 | ||
287 | @@ -598,7 +617,12 @@ | |||
288 | 598 | _git_update_requirements(repo_dir, requirements_dir) | 617 | _git_update_requirements(repo_dir, requirements_dir) |
289 | 599 | 618 | ||
290 | 600 | juju_log('Installing git repo from dir: {}'.format(repo_dir)) | 619 | juju_log('Installing git repo from dir: {}'.format(repo_dir)) |
292 | 601 | pip_install(repo_dir) | 620 | if http_proxy: |
293 | 621 | pip_install(repo_dir, proxy=http_proxy, | ||
294 | 622 | venv=os.path.join(parent_dir, 'venv')) | ||
295 | 623 | else: | ||
296 | 624 | pip_install(repo_dir, | ||
297 | 625 | venv=os.path.join(parent_dir, 'venv')) | ||
298 | 602 | 626 | ||
299 | 603 | return repo_dir | 627 | return repo_dir |
300 | 604 | 628 | ||
301 | @@ -621,16 +645,27 @@ | |||
302 | 621 | os.chdir(orig_dir) | 645 | os.chdir(orig_dir) |
303 | 622 | 646 | ||
304 | 623 | 647 | ||
305 | 648 | def git_pip_venv_dir(projects_yaml): | ||
306 | 649 | """ | ||
307 | 650 | Return the pip virtualenv path. | ||
308 | 651 | """ | ||
309 | 652 | parent_dir = '/mnt/openstack-git' | ||
310 | 653 | |||
311 | 654 | projects = _git_yaml_load(projects_yaml) | ||
312 | 655 | |||
313 | 656 | if 'directory' in projects.keys(): | ||
314 | 657 | parent_dir = projects['directory'] | ||
315 | 658 | |||
316 | 659 | return os.path.join(parent_dir, 'venv') | ||
317 | 660 | |||
318 | 661 | |||
319 | 624 | def git_src_dir(projects_yaml, project): | 662 | def git_src_dir(projects_yaml, project): |
320 | 625 | """ | 663 | """ |
321 | 626 | Return the directory where the specified project's source is located. | 664 | Return the directory where the specified project's source is located. |
322 | 627 | """ | 665 | """ |
323 | 628 | parent_dir = '/mnt/openstack-git' | 666 | parent_dir = '/mnt/openstack-git' |
324 | 629 | 667 | ||
329 | 630 | if not projects_yaml: | 668 | projects = _git_yaml_load(projects_yaml) |
326 | 631 | return | ||
327 | 632 | |||
328 | 633 | projects = yaml.load(projects_yaml) | ||
330 | 634 | 669 | ||
331 | 635 | if 'directory' in projects.keys(): | 670 | if 'directory' in projects.keys(): |
332 | 636 | parent_dir = projects['directory'] | 671 | parent_dir = projects['directory'] |
333 | @@ -640,3 +675,15 @@ | |||
334 | 640 | return os.path.join(parent_dir, os.path.basename(p['repository'])) | 675 | return os.path.join(parent_dir, os.path.basename(p['repository'])) |
335 | 641 | 676 | ||
336 | 642 | return None | 677 | return None |
337 | 678 | |||
338 | 679 | |||
339 | 680 | def git_yaml_value(projects_yaml, key): | ||
340 | 681 | """ | ||
341 | 682 | Return the value in projects_yaml for the specified key. | ||
342 | 683 | """ | ||
343 | 684 | projects = _git_yaml_load(projects_yaml) | ||
344 | 685 | |||
345 | 686 | if key in projects.keys(): | ||
346 | 687 | return projects[key] | ||
347 | 688 | |||
348 | 689 | return None | ||
349 | 643 | 690 | ||
350 | === modified file 'hooks/charmhelpers/contrib/peerstorage/__init__.py' | |||
351 | --- hooks/charmhelpers/contrib/peerstorage/__init__.py 2015-03-11 11:45:09 +0000 | |||
352 | +++ hooks/charmhelpers/contrib/peerstorage/__init__.py 2015-06-04 08:44:44 +0000 | |||
353 | @@ -14,14 +14,19 @@ | |||
354 | 14 | # You should have received a copy of the GNU Lesser General Public License | 14 | # You should have received a copy of the GNU Lesser General Public License |
355 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. |
356 | 16 | 16 | ||
357 | 17 | import json | ||
358 | 17 | import six | 18 | import six |
359 | 19 | |||
360 | 18 | from charmhelpers.core.hookenv import relation_id as current_relation_id | 20 | from charmhelpers.core.hookenv import relation_id as current_relation_id |
361 | 19 | from charmhelpers.core.hookenv import ( | 21 | from charmhelpers.core.hookenv import ( |
362 | 20 | is_relation_made, | 22 | is_relation_made, |
363 | 21 | relation_ids, | 23 | relation_ids, |
365 | 22 | relation_get, | 24 | relation_get as _relation_get, |
366 | 23 | local_unit, | 25 | local_unit, |
368 | 24 | relation_set, | 26 | relation_set as _relation_set, |
369 | 27 | leader_get as _leader_get, | ||
370 | 28 | leader_set, | ||
371 | 29 | is_leader, | ||
372 | 25 | ) | 30 | ) |
373 | 26 | 31 | ||
374 | 27 | 32 | ||
375 | @@ -54,6 +59,105 @@ | |||
376 | 54 | """ | 59 | """ |
377 | 55 | 60 | ||
378 | 56 | 61 | ||
379 | 62 | def leader_get(attribute=None): | ||
380 | 63 | """Wrapper to ensure that settings are migrated from the peer relation. | ||
381 | 64 | |||
382 | 65 | This is to support upgrading an environment that does not support | ||
383 | 66 | Juju leadership election to one that does. | ||
384 | 67 | |||
385 | 68 | If a setting is not extant in the leader-get but is on the relation-get | ||
386 | 69 | peer rel, it is migrated and marked as such so that it is not re-migrated. | ||
387 | 70 | """ | ||
388 | 71 | migration_key = '__leader_get_migrated_settings__' | ||
389 | 72 | if not is_leader(): | ||
390 | 73 | return _leader_get(attribute=attribute) | ||
391 | 74 | |||
392 | 75 | settings_migrated = False | ||
393 | 76 | leader_settings = _leader_get(attribute=attribute) | ||
394 | 77 | previously_migrated = _leader_get(attribute=migration_key) | ||
395 | 78 | |||
396 | 79 | if previously_migrated: | ||
397 | 80 | migrated = set(json.loads(previously_migrated)) | ||
398 | 81 | else: | ||
399 | 82 | migrated = set([]) | ||
400 | 83 | |||
401 | 84 | try: | ||
402 | 85 | if migration_key in leader_settings: | ||
403 | 86 | del leader_settings[migration_key] | ||
404 | 87 | except TypeError: | ||
405 | 88 | pass | ||
406 | 89 | |||
407 | 90 | if attribute: | ||
408 | 91 | if attribute in migrated: | ||
409 | 92 | return leader_settings | ||
410 | 93 | |||
411 | 94 | # If attribute not present in leader db, check if this unit has set | ||
412 | 95 | # the attribute in the peer relation | ||
413 | 96 | if not leader_settings: | ||
414 | 97 | peer_setting = relation_get(attribute=attribute, unit=local_unit()) | ||
415 | 98 | if peer_setting: | ||
416 | 99 | leader_set(settings={attribute: peer_setting}) | ||
417 | 100 | leader_settings = peer_setting | ||
418 | 101 | |||
419 | 102 | if leader_settings: | ||
420 | 103 | settings_migrated = True | ||
421 | 104 | migrated.add(attribute) | ||
422 | 105 | else: | ||
423 | 106 | r_settings = relation_get(unit=local_unit()) | ||
424 | 107 | if r_settings: | ||
425 | 108 | for key in set(r_settings.keys()).difference(migrated): | ||
426 | 109 | # Leader setting wins | ||
427 | 110 | if not leader_settings.get(key): | ||
428 | 111 | leader_settings[key] = r_settings[key] | ||
429 | 112 | |||
430 | 113 | settings_migrated = True | ||
431 | 114 | migrated.add(key) | ||
432 | 115 | |||
433 | 116 | if settings_migrated: | ||
434 | 117 | leader_set(**leader_settings) | ||
435 | 118 | |||
436 | 119 | if migrated and settings_migrated: | ||
437 | 120 | migrated = json.dumps(list(migrated)) | ||
438 | 121 | leader_set(settings={migration_key: migrated}) | ||
439 | 122 | |||
440 | 123 | return leader_settings | ||
441 | 124 | |||
442 | 125 | |||
443 | 126 | def relation_set(relation_id=None, relation_settings=None, **kwargs): | ||
444 | 127 | """Attempt to use leader-set if supported in the current version of Juju, | ||
445 | 128 | otherwise falls back on relation-set. | ||
446 | 129 | |||
447 | 130 | Note that we only attempt to use leader-set if the provided relation_id is | ||
448 | 131 | a peer relation id or no relation id is provided (in which case we assume | ||
449 | 132 | we are within the peer relation context). | ||
450 | 133 | """ | ||
451 | 134 | try: | ||
452 | 135 | if relation_id in relation_ids('cluster'): | ||
453 | 136 | return leader_set(settings=relation_settings, **kwargs) | ||
454 | 137 | else: | ||
455 | 138 | raise NotImplementedError | ||
456 | 139 | except NotImplementedError: | ||
457 | 140 | return _relation_set(relation_id=relation_id, | ||
458 | 141 | relation_settings=relation_settings, **kwargs) | ||
459 | 142 | |||
460 | 143 | |||
461 | 144 | def relation_get(attribute=None, unit=None, rid=None): | ||
462 | 145 | """Attempt to use leader-get if supported in the current version of Juju, | ||
463 | 146 | otherwise falls back on relation-get. | ||
464 | 147 | |||
465 | 148 | Note that we only attempt to use leader-get if the provided rid is a peer | ||
466 | 149 | relation id or no relation id is provided (in which case we assume we are | ||
467 | 150 | within the peer relation context). | ||
468 | 151 | """ | ||
469 | 152 | try: | ||
470 | 153 | if rid in relation_ids('cluster'): | ||
471 | 154 | return leader_get(attribute) | ||
472 | 155 | else: | ||
473 | 156 | raise NotImplementedError | ||
474 | 157 | except NotImplementedError: | ||
475 | 158 | return _relation_get(attribute=attribute, rid=rid, unit=unit) | ||
476 | 159 | |||
477 | 160 | |||
478 | 57 | def peer_retrieve(key, relation_name='cluster'): | 161 | def peer_retrieve(key, relation_name='cluster'): |
479 | 58 | """Retrieve a named key from peer relation `relation_name`.""" | 162 | """Retrieve a named key from peer relation `relation_name`.""" |
480 | 59 | cluster_rels = relation_ids(relation_name) | 163 | cluster_rels = relation_ids(relation_name) |
481 | @@ -73,6 +177,8 @@ | |||
482 | 73 | exc_list = exc_list if exc_list else [] | 177 | exc_list = exc_list if exc_list else [] |
483 | 74 | peerdb_settings = peer_retrieve('-', relation_name=relation_name) | 178 | peerdb_settings = peer_retrieve('-', relation_name=relation_name) |
484 | 75 | matched = {} | 179 | matched = {} |
485 | 180 | if peerdb_settings is None: | ||
486 | 181 | return matched | ||
487 | 76 | for k, v in peerdb_settings.items(): | 182 | for k, v in peerdb_settings.items(): |
488 | 77 | full_prefix = prefix + delimiter | 183 | full_prefix = prefix + delimiter |
489 | 78 | if k.startswith(full_prefix): | 184 | if k.startswith(full_prefix): |
490 | @@ -96,12 +202,26 @@ | |||
491 | 96 | 'peer relation {}'.format(relation_name)) | 202 | 'peer relation {}'.format(relation_name)) |
492 | 97 | 203 | ||
493 | 98 | 204 | ||
495 | 99 | def peer_echo(includes=None): | 205 | def peer_echo(includes=None, force=False): |
496 | 100 | """Echo filtered attributes back onto the same relation for storage. | 206 | """Echo filtered attributes back onto the same relation for storage. |
497 | 101 | 207 | ||
498 | 102 | This is a requirement to use the peerstorage module - it needs to be called | 208 | This is a requirement to use the peerstorage module - it needs to be called |
499 | 103 | from the peer relation's changed hook. | 209 | from the peer relation's changed hook. |
500 | 210 | |||
501 | 211 | If Juju leader support exists this will be a noop unless force is True. | ||
502 | 104 | """ | 212 | """ |
503 | 213 | try: | ||
504 | 214 | is_leader() | ||
505 | 215 | except NotImplementedError: | ||
506 | 216 | pass | ||
507 | 217 | else: | ||
508 | 218 | if not force: | ||
509 | 219 | return # NOOP if leader-election is supported | ||
510 | 220 | |||
511 | 221 | # Use original non-leader calls | ||
512 | 222 | relation_get = _relation_get | ||
513 | 223 | relation_set = _relation_set | ||
514 | 224 | |||
515 | 105 | rdata = relation_get() | 225 | rdata = relation_get() |
516 | 106 | echo_data = {} | 226 | echo_data = {} |
517 | 107 | if includes is None: | 227 | if includes is None: |
518 | 108 | 228 | ||
519 | === modified file 'hooks/charmhelpers/contrib/python/packages.py' | |||
520 | --- hooks/charmhelpers/contrib/python/packages.py 2015-03-11 11:45:09 +0000 | |||
521 | +++ hooks/charmhelpers/contrib/python/packages.py 2015-06-04 08:44:44 +0000 | |||
522 | @@ -17,8 +17,11 @@ | |||
523 | 17 | # You should have received a copy of the GNU Lesser General Public License | 17 | # You should have received a copy of the GNU Lesser General Public License |
524 | 18 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | 18 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. |
525 | 19 | 19 | ||
526 | 20 | import os | ||
527 | 21 | import subprocess | ||
528 | 22 | |||
529 | 20 | from charmhelpers.fetch import apt_install, apt_update | 23 | from charmhelpers.fetch import apt_install, apt_update |
531 | 21 | from charmhelpers.core.hookenv import log | 24 | from charmhelpers.core.hookenv import charm_dir, log |
532 | 22 | 25 | ||
533 | 23 | try: | 26 | try: |
534 | 24 | from pip import main as pip_execute | 27 | from pip import main as pip_execute |
535 | @@ -51,11 +54,15 @@ | |||
536 | 51 | pip_execute(command) | 54 | pip_execute(command) |
537 | 52 | 55 | ||
538 | 53 | 56 | ||
540 | 54 | def pip_install(package, fatal=False, upgrade=False, **options): | 57 | def pip_install(package, fatal=False, upgrade=False, venv=None, **options): |
541 | 55 | """Install a python package""" | 58 | """Install a python package""" |
543 | 56 | command = ["install"] | 59 | if venv: |
544 | 60 | venv_python = os.path.join(venv, 'bin/pip') | ||
545 | 61 | command = [venv_python, "install"] | ||
546 | 62 | else: | ||
547 | 63 | command = ["install"] | ||
548 | 57 | 64 | ||
550 | 58 | available_options = ('proxy', 'src', 'log', "index-url", ) | 65 | available_options = ('proxy', 'src', 'log', 'index-url', ) |
551 | 59 | for option in parse_options(options, available_options): | 66 | for option in parse_options(options, available_options): |
552 | 60 | command.append(option) | 67 | command.append(option) |
553 | 61 | 68 | ||
554 | @@ -69,7 +76,10 @@ | |||
555 | 69 | 76 | ||
556 | 70 | log("Installing {} package with options: {}".format(package, | 77 | log("Installing {} package with options: {}".format(package, |
557 | 71 | command)) | 78 | command)) |
559 | 72 | pip_execute(command) | 79 | if venv: |
560 | 80 | subprocess.check_call(command) | ||
561 | 81 | else: | ||
562 | 82 | pip_execute(command) | ||
563 | 73 | 83 | ||
564 | 74 | 84 | ||
565 | 75 | def pip_uninstall(package, **options): | 85 | def pip_uninstall(package, **options): |
566 | @@ -94,3 +104,16 @@ | |||
567 | 94 | """Returns the list of current python installed packages | 104 | """Returns the list of current python installed packages |
568 | 95 | """ | 105 | """ |
569 | 96 | return pip_execute(["list"]) | 106 | return pip_execute(["list"]) |
570 | 107 | |||
571 | 108 | |||
572 | 109 | def pip_create_virtualenv(path=None): | ||
573 | 110 | """Create an isolated Python environment.""" | ||
574 | 111 | apt_install('python-virtualenv') | ||
575 | 112 | |||
576 | 113 | if path: | ||
577 | 114 | venv_path = path | ||
578 | 115 | else: | ||
579 | 116 | venv_path = os.path.join(charm_dir(), 'venv') | ||
580 | 117 | |||
581 | 118 | if not os.path.exists(venv_path): | ||
582 | 119 | subprocess.check_call(['virtualenv', venv_path]) | ||
583 | 97 | 120 | ||
584 | === modified file 'hooks/charmhelpers/contrib/unison/__init__.py' | |||
585 | --- hooks/charmhelpers/contrib/unison/__init__.py 2015-03-18 18:59:03 +0000 | |||
586 | +++ hooks/charmhelpers/contrib/unison/__init__.py 2015-06-04 08:44:44 +0000 | |||
587 | @@ -63,6 +63,7 @@ | |||
588 | 63 | from charmhelpers.core.host import ( | 63 | from charmhelpers.core.host import ( |
589 | 64 | adduser, | 64 | adduser, |
590 | 65 | add_user_to_group, | 65 | add_user_to_group, |
591 | 66 | pwgen, | ||
592 | 66 | ) | 67 | ) |
593 | 67 | 68 | ||
594 | 68 | from charmhelpers.core.hookenv import ( | 69 | from charmhelpers.core.hookenv import ( |
595 | @@ -140,7 +141,7 @@ | |||
596 | 140 | ssh_dir = os.path.join(home_dir, '.ssh') | 141 | ssh_dir = os.path.join(home_dir, '.ssh') |
597 | 141 | auth_keys = os.path.join(ssh_dir, 'authorized_keys') | 142 | auth_keys = os.path.join(ssh_dir, 'authorized_keys') |
598 | 142 | log('Syncing authorized_keys @ %s.' % auth_keys) | 143 | log('Syncing authorized_keys @ %s.' % auth_keys) |
600 | 143 | with open(auth_keys, 'wb') as out: | 144 | with open(auth_keys, 'w') as out: |
601 | 144 | for k in keys: | 145 | for k in keys: |
602 | 145 | out.write('%s\n' % k) | 146 | out.write('%s\n' % k) |
603 | 146 | 147 | ||
604 | @@ -152,16 +153,16 @@ | |||
605 | 152 | khosts = [] | 153 | khosts = [] |
606 | 153 | for host in hosts: | 154 | for host in hosts: |
607 | 154 | cmd = ['ssh-keyscan', '-H', '-t', 'rsa', host] | 155 | cmd = ['ssh-keyscan', '-H', '-t', 'rsa', host] |
609 | 155 | remote_key = check_output(cmd).strip() | 156 | remote_key = check_output(cmd, universal_newlines=True).strip() |
610 | 156 | khosts.append(remote_key) | 157 | khosts.append(remote_key) |
611 | 157 | log('Syncing known_hosts @ %s.' % known_hosts) | 158 | log('Syncing known_hosts @ %s.' % known_hosts) |
613 | 158 | with open(known_hosts, 'wb') as out: | 159 | with open(known_hosts, 'w') as out: |
614 | 159 | for host in khosts: | 160 | for host in khosts: |
615 | 160 | out.write('%s\n' % host) | 161 | out.write('%s\n' % host) |
616 | 161 | 162 | ||
617 | 162 | 163 | ||
618 | 163 | def ensure_user(user, group=None): | 164 | def ensure_user(user, group=None): |
620 | 164 | adduser(user) | 165 | adduser(user, pwgen()) |
621 | 165 | if group: | 166 | if group: |
622 | 166 | add_user_to_group(user, group) | 167 | add_user_to_group(user, group) |
623 | 167 | 168 | ||
624 | 168 | 169 | ||
625 | === modified file 'hooks/charmhelpers/core/hookenv.py' | |||
626 | --- hooks/charmhelpers/core/hookenv.py 2015-04-15 15:21:50 +0000 | |||
627 | +++ hooks/charmhelpers/core/hookenv.py 2015-06-04 08:44:44 +0000 | |||
628 | @@ -21,12 +21,14 @@ | |||
629 | 21 | # Charm Helpers Developers <juju@lists.ubuntu.com> | 21 | # Charm Helpers Developers <juju@lists.ubuntu.com> |
630 | 22 | 22 | ||
631 | 23 | from __future__ import print_function | 23 | from __future__ import print_function |
632 | 24 | from functools import wraps | ||
633 | 24 | import os | 25 | import os |
634 | 25 | import json | 26 | import json |
635 | 26 | import yaml | 27 | import yaml |
636 | 27 | import subprocess | 28 | import subprocess |
637 | 28 | import sys | 29 | import sys |
638 | 29 | import errno | 30 | import errno |
639 | 31 | import tempfile | ||
640 | 30 | from subprocess import CalledProcessError | 32 | from subprocess import CalledProcessError |
641 | 31 | 33 | ||
642 | 32 | import six | 34 | import six |
643 | @@ -58,15 +60,17 @@ | |||
644 | 58 | 60 | ||
645 | 59 | will cache the result of unit_get + 'test' for future calls. | 61 | will cache the result of unit_get + 'test' for future calls. |
646 | 60 | """ | 62 | """ |
647 | 63 | @wraps(func) | ||
648 | 61 | def wrapper(*args, **kwargs): | 64 | def wrapper(*args, **kwargs): |
649 | 62 | global cache | 65 | global cache |
650 | 63 | key = str((func, args, kwargs)) | 66 | key = str((func, args, kwargs)) |
651 | 64 | try: | 67 | try: |
652 | 65 | return cache[key] | 68 | return cache[key] |
653 | 66 | except KeyError: | 69 | except KeyError: |
657 | 67 | res = func(*args, **kwargs) | 70 | pass # Drop out of the exception handler scope. |
658 | 68 | cache[key] = res | 71 | res = func(*args, **kwargs) |
659 | 69 | return res | 72 | cache[key] = res |
660 | 73 | return res | ||
661 | 70 | return wrapper | 74 | return wrapper |
662 | 71 | 75 | ||
663 | 72 | 76 | ||
664 | @@ -178,7 +182,7 @@ | |||
665 | 178 | 182 | ||
666 | 179 | def remote_unit(): | 183 | def remote_unit(): |
667 | 180 | """The remote unit for the current relation hook""" | 184 | """The remote unit for the current relation hook""" |
669 | 181 | return os.environ['JUJU_REMOTE_UNIT'] | 185 | return os.environ.get('JUJU_REMOTE_UNIT', None) |
670 | 182 | 186 | ||
671 | 183 | 187 | ||
672 | 184 | def service_name(): | 188 | def service_name(): |
673 | @@ -250,6 +254,12 @@ | |||
674 | 250 | except KeyError: | 254 | except KeyError: |
675 | 251 | return (self._prev_dict or {})[key] | 255 | return (self._prev_dict or {})[key] |
676 | 252 | 256 | ||
677 | 257 | def get(self, key, default=None): | ||
678 | 258 | try: | ||
679 | 259 | return self[key] | ||
680 | 260 | except KeyError: | ||
681 | 261 | return default | ||
682 | 262 | |||
683 | 253 | def keys(self): | 263 | def keys(self): |
684 | 254 | prev_keys = [] | 264 | prev_keys = [] |
685 | 255 | if self._prev_dict is not None: | 265 | if self._prev_dict is not None: |
686 | @@ -353,18 +363,49 @@ | |||
687 | 353 | """Set relation information for the current unit""" | 363 | """Set relation information for the current unit""" |
688 | 354 | relation_settings = relation_settings if relation_settings else {} | 364 | relation_settings = relation_settings if relation_settings else {} |
689 | 355 | relation_cmd_line = ['relation-set'] | 365 | relation_cmd_line = ['relation-set'] |
690 | 366 | accepts_file = "--file" in subprocess.check_output( | ||
691 | 367 | relation_cmd_line + ["--help"], universal_newlines=True) | ||
692 | 356 | if relation_id is not None: | 368 | if relation_id is not None: |
693 | 357 | relation_cmd_line.extend(('-r', relation_id)) | 369 | relation_cmd_line.extend(('-r', relation_id)) |
700 | 358 | for k, v in (list(relation_settings.items()) + list(kwargs.items())): | 370 | settings = relation_settings.copy() |
701 | 359 | if v is None: | 371 | settings.update(kwargs) |
702 | 360 | relation_cmd_line.append('{}='.format(k)) | 372 | for key, value in settings.items(): |
703 | 361 | else: | 373 | # Force value to be a string: it always should, but some call |
704 | 362 | relation_cmd_line.append('{}={}'.format(k, v)) | 374 | # sites pass in things like dicts or numbers. |
705 | 363 | subprocess.check_call(relation_cmd_line) | 375 | if value is not None: |
706 | 376 | settings[key] = "{}".format(value) | ||
707 | 377 | if accepts_file: | ||
708 | 378 | # --file was introduced in Juju 1.23.2. Use it by default if | ||
709 | 379 | # available, since otherwise we'll break if the relation data is | ||
710 | 380 | # too big. Ideally we should tell relation-set to read the data from | ||
711 | 381 | # stdin, but that feature is broken in 1.23.2: Bug #1454678. | ||
712 | 382 | with tempfile.NamedTemporaryFile(delete=False) as settings_file: | ||
713 | 383 | settings_file.write(yaml.safe_dump(settings).encode("utf-8")) | ||
714 | 384 | subprocess.check_call( | ||
715 | 385 | relation_cmd_line + ["--file", settings_file.name]) | ||
716 | 386 | os.remove(settings_file.name) | ||
717 | 387 | else: | ||
718 | 388 | for key, value in settings.items(): | ||
719 | 389 | if value is None: | ||
720 | 390 | relation_cmd_line.append('{}='.format(key)) | ||
721 | 391 | else: | ||
722 | 392 | relation_cmd_line.append('{}={}'.format(key, value)) | ||
723 | 393 | subprocess.check_call(relation_cmd_line) | ||
724 | 364 | # Flush cache of any relation-gets for local unit | 394 | # Flush cache of any relation-gets for local unit |
725 | 365 | flush(local_unit()) | 395 | flush(local_unit()) |
726 | 366 | 396 | ||
727 | 367 | 397 | ||
728 | 398 | def relation_clear(r_id=None): | ||
729 | 399 | ''' Clears any relation data already set on relation r_id ''' | ||
730 | 400 | settings = relation_get(rid=r_id, | ||
731 | 401 | unit=local_unit()) | ||
732 | 402 | for setting in settings: | ||
733 | 403 | if setting not in ['public-address', 'private-address']: | ||
734 | 404 | settings[setting] = None | ||
735 | 405 | relation_set(relation_id=r_id, | ||
736 | 406 | **settings) | ||
737 | 407 | |||
738 | 408 | |||
739 | 368 | @cached | 409 | @cached |
740 | 369 | def relation_ids(reltype=None): | 410 | def relation_ids(reltype=None): |
741 | 370 | """A list of relation_ids""" | 411 | """A list of relation_ids""" |
742 | @@ -509,6 +550,11 @@ | |||
743 | 509 | return None | 550 | return None |
744 | 510 | 551 | ||
745 | 511 | 552 | ||
746 | 553 | def unit_public_ip(): | ||
747 | 554 | """Get this unit's public IP address""" | ||
748 | 555 | return unit_get('public-address') | ||
749 | 556 | |||
750 | 557 | |||
751 | 512 | def unit_private_ip(): | 558 | def unit_private_ip(): |
752 | 513 | """Get this unit's private IP address""" | 559 | """Get this unit's private IP address""" |
753 | 514 | return unit_get('private-address') | 560 | return unit_get('private-address') |
754 | @@ -605,3 +651,94 @@ | |||
755 | 605 | 651 | ||
756 | 606 | The results set by action_set are preserved.""" | 652 | The results set by action_set are preserved.""" |
757 | 607 | subprocess.check_call(['action-fail', message]) | 653 | subprocess.check_call(['action-fail', message]) |
758 | 654 | |||
759 | 655 | |||
760 | 656 | def status_set(workload_state, message): | ||
761 | 657 | """Set the workload state with a message | ||
762 | 658 | |||
763 | 659 | Use status-set to set the workload state with a message which is visible | ||
764 | 660 | to the user via juju status. If the status-set command is not found then | ||
765 | 661 | assume this is juju < 1.23 and juju-log the message unstead. | ||
766 | 662 | |||
767 | 663 | workload_state -- valid juju workload state. | ||
768 | 664 | message -- status update message | ||
769 | 665 | """ | ||
770 | 666 | valid_states = ['maintenance', 'blocked', 'waiting', 'active'] | ||
771 | 667 | if workload_state not in valid_states: | ||
772 | 668 | raise ValueError( | ||
773 | 669 | '{!r} is not a valid workload state'.format(workload_state) | ||
774 | 670 | ) | ||
775 | 671 | cmd = ['status-set', workload_state, message] | ||
776 | 672 | try: | ||
777 | 673 | ret = subprocess.call(cmd) | ||
778 | 674 | if ret == 0: | ||
779 | 675 | return | ||
780 | 676 | except OSError as e: | ||
781 | 677 | if e.errno != errno.ENOENT: | ||
782 | 678 | raise | ||
783 | 679 | log_message = 'status-set failed: {} {}'.format(workload_state, | ||
784 | 680 | message) | ||
785 | 681 | log(log_message, level='INFO') | ||
786 | 682 | |||
787 | 683 | |||
788 | 684 | def status_get(): | ||
789 | 685 | """Retrieve the previously set juju workload state | ||
790 | 686 | |||
791 | 687 | If the status-set command is not found then assume this is juju < 1.23 and | ||
792 | 688 | return 'unknown' | ||
793 | 689 | """ | ||
794 | 690 | cmd = ['status-get'] | ||
795 | 691 | try: | ||
796 | 692 | raw_status = subprocess.check_output(cmd, universal_newlines=True) | ||
797 | 693 | status = raw_status.rstrip() | ||
798 | 694 | return status | ||
799 | 695 | except OSError as e: | ||
800 | 696 | if e.errno == errno.ENOENT: | ||
801 | 697 | return 'unknown' | ||
802 | 698 | else: | ||
803 | 699 | raise | ||
804 | 700 | |||
805 | 701 | |||
806 | 702 | def translate_exc(from_exc, to_exc): | ||
807 | 703 | def inner_translate_exc1(f): | ||
808 | 704 | def inner_translate_exc2(*args, **kwargs): | ||
809 | 705 | try: | ||
810 | 706 | return f(*args, **kwargs) | ||
811 | 707 | except from_exc: | ||
812 | 708 | raise to_exc | ||
813 | 709 | |||
814 | 710 | return inner_translate_exc2 | ||
815 | 711 | |||
816 | 712 | return inner_translate_exc1 | ||
817 | 713 | |||
818 | 714 | |||
819 | 715 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | ||
820 | 716 | def is_leader(): | ||
821 | 717 | """Does the current unit hold the juju leadership | ||
822 | 718 | |||
823 | 719 | Uses juju to determine whether the current unit is the leader of its peers | ||
824 | 720 | """ | ||
825 | 721 | cmd = ['is-leader', '--format=json'] | ||
826 | 722 | return json.loads(subprocess.check_output(cmd).decode('UTF-8')) | ||
827 | 723 | |||
828 | 724 | |||
829 | 725 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | ||
830 | 726 | def leader_get(attribute=None): | ||
831 | 727 | """Juju leader get value(s)""" | ||
832 | 728 | cmd = ['leader-get', '--format=json'] + [attribute or '-'] | ||
833 | 729 | return json.loads(subprocess.check_output(cmd).decode('UTF-8')) | ||
834 | 730 | |||
835 | 731 | |||
836 | 732 | @translate_exc(from_exc=OSError, to_exc=NotImplementedError) | ||
837 | 733 | def leader_set(settings=None, **kwargs): | ||
838 | 734 | """Juju leader set value(s)""" | ||
839 | 735 | log("Juju leader-set '%s'" % (settings), level=DEBUG) | ||
840 | 736 | cmd = ['leader-set'] | ||
841 | 737 | settings = settings or {} | ||
842 | 738 | settings.update(kwargs) | ||
843 | 739 | for k, v in settings.iteritems(): | ||
844 | 740 | if v is None: | ||
845 | 741 | cmd.append('{}='.format(k)) | ||
846 | 742 | else: | ||
847 | 743 | cmd.append('{}={}'.format(k, v)) | ||
848 | 744 | subprocess.check_call(cmd) | ||
849 | 608 | 745 | ||
850 | === modified file 'hooks/charmhelpers/core/host.py' | |||
851 | --- hooks/charmhelpers/core/host.py 2015-03-30 11:43:06 +0000 | |||
852 | +++ hooks/charmhelpers/core/host.py 2015-06-04 08:44:44 +0000 | |||
853 | @@ -90,7 +90,7 @@ | |||
854 | 90 | ['service', service_name, 'status'], | 90 | ['service', service_name, 'status'], |
855 | 91 | stderr=subprocess.STDOUT).decode('UTF-8') | 91 | stderr=subprocess.STDOUT).decode('UTF-8') |
856 | 92 | except subprocess.CalledProcessError as e: | 92 | except subprocess.CalledProcessError as e: |
858 | 93 | return 'unrecognized service' not in e.output | 93 | return b'unrecognized service' not in e.output |
859 | 94 | else: | 94 | else: |
860 | 95 | return True | 95 | return True |
861 | 96 | 96 | ||
862 | 97 | 97 | ||
863 | === modified file 'hooks/charmhelpers/core/services/base.py' | |||
864 | --- hooks/charmhelpers/core/services/base.py 2015-03-11 11:45:09 +0000 | |||
865 | +++ hooks/charmhelpers/core/services/base.py 2015-06-04 08:44:44 +0000 | |||
866 | @@ -15,9 +15,9 @@ | |||
867 | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. | 15 | # along with charm-helpers. If not, see <http://www.gnu.org/licenses/>. |
868 | 16 | 16 | ||
869 | 17 | import os | 17 | import os |
870 | 18 | import re | ||
871 | 19 | import json | 18 | import json |
873 | 20 | from collections import Iterable | 19 | from inspect import getargspec |
874 | 20 | from collections import Iterable, OrderedDict | ||
875 | 21 | 21 | ||
876 | 22 | from charmhelpers.core import host | 22 | from charmhelpers.core import host |
877 | 23 | from charmhelpers.core import hookenv | 23 | from charmhelpers.core import hookenv |
878 | @@ -119,7 +119,7 @@ | |||
879 | 119 | """ | 119 | """ |
880 | 120 | self._ready_file = os.path.join(hookenv.charm_dir(), 'READY-SERVICES.json') | 120 | self._ready_file = os.path.join(hookenv.charm_dir(), 'READY-SERVICES.json') |
881 | 121 | self._ready = None | 121 | self._ready = None |
883 | 122 | self.services = {} | 122 | self.services = OrderedDict() |
884 | 123 | for service in services or []: | 123 | for service in services or []: |
885 | 124 | service_name = service['service'] | 124 | service_name = service['service'] |
886 | 125 | self.services[service_name] = service | 125 | self.services[service_name] = service |
887 | @@ -132,8 +132,8 @@ | |||
888 | 132 | if hook_name == 'stop': | 132 | if hook_name == 'stop': |
889 | 133 | self.stop_services() | 133 | self.stop_services() |
890 | 134 | else: | 134 | else: |
891 | 135 | self.reconfigure_services() | ||
892 | 135 | self.provide_data() | 136 | self.provide_data() |
893 | 136 | self.reconfigure_services() | ||
894 | 137 | cfg = hookenv.config() | 137 | cfg = hookenv.config() |
895 | 138 | if cfg.implicit_save: | 138 | if cfg.implicit_save: |
896 | 139 | cfg.save() | 139 | cfg.save() |
897 | @@ -145,15 +145,36 @@ | |||
898 | 145 | A provider must have a `name` attribute, which indicates which relation | 145 | A provider must have a `name` attribute, which indicates which relation |
899 | 146 | to set data on, and a `provide_data()` method, which returns a dict of | 146 | to set data on, and a `provide_data()` method, which returns a dict of |
900 | 147 | data to set. | 147 | data to set. |
901 | 148 | |||
902 | 149 | The `provide_data()` method can optionally accept two parameters: | ||
903 | 150 | |||
904 | 151 | * ``remote_service`` The name of the remote service that the data will | ||
905 | 152 | be provided to. The `provide_data()` method will be called once | ||
906 | 153 | for each connected service (not unit). This allows the method to | ||
907 | 154 | tailor its data to the given service. | ||
908 | 155 | * ``service_ready`` Whether or not the service definition had all of | ||
909 | 156 | its requirements met, and thus the ``data_ready`` callbacks run. | ||
910 | 157 | |||
911 | 158 | Note that the ``provided_data`` methods are now called **after** the | ||
912 | 159 | ``data_ready`` callbacks are run. This gives the ``data_ready`` callbacks | ||
913 | 160 | a chance to generate any data necessary for the providing to the remote | ||
914 | 161 | services. | ||
915 | 148 | """ | 162 | """ |
918 | 149 | hook_name = hookenv.hook_name() | 163 | for service_name, service in self.services.items(): |
919 | 150 | for service in self.services.values(): | 164 | service_ready = self.is_ready(service_name) |
920 | 151 | for provider in service.get('provided_data', []): | 165 | for provider in service.get('provided_data', []): |
926 | 152 | if re.match(r'{}-relation-(joined|changed)'.format(provider.name), hook_name): | 166 | for relid in hookenv.relation_ids(provider.name): |
927 | 153 | data = provider.provide_data() | 167 | units = hookenv.related_units(relid) |
928 | 154 | _ready = provider._is_ready(data) if hasattr(provider, '_is_ready') else data | 168 | if not units: |
929 | 155 | if _ready: | 169 | continue |
930 | 156 | hookenv.relation_set(None, data) | 170 | remote_service = units[0].split('/')[0] |
931 | 171 | argspec = getargspec(provider.provide_data) | ||
932 | 172 | if len(argspec.args) > 1: | ||
933 | 173 | data = provider.provide_data(remote_service, service_ready) | ||
934 | 174 | else: | ||
935 | 175 | data = provider.provide_data() | ||
936 | 176 | if data: | ||
937 | 177 | hookenv.relation_set(relid, data) | ||
938 | 157 | 178 | ||
939 | 158 | def reconfigure_services(self, *service_names): | 179 | def reconfigure_services(self, *service_names): |
940 | 159 | """ | 180 | """ |
941 | 160 | 181 | ||
942 | === modified file 'hooks/charmhelpers/fetch/__init__.py' | |||
943 | --- hooks/charmhelpers/fetch/__init__.py 2015-03-11 11:45:09 +0000 | |||
944 | +++ hooks/charmhelpers/fetch/__init__.py 2015-06-04 08:44:44 +0000 | |||
945 | @@ -158,7 +158,7 @@ | |||
946 | 158 | 158 | ||
947 | 159 | def apt_cache(in_memory=True): | 159 | def apt_cache(in_memory=True): |
948 | 160 | """Build and return an apt cache""" | 160 | """Build and return an apt cache""" |
950 | 161 | import apt_pkg | 161 | from apt import apt_pkg |
951 | 162 | apt_pkg.init() | 162 | apt_pkg.init() |
952 | 163 | if in_memory: | 163 | if in_memory: |
953 | 164 | apt_pkg.config.set("Dir::Cache::pkgcache", "") | 164 | apt_pkg.config.set("Dir::Cache::pkgcache", "") |
954 | 165 | 165 | ||
955 | === modified file 'hooks/charmhelpers/fetch/giturl.py' | |||
956 | --- hooks/charmhelpers/fetch/giturl.py 2015-03-11 11:45:09 +0000 | |||
957 | +++ hooks/charmhelpers/fetch/giturl.py 2015-06-04 08:44:44 +0000 | |||
958 | @@ -45,14 +45,16 @@ | |||
959 | 45 | else: | 45 | else: |
960 | 46 | return True | 46 | return True |
961 | 47 | 47 | ||
963 | 48 | def clone(self, source, dest, branch): | 48 | def clone(self, source, dest, branch, depth=None): |
964 | 49 | if not self.can_handle(source): | 49 | if not self.can_handle(source): |
965 | 50 | raise UnhandledSource("Cannot handle {}".format(source)) | 50 | raise UnhandledSource("Cannot handle {}".format(source)) |
966 | 51 | 51 | ||
969 | 52 | repo = Repo.clone_from(source, dest) | 52 | if depth: |
970 | 53 | repo.git.checkout(branch) | 53 | Repo.clone_from(source, dest, branch=branch, depth=depth) |
971 | 54 | else: | ||
972 | 55 | Repo.clone_from(source, dest, branch=branch) | ||
973 | 54 | 56 | ||
975 | 55 | def install(self, source, branch="master", dest=None): | 57 | def install(self, source, branch="master", dest=None, depth=None): |
976 | 56 | url_parts = self.parse_url(source) | 58 | url_parts = self.parse_url(source) |
977 | 57 | branch_name = url_parts.path.strip("/").split("/")[-1] | 59 | branch_name = url_parts.path.strip("/").split("/")[-1] |
978 | 58 | if dest: | 60 | if dest: |
979 | @@ -63,7 +65,7 @@ | |||
980 | 63 | if not os.path.exists(dest_dir): | 65 | if not os.path.exists(dest_dir): |
981 | 64 | mkdir(dest_dir, perms=0o755) | 66 | mkdir(dest_dir, perms=0o755) |
982 | 65 | try: | 67 | try: |
984 | 66 | self.clone(source, dest_dir, branch) | 68 | self.clone(source, dest_dir, branch, depth) |
985 | 67 | except GitCommandError as e: | 69 | except GitCommandError as e: |
986 | 68 | raise UnhandledSource(e.message) | 70 | raise UnhandledSource(e.message) |
987 | 69 | except OSError as e: | 71 | except OSError as e: |
988 | 70 | 72 | ||
989 | === modified file 'hooks/keystone_hooks.py' | |||
990 | --- hooks/keystone_hooks.py 2015-04-01 14:39:21 +0000 | |||
991 | +++ hooks/keystone_hooks.py 2015-06-04 08:44:44 +0000 | |||
992 | @@ -2,7 +2,6 @@ | |||
993 | 2 | import hashlib | 2 | import hashlib |
994 | 3 | import json | 3 | import json |
995 | 4 | import os | 4 | import os |
996 | 5 | import stat | ||
997 | 6 | import sys | 5 | import sys |
998 | 7 | 6 | ||
999 | 8 | from subprocess import check_call | 7 | from subprocess import check_call |
1000 | @@ -68,18 +67,18 @@ | |||
1001 | 68 | setup_ipv6, | 67 | setup_ipv6, |
1002 | 69 | send_notifications, | 68 | send_notifications, |
1003 | 70 | check_peer_actions, | 69 | check_peer_actions, |
1004 | 71 | CA_CERT_PATH, | ||
1005 | 72 | ensure_permissions, | ||
1006 | 73 | get_ssl_sync_request_units, | 70 | get_ssl_sync_request_units, |
1007 | 74 | is_ssl_cert_master, | 71 | is_ssl_cert_master, |
1008 | 75 | is_db_ready, | 72 | is_db_ready, |
1009 | 76 | clear_ssl_synced_units, | 73 | clear_ssl_synced_units, |
1010 | 77 | is_db_initialised, | 74 | is_db_initialised, |
1011 | 75 | update_certs_if_available, | ||
1012 | 78 | is_pki_enabled, | 76 | is_pki_enabled, |
1013 | 79 | ensure_ssl_dir, | 77 | ensure_ssl_dir, |
1014 | 80 | ensure_pki_dir_permissions, | 78 | ensure_pki_dir_permissions, |
1015 | 81 | force_ssl_sync, | 79 | force_ssl_sync, |
1016 | 82 | filter_null, | 80 | filter_null, |
1017 | 81 | ensure_ssl_dirs, | ||
1018 | 83 | ) | 82 | ) |
1019 | 84 | 83 | ||
1020 | 85 | from charmhelpers.contrib.hahelpers.cluster import ( | 84 | from charmhelpers.contrib.hahelpers.cluster import ( |
1021 | @@ -149,13 +148,7 @@ | |||
1022 | 149 | 148 | ||
1023 | 150 | check_call(['chmod', '-R', 'g+wrx', '/var/lib/keystone/']) | 149 | check_call(['chmod', '-R', 'g+wrx', '/var/lib/keystone/']) |
1024 | 151 | 150 | ||
1032 | 152 | # Ensure unison can write to certs dir. | 151 | ensure_ssl_dirs() |
1026 | 153 | # FIXME: need to a better way around this e.g. move cert to it's own dir | ||
1027 | 154 | # and give that unison permissions. | ||
1028 | 155 | path = os.path.dirname(CA_CERT_PATH) | ||
1029 | 156 | perms = int(oct(stat.S_IMODE(os.stat(path).st_mode) | | ||
1030 | 157 | (stat.S_IWGRP | stat.S_IXGRP)), base=8) | ||
1031 | 158 | ensure_permissions(path, group='keystone', perms=perms) | ||
1033 | 159 | 152 | ||
1034 | 160 | save_script_rc() | 153 | save_script_rc() |
1035 | 161 | configure_https() | 154 | configure_https() |
1036 | @@ -423,6 +416,7 @@ | |||
1037 | 423 | @hooks.hook('cluster-relation-changed', | 416 | @hooks.hook('cluster-relation-changed', |
1038 | 424 | 'cluster-relation-departed') | 417 | 'cluster-relation-departed') |
1039 | 425 | @restart_on_change(restart_map(), stopstart=True) | 418 | @restart_on_change(restart_map(), stopstart=True) |
1040 | 419 | @update_certs_if_available | ||
1041 | 426 | def cluster_changed(): | 420 | def cluster_changed(): |
1042 | 427 | unison.ssh_authorized_peers(user=SSH_USER, | 421 | unison.ssh_authorized_peers(user=SSH_USER, |
1043 | 428 | group='juju_keystone', | 422 | group='juju_keystone', |
1044 | @@ -430,9 +424,9 @@ | |||
1045 | 430 | ensure_local_user=True) | 424 | ensure_local_user=True) |
1046 | 431 | # NOTE(jamespage) re-echo passwords for peer storage | 425 | # NOTE(jamespage) re-echo passwords for peer storage |
1047 | 432 | echo_whitelist = ['_passwd', 'identity-service:', 'ssl-cert-master', | 426 | echo_whitelist = ['_passwd', 'identity-service:', 'ssl-cert-master', |
1049 | 433 | 'db-initialised'] | 427 | 'db-initialised', 'ssl-cert-available-updates'] |
1050 | 434 | log("Peer echo whitelist: %s" % (echo_whitelist), level=DEBUG) | 428 | log("Peer echo whitelist: %s" % (echo_whitelist), level=DEBUG) |
1052 | 435 | peer_echo(includes=echo_whitelist) | 429 | peer_echo(includes=echo_whitelist, force=True) |
1053 | 436 | 430 | ||
1054 | 437 | check_peer_actions() | 431 | check_peer_actions() |
1055 | 438 | 432 | ||
1056 | @@ -466,6 +460,14 @@ | |||
1057 | 466 | CONFIGS.write_all() | 460 | CONFIGS.write_all() |
1058 | 467 | 461 | ||
1059 | 468 | 462 | ||
1060 | 463 | @hooks.hook('leader-settings-changed') | ||
1061 | 464 | def leader_settings_changed(): | ||
1062 | 465 | log('Firing identity_changed hook for all related services.') | ||
1063 | 466 | for rid in relation_ids('identity-service'): | ||
1064 | 467 | for unit in related_units(rid): | ||
1065 | 468 | identity_changed(relation_id=rid, remote_unit=unit) | ||
1066 | 469 | |||
1067 | 470 | |||
1068 | 469 | @hooks.hook('ha-relation-joined') | 471 | @hooks.hook('ha-relation-joined') |
1069 | 470 | def ha_joined(relation_id=None): | 472 | def ha_joined(relation_id=None): |
1070 | 471 | cluster_config = get_hacluster_config() | 473 | cluster_config = get_hacluster_config() |
1071 | @@ -575,6 +577,8 @@ | |||
1072 | 575 | peer_interface='cluster', | 577 | peer_interface='cluster', |
1073 | 576 | ensure_local_user=True) | 578 | ensure_local_user=True) |
1074 | 577 | 579 | ||
1075 | 580 | ensure_ssl_dirs() | ||
1076 | 581 | |||
1077 | 578 | CONFIGS.write_all() | 582 | CONFIGS.write_all() |
1078 | 579 | update_nrpe_config() | 583 | update_nrpe_config() |
1079 | 580 | 584 | ||
1080 | 581 | 585 | ||
1081 | === modified file 'hooks/keystone_ssl.py' | |||
1082 | --- hooks/keystone_ssl.py 2015-02-18 17:20:23 +0000 | |||
1083 | +++ hooks/keystone_ssl.py 2015-06-04 08:44:44 +0000 | |||
1084 | @@ -5,12 +5,10 @@ | |||
1085 | 5 | import subprocess | 5 | import subprocess |
1086 | 6 | import tarfile | 6 | import tarfile |
1087 | 7 | import tempfile | 7 | import tempfile |
1088 | 8 | import time | ||
1089 | 9 | 8 | ||
1090 | 10 | from charmhelpers.core.hookenv import ( | 9 | from charmhelpers.core.hookenv import ( |
1091 | 11 | log, | 10 | log, |
1092 | 12 | DEBUG, | 11 | DEBUG, |
1093 | 13 | WARNING, | ||
1094 | 14 | ) | 12 | ) |
1095 | 15 | 13 | ||
1096 | 16 | CA_EXPIRY = '365' | 14 | CA_EXPIRY = '365' |
1097 | @@ -312,31 +310,9 @@ | |||
1098 | 312 | if os.path.isfile(crtpath): | 310 | if os.path.isfile(crtpath): |
1099 | 313 | log('Found existing certificate for %s.' % common_name, | 311 | log('Found existing certificate for %s.' % common_name, |
1100 | 314 | level=DEBUG) | 312 | level=DEBUG) |
1126 | 315 | max_retries = 3 | 313 | crt = open(crtpath, 'r').read() |
1127 | 316 | while True: | 314 | key = open(keypath, 'r').read() |
1128 | 317 | mtime = os.path.getmtime(crtpath) | 315 | return crt, key |
1104 | 318 | |||
1105 | 319 | crt = open(crtpath, 'r').read() | ||
1106 | 320 | try: | ||
1107 | 321 | key = open(keypath, 'r').read() | ||
1108 | 322 | except: | ||
1109 | 323 | msg = ('Could not load ssl private key for %s from %s' % | ||
1110 | 324 | (common_name, keypath)) | ||
1111 | 325 | raise Exception(msg) | ||
1112 | 326 | |||
1113 | 327 | # Ensure we are not reading a file that is being written to | ||
1114 | 328 | if mtime != os.path.getmtime(crtpath): | ||
1115 | 329 | max_retries -= 1 | ||
1116 | 330 | if max_retries == 0: | ||
1117 | 331 | msg = ("crt contents changed during read - retry " | ||
1118 | 332 | "failed") | ||
1119 | 333 | raise Exception(msg) | ||
1120 | 334 | |||
1121 | 335 | log("crt contents changed during read - re-reading", | ||
1122 | 336 | level=WARNING) | ||
1123 | 337 | time.sleep(1) | ||
1124 | 338 | else: | ||
1125 | 339 | return crt, key | ||
1129 | 340 | 316 | ||
1130 | 341 | crt, key = self._create_certificate(common_name, common_name) | 317 | crt, key = self._create_certificate(common_name, common_name) |
1131 | 342 | return open(crt, 'r').read(), open(key, 'r').read() | 318 | return open(crt, 'r').read(), open(key, 'r').read() |
1132 | 343 | 319 | ||
1133 | === modified file 'hooks/keystone_utils.py' | |||
1134 | --- hooks/keystone_utils.py 2015-05-08 11:43:00 +0000 | |||
1135 | +++ hooks/keystone_utils.py 2015-06-04 08:44:44 +0000 | |||
1136 | @@ -8,6 +8,7 @@ | |||
1137 | 8 | import re | 8 | import re |
1138 | 9 | import shutil | 9 | import shutil |
1139 | 10 | import subprocess | 10 | import subprocess |
1140 | 11 | import tarfile | ||
1141 | 11 | import threading | 12 | import threading |
1142 | 12 | import time | 13 | import time |
1143 | 13 | import urlparse | 14 | import urlparse |
1144 | @@ -71,7 +72,6 @@ | |||
1145 | 71 | DEBUG, | 72 | DEBUG, |
1146 | 72 | INFO, | 73 | INFO, |
1147 | 73 | WARNING, | 74 | WARNING, |
1148 | 74 | ERROR, | ||
1149 | 75 | ) | 75 | ) |
1150 | 76 | 76 | ||
1151 | 77 | from charmhelpers.fetch import ( | 77 | from charmhelpers.fetch import ( |
1152 | @@ -160,6 +160,8 @@ | |||
1153 | 160 | 160 | ||
1154 | 161 | APACHE_SSL_DIR = '/etc/apache2/ssl/keystone' | 161 | APACHE_SSL_DIR = '/etc/apache2/ssl/keystone' |
1155 | 162 | SYNC_FLAGS_DIR = '/var/lib/keystone/juju_sync_flags/' | 162 | SYNC_FLAGS_DIR = '/var/lib/keystone/juju_sync_flags/' |
1156 | 163 | SYNC_DIR = '/var/lib/keystone/juju_sync/' | ||
1157 | 164 | SSL_SYNC_ARCHIVE = os.path.join(SYNC_DIR, 'juju-ssl-sync.tar') | ||
1158 | 163 | SSL_DIR = '/var/lib/keystone/juju_ssl/' | 165 | SSL_DIR = '/var/lib/keystone/juju_ssl/' |
1159 | 164 | PKI_CERTS_DIR = os.path.join(SSL_DIR, 'pki') | 166 | PKI_CERTS_DIR = os.path.join(SSL_DIR, 'pki') |
1160 | 165 | SSL_CA_NAME = 'Ubuntu Cloud' | 167 | SSL_CA_NAME = 'Ubuntu Cloud' |
1161 | @@ -382,26 +384,20 @@ | |||
1162 | 382 | return | 384 | return |
1163 | 383 | 385 | ||
1164 | 384 | 386 | ||
1165 | 385 | def set_db_initialised(): | ||
1166 | 386 | for rid in relation_ids('cluster'): | ||
1167 | 387 | relation_set(relation_settings={'db-initialised': 'True'}, | ||
1168 | 388 | relation_id=rid) | ||
1169 | 389 | |||
1170 | 390 | |||
1171 | 391 | def is_db_initialised(): | 387 | def is_db_initialised(): |
1180 | 392 | for rid in relation_ids('cluster'): | 388 | if relation_ids('cluster'): |
1181 | 393 | units = related_units(rid) + [local_unit()] | 389 | inited = peer_retrieve('db-initialised') |
1182 | 394 | for unit in units: | 390 | if inited and bool_from_string(inited): |
1183 | 395 | db_initialised = relation_get(attribute='db-initialised', | 391 | log("Database is initialised", level=DEBUG) |
1184 | 396 | unit=unit, rid=rid) | 392 | return True |
1177 | 397 | if db_initialised: | ||
1178 | 398 | log("Database is initialised", level=DEBUG) | ||
1179 | 399 | return True | ||
1185 | 400 | 393 | ||
1186 | 401 | log("Database is NOT initialised", level=DEBUG) | 394 | log("Database is NOT initialised", level=DEBUG) |
1187 | 402 | return False | 395 | return False |
1188 | 403 | 396 | ||
1189 | 404 | 397 | ||
1190 | 398 | # NOTE(jamespage): Retry deals with sync issues during one-shot HA deploys. | ||
1191 | 399 | # mysql might be restarting or suchlike. | ||
1192 | 400 | @retry_on_exception(5, base_delay=3, exc_type=subprocess.CalledProcessError) | ||
1193 | 405 | def migrate_database(): | 401 | def migrate_database(): |
1194 | 406 | """Runs keystone-manage to initialize a new database or migrate existing""" | 402 | """Runs keystone-manage to initialize a new database or migrate existing""" |
1195 | 407 | log('Migrating the keystone database.', level=INFO) | 403 | log('Migrating the keystone database.', level=INFO) |
1196 | @@ -413,7 +409,7 @@ | |||
1197 | 413 | subprocess.check_output(cmd) | 409 | subprocess.check_output(cmd) |
1198 | 414 | service_start('keystone') | 410 | service_start('keystone') |
1199 | 415 | time.sleep(10) | 411 | time.sleep(10) |
1201 | 416 | set_db_initialised() | 412 | peer_store('db-initialised', 'True') |
1202 | 417 | 413 | ||
1203 | 418 | # OLD | 414 | # OLD |
1204 | 419 | 415 | ||
1205 | @@ -768,6 +764,16 @@ | |||
1206 | 768 | return passwd | 764 | return passwd |
1207 | 769 | 765 | ||
1208 | 770 | 766 | ||
1209 | 767 | def ensure_ssl_dirs(): | ||
1210 | 768 | """Ensure unison has access to these dirs.""" | ||
1211 | 769 | for path in [SYNC_FLAGS_DIR, SYNC_DIR]: | ||
1212 | 770 | if not os.path.isdir(path): | ||
1213 | 771 | mkdir(path, SSH_USER, 'juju_keystone', 0o775) | ||
1214 | 772 | else: | ||
1215 | 773 | ensure_permissions(path, user=SSH_USER, group='keystone', | ||
1216 | 774 | perms=0o755) | ||
1217 | 775 | |||
1218 | 776 | |||
1219 | 771 | def ensure_permissions(path, user=None, group=None, perms=None, recurse=False, | 777 | def ensure_permissions(path, user=None, group=None, perms=None, recurse=False, |
1220 | 772 | maxdepth=50): | 778 | maxdepth=50): |
1221 | 773 | """Set chownand chmod for path | 779 | """Set chownand chmod for path |
1222 | @@ -864,7 +870,7 @@ | |||
1223 | 864 | service.strip(), action)) | 870 | service.strip(), action)) |
1224 | 865 | log("Creating action %s" % (flagfile), level=DEBUG) | 871 | log("Creating action %s" % (flagfile), level=DEBUG) |
1225 | 866 | write_file(flagfile, content='', owner=SSH_USER, group='keystone', | 872 | write_file(flagfile, content='', owner=SSH_USER, group='keystone', |
1227 | 867 | perms=0o644) | 873 | perms=0o744) |
1228 | 868 | 874 | ||
1229 | 869 | 875 | ||
1230 | 870 | def create_peer_actions(actions): | 876 | def create_peer_actions(actions): |
1231 | @@ -873,7 +879,7 @@ | |||
1232 | 873 | flagfile = os.path.join(SYNC_FLAGS_DIR, action) | 879 | flagfile = os.path.join(SYNC_FLAGS_DIR, action) |
1233 | 874 | log("Creating action %s" % (flagfile), level=DEBUG) | 880 | log("Creating action %s" % (flagfile), level=DEBUG) |
1234 | 875 | write_file(flagfile, content='', owner=SSH_USER, group='keystone', | 881 | write_file(flagfile, content='', owner=SSH_USER, group='keystone', |
1236 | 876 | perms=0o644) | 882 | perms=0o744) |
1237 | 877 | 883 | ||
1238 | 878 | 884 | ||
1239 | 879 | @retry_on_exception(3, base_delay=2, exc_type=subprocess.CalledProcessError) | 885 | @retry_on_exception(3, base_delay=2, exc_type=subprocess.CalledProcessError) |
1240 | @@ -1011,6 +1017,22 @@ | |||
1241 | 1011 | return True | 1017 | return True |
1242 | 1012 | 1018 | ||
1243 | 1013 | 1019 | ||
1244 | 1020 | def stage_paths_for_sync(paths): | ||
1245 | 1021 | shutil.rmtree(SYNC_DIR) | ||
1246 | 1022 | ensure_ssl_dirs() | ||
1247 | 1023 | with tarfile.open(SSL_SYNC_ARCHIVE, 'w') as fd: | ||
1248 | 1024 | for path in paths: | ||
1249 | 1025 | if os.path.exists(path): | ||
1250 | 1026 | log("Adding path '%s' sync tarball" % (path), level=DEBUG) | ||
1251 | 1027 | fd.add(path) | ||
1252 | 1028 | else: | ||
1253 | 1029 | log("Path '%s' does not exist - not adding to sync " | ||
1254 | 1030 | "tarball" % (path), level=INFO) | ||
1255 | 1031 | |||
1256 | 1032 | ensure_permissions(SYNC_DIR, user=SSH_USER, group='keystone', | ||
1257 | 1033 | perms=0o755, recurse=True) | ||
1258 | 1034 | |||
1259 | 1035 | |||
1260 | 1014 | def is_pki_enabled(): | 1036 | def is_pki_enabled(): |
1261 | 1015 | enable_pki = config('enable-pki') | 1037 | enable_pki = config('enable-pki') |
1262 | 1016 | if enable_pki and bool_from_string(enable_pki): | 1038 | if enable_pki and bool_from_string(enable_pki): |
1263 | @@ -1025,6 +1047,33 @@ | |||
1264 | 1025 | perms=0o755, recurse=True) | 1047 | perms=0o755, recurse=True) |
1265 | 1026 | 1048 | ||
1266 | 1027 | 1049 | ||
1267 | 1050 | def update_certs_if_available(f): | ||
1268 | 1051 | def _inner_update_certs_if_available(*args, **kwargs): | ||
1269 | 1052 | path = None | ||
1270 | 1053 | for rid in relation_ids('cluster'): | ||
1271 | 1054 | path = relation_get(attribute='ssl-cert-available-updates', | ||
1272 | 1055 | rid=rid, unit=local_unit()) | ||
1273 | 1056 | |||
1274 | 1057 | if path and os.path.exists(path): | ||
1275 | 1058 | log("Updating certs from '%s'" % (path), level=DEBUG) | ||
1276 | 1059 | with tarfile.open(path) as fd: | ||
1277 | 1060 | files = ["/%s" % m.name for m in fd.getmembers()] | ||
1278 | 1061 | fd.extractall(path='/') | ||
1279 | 1062 | |||
1280 | 1063 | for syncfile in files: | ||
1281 | 1064 | ensure_permissions(syncfile, user='keystone', group='keystone', | ||
1282 | 1065 | perms=0o744, recurse=True) | ||
1283 | 1066 | |||
1284 | 1067 | # Mark as complete | ||
1285 | 1068 | os.rename(path, "%s.complete" % (path)) | ||
1286 | 1069 | else: | ||
1287 | 1070 | log("No cert updates available", level=DEBUG) | ||
1288 | 1071 | |||
1289 | 1072 | return f(*args, **kwargs) | ||
1290 | 1073 | |||
1291 | 1074 | return _inner_update_certs_if_available | ||
1292 | 1075 | |||
1293 | 1076 | |||
1294 | 1028 | def synchronize_ca(fatal=False): | 1077 | def synchronize_ca(fatal=False): |
1295 | 1029 | """Broadcast service credentials to peers. | 1078 | """Broadcast service credentials to peers. |
1296 | 1030 | 1079 | ||
1297 | @@ -1038,7 +1087,7 @@ | |||
1298 | 1038 | 1087 | ||
1299 | 1039 | Returns a dictionary of settings to be set on the cluster relation. | 1088 | Returns a dictionary of settings to be set on the cluster relation. |
1300 | 1040 | """ | 1089 | """ |
1302 | 1041 | paths_to_sync = [SYNC_FLAGS_DIR] | 1090 | paths_to_sync = [] |
1303 | 1042 | peer_service_actions = {'restart': []} | 1091 | peer_service_actions = {'restart': []} |
1304 | 1043 | peer_actions = [] | 1092 | peer_actions = [] |
1305 | 1044 | 1093 | ||
1306 | @@ -1068,9 +1117,6 @@ | |||
1307 | 1068 | paths_to_sync.append(PKI_CERTS_DIR) | 1117 | paths_to_sync.append(PKI_CERTS_DIR) |
1308 | 1069 | peer_actions.append('ensure-pki-permissions') | 1118 | peer_actions.append('ensure-pki-permissions') |
1309 | 1070 | 1119 | ||
1310 | 1071 | # Ensure unique | ||
1311 | 1072 | paths_to_sync = list(set(paths_to_sync)) | ||
1312 | 1073 | |||
1313 | 1074 | if not paths_to_sync: | 1120 | if not paths_to_sync: |
1314 | 1075 | log("Nothing to sync - skipping", level=DEBUG) | 1121 | log("Nothing to sync - skipping", level=DEBUG) |
1315 | 1076 | return {} | 1122 | return {} |
1316 | @@ -1083,50 +1129,27 @@ | |||
1317 | 1083 | 1129 | ||
1318 | 1084 | create_peer_actions(peer_actions) | 1130 | create_peer_actions(peer_actions) |
1319 | 1085 | 1131 | ||
1362 | 1086 | cluster_rel_settings = {} | 1132 | paths_to_sync = list(set(paths_to_sync)) |
1363 | 1087 | 1133 | stage_paths_for_sync(paths_to_sync) | |
1364 | 1088 | retries = 3 | 1134 | |
1365 | 1089 | while True: | 1135 | hash1 = hashlib.sha256() |
1366 | 1090 | hash1 = hashlib.sha256() | 1136 | for path in paths_to_sync: |
1367 | 1091 | for path in paths_to_sync: | 1137 | update_hash_from_path(hash1, path) |
1368 | 1092 | update_hash_from_path(hash1, path) | 1138 | |
1369 | 1093 | 1139 | cluster_rel_settings = {'ssl-cert-available-updates': SSL_SYNC_ARCHIVE, | |
1370 | 1094 | try: | 1140 | 'sync-hash': hash1.hexdigest()} |
1371 | 1095 | synced_units = unison_sync(paths_to_sync) | 1141 | |
1372 | 1096 | if synced_units: | 1142 | synced_units = unison_sync([SSL_SYNC_ARCHIVE, SYNC_FLAGS_DIR]) |
1373 | 1097 | # Format here needs to match that used when peers request sync | 1143 | if synced_units: |
1374 | 1098 | synced_units = [u.replace('/', '-') for u in synced_units] | 1144 | # Format here needs to match that used when peers request sync |
1375 | 1099 | cluster_rel_settings['ssl-synced-units'] = \ | 1145 | synced_units = [u.replace('/', '-') for u in synced_units] |
1376 | 1100 | json.dumps(synced_units) | 1146 | cluster_rel_settings['ssl-synced-units'] = \ |
1377 | 1101 | except Exception as exc: | 1147 | json.dumps(synced_units) |
1378 | 1102 | if fatal: | 1148 | |
1379 | 1103 | raise | 1149 | trigger = str(uuid.uuid4()) |
1380 | 1104 | else: | 1150 | log("Sending restart-services-trigger=%s to all peers" % (trigger), |
1339 | 1105 | log("Sync failed but fatal=False - %s" % (exc), level=INFO) | ||
1340 | 1106 | return {} | ||
1341 | 1107 | |||
1342 | 1108 | hash2 = hashlib.sha256() | ||
1343 | 1109 | for path in paths_to_sync: | ||
1344 | 1110 | update_hash_from_path(hash2, path) | ||
1345 | 1111 | |||
1346 | 1112 | # Detect whether someone else has synced to this unit while we did our | ||
1347 | 1113 | # transfer. | ||
1348 | 1114 | if hash1.hexdigest() != hash2.hexdigest(): | ||
1349 | 1115 | retries -= 1 | ||
1350 | 1116 | if retries > 0: | ||
1351 | 1117 | log("SSL dir contents changed during sync - retrying unison " | ||
1352 | 1118 | "sync %s more times" % (retries), level=WARNING) | ||
1353 | 1119 | else: | ||
1354 | 1120 | log("SSL dir contents changed during sync - retries failed", | ||
1355 | 1121 | level=ERROR) | ||
1356 | 1122 | return {} | ||
1357 | 1123 | else: | ||
1358 | 1124 | break | ||
1359 | 1125 | |||
1360 | 1126 | hash = hash1.hexdigest() | ||
1361 | 1127 | log("Sending restart-services-trigger=%s to all peers" % (hash), | ||
1381 | 1128 | level=DEBUG) | 1151 | level=DEBUG) |
1383 | 1129 | cluster_rel_settings['restart-services-trigger'] = hash | 1152 | cluster_rel_settings['restart-services-trigger'] = trigger |
1384 | 1130 | 1153 | ||
1385 | 1131 | log("Sync complete", level=DEBUG) | 1154 | log("Sync complete", level=DEBUG) |
1386 | 1132 | return cluster_rel_settings | 1155 | return cluster_rel_settings |
1387 | 1133 | 1156 | ||
1388 | === added symlink 'hooks/leader-settings-changed' | |||
1389 | === target is u'keystone_hooks.py' | |||
1390 | === modified file 'unit_tests/test_keystone_hooks.py' | |||
1391 | --- unit_tests/test_keystone_hooks.py 2015-04-17 12:10:54 +0000 | |||
1392 | +++ unit_tests/test_keystone_hooks.py 2015-06-04 08:44:44 +0000 | |||
1393 | @@ -59,6 +59,9 @@ | |||
1394 | 59 | 'add_service_to_keystone', | 59 | 'add_service_to_keystone', |
1395 | 60 | 'synchronize_ca_if_changed', | 60 | 'synchronize_ca_if_changed', |
1396 | 61 | 'update_nrpe_config', | 61 | 'update_nrpe_config', |
1397 | 62 | 'ensure_ssl_dirs', | ||
1398 | 63 | 'is_db_initialised', | ||
1399 | 64 | 'is_db_ready', | ||
1400 | 62 | # other | 65 | # other |
1401 | 63 | 'check_call', | 66 | 'check_call', |
1402 | 64 | 'execd_preinstall', | 67 | 'execd_preinstall', |
1403 | @@ -237,18 +240,15 @@ | |||
1404 | 237 | configs.write = MagicMock() | 240 | configs.write = MagicMock() |
1405 | 238 | hooks.pgsql_db_changed() | 241 | hooks.pgsql_db_changed() |
1406 | 239 | 242 | ||
1407 | 240 | @patch.object(hooks, 'is_db_initialised') | ||
1408 | 241 | @patch.object(hooks, 'is_db_ready') | ||
1409 | 242 | @patch('keystone_utils.log') | 243 | @patch('keystone_utils.log') |
1410 | 243 | @patch('keystone_utils.ensure_ssl_cert_master') | 244 | @patch('keystone_utils.ensure_ssl_cert_master') |
1411 | 244 | @patch.object(hooks, 'CONFIGS') | 245 | @patch.object(hooks, 'CONFIGS') |
1412 | 245 | @patch.object(hooks, 'identity_changed') | 246 | @patch.object(hooks, 'identity_changed') |
1413 | 246 | def test_db_changed_allowed(self, identity_changed, configs, | 247 | def test_db_changed_allowed(self, identity_changed, configs, |
1414 | 247 | mock_ensure_ssl_cert_master, | 248 | mock_ensure_ssl_cert_master, |
1419 | 248 | mock_log, mock_is_db_ready, | 249 | mock_log): |
1420 | 249 | mock_is_db_initialised): | 250 | self.is_db_initialised.return_value = True |
1421 | 250 | mock_is_db_initialised.return_value = True | 251 | self.is_db_ready.return_value = True |
1418 | 251 | mock_is_db_ready.return_value = True | ||
1422 | 252 | mock_ensure_ssl_cert_master.return_value = False | 252 | mock_ensure_ssl_cert_master.return_value = False |
1423 | 253 | self.relation_ids.return_value = ['identity-service:0'] | 253 | self.relation_ids.return_value = ['identity-service:0'] |
1424 | 254 | self.related_units.return_value = ['unit/0'] | 254 | self.related_units.return_value = ['unit/0'] |
1425 | @@ -262,15 +262,13 @@ | |||
1426 | 262 | relation_id='identity-service:0', | 262 | relation_id='identity-service:0', |
1427 | 263 | remote_unit='unit/0') | 263 | remote_unit='unit/0') |
1428 | 264 | 264 | ||
1429 | 265 | @patch.object(hooks, 'is_db_ready') | ||
1430 | 266 | @patch('keystone_utils.log') | 265 | @patch('keystone_utils.log') |
1431 | 267 | @patch('keystone_utils.ensure_ssl_cert_master') | 266 | @patch('keystone_utils.ensure_ssl_cert_master') |
1432 | 268 | @patch.object(hooks, 'CONFIGS') | 267 | @patch.object(hooks, 'CONFIGS') |
1433 | 269 | @patch.object(hooks, 'identity_changed') | 268 | @patch.object(hooks, 'identity_changed') |
1434 | 270 | def test_db_changed_not_allowed(self, identity_changed, configs, | 269 | def test_db_changed_not_allowed(self, identity_changed, configs, |
1438 | 271 | mock_ensure_ssl_cert_master, mock_log, | 270 | mock_ensure_ssl_cert_master, mock_log): |
1439 | 272 | mock_is_db_ready): | 271 | self.is_db_ready.return_value = False |
1437 | 273 | mock_is_db_ready.return_value = False | ||
1440 | 274 | mock_ensure_ssl_cert_master.return_value = False | 272 | mock_ensure_ssl_cert_master.return_value = False |
1441 | 275 | self.relation_ids.return_value = ['identity-service:0'] | 273 | self.relation_ids.return_value = ['identity-service:0'] |
1442 | 276 | self.related_units.return_value = ['unit/0'] | 274 | self.related_units.return_value = ['unit/0'] |
1443 | @@ -284,15 +282,12 @@ | |||
1444 | 284 | 282 | ||
1445 | 285 | @patch('keystone_utils.log') | 283 | @patch('keystone_utils.log') |
1446 | 286 | @patch('keystone_utils.ensure_ssl_cert_master') | 284 | @patch('keystone_utils.ensure_ssl_cert_master') |
1447 | 287 | @patch.object(hooks, 'is_db_initialised') | ||
1448 | 288 | @patch.object(hooks, 'is_db_ready') | ||
1449 | 289 | @patch.object(hooks, 'CONFIGS') | 285 | @patch.object(hooks, 'CONFIGS') |
1450 | 290 | @patch.object(hooks, 'identity_changed') | 286 | @patch.object(hooks, 'identity_changed') |
1451 | 291 | def test_postgresql_db_changed(self, identity_changed, configs, | 287 | def test_postgresql_db_changed(self, identity_changed, configs, |
1452 | 292 | mock_is_db_ready, mock_is_db_initialised, | ||
1453 | 293 | mock_ensure_ssl_cert_master, mock_log): | 288 | mock_ensure_ssl_cert_master, mock_log): |
1456 | 294 | mock_is_db_initialised.return_value = True | 289 | self.is_db_initialised.return_value = True |
1457 | 295 | mock_is_db_ready.return_value = True | 290 | self.is_db_ready.return_value = True |
1458 | 296 | mock_ensure_ssl_cert_master.return_value = False | 291 | mock_ensure_ssl_cert_master.return_value = False |
1459 | 297 | self.relation_ids.return_value = ['identity-service:0'] | 292 | self.relation_ids.return_value = ['identity-service:0'] |
1460 | 298 | self.related_units.return_value = ['unit/0'] | 293 | self.related_units.return_value = ['unit/0'] |
1461 | @@ -309,15 +304,13 @@ | |||
1462 | 309 | @patch.object(hooks, 'git_install_requested') | 304 | @patch.object(hooks, 'git_install_requested') |
1463 | 310 | @patch('keystone_utils.log') | 305 | @patch('keystone_utils.log') |
1464 | 311 | @patch('keystone_utils.ensure_ssl_cert_master') | 306 | @patch('keystone_utils.ensure_ssl_cert_master') |
1465 | 307 | @patch('keystone_utils.ensure_ssl_dirs') | ||
1466 | 312 | @patch.object(hooks, 'ensure_pki_dir_permissions') | 308 | @patch.object(hooks, 'ensure_pki_dir_permissions') |
1467 | 313 | @patch.object(hooks, 'ensure_ssl_dir') | 309 | @patch.object(hooks, 'ensure_ssl_dir') |
1468 | 314 | @patch.object(hooks, 'is_pki_enabled') | 310 | @patch.object(hooks, 'is_pki_enabled') |
1469 | 315 | @patch.object(hooks, 'is_ssl_cert_master') | 311 | @patch.object(hooks, 'is_ssl_cert_master') |
1470 | 316 | @patch.object(hooks, 'send_ssl_sync_request') | 312 | @patch.object(hooks, 'send_ssl_sync_request') |
1471 | 317 | @patch.object(hooks, 'is_db_initialised') | ||
1472 | 318 | @patch.object(hooks, 'is_db_ready') | ||
1473 | 319 | @patch.object(hooks, 'peer_units') | 313 | @patch.object(hooks, 'peer_units') |
1474 | 320 | @patch.object(hooks, 'ensure_permissions') | ||
1475 | 321 | @patch.object(hooks, 'admin_relation_changed') | 314 | @patch.object(hooks, 'admin_relation_changed') |
1476 | 322 | @patch.object(hooks, 'cluster_joined') | 315 | @patch.object(hooks, 'cluster_joined') |
1477 | 323 | @patch.object(unison, 'ensure_user') | 316 | @patch.object(unison, 'ensure_user') |
1478 | @@ -331,22 +324,20 @@ | |||
1479 | 331 | ensure_user, | 324 | ensure_user, |
1480 | 332 | cluster_joined, | 325 | cluster_joined, |
1481 | 333 | admin_relation_changed, | 326 | admin_relation_changed, |
1482 | 334 | ensure_permissions, | ||
1483 | 335 | mock_peer_units, | 327 | mock_peer_units, |
1484 | 336 | mock_is_db_ready, | ||
1485 | 337 | mock_is_db_initialised, | ||
1486 | 338 | mock_send_ssl_sync_request, | 328 | mock_send_ssl_sync_request, |
1487 | 339 | mock_is_ssl_cert_master, | 329 | mock_is_ssl_cert_master, |
1488 | 340 | mock_is_pki_enabled, | 330 | mock_is_pki_enabled, |
1489 | 341 | mock_ensure_ssl_dir, | 331 | mock_ensure_ssl_dir, |
1490 | 342 | mock_ensure_pki_dir_permissions, | 332 | mock_ensure_pki_dir_permissions, |
1491 | 333 | mock_ensure_ssl_dirs, | ||
1492 | 343 | mock_ensure_ssl_cert_master, | 334 | mock_ensure_ssl_cert_master, |
1493 | 344 | mock_log, git_requested): | 335 | mock_log, git_requested): |
1494 | 345 | git_requested.return_value = False | 336 | git_requested.return_value = False |
1495 | 346 | mock_is_pki_enabled.return_value = True | 337 | mock_is_pki_enabled.return_value = True |
1496 | 347 | mock_is_ssl_cert_master.return_value = True | 338 | mock_is_ssl_cert_master.return_value = True |
1499 | 348 | mock_is_db_initialised.return_value = True | 339 | self.is_db_initialised.return_value = True |
1500 | 349 | mock_is_db_ready.return_value = True | 340 | self.is_db_ready.return_value = True |
1501 | 350 | self.openstack_upgrade_available.return_value = False | 341 | self.openstack_upgrade_available.return_value = False |
1502 | 351 | self.is_elected_leader.return_value = True | 342 | self.is_elected_leader.return_value = True |
1503 | 352 | # avoid having to mock syncer | 343 | # avoid having to mock syncer |
1504 | @@ -374,13 +365,13 @@ | |||
1505 | 374 | @patch.object(hooks, 'git_install_requested') | 365 | @patch.object(hooks, 'git_install_requested') |
1506 | 375 | @patch('keystone_utils.log') | 366 | @patch('keystone_utils.log') |
1507 | 376 | @patch('keystone_utils.ensure_ssl_cert_master') | 367 | @patch('keystone_utils.ensure_ssl_cert_master') |
1508 | 368 | @patch('keystone_utils.ensure_ssl_dirs') | ||
1509 | 377 | @patch.object(hooks, 'update_all_identity_relation_units') | 369 | @patch.object(hooks, 'update_all_identity_relation_units') |
1510 | 378 | @patch.object(hooks, 'ensure_pki_dir_permissions') | 370 | @patch.object(hooks, 'ensure_pki_dir_permissions') |
1511 | 379 | @patch.object(hooks, 'ensure_ssl_dir') | 371 | @patch.object(hooks, 'ensure_ssl_dir') |
1512 | 380 | @patch.object(hooks, 'is_pki_enabled') | 372 | @patch.object(hooks, 'is_pki_enabled') |
1513 | 381 | @patch.object(hooks, 'peer_units') | 373 | @patch.object(hooks, 'peer_units') |
1514 | 382 | @patch.object(hooks, 'is_ssl_cert_master') | 374 | @patch.object(hooks, 'is_ssl_cert_master') |
1515 | 383 | @patch.object(hooks, 'ensure_permissions') | ||
1516 | 384 | @patch.object(hooks, 'cluster_joined') | 375 | @patch.object(hooks, 'cluster_joined') |
1517 | 385 | @patch.object(unison, 'ensure_user') | 376 | @patch.object(unison, 'ensure_user') |
1518 | 386 | @patch.object(unison, 'get_homedir') | 377 | @patch.object(unison, 'get_homedir') |
1519 | @@ -391,13 +382,13 @@ | |||
1520 | 391 | identity_changed, | 382 | identity_changed, |
1521 | 392 | configs, get_homedir, | 383 | configs, get_homedir, |
1522 | 393 | ensure_user, cluster_joined, | 384 | ensure_user, cluster_joined, |
1523 | 394 | ensure_permissions, | ||
1524 | 395 | mock_is_ssl_cert_master, | 385 | mock_is_ssl_cert_master, |
1525 | 396 | mock_peer_units, | 386 | mock_peer_units, |
1526 | 397 | mock_is_pki_enabled, | 387 | mock_is_pki_enabled, |
1527 | 398 | mock_ensure_ssl_dir, | 388 | mock_ensure_ssl_dir, |
1528 | 399 | mock_ensure_pki_permissions, | 389 | mock_ensure_pki_permissions, |
1529 | 400 | mock_update_all_id_rel_units, | 390 | mock_update_all_id_rel_units, |
1530 | 391 | ensure_ssl_dirs, | ||
1531 | 401 | mock_ensure_ssl_cert_master, | 392 | mock_ensure_ssl_cert_master, |
1532 | 402 | mock_log, git_requested): | 393 | mock_log, git_requested): |
1533 | 403 | git_requested.return_value = False | 394 | git_requested.return_value = False |
1534 | @@ -423,15 +414,13 @@ | |||
1535 | 423 | @patch.object(hooks, 'git_install_requested') | 414 | @patch.object(hooks, 'git_install_requested') |
1536 | 424 | @patch('keystone_utils.log') | 415 | @patch('keystone_utils.log') |
1537 | 425 | @patch('keystone_utils.ensure_ssl_cert_master') | 416 | @patch('keystone_utils.ensure_ssl_cert_master') |
1538 | 417 | @patch('keystone_utils.ensure_ssl_dirs') | ||
1539 | 426 | @patch.object(hooks, 'ensure_pki_dir_permissions') | 418 | @patch.object(hooks, 'ensure_pki_dir_permissions') |
1540 | 427 | @patch.object(hooks, 'ensure_ssl_dir') | 419 | @patch.object(hooks, 'ensure_ssl_dir') |
1541 | 428 | @patch.object(hooks, 'is_pki_enabled') | 420 | @patch.object(hooks, 'is_pki_enabled') |
1542 | 429 | @patch.object(hooks, 'is_ssl_cert_master') | 421 | @patch.object(hooks, 'is_ssl_cert_master') |
1543 | 430 | @patch.object(hooks, 'send_ssl_sync_request') | 422 | @patch.object(hooks, 'send_ssl_sync_request') |
1544 | 431 | @patch.object(hooks, 'is_db_initialised') | ||
1545 | 432 | @patch.object(hooks, 'is_db_ready') | ||
1546 | 433 | @patch.object(hooks, 'peer_units') | 423 | @patch.object(hooks, 'peer_units') |
1547 | 434 | @patch.object(hooks, 'ensure_permissions') | ||
1548 | 435 | @patch.object(hooks, 'admin_relation_changed') | 424 | @patch.object(hooks, 'admin_relation_changed') |
1549 | 436 | @patch.object(hooks, 'cluster_joined') | 425 | @patch.object(hooks, 'cluster_joined') |
1550 | 437 | @patch.object(unison, 'ensure_user') | 426 | @patch.object(unison, 'ensure_user') |
1551 | @@ -444,22 +433,20 @@ | |||
1552 | 444 | configs, get_homedir, | 433 | configs, get_homedir, |
1553 | 445 | ensure_user, cluster_joined, | 434 | ensure_user, cluster_joined, |
1554 | 446 | admin_relation_changed, | 435 | admin_relation_changed, |
1555 | 447 | ensure_permissions, | ||
1556 | 448 | mock_peer_units, | 436 | mock_peer_units, |
1557 | 449 | mock_is_db_ready, | ||
1558 | 450 | mock_is_db_initialised, | ||
1559 | 451 | mock_send_ssl_sync_request, | 437 | mock_send_ssl_sync_request, |
1560 | 452 | mock_is_ssl_cert_master, | 438 | mock_is_ssl_cert_master, |
1561 | 453 | mock_is_pki_enabled, | 439 | mock_is_pki_enabled, |
1562 | 454 | mock_ensure_ssl_dir, | 440 | mock_ensure_ssl_dir, |
1563 | 455 | mock_ensure_pki_permissions, | 441 | mock_ensure_pki_permissions, |
1564 | 442 | mock_ensure_ssl_dirs, | ||
1565 | 456 | mock_ensure_ssl_cert_master, | 443 | mock_ensure_ssl_cert_master, |
1566 | 457 | mock_log, git_requested): | 444 | mock_log, git_requested): |
1567 | 458 | git_requested.return_value = False | 445 | git_requested.return_value = False |
1568 | 459 | mock_is_pki_enabled.return_value = True | 446 | mock_is_pki_enabled.return_value = True |
1569 | 460 | mock_is_ssl_cert_master.return_value = True | 447 | mock_is_ssl_cert_master.return_value = True |
1572 | 461 | mock_is_db_ready.return_value = True | 448 | self.is_db_ready.return_value = True |
1573 | 462 | mock_is_db_initialised.return_value = True | 449 | self.is_db_initialised.return_value = True |
1574 | 463 | self.openstack_upgrade_available.return_value = True | 450 | self.openstack_upgrade_available.return_value = True |
1575 | 464 | self.is_elected_leader.return_value = True | 451 | self.is_elected_leader.return_value = True |
1576 | 465 | # avoid having to mock syncer | 452 | # avoid having to mock syncer |
1577 | @@ -496,7 +483,6 @@ | |||
1578 | 496 | @patch.object(hooks, 'is_db_initialised') | 483 | @patch.object(hooks, 'is_db_initialised') |
1579 | 497 | @patch.object(hooks, 'is_db_ready') | 484 | @patch.object(hooks, 'is_db_ready') |
1580 | 498 | @patch.object(hooks, 'peer_units') | 485 | @patch.object(hooks, 'peer_units') |
1581 | 499 | @patch.object(hooks, 'ensure_permissions') | ||
1582 | 500 | @patch.object(hooks, 'admin_relation_changed') | 486 | @patch.object(hooks, 'admin_relation_changed') |
1583 | 501 | @patch.object(hooks, 'cluster_joined') | 487 | @patch.object(hooks, 'cluster_joined') |
1584 | 502 | @patch.object(unison, 'ensure_user') | 488 | @patch.object(unison, 'ensure_user') |
1585 | @@ -508,7 +494,7 @@ | |||
1586 | 508 | identity_changed, | 494 | identity_changed, |
1587 | 509 | configs, get_homedir, ensure_user, | 495 | configs, get_homedir, ensure_user, |
1588 | 510 | cluster_joined, admin_relation_changed, | 496 | cluster_joined, admin_relation_changed, |
1590 | 511 | ensure_permissions, mock_peer_units, | 497 | mock_peer_units, |
1591 | 512 | mock_is_db_ready, | 498 | mock_is_db_ready, |
1592 | 513 | mock_is_db_initialised, | 499 | mock_is_db_initialised, |
1593 | 514 | mock_send_ssl_sync_request, | 500 | mock_send_ssl_sync_request, |
1594 | @@ -546,18 +532,15 @@ | |||
1595 | 546 | self.assertFalse(self.openstack_upgrade_available.called) | 532 | self.assertFalse(self.openstack_upgrade_available.called) |
1596 | 547 | self.assertFalse(self.do_openstack_upgrade.called) | 533 | self.assertFalse(self.do_openstack_upgrade.called) |
1597 | 548 | 534 | ||
1598 | 549 | @patch.object(hooks, 'is_db_initialised') | ||
1599 | 550 | @patch.object(hooks, 'is_db_ready') | ||
1600 | 551 | @patch('keystone_utils.log') | 535 | @patch('keystone_utils.log') |
1601 | 552 | @patch('keystone_utils.ensure_ssl_cert_master') | 536 | @patch('keystone_utils.ensure_ssl_cert_master') |
1602 | 553 | @patch.object(hooks, 'hashlib') | 537 | @patch.object(hooks, 'hashlib') |
1603 | 554 | @patch.object(hooks, 'send_notifications') | 538 | @patch.object(hooks, 'send_notifications') |
1604 | 555 | def test_identity_changed_leader(self, mock_send_notifications, | 539 | def test_identity_changed_leader(self, mock_send_notifications, |
1605 | 556 | mock_hashlib, mock_ensure_ssl_cert_master, | 540 | mock_hashlib, mock_ensure_ssl_cert_master, |
1610 | 557 | mock_log, mock_is_db_ready, | 541 | mock_log): |
1611 | 558 | mock_is_db_initialised): | 542 | self.is_db_initialised.return_value = True |
1612 | 559 | mock_is_db_initialised.return_value = True | 543 | self.is_db_ready.return_value = True |
1609 | 560 | mock_is_db_ready.return_value = True | ||
1613 | 561 | mock_ensure_ssl_cert_master.return_value = False | 544 | mock_ensure_ssl_cert_master.return_value = False |
1614 | 562 | hooks.identity_changed( | 545 | hooks.identity_changed( |
1615 | 563 | relation_id='identity-service:0', | 546 | relation_id='identity-service:0', |
1616 | @@ -597,6 +580,7 @@ | |||
1617 | 597 | @patch.object(hooks, 'get_ssl_sync_request_units') | 580 | @patch.object(hooks, 'get_ssl_sync_request_units') |
1618 | 598 | @patch.object(hooks, 'is_ssl_cert_master') | 581 | @patch.object(hooks, 'is_ssl_cert_master') |
1619 | 599 | @patch.object(hooks, 'peer_units') | 582 | @patch.object(hooks, 'peer_units') |
1620 | 583 | @patch('keystone_utils.relation_ids') | ||
1621 | 600 | @patch('keystone_utils.config') | 584 | @patch('keystone_utils.config') |
1622 | 601 | @patch('keystone_utils.log') | 585 | @patch('keystone_utils.log') |
1623 | 602 | @patch('keystone_utils.ensure_ssl_cert_master') | 586 | @patch('keystone_utils.ensure_ssl_cert_master') |
1624 | @@ -607,7 +591,8 @@ | |||
1625 | 607 | def test_cluster_changed(self, configs, ssh_authorized_peers, | 591 | def test_cluster_changed(self, configs, ssh_authorized_peers, |
1626 | 608 | check_peer_actions, mock_synchronize_ca, | 592 | check_peer_actions, mock_synchronize_ca, |
1627 | 609 | mock_ensure_ssl_cert_master, | 593 | mock_ensure_ssl_cert_master, |
1629 | 610 | mock_log, mock_config, mock_peer_units, | 594 | mock_log, mock_config, mock_relation_ids, |
1630 | 595 | mock_peer_units, | ||
1631 | 611 | mock_is_ssl_cert_master, | 596 | mock_is_ssl_cert_master, |
1632 | 612 | mock_get_ssl_sync_request_units, | 597 | mock_get_ssl_sync_request_units, |
1633 | 613 | mock_update_all_identity_relation_units): | 598 | mock_update_all_identity_relation_units): |
1634 | @@ -618,6 +603,7 @@ | |||
1635 | 618 | mock_is_ssl_cert_master.return_value = False | 603 | mock_is_ssl_cert_master.return_value = False |
1636 | 619 | mock_peer_units.return_value = ['unit/0'] | 604 | mock_peer_units.return_value = ['unit/0'] |
1637 | 620 | mock_ensure_ssl_cert_master.return_value = False | 605 | mock_ensure_ssl_cert_master.return_value = False |
1638 | 606 | mock_relation_ids.return_value = [] | ||
1639 | 621 | self.is_elected_leader.return_value = False | 607 | self.is_elected_leader.return_value = False |
1640 | 622 | 608 | ||
1641 | 623 | def fake_rel_get(attribute=None, *args, **kwargs): | 609 | def fake_rel_get(attribute=None, *args, **kwargs): |
1642 | @@ -632,8 +618,8 @@ | |||
1643 | 632 | 618 | ||
1644 | 633 | hooks.cluster_changed() | 619 | hooks.cluster_changed() |
1645 | 634 | whitelist = ['_passwd', 'identity-service:', 'ssl-cert-master', | 620 | whitelist = ['_passwd', 'identity-service:', 'ssl-cert-master', |
1648 | 635 | 'db-initialised'] | 621 | 'db-initialised', 'ssl-cert-available-updates'] |
1649 | 636 | self.peer_echo.assert_called_with(includes=whitelist) | 622 | self.peer_echo.assert_called_with(force=True, includes=whitelist) |
1650 | 637 | ssh_authorized_peers.assert_called_with( | 623 | ssh_authorized_peers.assert_called_with( |
1651 | 638 | user=self.ssh_user, group='juju_keystone', | 624 | user=self.ssh_user, group='juju_keystone', |
1652 | 639 | peer_interface='cluster', ensure_local_user=True) | 625 | peer_interface='cluster', ensure_local_user=True) |
1653 | @@ -733,18 +719,14 @@ | |||
1654 | 733 | 719 | ||
1655 | 734 | @patch('keystone_utils.log') | 720 | @patch('keystone_utils.log') |
1656 | 735 | @patch('keystone_utils.ensure_ssl_cert_master') | 721 | @patch('keystone_utils.ensure_ssl_cert_master') |
1657 | 736 | @patch.object(hooks, 'is_db_ready') | ||
1658 | 737 | @patch.object(hooks, 'is_db_initialised') | ||
1659 | 738 | @patch.object(hooks, 'identity_changed') | 722 | @patch.object(hooks, 'identity_changed') |
1660 | 739 | @patch.object(hooks, 'CONFIGS') | 723 | @patch.object(hooks, 'CONFIGS') |
1661 | 740 | def test_ha_relation_changed_clustered_leader(self, configs, | 724 | def test_ha_relation_changed_clustered_leader(self, configs, |
1662 | 741 | identity_changed, | 725 | identity_changed, |
1663 | 742 | mock_is_db_initialised, | ||
1664 | 743 | mock_is_db_ready, | ||
1665 | 744 | mock_ensure_ssl_cert_master, | 726 | mock_ensure_ssl_cert_master, |
1666 | 745 | mock_log): | 727 | mock_log): |
1669 | 746 | mock_is_db_initialised.return_value = True | 728 | self.is_db_initialised.return_value = True |
1670 | 747 | mock_is_db_ready.return_value = True | 729 | self.is_db_ready.return_value = True |
1671 | 748 | mock_ensure_ssl_cert_master.return_value = False | 730 | mock_ensure_ssl_cert_master.return_value = False |
1672 | 749 | self.relation_get.return_value = True | 731 | self.relation_get.return_value = True |
1673 | 750 | self.is_elected_leader.return_value = True | 732 | self.is_elected_leader.return_value = True |
1674 | @@ -807,8 +789,8 @@ | |||
1675 | 807 | mock_is_elected_leader, | 789 | mock_is_elected_leader, |
1676 | 808 | mock_relation_ids, | 790 | mock_relation_ids, |
1677 | 809 | mock_log, | 791 | mock_log, |
1678 | 792 | mock_is_db_initialised, | ||
1679 | 810 | mock_is_db_ready, | 793 | mock_is_db_ready, |
1680 | 811 | mock_is_db_initialised, | ||
1681 | 812 | git_requested): | 794 | git_requested): |
1682 | 813 | mock_is_db_initialised.return_value = True | 795 | mock_is_db_initialised.return_value = True |
1683 | 814 | mock_is_db_ready.return_value = True | 796 | mock_is_db_ready.return_value = True |