Merge lp:~gandelman-a/charm-helpers/os_templating_contexts into lp:charm-helpers
- os_templating_contexts
- Merge into devel
Proposed by
Adam Gandelman
Status: | Merged |
---|---|
Merged at revision: | 50 |
Proposed branch: | lp:~gandelman-a/charm-helpers/os_templating_contexts |
Merge into: | lp:charm-helpers |
Diff against target: |
1601 lines (+1374/-36) 10 files modified
charmhelpers/contrib/openstack/context.py (+271/-0) charmhelpers/contrib/openstack/openstack_utils.py (+40/-27) charmhelpers/contrib/openstack/templates/__init__.py (+2/-0) charmhelpers/contrib/openstack/templates/ceph.conf (+11/-0) charmhelpers/contrib/openstack/templates/haproxy.cfg (+37/-0) charmhelpers/contrib/openstack/templates/openstack_https_frontend (+23/-0) charmhelpers/contrib/openstack/templating.py (+261/-0) tests/contrib/openstack/test_openstack_utils.py (+43/-9) tests/contrib/openstack/test_os_contexts.py (+454/-0) tests/contrib/openstack/test_os_templating.py (+232/-0) |
To merge this branch: | bzr merge lp:~gandelman-a/charm-helpers/os_templating_contexts |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Adam Gandelman (community) | Needs Resubmitting | ||
James Page | Needs Fixing | ||
Review via email:
|
Commit message
A common templating system and context generators for OpenStack charms.
Description of the change
To post a comment you must log in.
- 54. By Adam Gandelman
-
Capitalize openstack constatns, add icehouse.
- 55. By Adam Gandelman
-
Convert openstack release constants to ordered dict. Generate template search list from release constants.
- 56. By Adam Gandelman
-
Use logging constants in os templating.py
- 57. By Adam Gandelman
-
Add some docs to templating.py
Revision history for this message
![](/+icing/build/overlay/assets/skins/sam/images/close.gif)
Adam Gandelman (gandelman-a) wrote : | # |
Hey James-
Fixed up logging, added some docs.
Good call on constructing the template loader from releases listed in openstack_utils. I needed to adjust those constants to be OrderedDicts.
review:
Needs Resubmitting
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === added file 'charmhelpers/contrib/openstack/context.py' |
2 | --- charmhelpers/contrib/openstack/context.py 1970-01-01 00:00:00 +0000 |
3 | +++ charmhelpers/contrib/openstack/context.py 2013-07-11 20:23:23 +0000 |
4 | @@ -0,0 +1,271 @@ |
5 | +import os |
6 | + |
7 | +from base64 import b64decode |
8 | + |
9 | +from subprocess import ( |
10 | + check_call |
11 | +) |
12 | + |
13 | +from charmhelpers.core.hookenv import ( |
14 | + config, |
15 | + local_unit, |
16 | + log, |
17 | + relation_get, |
18 | + relation_ids, |
19 | + related_units, |
20 | + unit_get, |
21 | +) |
22 | + |
23 | +from charmhelpers.contrib.hahelpers.cluster_utils import ( |
24 | + determine_api_port, |
25 | + determine_haproxy_port, |
26 | + https, |
27 | + is_clustered, |
28 | + peer_units, |
29 | +) |
30 | + |
31 | +from charmhelpers.contrib.hahelpers.apache_utils import ( |
32 | + get_cert, |
33 | + get_ca_cert, |
34 | +) |
35 | + |
36 | +CA_CERT_PATH = '/usr/local/share/ca-certificates/keystone_juju_ca_cert.crt' |
37 | + |
38 | + |
39 | +class OSContextError(Exception): |
40 | + pass |
41 | + |
42 | + |
43 | +def context_complete(ctxt): |
44 | + _missing = [] |
45 | + for k, v in ctxt.iteritems(): |
46 | + if v is None or v == '': |
47 | + _missing.append(k) |
48 | + if _missing: |
49 | + log('Missing required data: %s' % ' '.join(_missing), level='INFO') |
50 | + return False |
51 | + return True |
52 | + |
53 | + |
54 | +class OSContextGenerator(object): |
55 | + interfaces = [] |
56 | + |
57 | + def __call__(self): |
58 | + raise NotImplementedError |
59 | + |
60 | + |
61 | +class SharedDBContext(OSContextGenerator): |
62 | + interfaces = ['shared-db'] |
63 | + |
64 | + def __call__(self): |
65 | + log('Generating template context for shared-db') |
66 | + conf = config() |
67 | + try: |
68 | + database = conf['database'] |
69 | + username = conf['database-user'] |
70 | + except KeyError as e: |
71 | + log('Could not generate shared_db context. ' |
72 | + 'Missing required charm config options: %s.' % e) |
73 | + raise OSContextError |
74 | + ctxt = {} |
75 | + for rid in relation_ids('shared-db'): |
76 | + for unit in related_units(rid): |
77 | + ctxt = { |
78 | + 'database_host': relation_get('db_host', rid=rid, |
79 | + unit=unit), |
80 | + 'database': database, |
81 | + 'database_user': username, |
82 | + 'database_password': relation_get('password', rid=rid, |
83 | + unit=unit) |
84 | + } |
85 | + if not context_complete(ctxt): |
86 | + return {} |
87 | + return ctxt |
88 | + |
89 | + |
90 | +class IdentityServiceContext(OSContextGenerator): |
91 | + interfaces = ['identity-service'] |
92 | + |
93 | + def __call__(self): |
94 | + log('Generating template context for identity-service') |
95 | + ctxt = {} |
96 | + |
97 | + for rid in relation_ids('identity-service'): |
98 | + for unit in related_units(rid): |
99 | + ctxt = { |
100 | + 'service_port': relation_get('service_port', rid=rid, |
101 | + unit=unit), |
102 | + 'service_host': relation_get('service_host', rid=rid, |
103 | + unit=unit), |
104 | + 'auth_host': relation_get('auth_host', rid=rid, unit=unit), |
105 | + 'auth_port': relation_get('auth_port', rid=rid, unit=unit), |
106 | + 'admin_tenant_name': relation_get('service_tenant', |
107 | + rid=rid, unit=unit), |
108 | + 'admin_user': relation_get('service_username', rid=rid, |
109 | + unit=unit), |
110 | + 'admin_password': relation_get('service_password', rid=rid, |
111 | + unit=unit), |
112 | + # XXX: Hard-coded http. |
113 | + 'service_protocol': 'http', |
114 | + 'auth_protocol': 'http', |
115 | + } |
116 | + if not context_complete(ctxt): |
117 | + return {} |
118 | + return ctxt |
119 | + |
120 | + |
121 | +class AMQPContext(OSContextGenerator): |
122 | + interfaces = ['amqp'] |
123 | + |
124 | + def __call__(self): |
125 | + log('Generating template context for amqp') |
126 | + conf = config() |
127 | + try: |
128 | + username = conf['rabbit-user'] |
129 | + vhost = conf['rabbit-vhost'] |
130 | + except KeyError as e: |
131 | + log('Could not generate shared_db context. ' |
132 | + 'Missing required charm config options: %s.' % e) |
133 | + raise OSContextError |
134 | + |
135 | + ctxt = {} |
136 | + for rid in relation_ids('amqp'): |
137 | + for unit in related_units(rid): |
138 | + if relation_get('clustered', rid=rid, unit=unit): |
139 | + rabbitmq_host = relation_get('vip', rid=rid, unit=unit) |
140 | + else: |
141 | + rabbitmq_host = relation_get('private-address', |
142 | + rid=rid, unit=unit) |
143 | + ctxt = { |
144 | + 'rabbitmq_host': rabbitmq_host, |
145 | + 'rabbitmq_user': username, |
146 | + 'rabbitmq_password': relation_get('password', rid=rid, |
147 | + unit=unit), |
148 | + 'rabbitmq_virtual_host': vhost, |
149 | + } |
150 | + if not context_complete(ctxt): |
151 | + return {} |
152 | + return ctxt |
153 | + |
154 | + |
155 | +class CephContext(OSContextGenerator): |
156 | + interfaces = ['ceph'] |
157 | + |
158 | + def __call__(self): |
159 | + '''This generates context for /etc/ceph/ceph.conf templates''' |
160 | + log('Generating tmeplate context for ceph') |
161 | + mon_hosts = [] |
162 | + auth = None |
163 | + for rid in relation_ids('ceph'): |
164 | + for unit in related_units(rid): |
165 | + mon_hosts.append(relation_get('private-address', rid=rid, |
166 | + unit=unit)) |
167 | + auth = relation_get('auth', rid=rid, unit=unit) |
168 | + |
169 | + ctxt = { |
170 | + 'mon_hosts': ' '.join(mon_hosts), |
171 | + 'auth': auth, |
172 | + } |
173 | + if not context_complete(ctxt): |
174 | + return {} |
175 | + return ctxt |
176 | + |
177 | + |
178 | +class HAProxyContext(OSContextGenerator): |
179 | + interfaces = ['cluster'] |
180 | + |
181 | + def __call__(self): |
182 | + ''' |
183 | + Builds half a context for the haproxy template, which describes |
184 | + all peers to be included in the cluster. Each charm needs to include |
185 | + its own context generator that describes the port mapping. |
186 | + ''' |
187 | + if not relation_ids('cluster'): |
188 | + return {} |
189 | + |
190 | + cluster_hosts = {} |
191 | + l_unit = local_unit().replace('/', '-') |
192 | + cluster_hosts[l_unit] = unit_get('private-address') |
193 | + |
194 | + for rid in relation_ids('cluster'): |
195 | + for unit in related_units(rid): |
196 | + _unit = unit.replace('/', '-') |
197 | + addr = relation_get('private-address', rid=rid, unit=unit) |
198 | + cluster_hosts[_unit] = addr |
199 | + |
200 | + ctxt = { |
201 | + 'units': cluster_hosts, |
202 | + } |
203 | + if len(cluster_hosts.keys()) > 1: |
204 | + # Enable haproxy when we have enough peers. |
205 | + log('Ensuring haproxy enabled in /etc/default/haproxy.') |
206 | + with open('/etc/default/haproxy', 'w') as out: |
207 | + out.write('ENABLED=1\n') |
208 | + return ctxt |
209 | + log('HAProxy context is incomplete, this unit has no peers.') |
210 | + return {} |
211 | + |
212 | + |
213 | +class ApacheSSLContext(OSContextGenerator): |
214 | + """ |
215 | + Generates a context for an apache vhost configuration that configures |
216 | + HTTPS reverse proxying for one or many endpoints. Generated context |
217 | + looks something like: |
218 | + { |
219 | + 'namespace': 'cinder', |
220 | + 'private_address': 'iscsi.mycinderhost.com', |
221 | + 'endpoints': [(8776, 8766), (8777, 8767)] |
222 | + } |
223 | + |
224 | + The endpoints list consists of a tuples mapping external ports |
225 | + to internal ports. |
226 | + """ |
227 | + interfaces = ['https'] |
228 | + |
229 | + # charms should inherit this context and set external ports |
230 | + # and service namespace accordingly. |
231 | + external_ports = [] |
232 | + service_namespace = None |
233 | + |
234 | + def enable_modules(self): |
235 | + cmd = ['a2enmod', 'ssl', 'proxy', 'proxy_http'] |
236 | + check_call(cmd) |
237 | + |
238 | + def configure_cert(self): |
239 | + if not os.path.isdir('/etc/apache2/ssl'): |
240 | + os.mkdir('/etc/apache2/ssl') |
241 | + ssl_dir = os.path.join('/etc/apache2/ssl/', self.service_namespace) |
242 | + if not os.path.isdir(ssl_dir): |
243 | + os.mkdir(ssl_dir) |
244 | + cert, key = get_cert() |
245 | + with open(os.path.join(ssl_dir, 'cert'), 'w') as cert_out: |
246 | + cert_out.write(b64decode(cert)) |
247 | + with open(os.path.join(ssl_dir, 'key'), 'w') as key_out: |
248 | + key_out.write(b64decode(key)) |
249 | + ca_cert = get_ca_cert() |
250 | + if ca_cert: |
251 | + with open(CA_CERT_PATH, 'w') as ca_out: |
252 | + ca_out.write(b64decode(ca_cert)) |
253 | + |
254 | + def __call__(self): |
255 | + if isinstance(self.external_ports, basestring): |
256 | + self.external_ports = [self.external_ports] |
257 | + if (not self.external_ports or not https()): |
258 | + return {} |
259 | + |
260 | + self.configure_cert() |
261 | + self.enable_modules() |
262 | + |
263 | + ctxt = { |
264 | + 'namespace': self.service_namespace, |
265 | + 'private_address': unit_get('private-address'), |
266 | + 'endpoints': [] |
267 | + } |
268 | + for ext_port in self.external_ports: |
269 | + if peer_units() or is_clustered(): |
270 | + int_port = determine_haproxy_port(ext_port) |
271 | + else: |
272 | + int_port = determine_api_port(ext_port) |
273 | + portmap = (int(ext_port), int(int_port)) |
274 | + ctxt['endpoints'].append(portmap) |
275 | + return ctxt |
276 | |
277 | === modified file 'charmhelpers/contrib/openstack/openstack_utils.py' |
278 | --- charmhelpers/contrib/openstack/openstack_utils.py 2013-07-09 18:48:53 +0000 |
279 | +++ charmhelpers/contrib/openstack/openstack_utils.py 2013-07-11 20:23:23 +0000 |
280 | @@ -2,6 +2,8 @@ |
281 | |
282 | # Common python helper functions used for OpenStack charms. |
283 | |
284 | +from collections import OrderedDict |
285 | + |
286 | import apt_pkg as apt |
287 | import subprocess |
288 | import os |
289 | @@ -18,24 +20,26 @@ |
290 | CLOUD_ARCHIVE_URL = "http://ubuntu-cloud.archive.canonical.com/ubuntu" |
291 | CLOUD_ARCHIVE_KEY_ID = '5EDB1B62EC4926EA' |
292 | |
293 | -ubuntu_openstack_release = { |
294 | - 'oneiric': 'diablo', |
295 | - 'precise': 'essex', |
296 | - 'quantal': 'folsom', |
297 | - 'raring': 'grizzly', |
298 | -} |
299 | - |
300 | - |
301 | -openstack_codenames = { |
302 | - '2011.2': 'diablo', |
303 | - '2012.1': 'essex', |
304 | - '2012.2': 'folsom', |
305 | - '2013.1': 'grizzly', |
306 | - '2013.2': 'havana', |
307 | -} |
308 | +UBUNTU_OPENSTACK_RELEASE = OrderedDict([ |
309 | + ('oneiric', 'diablo'), |
310 | + ('precise', 'essex'), |
311 | + ('quantal', 'folsom'), |
312 | + ('raring', 'grizzly'), |
313 | + ('saucy', 'havana'), |
314 | +]) |
315 | + |
316 | + |
317 | +OPENSTACK_CODENAMES = OrderedDict([ |
318 | + ('2011.2', 'diablo'), |
319 | + ('2012.1', 'essex'), |
320 | + ('2012.2', 'folsom'), |
321 | + ('2013.1', 'grizzly'), |
322 | + ('2013.2', 'havana'), |
323 | + ('2014.1', 'icehouse'), |
324 | +]) |
325 | |
326 | # The ugly duckling |
327 | -swift_codenames = { |
328 | +SWIFT_CODENAMES = { |
329 | '1.4.3': 'diablo', |
330 | '1.4.8': 'essex', |
331 | '1.7.4': 'folsom', |
332 | @@ -60,7 +64,7 @@ |
333 | rel = '' |
334 | if src == 'distro': |
335 | try: |
336 | - rel = ubuntu_openstack_release[ubuntu_rel] |
337 | + rel = UBUNTU_OPENSTACK_RELEASE[ubuntu_rel] |
338 | except KeyError: |
339 | e = 'Could not derive openstack release for '\ |
340 | 'this Ubuntu release: %s' % ubuntu_rel |
341 | @@ -74,7 +78,7 @@ |
342 | |
343 | # Best guess match based on deb string provided |
344 | if src.startswith('deb') or src.startswith('ppa'): |
345 | - for k, v in openstack_codenames.iteritems(): |
346 | + for k, v in OPENSTACK_CODENAMES.iteritems(): |
347 | if v in src: |
348 | return v |
349 | |
350 | @@ -87,7 +91,7 @@ |
351 | def get_os_codename_version(vers): |
352 | '''Determine OpenStack codename from version number.''' |
353 | try: |
354 | - return openstack_codenames[vers] |
355 | + return OPENSTACK_CODENAMES[vers] |
356 | except KeyError: |
357 | e = 'Could not determine OpenStack codename for version %s' % vers |
358 | error_out(e) |
359 | @@ -95,7 +99,7 @@ |
360 | |
361 | def get_os_version_codename(codename): |
362 | '''Determine OpenStack version number from codename.''' |
363 | - for k, v in openstack_codenames.iteritems(): |
364 | + for k, v in OPENSTACK_CODENAMES.iteritems(): |
365 | if v == codename: |
366 | return k |
367 | e = 'Could not derive OpenStack version for '\ |
368 | @@ -103,17 +107,26 @@ |
369 | error_out(e) |
370 | |
371 | |
372 | -def get_os_codename_package(pkg, fatal=True): |
373 | +def get_os_codename_package(package, fatal=True): |
374 | '''Derive OpenStack release codename from an installed package.''' |
375 | apt.init() |
376 | cache = apt.Cache() |
377 | |
378 | try: |
379 | - pkg = cache[pkg] |
380 | + pkg = cache[package] |
381 | except: |
382 | if not fatal: |
383 | return None |
384 | - e = 'Could not determine version of installed package: %s' % pkg |
385 | + # the package is unknown to the current apt cache. |
386 | + e = 'Could not determine version of package with no installation '\ |
387 | + 'candidate: %s' % package |
388 | + error_out(e) |
389 | + |
390 | + if not pkg.current_ver: |
391 | + if not fatal: |
392 | + return None |
393 | + # package is known, but no version is currently installed. |
394 | + e = 'Could not determine version of uninstalled package: %s' % package |
395 | error_out(e) |
396 | |
397 | vers = apt.UpstreamVersion(pkg.current_ver.ver_str) |
398 | @@ -121,10 +134,10 @@ |
399 | try: |
400 | if 'swift' in pkg.name: |
401 | vers = vers[:5] |
402 | - return swift_codenames[vers] |
403 | + return SWIFT_CODENAMES[vers] |
404 | else: |
405 | vers = vers[:6] |
406 | - return openstack_codenames[vers] |
407 | + return OPENSTACK_CODENAMES[vers] |
408 | except KeyError: |
409 | e = 'Could not determine OpenStack codename for version %s' % vers |
410 | error_out(e) |
411 | @@ -138,9 +151,9 @@ |
412 | return None |
413 | |
414 | if 'swift' in pkg: |
415 | - vers_map = swift_codenames |
416 | + vers_map = SWIFT_CODENAMES |
417 | else: |
418 | - vers_map = openstack_codenames |
419 | + vers_map = OPENSTACK_CODENAMES |
420 | |
421 | for version, cname in vers_map.iteritems(): |
422 | if cname == codename: |
423 | |
424 | === added directory 'charmhelpers/contrib/openstack/templates' |
425 | === added file 'charmhelpers/contrib/openstack/templates/__init__.py' |
426 | --- charmhelpers/contrib/openstack/templates/__init__.py 1970-01-01 00:00:00 +0000 |
427 | +++ charmhelpers/contrib/openstack/templates/__init__.py 2013-07-11 20:23:23 +0000 |
428 | @@ -0,0 +1,2 @@ |
429 | +# dummy __init__.py to fool syncer into thinking this is a syncable python |
430 | +# module |
431 | |
432 | === added file 'charmhelpers/contrib/openstack/templates/ceph.conf' |
433 | --- charmhelpers/contrib/openstack/templates/ceph.conf 1970-01-01 00:00:00 +0000 |
434 | +++ charmhelpers/contrib/openstack/templates/ceph.conf 2013-07-11 20:23:23 +0000 |
435 | @@ -0,0 +1,11 @@ |
436 | +############################################################################### |
437 | +# [ WARNING ] |
438 | +# cinder configuration file maintained by Juju |
439 | +# local changes may be overwritten. |
440 | +############################################################################### |
441 | +{% if auth %} |
442 | +[global] |
443 | + auth_supported = {{ auth }} |
444 | + keyring = /etc/ceph/$cluster.$name.keyring |
445 | + mon host = {{ mon_hosts }} |
446 | +{% endif %} |
447 | |
448 | === added file 'charmhelpers/contrib/openstack/templates/haproxy.cfg' |
449 | --- charmhelpers/contrib/openstack/templates/haproxy.cfg 1970-01-01 00:00:00 +0000 |
450 | +++ charmhelpers/contrib/openstack/templates/haproxy.cfg 2013-07-11 20:23:23 +0000 |
451 | @@ -0,0 +1,37 @@ |
452 | +global |
453 | + log 127.0.0.1 local0 |
454 | + log 127.0.0.1 local1 notice |
455 | + maxconn 20000 |
456 | + user haproxy |
457 | + group haproxy |
458 | + spread-checks 0 |
459 | + |
460 | +defaults |
461 | + log global |
462 | + mode http |
463 | + option httplog |
464 | + option dontlognull |
465 | + retries 3 |
466 | + timeout queue 1000 |
467 | + timeout connect 1000 |
468 | + timeout client 30000 |
469 | + timeout server 30000 |
470 | + |
471 | +listen stats :8888 |
472 | + mode http |
473 | + stats enable |
474 | + stats hide-version |
475 | + stats realm Haproxy\ Statistics |
476 | + stats uri / |
477 | + stats auth admin:password |
478 | + |
479 | +{% if units %} |
480 | +{% for service, ports in service_ports.iteritems() -%} |
481 | +listen {{ service }} 0.0.0.0:{{ ports[0] }} |
482 | + balance roundrobin |
483 | + option tcplog |
484 | + {% for unit, address in units.iteritems() -%} |
485 | + server {{ unit }} {{ address }}:{{ ports[1] }} check |
486 | + {% endfor %} |
487 | +{% endfor %} |
488 | +{% endif %} |
489 | |
490 | === added file 'charmhelpers/contrib/openstack/templates/openstack_https_frontend' |
491 | --- charmhelpers/contrib/openstack/templates/openstack_https_frontend 1970-01-01 00:00:00 +0000 |
492 | +++ charmhelpers/contrib/openstack/templates/openstack_https_frontend 2013-07-11 20:23:23 +0000 |
493 | @@ -0,0 +1,23 @@ |
494 | +{% if endpoints %} |
495 | +{% for ext, int in endpoints %} |
496 | +Listen {{ ext }} |
497 | +NameVirtualHost *:{{ ext }} |
498 | +<VirtualHost *:{{ ext }}> |
499 | + ServerName {{ private_address }} |
500 | + SSLEngine on |
501 | + SSLCertificateFile /etc/apache2/ssl/{{ namespace }}/cert |
502 | + SSLCertificateKeyFile /etc/apache2/ssl/{{ namespace }}/key |
503 | + ProxyPass / http://localhost:{{ int }}/ |
504 | + ProxyPassReverse / http://localhost:{{ int }}/ |
505 | + ProxyPreserveHost on |
506 | +</VirtualHost> |
507 | +<Proxy *> |
508 | + Order deny,allow |
509 | + Allow from all |
510 | +</Proxy> |
511 | +<Location /> |
512 | + Order allow,deny |
513 | + Allow from all |
514 | +</Location> |
515 | +{% endfor %} |
516 | +{% endif %} |
517 | |
518 | === added file 'charmhelpers/contrib/openstack/templating.py' |
519 | --- charmhelpers/contrib/openstack/templating.py 1970-01-01 00:00:00 +0000 |
520 | +++ charmhelpers/contrib/openstack/templating.py 2013-07-11 20:23:23 +0000 |
521 | @@ -0,0 +1,261 @@ |
522 | +import os |
523 | + |
524 | +from charmhelpers.core.host import apt_install |
525 | + |
526 | +from charmhelpers.core.hookenv import ( |
527 | + log, |
528 | + ERROR, |
529 | + INFO |
530 | +) |
531 | + |
532 | +from charmhelpers.contrib.openstack.openstack_utils import OPENSTACK_CODENAMES |
533 | + |
534 | +try: |
535 | + from jinja2 import FileSystemLoader, ChoiceLoader, Environment |
536 | +except ImportError: |
537 | + # python-jinja2 may not be installed yet, or we're running unittests. |
538 | + FileSystemLoader = ChoiceLoader = Environment = None |
539 | + |
540 | + |
541 | +class OSConfigException(Exception): |
542 | + pass |
543 | + |
544 | + |
545 | +def get_loader(templates_dir, os_release): |
546 | + """ |
547 | + Create a jinja2.ChoiceLoader containing template dirs up to |
548 | + and including os_release. If directory template directory |
549 | + is missing at templates_dir, it will be omitted from the loader. |
550 | + templates_dir is added to the bottom of the search list as a base |
551 | + loading dir. |
552 | + |
553 | + A charm may also ship a templates dir with this module |
554 | + and it will be appended to the bottom of the search list, eg: |
555 | + hooks/charmhelpers/contrib/openstack/templates. |
556 | + |
557 | + :param templates_dir: str: Base template directory containing release |
558 | + sub-directories. |
559 | + :param os_release : str: OpenStack release codename to construct template |
560 | + loader. |
561 | + |
562 | + :returns : jinja2.ChoiceLoader constructed with a list of |
563 | + jinja2.FilesystemLoaders, ordered in descending |
564 | + order by OpenStack release. |
565 | + """ |
566 | + tmpl_dirs = [(rel, os.path.join(templates_dir, rel)) |
567 | + for rel in OPENSTACK_CODENAMES.itervalues()] |
568 | + |
569 | + if not os.path.isdir(templates_dir): |
570 | + log('Templates directory not found @ %s.' % templates_dir, |
571 | + level=ERROR) |
572 | + raise OSConfigException |
573 | + |
574 | + # the bottom contains tempaltes_dir and possibly a common templates dir |
575 | + # shipped with the helper. |
576 | + loaders = [FileSystemLoader(templates_dir)] |
577 | + helper_templates = os.path.join(os.path.dirname(__file__), 'templates') |
578 | + if os.path.isdir(helper_templates): |
579 | + loaders.append(FileSystemLoader(helper_templates)) |
580 | + |
581 | + for rel, tmpl_dir in tmpl_dirs: |
582 | + if os.path.isdir(tmpl_dir): |
583 | + loaders.insert(0, FileSystemLoader(tmpl_dir)) |
584 | + if rel == os_release: |
585 | + break |
586 | + log('Creating choice loader with dirs: %s' % |
587 | + [l.searchpath for l in loaders], level=INFO) |
588 | + return ChoiceLoader(loaders) |
589 | + |
590 | + |
591 | +class OSConfigTemplate(object): |
592 | + """ |
593 | + Associates a config file template with a list of context generators. |
594 | + Responsible for constructing a template context based on those generators. |
595 | + """ |
596 | + def __init__(self, config_file, contexts): |
597 | + self.config_file = config_file |
598 | + |
599 | + if hasattr(contexts, '__call__'): |
600 | + self.contexts = [contexts] |
601 | + else: |
602 | + self.contexts = contexts |
603 | + |
604 | + self._complete_contexts = [] |
605 | + |
606 | + def context(self): |
607 | + ctxt = {} |
608 | + for context in self.contexts: |
609 | + _ctxt = context() |
610 | + if _ctxt: |
611 | + ctxt.update(_ctxt) |
612 | + # track interfaces for every complete context. |
613 | + [self._complete_contexts.append(interface) |
614 | + for interface in context.interfaces |
615 | + if interface not in self._complete_contexts] |
616 | + return ctxt |
617 | + |
618 | + def complete_contexts(self): |
619 | + ''' |
620 | + Return a list of interfaces that have atisfied contexts. |
621 | + ''' |
622 | + if self._complete_contexts: |
623 | + return self._complete_contexts |
624 | + self.context() |
625 | + return self._complete_contexts |
626 | + |
627 | + |
628 | +class OSConfigRenderer(object): |
629 | + """ |
630 | + This class provides a common templating system to be used by OpenStack |
631 | + charms. It is intended to help charms share common code and templates, |
632 | + and ease the burden of managing config templates across multiple OpenStack |
633 | + releases. |
634 | + |
635 | + Basic usage: |
636 | + # import some common context generates from charmhelpers |
637 | + from charmhelpers.contrib.openstack import context |
638 | + |
639 | + # Create a renderer object for a specific OS release. |
640 | + configs = OSConfigRenderer(templates_dir='/tmp/templates', |
641 | + openstack_release='folsom') |
642 | + # register some config files with context generators. |
643 | + configs.register(config_file='/etc/nova/nova.conf', |
644 | + contexts=[context.SharedDBContext(), |
645 | + context.AMQPContext()]) |
646 | + configs.register(config_file='/etc/nova/api-paste.ini', |
647 | + contexts=[context.IdentityServiceContext()]) |
648 | + configs.register(config_file='/etc/haproxy/haproxy.conf', |
649 | + contexts=[context.HAProxyContext()]) |
650 | + # write out a single config |
651 | + configs.write('/etc/nova/nova.conf') |
652 | + # write out all registered configs |
653 | + configs.write_all() |
654 | + |
655 | + Details: |
656 | + |
657 | + OpenStack Releases and template loading |
658 | + --------------------------------------- |
659 | + When the object is instantiated, it is associated with a specific OS |
660 | + release. This dictates how the template loader will be constructed. |
661 | + |
662 | + The constructed loader attempts to load the template from several places |
663 | + in the following order: |
664 | + - from the most recent OS release-specific template dir (if one exists) |
665 | + - the base templates_dir |
666 | + - a template directory shipped in the charm with this helper file. |
667 | + |
668 | + |
669 | + For the example above, '/tmp/templates' contains the following structure: |
670 | + /tmp/templates/nova.conf |
671 | + /tmp/templates/api-paste.ini |
672 | + /tmp/templates/grizzly/api-paste.ini |
673 | + /tmp/templates/havana/api-paste.ini |
674 | + |
675 | + Since it was registered with the grizzly release, it first seraches |
676 | + the grizzly directory for nova.conf, then the templates dir. |
677 | + |
678 | + When writing api-paste.ini, it will find the template in the grizzly |
679 | + directory. |
680 | + |
681 | + If the object were created with folsom, it would fall back to the |
682 | + base templates dir for its api-paste.ini template. |
683 | + |
684 | + This system should help manage changes in config files through |
685 | + openstack releases, allowing charms to fall back to the most recently |
686 | + updated config template for a given release |
687 | + |
688 | + The haproxy.conf, since it is not shipped in the templates dir, will |
689 | + be loaded from the module directory's template directory, eg |
690 | + $CHARM/hooks/charmhelpers/contrib/openstack/templates. This allows |
691 | + us to ship common templates (haproxy, apache) with the helpers. |
692 | + |
693 | + Context generators |
694 | + --------------------------------------- |
695 | + Context generators are used to generate template contexts during hook |
696 | + execution. Doing so may require inspecting service relations, charm |
697 | + config, etc. When registered, a config file is associated with a list |
698 | + of generators. When a template is rendered and written, all context |
699 | + generates are called in a chain to generate the context dictionary |
700 | + passed to the jinja2 template. See context.py for more info. |
701 | + """ |
702 | + def __init__(self, templates_dir, openstack_release): |
703 | + if not os.path.isdir(templates_dir): |
704 | + log('Could not locate templates dir %s' % templates_dir, |
705 | + level=ERROR) |
706 | + raise OSConfigException |
707 | + |
708 | + self.templates_dir = templates_dir |
709 | + self.openstack_release = openstack_release |
710 | + self.templates = {} |
711 | + self._tmpl_env = None |
712 | + |
713 | + if None in [Environment, ChoiceLoader, FileSystemLoader]: |
714 | + # if this code is running, the object is created pre-install hook. |
715 | + # jinja2 shouldn't get touched until the module is reloaded on next |
716 | + # hook execution, with proper jinja2 bits successfully imported. |
717 | + apt_install('python-jinja2') |
718 | + |
719 | + def register(self, config_file, contexts): |
720 | + """ |
721 | + Register a config file with a list of context generators to be called |
722 | + during rendering. |
723 | + """ |
724 | + self.templates[config_file] = OSConfigTemplate(config_file=config_file, |
725 | + contexts=contexts) |
726 | + log('Registered config file: %s' % config_file, level=INFO) |
727 | + |
728 | + def _get_tmpl_env(self): |
729 | + if not self._tmpl_env: |
730 | + loader = get_loader(self.templates_dir, self.openstack_release) |
731 | + self._tmpl_env = Environment(loader=loader) |
732 | + |
733 | + def _get_template(self, template): |
734 | + self._get_tmpl_env() |
735 | + template = self._tmpl_env.get_template(template) |
736 | + log('Loaded template from %s' % template.filename, level=INFO) |
737 | + return template |
738 | + |
739 | + def render(self, config_file): |
740 | + if config_file not in self.templates: |
741 | + log('Config not registered: %s' % config_file, level=ERROR) |
742 | + raise OSConfigException |
743 | + ctxt = self.templates[config_file].context() |
744 | + _tmpl = os.path.basename(config_file) |
745 | + log('Rendering from template: %s' % _tmpl, level=INFO) |
746 | + template = self._get_template(_tmpl) |
747 | + return template.render(ctxt) |
748 | + |
749 | + def write(self, config_file): |
750 | + """ |
751 | + Write a single config file, raises if config file is not registered. |
752 | + """ |
753 | + if config_file not in self.templates: |
754 | + log('Config not registered: %s' % config_file, level=ERROR) |
755 | + raise OSConfigException |
756 | + with open(config_file, 'wb') as out: |
757 | + out.write(self.render(config_file)) |
758 | + log('Wrote template %s.' % config_file, level=INFO) |
759 | + |
760 | + def write_all(self): |
761 | + """ |
762 | + Write out all registered config files. |
763 | + """ |
764 | + [self.write(k) for k in self.templates.iterkeys()] |
765 | + |
766 | + def set_release(self, openstack_release): |
767 | + """ |
768 | + Resets the template environment and generates a new template loader |
769 | + based on a the new openstack release. |
770 | + """ |
771 | + self._tmpl_env = None |
772 | + self.openstack_release = openstack_release |
773 | + self._get_tmpl_env() |
774 | + |
775 | + def complete_contexts(self): |
776 | + ''' |
777 | + Returns a list of context interfaces that yield a complete context. |
778 | + ''' |
779 | + interfaces = [] |
780 | + [interfaces.extend(i.complete_contexts()) |
781 | + for i in self.templates.itervalues()] |
782 | + return interfaces |
783 | |
784 | === modified file 'tests/contrib/openstack/test_openstack_utils.py' |
785 | --- tests/contrib/openstack/test_openstack_utils.py 2013-07-09 18:48:53 +0000 |
786 | +++ tests/contrib/openstack/test_openstack_utils.py 2013-07-11 20:23:23 +0000 |
787 | @@ -37,11 +37,12 @@ |
788 | 'os_release': 'grizzly', |
789 | 'os_version': '1.7.7' |
790 | }, |
791 | + # a package thats available in the cache but is not installed |
792 | 'cinder-common': { |
793 | - 'pkg_vers': '1:2013.2-0ubuntu1~cloud0', |
794 | 'os_release': 'havana', |
795 | 'os_version': '2013.2' |
796 | }, |
797 | + # poorly formed openstack version |
798 | 'bad-version': { |
799 | 'pkg_vers': '1:2016.1-0ubuntu1.1~cloud0', |
800 | 'os_release': None, |
801 | @@ -66,11 +67,15 @@ |
802 | # mocks out the apt cache |
803 | def cache_get(package): |
804 | pkg = MagicMock() |
805 | - if package in FAKE_REPO: |
806 | + if package in FAKE_REPO and 'pkg_vers' in FAKE_REPO[package]: |
807 | pkg.name = package |
808 | pkg.current_ver.ver_str = FAKE_REPO[package]['pkg_vers'] |
809 | + elif (package in FAKE_REPO and |
810 | + 'pkg_vers' not in FAKE_REPO[package]): |
811 | + pkg.name = package |
812 | + pkg.current_ver = None |
813 | else: |
814 | - raise |
815 | + raise KeyError |
816 | return pkg |
817 | cache = MagicMock() |
818 | cache.__getitem__.side_effect = cache_get |
819 | @@ -152,8 +157,11 @@ |
820 | with patch('apt_pkg.Cache') as cache: |
821 | cache.return_value = self._apt_cache() |
822 | for pkg, vers in FAKE_REPO.iteritems(): |
823 | + # test fake repo for all "installed" packages |
824 | if pkg.startswith('bad-'): |
825 | continue |
826 | + if 'pkg_vers' not in vers: |
827 | + continue |
828 | self.assertEquals(openstack.get_os_codename_package(pkg), |
829 | vers['os_release']) |
830 | |
831 | @@ -168,7 +176,7 @@ |
832 | |
833 | @patch('charmhelpers.contrib.openstack.openstack_utils.error_out') |
834 | def test_os_codename_from_bad_package(self, mocked_error): |
835 | - '''Test deriving OpenStack codename from an uninstalled package''' |
836 | + '''Test deriving OpenStack codename from an unavailable package''' |
837 | with patch('apt_pkg.Cache') as cache: |
838 | cache.return_value = self._apt_cache() |
839 | try: |
840 | @@ -177,11 +185,12 @@ |
841 | # ignore exceptions that raise when error_out is mocked |
842 | # and doesn't sys.exit(1) |
843 | pass |
844 | - _err = 'Could not determine version of installed package: foo' |
845 | - mocked_error.assert_called_with(_err) |
846 | + e = 'Could not determine version of package with no installation '\ |
847 | + 'candidate: foo' |
848 | + mocked_error.assert_called_with(e) |
849 | |
850 | def test_os_codename_from_bad_package_nonfatal(self): |
851 | - '''Test OpenStack codename from an uninstalled package is non-fatal''' |
852 | + '''Test OpenStack codename from an unavailable package is non-fatal''' |
853 | with patch('apt_pkg.Cache') as cache: |
854 | cache.return_value = self._apt_cache() |
855 | self.assertEquals( |
856 | @@ -190,6 +199,28 @@ |
857 | ) |
858 | |
859 | @patch('charmhelpers.contrib.openstack.openstack_utils.error_out') |
860 | + def test_os_codename_from_uninstalled_package(self, mock_error): |
861 | + '''Test OpenStack codename from an available but uninstalled pkg''' |
862 | + with patch('apt_pkg.Cache') as cache: |
863 | + cache.return_value = self._apt_cache() |
864 | + try: |
865 | + openstack.get_os_codename_package('cinder-common', fatal=True) |
866 | + except: |
867 | + pass |
868 | + e = ('Could not determine version of uninstalled package: ' |
869 | + 'cinder-common') |
870 | + mock_error.assert_called_with(e) |
871 | + |
872 | + def test_os_codename_from_uninstalled_package_nonfatal(self): |
873 | + '''Test OpenStack codename from avail uninstalled pkg is non fatal''' |
874 | + with patch('apt_pkg.Cache') as cache: |
875 | + cache.return_value = self._apt_cache() |
876 | + self.assertEquals( |
877 | + None, |
878 | + openstack.get_os_codename_package('cinder-common', fatal=False) |
879 | + ) |
880 | + |
881 | + @patch('charmhelpers.contrib.openstack.openstack_utils.error_out') |
882 | def test_os_version_from_package(self, mocked_error): |
883 | '''Test deriving OpenStack version from an installed package''' |
884 | with patch('apt_pkg.Cache') as cache: |
885 | @@ -197,6 +228,8 @@ |
886 | for pkg, vers in FAKE_REPO.iteritems(): |
887 | if pkg.startswith('bad-'): |
888 | continue |
889 | + if 'pkg_vers' not in vers: |
890 | + continue |
891 | self.assertEquals(openstack.get_os_version_package(pkg), |
892 | vers['os_version']) |
893 | |
894 | @@ -211,8 +244,9 @@ |
895 | # ignore exceptions that raise when error_out is mocked |
896 | # and doesn't sys.exit(1) |
897 | pass |
898 | - _err = 'Could not determine version of installed package: foo' |
899 | - mocked_error.assert_called_with(_err) |
900 | + e = 'Could not determine version of package with no installation '\ |
901 | + 'candidate: foo' |
902 | + mocked_error.assert_called_with(e) |
903 | |
904 | def test_os_version_from_bad_package_nonfatal(self): |
905 | '''Test OpenStack version from an uninstalled package is non-fatal''' |
906 | |
907 | === added file 'tests/contrib/openstack/test_os_contexts.py' |
908 | --- tests/contrib/openstack/test_os_contexts.py 1970-01-01 00:00:00 +0000 |
909 | +++ tests/contrib/openstack/test_os_contexts.py 2013-07-11 20:23:23 +0000 |
910 | @@ -0,0 +1,454 @@ |
911 | +import unittest |
912 | + |
913 | +from mock import patch, MagicMock, call |
914 | + |
915 | +from contextlib import contextmanager |
916 | +from copy import copy |
917 | + |
918 | +import charmhelpers.contrib.openstack.context as context |
919 | + |
920 | + |
921 | +@contextmanager |
922 | +def patch_open(): |
923 | + '''Patch open() to allow mocking both open() itself and the file that is |
924 | + yielded. |
925 | + |
926 | + Yields the mock for "open" and "file", respectively.''' |
927 | + mock_open = MagicMock(spec=open) |
928 | + mock_file = MagicMock(spec=file) |
929 | + |
930 | + @contextmanager |
931 | + def stub_open(*args, **kwargs): |
932 | + mock_open(*args, **kwargs) |
933 | + yield mock_file |
934 | + |
935 | + with patch('__builtin__.open', stub_open): |
936 | + yield mock_open, mock_file |
937 | + |
938 | + |
939 | +class FakeRelation(object): |
940 | + ''' |
941 | + A fake relation class. Lets tests specify simple relation data |
942 | + for a default relation + unit (foo:0, foo/0, set in setUp()), eg: |
943 | + |
944 | + rel = { |
945 | + 'private-address': 'foo', |
946 | + 'password': 'passwd', |
947 | + } |
948 | + relation = FakeRelation(rel) |
949 | + self.relation_get.side_effect = relation.get |
950 | + passwd = self.relation_get('password') |
951 | + |
952 | + or more complex relations meant to be addressed by explicit relation id |
953 | + + unit id combos: |
954 | + |
955 | + rel = { |
956 | + 'mysql:0': { |
957 | + 'mysql/0': { |
958 | + 'private-address': 'foo', |
959 | + 'password': 'passwd', |
960 | + } |
961 | + } |
962 | + } |
963 | + relation = FakeRelation(rel) |
964 | + self.relation_get.side_affect = relation.get |
965 | + passwd = self.relation_get('password', rid='mysql:0', unit='mysql/0') |
966 | + ''' |
967 | + def __init__(self, relation_data): |
968 | + self.relation_data = relation_data |
969 | + |
970 | + def get(self, attr=None, unit=None, rid=None): |
971 | + if not rid or rid == 'foo:0': |
972 | + if attr is None: |
973 | + return self.relation_data |
974 | + elif attr in self.relation_data: |
975 | + return self.relation_data[attr] |
976 | + return None |
977 | + else: |
978 | + if rid not in self.relation_data: |
979 | + return None |
980 | + try: |
981 | + relation = self.relation_data[rid][unit] |
982 | + except KeyError: |
983 | + return None |
984 | + if attr in relation: |
985 | + return relation[attr] |
986 | + return None |
987 | + |
988 | + def relation_ids(self, relation): |
989 | + return self.relation_data.keys() |
990 | + |
991 | + def relation_units(self, relation_id): |
992 | + if relation_id not in self.relation_data: |
993 | + return None |
994 | + return self.relation_data[relation_id].keys() |
995 | + |
996 | +SHARED_DB_RELATION = { |
997 | + 'db_host': 'dbserver.local', |
998 | + 'password': 'foo', |
999 | +} |
1000 | + |
1001 | +SHARED_DB_CONFIG = { |
1002 | + 'database-user': 'adam', |
1003 | + 'database': 'foodb', |
1004 | +} |
1005 | + |
1006 | +IDENTITY_SERVICE_RELATION = { |
1007 | + 'service_port': '5000', |
1008 | + 'service_host': 'keystonehost.local', |
1009 | + 'auth_host': 'keystone-host.local', |
1010 | + 'auth_port': '35357', |
1011 | + 'service_tenant': 'admin', |
1012 | + 'service_password': 'foo', |
1013 | + 'service_username': 'adam', |
1014 | +} |
1015 | + |
1016 | +AMQP_RELATION = { |
1017 | + 'private-address': 'rabbithost', |
1018 | + 'password': 'foobar', |
1019 | + 'vip': '10.0.0.1', |
1020 | +} |
1021 | + |
1022 | +AMQP_CONFIG = { |
1023 | + 'rabbit-user': 'adam', |
1024 | + 'rabbit-vhost': 'foo', |
1025 | +} |
1026 | + |
1027 | +CEPH_RELATION = { |
1028 | + 'ceph:0': { |
1029 | + 'ceph/0': { |
1030 | + 'private-address': 'ceph_node1', |
1031 | + 'auth': 'foo', |
1032 | + }, |
1033 | + 'ceph/1': { |
1034 | + 'private-address': 'ceph_node2', |
1035 | + 'auth': 'foo', |
1036 | + }, |
1037 | + } |
1038 | +} |
1039 | + |
1040 | + |
1041 | +# Imported in contexts.py and needs patching in setUp() |
1042 | +TO_PATCH = [ |
1043 | + 'b64decode', |
1044 | + 'check_call', |
1045 | + 'get_cert', |
1046 | + 'get_ca_cert', |
1047 | + 'log', |
1048 | + 'config', |
1049 | + 'relation_get', |
1050 | + 'relation_ids', |
1051 | + 'related_units', |
1052 | + 'unit_get', |
1053 | + 'https', |
1054 | + 'determine_api_port', |
1055 | + 'determine_haproxy_port', |
1056 | + 'peer_units', |
1057 | + 'is_clustered', |
1058 | +] |
1059 | + |
1060 | + |
1061 | +class ContextTests(unittest.TestCase): |
1062 | + def setUp(self): |
1063 | + for m in TO_PATCH: |
1064 | + setattr(self, m, self._patch(m)) |
1065 | + # mock at least a single relation + unit |
1066 | + self.relation_ids.return_value = ['foo:0'] |
1067 | + self.related_units.return_value = ['foo/0'] |
1068 | + |
1069 | + def _patch(self, method): |
1070 | + _m = patch('charmhelpers.contrib.openstack.context.' + method) |
1071 | + mock = _m.start() |
1072 | + self.addCleanup(_m.stop) |
1073 | + return mock |
1074 | + |
1075 | + def test_base_class_not_implemented(self): |
1076 | + base = context.OSContextGenerator() |
1077 | + self.assertRaises(NotImplementedError, base) |
1078 | + |
1079 | + def test_shared_db_context_with_data(self): |
1080 | + '''Test shared-db context with all required data''' |
1081 | + relation = FakeRelation(relation_data=SHARED_DB_RELATION) |
1082 | + self.relation_get.side_effect = relation.get |
1083 | + self.config.return_value = SHARED_DB_CONFIG |
1084 | + shared_db = context.SharedDBContext() |
1085 | + result = shared_db() |
1086 | + expected = { |
1087 | + 'database_host': 'dbserver.local', |
1088 | + 'database': 'foodb', |
1089 | + 'database_user': 'adam', |
1090 | + 'database_password': 'foo', |
1091 | + } |
1092 | + self.assertEquals(result, expected) |
1093 | + |
1094 | + def test_shared_db_context_with_missing_relation(self): |
1095 | + '''Test shared-db context missing relation data''' |
1096 | + incomplete_relation = copy(SHARED_DB_RELATION) |
1097 | + incomplete_relation['password'] = None |
1098 | + relation = FakeRelation(relation_data=incomplete_relation) |
1099 | + self.relation_get.side_effect = relation.get |
1100 | + self.config.return_value = SHARED_DB_CONFIG |
1101 | + shared_db = context.SharedDBContext() |
1102 | + result = shared_db() |
1103 | + self.assertEquals(result, {}) |
1104 | + |
1105 | + def test_shared_db_context_with_missing_config(self): |
1106 | + '''Test shared-db context missing relation data''' |
1107 | + incomplete_config = copy(SHARED_DB_CONFIG) |
1108 | + del incomplete_config['database-user'] |
1109 | + relation = FakeRelation(relation_data=SHARED_DB_RELATION) |
1110 | + self.relation_get.side_effect = relation.get |
1111 | + self.config.return_value = incomplete_config |
1112 | + shared_db = context.SharedDBContext() |
1113 | + self.assertRaises(context.OSContextError, shared_db) |
1114 | + |
1115 | + def test_identity_service_context_with_data(self): |
1116 | + '''Test shared-db context with all required data''' |
1117 | + relation = FakeRelation(relation_data=IDENTITY_SERVICE_RELATION) |
1118 | + self.relation_get.side_effect = relation.get |
1119 | + identity_service = context.IdentityServiceContext() |
1120 | + result = identity_service() |
1121 | + expected = { |
1122 | + 'admin_password': 'foo', |
1123 | + 'admin_tenant_name': 'admin', |
1124 | + 'admin_user': 'adam', |
1125 | + 'auth_host': 'keystone-host.local', |
1126 | + 'auth_port': '35357', |
1127 | + 'auth_protocol': 'http', |
1128 | + 'service_host': 'keystonehost.local', |
1129 | + 'service_port': '5000', |
1130 | + 'service_protocol': 'http' |
1131 | + } |
1132 | + self.assertEquals(result, expected) |
1133 | + |
1134 | + def test_identity_service_context_with_missing_relation(self): |
1135 | + '''Test shared-db context missing relation data''' |
1136 | + incomplete_relation = copy(IDENTITY_SERVICE_RELATION) |
1137 | + incomplete_relation['service_password'] = None |
1138 | + relation = FakeRelation(relation_data=incomplete_relation) |
1139 | + self.relation_get.side_effect = relation.get |
1140 | + identity_service = context.IdentityServiceContext() |
1141 | + result = identity_service() |
1142 | + self.assertEquals(result, {}) |
1143 | + |
1144 | + def test_amqp_context_with_data(self): |
1145 | + '''Test amqp context with all required data''' |
1146 | + relation = FakeRelation(relation_data=AMQP_RELATION) |
1147 | + self.relation_get.side_effect = relation.get |
1148 | + self.config.return_value = AMQP_CONFIG |
1149 | + amqp = context.AMQPContext() |
1150 | + result = amqp() |
1151 | + |
1152 | + expected = { |
1153 | + 'rabbitmq_host': 'rabbithost', |
1154 | + 'rabbitmq_password': 'foobar', |
1155 | + 'rabbitmq_user': 'adam', |
1156 | + 'rabbitmq_virtual_host': 'foo' |
1157 | + } |
1158 | + self.assertEquals(result, expected) |
1159 | + |
1160 | + def test_amqp_context_with_data_clustered(self): |
1161 | + '''Test amqp context with all required data with clustered rabbit''' |
1162 | + relation_data = copy(AMQP_RELATION) |
1163 | + relation_data['clustered'] = 'yes' |
1164 | + relation = FakeRelation(relation_data=relation_data) |
1165 | + self.relation_get.side_effect = relation.get |
1166 | + self.config.return_value = AMQP_CONFIG |
1167 | + amqp = context.AMQPContext() |
1168 | + result = amqp() |
1169 | + |
1170 | + expected = { |
1171 | + 'rabbitmq_host': relation_data['vip'], |
1172 | + 'rabbitmq_password': 'foobar', |
1173 | + 'rabbitmq_user': 'adam', |
1174 | + 'rabbitmq_virtual_host': 'foo' |
1175 | + } |
1176 | + self.assertEquals(result, expected) |
1177 | + |
1178 | + def test_amqp_context_with_missing_relation(self): |
1179 | + '''Test amqp context missing relation data''' |
1180 | + incomplete_relation = copy(AMQP_RELATION) |
1181 | + incomplete_relation['password'] = '' |
1182 | + relation = FakeRelation(relation_data=incomplete_relation) |
1183 | + self.relation_get.side_effect = relation.get |
1184 | + self.config.return_value = AMQP_CONFIG |
1185 | + amqp = context.AMQPContext() |
1186 | + result = amqp() |
1187 | + self.assertEquals({}, result) |
1188 | + |
1189 | + def test_amqp_context_with_missing_config(self): |
1190 | + '''Test amqp context missing relation data''' |
1191 | + incomplete_config = copy(AMQP_CONFIG) |
1192 | + del incomplete_config['rabbit-user'] |
1193 | + relation = FakeRelation(relation_data=AMQP_RELATION) |
1194 | + self.relation_get.side_effect = relation.get |
1195 | + self.config.return_value = incomplete_config |
1196 | + amqp = context.AMQPContext() |
1197 | + self.assertRaises(context.OSContextError, amqp) |
1198 | + |
1199 | + def test_ceph_context_with_data(self): |
1200 | + '''Test ceph context with all relation data''' |
1201 | + relation = FakeRelation(relation_data=CEPH_RELATION) |
1202 | + self.relation_get.side_effect = relation.get |
1203 | + self.relation_ids.side_effect = relation.relation_ids |
1204 | + self.related_units.side_effect = relation.relation_units |
1205 | + ceph = context.CephContext() |
1206 | + result = ceph() |
1207 | + expected = { |
1208 | + 'mon_hosts': 'ceph_node2 ceph_node1', |
1209 | + 'auth': 'foo' |
1210 | + } |
1211 | + self.assertEquals(result, expected) |
1212 | + |
1213 | + def test_ceph_context_with_missing_data(self): |
1214 | + '''Test ceph context with missing relation data''' |
1215 | + relation = copy(CEPH_RELATION) |
1216 | + for k, v in relation.iteritems(): |
1217 | + for u in v.iterkeys(): |
1218 | + del relation[k][u]['auth'] |
1219 | + relation = FakeRelation(relation_data=relation) |
1220 | + self.relation_get.side_effect = relation.get |
1221 | + self.relation_ids.side_effect = relation.relation_ids |
1222 | + self.related_units.side_effect = relation.relation_units |
1223 | + ceph = context.CephContext() |
1224 | + result = ceph() |
1225 | + self.assertEquals(result, {}) |
1226 | + |
1227 | + @patch('charmhelpers.contrib.openstack.context.unit_get') |
1228 | + @patch('charmhelpers.contrib.openstack.context.local_unit') |
1229 | + def test_haproxy_context_with_data(self, local_unit, unit_get): |
1230 | + '''Test haproxy context with all relation data''' |
1231 | + cluster_relation = { |
1232 | + 'cluster:0': { |
1233 | + 'peer/1': { |
1234 | + 'private-address': 'cluster-peer1.localnet', |
1235 | + }, |
1236 | + 'peer/2': { |
1237 | + 'private-address': 'cluster-peer2.localnet', |
1238 | + }, |
1239 | + }, |
1240 | + } |
1241 | + local_unit.return_value = 'peer/0' |
1242 | + unit_get.return_value = 'cluster-peer0.localnet' |
1243 | + relation = FakeRelation(cluster_relation) |
1244 | + self.relation_ids.side_effect = relation.relation_ids |
1245 | + self.relation_get.side_effect = relation.get |
1246 | + self.related_units.side_effect = relation.relation_units |
1247 | + haproxy = context.HAProxyContext() |
1248 | + with patch_open() as (_open, _file): |
1249 | + result = haproxy() |
1250 | + ex = { |
1251 | + 'units': { |
1252 | + 'peer-0': 'cluster-peer0.localnet', |
1253 | + 'peer-1': 'cluster-peer1.localnet', |
1254 | + 'peer-2': 'cluster-peer2.localnet' |
1255 | + } |
1256 | + } |
1257 | + # the context gets generated. |
1258 | + self.assertEquals(ex, result) |
1259 | + # and /etc/default/haproxy is updated. |
1260 | + self.assertEquals(_file.write.call_args_list, |
1261 | + [call('ENABLED=1\n')]) |
1262 | + |
1263 | + def test_haproxy_context_with_missing_data(self): |
1264 | + '''Test haproxy context with missing relation data''' |
1265 | + self.relation_ids.return_value = [] |
1266 | + haproxy = context.HAProxyContext() |
1267 | + self.assertEquals({}, haproxy()) |
1268 | + |
1269 | + @patch('charmhelpers.contrib.openstack.context.unit_get') |
1270 | + @patch('charmhelpers.contrib.openstack.context.local_unit') |
1271 | + def test_haproxy_context_with_no_peers(self, local_unit, unit_get): |
1272 | + '''Test haproxy context with single unit''' |
1273 | + # peer relations always show at least one peer relation, even |
1274 | + # if unit is alone. should be an incomplete context. |
1275 | + cluster_relation = { |
1276 | + 'cluster:0': { |
1277 | + 'peer/0': { |
1278 | + 'private-address': 'lonely.clusterpeer.howsad', |
1279 | + }, |
1280 | + }, |
1281 | + } |
1282 | + local_unit.return_value = 'peer/0' |
1283 | + unit_get.return_value = 'lonely.clusterpeer.howsad' |
1284 | + relation = FakeRelation(cluster_relation) |
1285 | + self.relation_ids.side_effect = relation.relation_ids |
1286 | + self.relation_get.side_effect = relation.get |
1287 | + self.related_units.side_effect = relation.relation_units |
1288 | + haproxy = context.HAProxyContext() |
1289 | + self.assertEquals({}, haproxy()) |
1290 | + |
1291 | + def test_https_context_with_no_https(self): |
1292 | + '''Test apache2 https when no https data available''' |
1293 | + apache = context.ApacheSSLContext() |
1294 | + self.https.return_value = False |
1295 | + self.assertEquals({}, apache()) |
1296 | + |
1297 | + def _test_https_context(self, apache, is_clustered, peer_units): |
1298 | + self.https.return_value = True |
1299 | + |
1300 | + if is_clustered or peer_units: |
1301 | + self.determine_api_port.return_value = 8756 |
1302 | + self.determine_haproxy_port.return_value = 8766 |
1303 | + else: |
1304 | + self.determine_api_port.return_value = 8766 |
1305 | + |
1306 | + self.unit_get.return_value = 'cinderhost1' |
1307 | + self.is_clustered.return_value = is_clustered |
1308 | + self.peer_units.return_value = peer_units |
1309 | + apache = context.ApacheSSLContext() |
1310 | + apache.configure_cert = MagicMock |
1311 | + apache.enable_modules = MagicMock |
1312 | + apache.external_ports = '8776' |
1313 | + apache.service_namespace = 'cinder' |
1314 | + |
1315 | + ex = { |
1316 | + 'private_address': 'cinderhost1', |
1317 | + 'namespace': 'cinder', |
1318 | + 'endpoints': [(8776, 8766)], |
1319 | + } |
1320 | + self.assertEquals(ex, apache()) |
1321 | + self.assertTrue(apache.configure_cert.called) |
1322 | + self.assertTrue(apache.enable_modules.called) |
1323 | + |
1324 | + def test_https_context_no_peers_no_cluster(self): |
1325 | + '''Test apache2 https on a single, unclustered unit''' |
1326 | + apache = context.ApacheSSLContext() |
1327 | + self._test_https_context(apache, is_clustered=False, peer_units=None) |
1328 | + |
1329 | + def test_https_context_wth_peers_no_cluster(self): |
1330 | + '''Test apache2 https on a unclustered unit with peers''' |
1331 | + apache = context.ApacheSSLContext() |
1332 | + self._test_https_context(apache, is_clustered=False, peer_units=[1, 2]) |
1333 | + |
1334 | + def test_https_context_loads_correct_apache_mods(self): |
1335 | + '''Test apache2 context also loads required apache modules''' |
1336 | + apache = context.ApacheSSLContext() |
1337 | + apache.enable_modules() |
1338 | + ex_cmd = ['a2enmod', 'ssl', 'proxy', 'proxy_http'] |
1339 | + self.check_call.assert_called_with(ex_cmd) |
1340 | + |
1341 | + @patch('__builtin__.open') |
1342 | + @patch('os.mkdir') |
1343 | + @patch('os.path.isdir') |
1344 | + def test_https_configure_cert(self, isdir, mkdir, _open): |
1345 | + '''Test apache2 properly installs certs and keys to disk''' |
1346 | + isdir.return_value = False |
1347 | + self.get_cert.return_value = ('SSL_CERT', 'SSL_KEY') |
1348 | + self.get_ca_cert.return_value = 'CA_CERT' |
1349 | + apache = context.ApacheSSLContext() |
1350 | + apache.service_namespace = 'cinder' |
1351 | + apache.configure_cert() |
1352 | + # appropriate directories are created. |
1353 | + dirs = [call('/etc/apache2/ssl'), call('/etc/apache2/ssl/cinder')] |
1354 | + self.assertEquals(dirs, mkdir.call_args_list) |
1355 | + # appropriate files are opened for writing. |
1356 | + _ca = '/usr/local/share/ca-certificates/keystone_juju_ca_cert.crt' |
1357 | + files = [call('/etc/apache2/ssl/cinder/cert', 'w'), |
1358 | + call('/etc/apache2/ssl/cinder/key', 'w'), |
1359 | + call(_ca, 'w')] |
1360 | + for f in files: |
1361 | + self.assertIn(f, _open.call_args_list) |
1362 | + # appropriate bits are b64decoded. |
1363 | + decode = [call('SSL_CERT'), call('SSL_KEY'), call('CA_CERT')] |
1364 | + self.assertEquals(decode, self.b64decode.call_args_list) |
1365 | |
1366 | === added file 'tests/contrib/openstack/test_os_templating.py' |
1367 | --- tests/contrib/openstack/test_os_templating.py 1970-01-01 00:00:00 +0000 |
1368 | +++ tests/contrib/openstack/test_os_templating.py 2013-07-11 20:23:23 +0000 |
1369 | @@ -0,0 +1,232 @@ |
1370 | + |
1371 | +import os |
1372 | + |
1373 | +import unittest |
1374 | +from mock import patch, call, MagicMock |
1375 | + |
1376 | +import charmhelpers.contrib.openstack.templating as templating |
1377 | + |
1378 | + |
1379 | +class FakeContextGenerator(object): |
1380 | + interfaces = None |
1381 | + |
1382 | + def set(self, interfaces, context): |
1383 | + self.interfaces = interfaces |
1384 | + self.context = context |
1385 | + |
1386 | + def __call__(self): |
1387 | + return self.context |
1388 | + |
1389 | + |
1390 | +class FakeLoader(object): |
1391 | + def set(self, template): |
1392 | + self.template = template |
1393 | + |
1394 | + def get(self, name): |
1395 | + return self.template |
1396 | + |
1397 | + |
1398 | +class MockFSLoader(object): |
1399 | + def __init__(self, dirs): |
1400 | + self.searchpath = [dirs] |
1401 | + |
1402 | + |
1403 | +class MockChoiceLoader(object): |
1404 | + def __init__(self, loaders): |
1405 | + self.loaders = loaders |
1406 | + |
1407 | + |
1408 | +def MockTemplate(): |
1409 | + templ = MagicMock() |
1410 | + templ.render = MagicMock() |
1411 | + return templ |
1412 | + |
1413 | + |
1414 | +class TemplatingTests(unittest.TestCase): |
1415 | + def setUp(self): |
1416 | + path = os.path.dirname(__file__) |
1417 | + self.loader = FakeLoader() |
1418 | + self.context = FakeContextGenerator() |
1419 | + |
1420 | + self.addCleanup(patch.object(templating, 'apt_install').start().stop()) |
1421 | + self.addCleanup(patch.object(templating, 'log').start().stop()) |
1422 | + |
1423 | + templating.FileSystemLoader = MockFSLoader |
1424 | + templating.ChoiceLoader = MockChoiceLoader |
1425 | + templating.Environment = MagicMock |
1426 | + |
1427 | + self.renderer = templating.OSConfigRenderer(templates_dir=path, |
1428 | + openstack_release='folsom') |
1429 | + |
1430 | + @patch.object(templating, 'apt_install') |
1431 | + def test_initializing_a_render_ensures_jinja2_present(self, apt): |
1432 | + '''Creatinga new renderer object installs jinja2 if needed''' |
1433 | + # temp. undo the patching from setUp |
1434 | + templating.FileSystemLoader = None |
1435 | + templating.ChoiceLoader = None |
1436 | + templating.Environment = None |
1437 | + templating.OSConfigRenderer(templates_dir='/tmp', |
1438 | + openstack_release='foo') |
1439 | + templating.FileSystemLoader = MockFSLoader |
1440 | + templating.ChoiceLoader = MockChoiceLoader |
1441 | + templating.Environment = MagicMock |
1442 | + apt.assert_called_with('python-jinja2') |
1443 | + |
1444 | + def test_create_renderer_invalid_templates_dir(self): |
1445 | + '''Ensure OSConfigRenderer checks templates_dir''' |
1446 | + self.assertRaises(templating.OSConfigException, |
1447 | + templating.OSConfigRenderer, |
1448 | + templates_dir='/tmp/foooo0', |
1449 | + openstack_release='grizzly') |
1450 | + |
1451 | + def test_render_unregistered_config(self): |
1452 | + '''Ensure cannot render an unregistered config file''' |
1453 | + self.assertRaises(templating.OSConfigException, |
1454 | + self.renderer.render, |
1455 | + config_file='/tmp/foo') |
1456 | + |
1457 | + def test_write_unregistered_config(self): |
1458 | + '''Ensure cannot write an unregistered config file''' |
1459 | + self.assertRaises(templating.OSConfigException, |
1460 | + self.renderer.write, |
1461 | + config_file='/tmp/foo') |
1462 | + |
1463 | + def test_render_complete_context(self): |
1464 | + '''It renders a template when provided a complete context''' |
1465 | + self.loader.set('{{ foo }}') |
1466 | + self.context.set(interfaces=['fooservice'], context={'foo': 'bar'}) |
1467 | + self.renderer.register('/tmp/foo', [self.context]) |
1468 | + with patch.object(self.renderer, '_get_template') as _get_t: |
1469 | + fake_tmpl = MockTemplate() |
1470 | + _get_t.return_value = fake_tmpl |
1471 | + self.renderer.render('/tmp/foo') |
1472 | + fake_tmpl.render.assert_called_with(self.context()) |
1473 | + self.assertIn('fooservice', self.renderer.complete_contexts()) |
1474 | + |
1475 | + def test_render_incomplete_context_with_template(self): |
1476 | + '''It renders a template when provided an incomplete context''' |
1477 | + self.context.set(interfaces=['fooservice'], context={}) |
1478 | + self.renderer.register('/tmp/foo', [self.context]) |
1479 | + with patch.object(self.renderer, '_get_template') as _get_t: |
1480 | + fake_tmpl = MockTemplate() |
1481 | + _get_t.return_value = fake_tmpl |
1482 | + self.renderer.render('/tmp/foo') |
1483 | + fake_tmpl.render.assert_called_with({}) |
1484 | + self.assertNotIn('fooservice', self.renderer.complete_contexts()) |
1485 | + |
1486 | + @patch('__builtin__.open') |
1487 | + @patch.object(templating, 'get_loader') |
1488 | + def test_write_out_config(self, loader, _open): |
1489 | + '''It writes a templated config when provided a complete context''' |
1490 | + self.context.set(interfaces=['fooservice'], context={'foo': 'bar'}) |
1491 | + self.renderer.register('/tmp/foo', [self.context]) |
1492 | + with patch.object(self.renderer, '_get_template') as _get_t: |
1493 | + fake_tmpl = MockTemplate() |
1494 | + _get_t.return_value = fake_tmpl |
1495 | + self.renderer.write('/tmp/foo') |
1496 | + _open.assert_called_with('/tmp/foo', 'wb') |
1497 | + |
1498 | + def test_write_all(self): |
1499 | + '''It writes out all configuration files at once''' |
1500 | + self.context.set(interfaces=['fooservice'], context={'foo': 'bar'}) |
1501 | + self.renderer.register('/tmp/foo', [self.context]) |
1502 | + self.renderer.register('/tmp/bar', [self.context]) |
1503 | + ex_calls = [ |
1504 | + call('/tmp/bar'), |
1505 | + call('/tmp/foo'), |
1506 | + ] |
1507 | + with patch.object(self.renderer, 'write') as _write: |
1508 | + self.renderer.write_all() |
1509 | + self.assertEquals(ex_calls, _write.call_args_list) |
1510 | + pass |
1511 | + |
1512 | + @patch.object(templating, 'get_loader') |
1513 | + def test_reset_template_loader_for_new_os_release(self, loader): |
1514 | + self.loader.set('') |
1515 | + self.context.set(interfaces=['fooservice'], context={}) |
1516 | + loader.return_value = MockFSLoader('/tmp/foo') |
1517 | + self.renderer.register('/tmp/foo', [self.context]) |
1518 | + self.renderer.render('/tmp/foo') |
1519 | + loader.assert_called_with(os.path.dirname(__file__), 'folsom') |
1520 | + self.renderer.set_release(openstack_release='grizzly') |
1521 | + self.renderer.render('/tmp/foo') |
1522 | + loader.assert_called_with(os.path.dirname(__file__), 'grizzly') |
1523 | + |
1524 | + @patch.object(templating, 'get_loader') |
1525 | + def test_incomplete_context_not_reported_complete(self, loader): |
1526 | + '''It does not recognize an incomplete context as a complete context''' |
1527 | + self.context.set(interfaces=['fooservice'], context={}) |
1528 | + self.renderer.register('/tmp/foo', [self.context]) |
1529 | + self.assertNotIn('fooservice', self.renderer.complete_contexts()) |
1530 | + |
1531 | + @patch.object(templating, 'get_loader') |
1532 | + def test_complete_context_reported_complete(self, loader): |
1533 | + '''It recognizes a complete context as a complete context''' |
1534 | + self.context.set(interfaces=['fooservice'], context={'foo': 'bar'}) |
1535 | + self.renderer.register('/tmp/foo', [self.context]) |
1536 | + self.assertIn('fooservice', self.renderer.complete_contexts()) |
1537 | + |
1538 | + @patch('os.path.isdir') |
1539 | + def test_get_loader_no_templates_dir(self, isdir): |
1540 | + '''Ensure getting loader fails with no template dir''' |
1541 | + isdir.return_value = False |
1542 | + self.assertRaises(templating.OSConfigException, |
1543 | + templating.get_loader, |
1544 | + templates_dir='/tmp/foo', os_release='foo') |
1545 | + |
1546 | + @patch('os.path.isdir') |
1547 | + def test_get_loader_all_search_paths(self, isdir): |
1548 | + '''Ensure loader reverse searches of all release template dirs''' |
1549 | + isdir.return_value = True |
1550 | + choice_loader = templating.get_loader('/tmp/foo', |
1551 | + os_release='icehouse') |
1552 | + dirs = [l.searchpath for l in choice_loader.loaders] |
1553 | + |
1554 | + common_tmplts = os.path.join(os.path.dirname(templating.__file__), |
1555 | + 'templates') |
1556 | + expected = [['/tmp/foo/icehouse'], |
1557 | + ['/tmp/foo/havana'], |
1558 | + ['/tmp/foo/grizzly'], |
1559 | + ['/tmp/foo/folsom'], |
1560 | + ['/tmp/foo/essex'], |
1561 | + ['/tmp/foo/diablo'], |
1562 | + ['/tmp/foo'], |
1563 | + [common_tmplts]] |
1564 | + self.assertEquals(dirs, expected) |
1565 | + |
1566 | + @patch('os.path.isdir') |
1567 | + def test_get_loader_some_search_paths(self, isdir): |
1568 | + '''Ensure loader reverse searches of some release template dirs''' |
1569 | + isdir.return_value = True |
1570 | + choice_loader = templating.get_loader('/tmp/foo', os_release='grizzly') |
1571 | + dirs = [l.searchpath for l in choice_loader.loaders] |
1572 | + |
1573 | + common_tmplts = os.path.join(os.path.dirname(templating.__file__), |
1574 | + 'templates') |
1575 | + |
1576 | + expected = [['/tmp/foo/grizzly'], |
1577 | + ['/tmp/foo/folsom'], |
1578 | + ['/tmp/foo/essex'], |
1579 | + ['/tmp/foo/diablo'], |
1580 | + ['/tmp/foo'], |
1581 | + [common_tmplts]] |
1582 | + self.assertEquals(dirs, expected) |
1583 | + |
1584 | + def test_register_template_with_list_of_contexts(self): |
1585 | + '''Ensure registering a template with a list of context generators''' |
1586 | + def _c1(): |
1587 | + pass |
1588 | + |
1589 | + def _c2(): |
1590 | + pass |
1591 | + tmpl = templating.OSConfigTemplate(config_file='/tmp/foo', |
1592 | + contexts=[_c1, _c2]) |
1593 | + self.assertEquals(tmpl.contexts, [_c1, _c2]) |
1594 | + |
1595 | + def test_register_template_with_single_context(self): |
1596 | + '''Ensure registering a template with a single non-list context''' |
1597 | + def _c1(): |
1598 | + pass |
1599 | + tmpl = templating.OSConfigTemplate(config_file='/tmp/foo', |
1600 | + contexts=_c1) |
1601 | + self.assertEquals(tmpl.contexts, [_c1]) |
More feedback:
1) templating. py/get_ loader
tmpl_dirs = ( join(templates_ dir, 'essex')), join(templates_ dir, 'folsom')), join(templates_ dir, 'grizzly')), join(templates_ dir, 'havana')),
('icehouse' , os.path. join(templates_ dir, 'icehouse')),
('essex', os.path.
('folsom', os.path.
('grizzly', os.path.
('havana', os.path.
)
Can we drive this off a list of supported released please; if this is a for loop of suchlike they adding a new release is just done in openstack_utils.py
2) templating. py/general
level='ERROR'; hookenv has a constant for this - please use.
3) templating. py/OSConfigTemp late & OSConfigRenderer
Please could these classes grow some pydoc - took me a while to figure out how to use them.